Home
last modified time | relevance | path

Searched refs:counter (Results 1 – 25 of 156) sorted by relevance

1234567

/arch/arm/include/asm/
Datomic.h25 #define arch_atomic_read(v) READ_ONCE((v)->counter)
26 #define arch_atomic_set(v,i) WRITE_ONCE(((v)->counter), (i))
42 prefetchw(&v->counter); \
49 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
50 : "r" (&v->counter), "Ir" (i) \
60 prefetchw(&v->counter); \
68 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
69 : "r" (&v->counter), "Ir" (i) \
81 prefetchw(&v->counter); \
89 : "=&r" (result), "=&r" (val), "=&r" (tmp), "+Qo" (v->counter) \
[all …]
/arch/x86/kernel/cpu/
Dperfctr-watchdog.c110 unsigned int counter; in reserve_perfctr_nmi() local
112 counter = nmi_perfctr_msr_to_bit(msr); in reserve_perfctr_nmi()
114 if (counter > NMI_MAX_COUNTER_BITS) in reserve_perfctr_nmi()
117 if (!test_and_set_bit(counter, perfctr_nmi_owner)) in reserve_perfctr_nmi()
125 unsigned int counter; in release_perfctr_nmi() local
127 counter = nmi_perfctr_msr_to_bit(msr); in release_perfctr_nmi()
129 if (counter > NMI_MAX_COUNTER_BITS) in release_perfctr_nmi()
132 clear_bit(counter, perfctr_nmi_owner); in release_perfctr_nmi()
138 unsigned int counter; in reserve_evntsel_nmi() local
140 counter = nmi_evntsel_msr_to_bit(msr); in reserve_evntsel_nmi()
[all …]
/arch/arm64/include/asm/
Darm_dsu_pmu.h53 static inline void __dsu_pmu_select_counter(int counter) in __dsu_pmu_select_counter() argument
55 write_sysreg_s(counter, CLUSTERPMSELR_EL1); in __dsu_pmu_select_counter()
59 static inline u64 __dsu_pmu_read_counter(int counter) in __dsu_pmu_read_counter() argument
61 __dsu_pmu_select_counter(counter); in __dsu_pmu_read_counter()
65 static inline void __dsu_pmu_write_counter(int counter, u64 val) in __dsu_pmu_write_counter() argument
67 __dsu_pmu_select_counter(counter); in __dsu_pmu_write_counter()
72 static inline void __dsu_pmu_set_event(int counter, u32 event) in __dsu_pmu_set_event() argument
74 __dsu_pmu_select_counter(counter); in __dsu_pmu_set_event()
90 static inline void __dsu_pmu_disable_counter(int counter) in __dsu_pmu_disable_counter() argument
92 write_sysreg_s(BIT(counter), CLUSTERPMCNTENCLR_EL1); in __dsu_pmu_disable_counter()
[all …]
/arch/x86/include/asm/
Datomic64_64.h15 return __READ_ONCE((v)->counter); in arch_atomic64_read()
20 __WRITE_ONCE(v->counter, i); in arch_atomic64_set()
26 : "=m" (v->counter) in arch_atomic64_add()
27 : "er" (i), "m" (v->counter) : "memory"); in arch_atomic64_add()
33 : "=m" (v->counter) in arch_atomic64_sub()
34 : "er" (i), "m" (v->counter) : "memory"); in arch_atomic64_sub()
39 return GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, e, "er", i); in arch_atomic64_sub_and_test()
46 : "=m" (v->counter) in arch_atomic64_inc()
47 : "m" (v->counter) : "memory"); in arch_atomic64_inc()
54 : "=m" (v->counter) in arch_atomic64_dec()
[all …]
Datomic.h23 return __READ_ONCE((v)->counter); in arch_atomic_read()
28 __WRITE_ONCE(v->counter, i); in arch_atomic_set()
34 : "+m" (v->counter) in arch_atomic_add()
41 : "+m" (v->counter) in arch_atomic_sub()
47 return GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, e, "er", i); in arch_atomic_sub_and_test()
54 : "+m" (v->counter) :: "memory"); in arch_atomic_inc()
61 : "+m" (v->counter) :: "memory"); in arch_atomic_dec()
67 return GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, e); in arch_atomic_dec_and_test()
73 return GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, e); in arch_atomic_inc_and_test()
79 return GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, s, "er", i); in arch_atomic_add_negative()
[all …]
Dlocal.h22 : "+m" (l->a.counter)); in local_inc()
28 : "+m" (l->a.counter)); in local_dec()
34 : "+m" (l->a.counter) in local_add()
41 : "+m" (l->a.counter) in local_sub()
56 return GEN_BINARY_RMWcc(_ASM_SUB, l->a.counter, e, "er", i); in local_sub_and_test()
69 return GEN_UNARY_RMWcc(_ASM_DEC, l->a.counter, e); in local_dec_and_test()
82 return GEN_UNARY_RMWcc(_ASM_INC, l->a.counter, e); in local_inc_and_test()
96 return GEN_BINARY_RMWcc(_ASM_ADD, l->a.counter, s, "er", i); in local_add_negative()
110 : "+r" (i), "+m" (l->a.counter) in local_add_return()
125 return cmpxchg_local(&l->a.counter, old, new); in local_cmpxchg()
[all …]
/arch/powerpc/include/asm/
Datomic.h32 __asm__ __volatile__("lwz %0,0(%1)" : "=r"(t) : "b"(&v->counter)); in arch_atomic_read()
34 __asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m<>"(v->counter)); in arch_atomic_read()
43 __asm__ __volatile__("stw %1,0(%2)" : "=m"(v->counter) : "r"(i), "b"(&v->counter)); in arch_atomic_set()
45 __asm__ __volatile__("stw%U0%X0 %1,%0" : "=m<>"(v->counter) : "r"(i)); in arch_atomic_set()
58 : "=&r" (t), "+m" (v->counter) \
59 : "r"#sign (a), "r" (&v->counter) \
73 : "=&r" (t), "+m" (v->counter) \
74 : "r"#sign (a), "r" (&v->counter) \
90 : "=&r" (res), "=&r" (t), "+m" (v->counter) \
91 : "r"#sign (a), "r" (&v->counter) \
[all …]
/arch/mips/include/asm/
Dlocal.h47 : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) in local_add_return()
48 : "Ir" (i), "m" (l->a.counter) in local_add_return()
54 result = l->a.counter; in local_add_return()
56 l->a.counter = result; in local_add_return()
81 : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) in local_sub_return()
82 : "Ir" (i), "m" (l->a.counter) in local_sub_return()
88 result = l->a.counter; in local_sub_return()
90 l->a.counter = result; in local_sub_return()
99 return cmpxchg_local(&l->a.counter, old, new); in local_cmpxchg()
104 return try_cmpxchg_local(&l->a.counter, in local_try_cmpxchg()
[all …]
Datomic.h29 return READ_ONCE(v->counter); \
34 WRITE_ONCE(v->counter, i); \
53 v->counter c_op i; \
67 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
81 result = v->counter; \
83 v->counter = result; \
99 "+" GCC_OFF_SMALL_ASM() (v->counter) \
115 result = v->counter; \
116 v->counter c_op i; \
132 "+" GCC_OFF_SMALL_ASM() (v->counter) \
[all …]
/arch/s390/include/asm/
Datomic.h32 return __atomic_add_barrier(i, &v->counter) + i; in arch_atomic_add_return()
38 return __atomic_add_barrier(i, &v->counter); in arch_atomic_fetch_add()
44 __atomic_add(i, &v->counter); in arch_atomic_add()
55 __atomic_##op(i, &v->counter); \
59 return __atomic_##op##_barrier(i, &v->counter); \
75 #define arch_atomic_xchg(v, new) (arch_xchg(&((v)->counter), new)) in ATOMIC_OPS()
79 return __atomic_cmpxchg(&v->counter, old, new); in ATOMIC_OPS()
99 return __atomic64_add_barrier(i, (long *)&v->counter) + i; in arch_atomic64_add_return()
105 return __atomic64_add_barrier(i, (long *)&v->counter); in arch_atomic64_fetch_add()
111 __atomic64_add(i, (long *)&v->counter); in arch_atomic64_add()
[all …]
/arch/alpha/include/asm/
Dlocal.h33 :"=&r" (temp), "=m" (l->a.counter), "=&r" (result) in local_add_return()
34 :"Ir" (i), "m" (l->a.counter) : "memory"); in local_add_return()
50 :"=&r" (temp), "=m" (l->a.counter), "=&r" (result) in local_sub_return()
51 :"Ir" (i), "m" (l->a.counter) : "memory"); in local_sub_return()
57 return cmpxchg_local(&l->a.counter, old, new); in local_cmpxchg()
62 return try_cmpxchg_local(&l->a.counter, (s64 *)old, new); in local_try_cmpxchg()
65 #define local_xchg(l, n) (xchg_local(&((l)->a.counter), (n)))
105 #define __local_inc(l) ((l)->a.counter++)
106 #define __local_dec(l) ((l)->a.counter++)
107 #define __local_add(i,l) ((l)->a.counter+=(i))
[all …]
Datomic.h29 #define arch_atomic_read(v) READ_ONCE((v)->counter)
30 #define arch_atomic64_read(v) READ_ONCE((v)->counter)
32 #define arch_atomic_set(v,i) WRITE_ONCE((v)->counter, (i))
33 #define arch_atomic64_set(v,i) WRITE_ONCE((v)->counter, (i))
53 :"=&r" (temp), "=m" (v->counter) \
54 :"Ir" (i), "m" (v->counter)); \
70 :"=&r" (temp), "=m" (v->counter), "=&r" (result) \
71 :"Ir" (i), "m" (v->counter) : "memory"); \
88 :"=&r" (temp), "=m" (v->counter), "=&r" (result) \
89 :"Ir" (i), "m" (v->counter) : "memory"); \
[all …]
/arch/arc/include/asm/
Dspinlock.h101 : [rwlock] "r" (&(rw->counter)), in arch_read_lock()
125 : [rwlock] "r" (&(rw->counter)), in arch_read_trylock()
158 : [rwlock] "r" (&(rw->counter)), in arch_write_lock()
183 : [rwlock] "r" (&(rw->counter)), in arch_write_trylock()
209 : [rwlock] "r" (&(rw->counter)) in arch_read_unlock()
217 WRITE_ONCE(rw->counter, __ARCH_RW_LOCK_UNLOCKED__); in arch_write_unlock()
310 if (rw->counter > 0) { in arch_read_trylock()
311 rw->counter--; in arch_read_trylock()
336 if (rw->counter == __ARCH_RW_LOCK_UNLOCKED__) { in arch_write_trylock()
337 rw->counter = 0; in arch_write_trylock()
[all …]
Datomic64-arcv2.h12 s64 __aligned(8) counter;
24 : "r"(&v->counter)); in arch_atomic64_read()
45 : "r"(a), "r"(&v->counter) in arch_atomic64_set()
62 : "r"(&v->counter), "ir"(a) \
79 : "r"(&v->counter), "ir"(a) \
101 : "r"(&v->counter), "ir"(a) \
200 : "r"(&v->counter) in arch_atomic64_dec_if_positive()
226 : "r"(&v->counter), "r"(a), "r"(u) in arch_atomic64_fetch_add_unless()
Datomic-spinlock.h25 WRITE_ONCE(v->counter, i); in arch_atomic_set()
37 v->counter c_op i; \
51 temp = v->counter; \
53 v->counter = temp; \
69 orig = v->counter; \
70 v->counter c_op i; \
/arch/loongarch/include/asm/
Datomic.h32 #define arch_atomic_read(v) READ_ONCE((v)->counter)
33 #define arch_atomic_set(v, i) WRITE_ONCE((v)->counter, (i))
40 : "+ZB" (v->counter) \
52 : "+ZB" (v->counter), "=&r" (result) \
66 : "+ZB" (v->counter), "=&r" (result) \
120 [c]"=ZB" (v->counter) in ATOMIC_OPS()
143 : "=&r" (result), "=&r" (temp), "+ZC" (v->counter) in arch_atomic_sub_if_positive()
155 : "=&r" (result), "=&r" (temp), "+ZC" (v->counter) in arch_atomic_sub_if_positive()
168 #define arch_atomic64_read(v) READ_ONCE((v)->counter)
169 #define arch_atomic64_set(v, i) WRITE_ONCE((v)->counter, (i))
[all …]
Dlocal.h36 : "+ZB" (l->a.counter), "=&r" (result) in local_add_return()
50 : "+ZB" (l->a.counter), "=&r" (result) in local_sub_return()
61 return cmpxchg_local(&l->a.counter, old, new); in local_cmpxchg()
66 return try_cmpxchg_local(&l->a.counter, in local_try_cmpxchg()
67 (typeof(l->a.counter) *) old, new); in local_try_cmpxchg()
141 #define __local_inc(l) ((l)->a.counter++)
142 #define __local_dec(l) ((l)->a.counter++)
143 #define __local_add(i, l) ((l)->a.counter += (i))
144 #define __local_sub(i, l) ((l)->a.counter -= (i))
/arch/parisc/include/asm/
Datomic.h64 v->counter = i; in arch_atomic_set()
73 return READ_ONCE((v)->counter); in arch_atomic_read()
82 v->counter c_op i; \
93 ret = (v->counter c_op i); \
106 ret = v->counter; \
107 v->counter c_op i; \
154 v->counter c_op i; \
165 ret = (v->counter c_op i); \
178 ret = v->counter; \
179 v->counter c_op i; \
[all …]
Dspinlock.h91 if (rw->counter > 0) { in arch_read_trylock()
92 rw->counter--; in arch_read_trylock()
117 if (rw->counter == __ARCH_RW_LOCK_UNLOCKED__) { in arch_write_trylock()
118 rw->counter = 0; in arch_write_trylock()
145 rw->counter++; in arch_read_unlock()
156 rw->counter = __ARCH_RW_LOCK_UNLOCKED__; in arch_write_unlock()
/arch/arm64/geniezone/
Dhvc.c26 static int gzvm_handle_ptp_time(struct gzvm_vcpu *vcpu, int counter) in gzvm_handle_ptp_time() argument
33 switch (counter) { in gzvm_handle_ptp_time()
62 int ret, counter; in gzvm_arch_handle_guest_hvc() local
66 counter = vcpu->run->hypercall.args[1]; in gzvm_arch_handle_guest_hvc()
67 ret = gzvm_handle_ptp_time(vcpu, counter); in gzvm_arch_handle_guest_hvc()
/arch/csky/include/asm/
Datomic.h18 return READ_ONCE(v->counter); in arch_atomic_read()
22 WRITE_ONCE(v->counter, i); in arch_atomic_set()
36 : "r" (i), "r" (&v->counter) \
60 : "r" (i), "r"(&v->counter) \ in ATOMIC_OP()
120 : "r" (a), "r" (&v->counter), "r" (u)
144 : "r" (&v->counter) in arch_atomic_inc_unless_negative()
169 : "r" (&v->counter) in arch_atomic_dec_unless_positive()
191 : "r" (&v->counter) in arch_atomic_dec_if_positive()
/arch/riscv/include/asm/
Datomic.h30 return READ_ONCE(v->counter); in arch_atomic_read()
34 WRITE_ONCE(v->counter, i); in arch_atomic_set()
41 return READ_ONCE(v->counter); in arch_atomic64_read()
45 WRITE_ONCE(v->counter, i); in arch_atomic64_set()
60 : "+A" (v->counter) \
96 : "+A" (v->counter), "=r" (ret) \ in ATOMIC_OPS()
107 : "+A" (v->counter), "=r" (ret) \
212 : [p]"=&r" (prev), [rc]"=&r" (rc), [c]"+A" (v->counter)
233 : [p]"=&r" (prev), [rc]"=&r" (rc), [c]"+A" (v->counter) in arch_atomic64_fetch_add_unless()
253 : [p]"=&r" (prev), [rc]"=&r" (rc), [c]"+A" (v->counter) in arch_atomic_inc_unless_negative()
[all …]
/arch/sparc/lib/
Datomic32.c38 ret = v->counter; \
39 v->counter c_op i; \
53 ret = (v->counter c_op i); \
76 ret = v->counter; in arch_atomic_xchg()
77 v->counter = new; in arch_atomic_xchg()
89 ret = v->counter; in arch_atomic_cmpxchg()
91 v->counter = new; in arch_atomic_cmpxchg()
104 ret = v->counter; in arch_atomic_fetch_add_unless()
106 v->counter += a; in arch_atomic_fetch_add_unless()
118 v->counter = i; in arch_atomic_set()
/arch/ia64/include/asm/
Datomic.h24 #define arch_atomic_read(v) READ_ONCE((v)->counter)
25 #define arch_atomic64_read(v) READ_ONCE((v)->counter)
27 #define arch_atomic_set(v,i) WRITE_ONCE(((v)->counter), (i))
28 #define arch_atomic64_set(v,i) WRITE_ONCE(((v)->counter), (i))
81 ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter) \
89 ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter) \
97 ? ia64_fetchadd(__ia64_aar_i, &(v)->counter, acq) \
105 ? ia64_fetchadd(-__ia64_asr_i, &(v)->counter, acq) \
166 ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter) \
174 ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter) \
[all …]
/arch/openrisc/include/asm/
Datomic.h27 : "r"(&v->counter), "r"(i) \
44 : "r"(&v->counter), "r"(i) \
63 : "r"(&v->counter), "r"(i) \
121 : "r"(&v->counter), "r"(a), "r"(u) in ATOMIC_OP_RETURN()
128 #define arch_atomic_read(v) READ_ONCE((v)->counter)
129 #define arch_atomic_set(v,i) WRITE_ONCE((v)->counter, (i))

1234567