Home
last modified time | relevance | path

Searched refs:counter (Results 1 – 25 of 153) sorted by relevance

1234567

/arch/x86/kernel/cpu/
Dperfctr-watchdog.c98 int avail_to_resrv_perfctr_nmi_bit(unsigned int counter) in avail_to_resrv_perfctr_nmi_bit() argument
100 BUG_ON(counter > NMI_MAX_COUNTER_BITS); in avail_to_resrv_perfctr_nmi_bit()
102 return !test_bit(counter, perfctr_nmi_owner); in avail_to_resrv_perfctr_nmi_bit()
108 unsigned int counter; in reserve_perfctr_nmi() local
110 counter = nmi_perfctr_msr_to_bit(msr); in reserve_perfctr_nmi()
112 if (counter > NMI_MAX_COUNTER_BITS) in reserve_perfctr_nmi()
115 if (!test_and_set_bit(counter, perfctr_nmi_owner)) in reserve_perfctr_nmi()
123 unsigned int counter; in release_perfctr_nmi() local
125 counter = nmi_perfctr_msr_to_bit(msr); in release_perfctr_nmi()
127 if (counter > NMI_MAX_COUNTER_BITS) in release_perfctr_nmi()
[all …]
/arch/arm/include/asm/
Datomic.h30 #define atomic_read(v) ACCESS_ONCE((v)->counter)
31 #define atomic_set(v,i) (((v)->counter) = (i))
47 prefetchw(&v->counter); \
54 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
55 : "r" (&v->counter), "Ir" (i) \
66 prefetchw(&v->counter); \
74 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
75 : "r" (&v->counter), "Ir" (i) \
89 prefetchw(&ptr->counter); in atomic_cmpxchg()
97 : "=&r" (res), "=&r" (oldval), "+Qo" (ptr->counter) in atomic_cmpxchg()
[all …]
/arch/x86/include/asm/
Datomic64_64.h21 return ACCESS_ONCE((v)->counter); in atomic64_read()
33 v->counter = i; in atomic64_set()
46 : "=m" (v->counter) in atomic64_add()
47 : "er" (i), "m" (v->counter)); in atomic64_add()
60 : "=m" (v->counter) in atomic64_sub()
61 : "er" (i), "m" (v->counter)); in atomic64_sub()
75 GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, "er", i, "%0", "e"); in atomic64_sub_and_test()
87 : "=m" (v->counter) in atomic64_inc()
88 : "m" (v->counter)); in atomic64_inc()
100 : "=m" (v->counter) in atomic64_dec()
[all …]
Datomic.h27 return ACCESS_ONCE((v)->counter); in atomic_read()
39 v->counter = i; in atomic_set()
52 : "+m" (v->counter) in atomic_add()
66 : "+m" (v->counter) in atomic_sub()
81 GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", "e"); in atomic_sub_and_test()
93 : "+m" (v->counter)); in atomic_inc()
105 : "+m" (v->counter)); in atomic_dec()
118 GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", "e"); in atomic_dec_and_test()
131 GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, "%0", "e"); in atomic_inc_and_test()
145 GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, "er", i, "%0", "s"); in atomic_add_negative()
[all …]
Dlocal.h21 : "+m" (l->a.counter)); in local_inc()
27 : "+m" (l->a.counter)); in local_dec()
33 : "+m" (l->a.counter) in local_add()
40 : "+m" (l->a.counter) in local_sub()
55 GEN_BINARY_RMWcc(_ASM_SUB, l->a.counter, "er", i, "%0", "e"); in local_sub_and_test()
68 GEN_UNARY_RMWcc(_ASM_DEC, l->a.counter, "%0", "e"); in local_dec_and_test()
81 GEN_UNARY_RMWcc(_ASM_INC, l->a.counter, "%0", "e"); in local_inc_and_test()
95 GEN_BINARY_RMWcc(_ASM_ADD, l->a.counter, "er", i, "%0", "s"); in local_add_negative()
109 : "+r" (i), "+m" (l->a.counter) in local_add_return()
123 (cmpxchg_local(&((l)->a.counter), (o), (n)))
[all …]
/arch/powerpc/include/asm/
Datomic.h19 __asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter)); in atomic_read()
26 __asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i)); in atomic_set()
40 : "=&r" (t), "+m" (v->counter) \
41 : "r" (a), "r" (&v->counter) \
59 : "r" (a), "r" (&v->counter) \
86 : "=&r" (t), "+m" (v->counter) in ATOMIC_OPS()
87 : "r" (&v->counter) in ATOMIC_OPS()
104 : "r" (&v->counter) in atomic_inc_return()
130 : "=&r" (t), "+m" (v->counter) in atomic_dec()
131 : "r" (&v->counter) in atomic_dec()
[all …]
Dlocal.h33 : "r" (a), "r" (&(l->a.counter)) in local_add_return()
52 : "r" (a), "r" (&(l->a.counter)) in local_sub_return()
69 : "r" (&(l->a.counter)) in local_inc_return()
96 : "r" (&(l->a.counter)) in local_dec_return()
103 (cmpxchg_local(&((l)->a.counter), (o), (n)))
104 #define local_xchg(l, n) (xchg_local(&((l)->a.counter), (n)))
130 : "r" (&(l->a.counter)), "r" (a), "r" (u) in local_add_unless()
159 : "r" (&(l->a.counter)) in local_dec_if_positive()
170 #define __local_inc(l) ((l)->a.counter++)
171 #define __local_dec(l) ((l)->a.counter++)
[all …]
/arch/mips/include/asm/
Datomic.h33 #define atomic_read(v) ACCESS_ONCE((v)->counter)
42 #define atomic_set(v, i) ((v)->counter = (i))
57 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
69 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
76 v->counter c_op i; \
100 "+" GCC_OFF_SMALL_ASM() (v->counter) \
113 "+" GCC_OFF_SMALL_ASM() (v->counter) \
122 result = v->counter; \
124 v->counter = result; \
174 "+" GCC_OFF_SMALL_ASM() (v->counter) in atomic_sub_if_positive()
[all …]
Dlocal.h44 : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) in local_add_return()
45 : "Ir" (i), "m" (l->a.counter) in local_add_return()
58 : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) in local_add_return()
59 : "Ir" (i), "m" (l->a.counter) in local_add_return()
65 result = l->a.counter; in local_add_return()
67 l->a.counter = result; in local_add_return()
89 : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) in local_sub_return()
90 : "Ir" (i), "m" (l->a.counter) in local_sub_return()
103 : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) in local_sub_return()
104 : "Ir" (i), "m" (l->a.counter) in local_sub_return()
[all …]
/arch/metag/include/asm/
Datomic_lock1.h13 return (v)->counter; in atomic_read()
35 v->counter = i; in atomic_set()
47 v->counter c_op i; \
58 result = v->counter; \
61 v->counter = result; \
82 v->counter &= ~mask; in atomic_clear_mask()
92 v->counter |= mask; in atomic_set_mask()
102 ret = v->counter; in atomic_cmpxchg()
105 v->counter = new; in atomic_cmpxchg()
112 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
[all …]
Datomic_lnkget.h6 #define atomic_set(v, i) ((v)->counter = (i))
25 : "da" (&v->counter)); in atomic_read()
44 : "da" (&v->counter), "bd" (i) \
64 : "da" (&v->counter), "bd" (i) \
94 : "da" (&v->counter), "bd" (~mask) in ATOMIC_OPS()
111 : "da" (&v->counter), "bd" (mask) in atomic_set_mask()
132 : "da" (&v->counter), "bd" (old), "da" (new) in atomic_cmpxchg()
152 : "da" (&v->counter), "da" (new) in atomic_xchg()
176 : "da" (&v->counter), "bd" (u), "bd" (a) in __atomic_add_unless()
199 : "da" (&v->counter), "bd" (i) in atomic_sub_if_positive()
/arch/cris/include/asm/
Datomic.h20 #define atomic_read(v) ACCESS_ONCE((v)->counter)
21 #define atomic_set(v,i) (((v)->counter) = (i))
30 v->counter c_op i; \
40 retval = (v->counter c_op i); \
61 retval = (v->counter -= i) == 0; in atomic_sub_and_test()
70 (v->counter)++; in atomic_inc()
78 (v->counter)--; in atomic_dec()
87 retval = ++(v->counter); in atomic_inc_return()
97 retval = --(v->counter); in atomic_dec_return()
106 retval = --(v->counter) == 0; in atomic_dec_and_test()
[all …]
/arch/tile/include/asm/
Datomic_64.h27 #define atomic_set(v, i) ((v)->counter = (i))
37 __insn_fetchadd4((void *)&v->counter, i); in atomic_add()
44 val = __insn_fetchadd4((void *)&v->counter, i) + i; in atomic_add_return()
51 int guess, oldval = v->counter; in __atomic_add_unless()
56 oldval = cmpxchg(&v->counter, guess, guess + a); in __atomic_add_unless()
65 #define atomic64_read(v) ((v)->counter)
66 #define atomic64_set(v, i) ((v)->counter = (i))
70 __insn_fetchadd((void *)&v->counter, i); in atomic64_add()
77 val = __insn_fetchadd((void *)&v->counter, i) + i; in atomic64_add_return()
84 long guess, oldval = v->counter; in atomic64_add_unless()
[all …]
Datomic_32.h34 _atomic_xchg_add(&v->counter, i); in atomic_add()
47 return _atomic_xchg_add(&v->counter, i) + i; in atomic_add_return()
62 return _atomic_xchg_add_unless(&v->counter, a, u); in __atomic_add_unless()
77 _atomic_xchg(&v->counter, n); in atomic_set()
83 long long counter; member
101 return _atomic64_xchg_add((long long *)&v->counter, 0); in atomic64_read()
113 _atomic64_xchg_add(&v->counter, i); in atomic64_add()
126 return _atomic64_xchg_add(&v->counter, i) + i; in atomic64_add_return()
142 return _atomic64_xchg_add_unless(&v->counter, a, u) != u; in atomic64_add_unless()
157 _atomic64_xchg(&v->counter, n); in atomic64_set()
/arch/parisc/include/asm/
Dspinlock.h78 rw->counter++; in arch_read_lock()
90 rw->counter--; in arch_read_unlock()
103 rw->counter++; in arch_read_trylock()
111 if (rw->counter < 0) in arch_read_trylock()
115 while (arch_spin_is_locked(&rw->lock) && rw->counter >= 0) in arch_read_trylock()
130 if (rw->counter != 0) { in arch_write_lock()
134 while (rw->counter != 0) in arch_write_lock()
140 rw->counter = -1; /* mark as write-locked */ in arch_write_lock()
147 rw->counter = 0; in arch_write_unlock()
160 if (rw->counter == 0) { in arch_write_trylock()
[all …]
/arch/frv/include/asm/
Datomic.h34 #define atomic_read(v) ACCESS_ONCE((v)->counter)
35 #define atomic_set(v, i) (((v)->counter) = (i))
51 : "+U"(v->counter), "=&r"(val) in atomic_add_return()
72 : "+U"(v->counter), "=&r"(val) in atomic_sub_return()
123 volatile long long counter; member
130 long long counter; in atomic64_read() local
133 : "=e"(counter) in atomic64_read()
134 : "m"(v->counter)); in atomic64_read()
135 return counter; in atomic64_read()
141 : "=m"(v->counter) in atomic64_set()
[all …]
/arch/avr32/include/asm/
Datomic.h22 #define atomic_read(v) ACCESS_ONCE((v)->counter)
23 #define atomic_set(v, i) (((v)->counter) = i)
37 : "=&r" (result), "=o" (v->counter) \
38 : "m" (v->counter), #asm_con (i) \
117 : "=&r"(tmp), "=o"(v->counter) in __atomic_add_unless()
118 : "m"(v->counter), "rKs21"(-a), "rKs21"(u) in __atomic_add_unless()
131 : "=&r"(tmp), "=o"(v->counter) in __atomic_add_unless()
132 : "m"(v->counter), "r"(a), "ir"(u) in __atomic_add_unless()
162 : "=&r"(result), "=o"(v->counter) in atomic_sub_if_positive()
163 : "m"(v->counter), "ir"(i) in atomic_sub_if_positive()
[all …]
/arch/m32r/include/asm/
Dlocal.h27 typedef struct { volatile int counter; } local_t; member
37 #define local_read(l) ((l)->counter)
46 #define local_set(l, i) (((l)->counter) = (i))
68 : "r" (&l->counter), "r" (i) in local_add_return()
99 : "r" (&l->counter), "r" (i) in local_sub_return()
158 : "r" (&l->counter) in local_inc_return()
188 : "r" (&l->counter) in local_dec_return()
246 #define local_cmpxchg(l, o, n) (cmpxchg_local(&((l)->counter), (o), (n)))
247 #define local_xchg(v, new) (xchg_local(&((l)->counter), new))
330 #define __local_inc(l) ((l)->a.counter++)
[all …]
/arch/alpha/include/asm/
Datomic.h20 #define atomic_read(v) ACCESS_ONCE((v)->counter)
21 #define atomic64_read(v) ACCESS_ONCE((v)->counter)
23 #define atomic_set(v,i) ((v)->counter = (i))
24 #define atomic64_set(v,i) ((v)->counter = (i))
44 :"=&r" (temp), "=m" (v->counter) \
45 :"Ir" (i), "m" (v->counter)); \
62 :"=&r" (temp), "=m" (v->counter), "=&r" (result) \
63 :"Ir" (i), "m" (v->counter) : "memory"); \
80 :"=&r" (temp), "=m" (v->counter) \
81 :"Ir" (i), "m" (v->counter)); \
[all …]
Dlocal.h32 :"=&r" (temp), "=m" (l->a.counter), "=&r" (result) in local_add_return()
33 :"Ir" (i), "m" (l->a.counter) : "memory"); in local_add_return()
49 :"=&r" (temp), "=m" (l->a.counter), "=&r" (result) in local_sub_return()
50 :"Ir" (i), "m" (l->a.counter) : "memory"); in local_sub_return()
55 (cmpxchg_local(&((l)->a.counter), (o), (n)))
56 #define local_xchg(l, n) (xchg_local(&((l)->a.counter), (n)))
96 #define __local_inc(l) ((l)->a.counter++)
97 #define __local_dec(l) ((l)->a.counter++)
98 #define __local_add(i,l) ((l)->a.counter+=(i))
99 #define __local_sub(i,l) ((l)->a.counter-=(i))
/arch/arm64/include/asm/
Datomic.h38 #define atomic_read(v) ACCESS_ONCE((v)->counter)
39 #define atomic_set(v,i) (((v)->counter) = (i))
58 : "=&r" (result), "=&r" (tmp), "+Q" (v->counter) \
73 : "=&r" (result), "=&r" (tmp), "+Q" (v->counter) \
106 : "=&r" (tmp), "=&r" (oldval), "+Q" (ptr->counter) in ATOMIC_OPS()
114 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
142 #define atomic64_read(v) ACCESS_ONCE((v)->counter)
143 #define atomic64_set(v,i) (((v)->counter) = (i))
156 : "=&r" (result), "=&r" (tmp), "+Q" (v->counter) \
171 : "=&r" (result), "=&r" (tmp), "+Q" (v->counter) \
[all …]
/arch/arc/include/asm/
Dspinlock.h101 #define arch_read_can_lock(x) ((x)->counter > 0)
104 #define arch_write_can_lock(x) ((x)->counter == __ARCH_RW_LOCK_UNLOCKED__)
117 if (rw->counter > 0) { in arch_read_trylock()
118 rw->counter--; in arch_read_trylock()
141 if (rw->counter == __ARCH_RW_LOCK_UNLOCKED__) { in arch_write_trylock()
142 rw->counter = 0; in arch_write_trylock()
165 rw->counter++; in arch_read_unlock()
172 rw->counter = __ARCH_RW_LOCK_UNLOCKED__; in arch_write_unlock()
Datomic.h20 #define atomic_read(v) ((v)->counter)
24 #define atomic_set(v, i) (((v)->counter) = (i))
37 : "r"(&v->counter), "ir"(i) \
58 : "r"(&v->counter), "ir"(i) \
71 #define atomic_set(v, i) (((v)->counter) = (i))
89 v->counter = i; in atomic_set()
106 v->counter c_op i; \
120 temp = v->counter; \
122 v->counter = temp; \
/arch/blackfin/include/asm/
Datomic.h25 #define atomic_read(v) __raw_uncached_fetch_asm(&(v)->counter)
27 #define atomic_add_return(i, v) __raw_atomic_update_asm(&(v)->counter, i)
28 #define atomic_sub_return(i, v) __raw_atomic_update_asm(&(v)->counter, -(i))
30 #define atomic_clear_mask(m, v) __raw_atomic_clear_asm(&(v)->counter, m)
31 #define atomic_set_mask(m, v) __raw_atomic_set_asm(&(v)->counter, m)
/arch/s390/include/asm/
Datomic.h41 : "=d" (old_val), "+Q" ((ptr)->counter) \
65 : "=&d" (old_val), "=&d" (new_val), "+Q" ((ptr)->counter)\
79 : "=d" (c) : "Q" (v->counter)); in atomic_read()
87 : "=Q" (v->counter) : "d" (i)); in atomic_set()
101 : "+Q" (v->counter) in atomic_add()
131 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
137 : "+d" (old), "+Q" (v->counter) in atomic_cmpxchg()
183 : "=d" (old_val), "+Q" ((ptr)->counter) \
207 : "=&d" (old_val), "=&d" (new_val), "+Q" ((ptr)->counter)\
221 : "=d" (c) : "Q" (v->counter)); in atomic64_read()
[all …]

1234567