Lines Matching full:v
15 * @v: pointer of type atomic64_t
17 * Atomically reads the value of @v.
20 static inline long arch_atomic64_read(const atomic64_t *v) in arch_atomic64_read() argument
22 return READ_ONCE((v)->counter); in arch_atomic64_read()
27 * @v: pointer to type atomic64_t
30 * Atomically sets the value of @v to @i.
32 static inline void arch_atomic64_set(atomic64_t *v, long i) in arch_atomic64_set() argument
34 WRITE_ONCE(v->counter, i); in arch_atomic64_set()
40 * @v: pointer to type atomic64_t
42 * Atomically adds @i to @v.
44 static __always_inline void arch_atomic64_add(long i, atomic64_t *v) in arch_atomic64_add() argument
47 : "=m" (v->counter) in arch_atomic64_add()
48 : "er" (i), "m" (v->counter) : "memory"); in arch_atomic64_add()
54 * @v: pointer to type atomic64_t
56 * Atomically subtracts @i from @v.
58 static inline void arch_atomic64_sub(long i, atomic64_t *v) in arch_atomic64_sub() argument
61 : "=m" (v->counter) in arch_atomic64_sub()
62 : "er" (i), "m" (v->counter) : "memory"); in arch_atomic64_sub()
68 * @v: pointer to type atomic64_t
70 * Atomically subtracts @i from @v and returns
74 static inline bool arch_atomic64_sub_and_test(long i, atomic64_t *v) in arch_atomic64_sub_and_test() argument
76 GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, "er", i, "%0", e); in arch_atomic64_sub_and_test()
82 * @v: pointer to type atomic64_t
84 * Atomically increments @v by 1.
86 static __always_inline void arch_atomic64_inc(atomic64_t *v) in arch_atomic64_inc() argument
89 : "=m" (v->counter) in arch_atomic64_inc()
90 : "m" (v->counter) : "memory"); in arch_atomic64_inc()
96 * @v: pointer to type atomic64_t
98 * Atomically decrements @v by 1.
100 static __always_inline void arch_atomic64_dec(atomic64_t *v) in arch_atomic64_dec() argument
103 : "=m" (v->counter) in arch_atomic64_dec()
104 : "m" (v->counter) : "memory"); in arch_atomic64_dec()
110 * @v: pointer to type atomic64_t
112 * Atomically decrements @v by 1 and
116 static inline bool arch_atomic64_dec_and_test(atomic64_t *v) in arch_atomic64_dec_and_test() argument
118 GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, "%0", e); in arch_atomic64_dec_and_test()
124 * @v: pointer to type atomic64_t
126 * Atomically increments @v by 1
130 static inline bool arch_atomic64_inc_and_test(atomic64_t *v) in arch_atomic64_inc_and_test() argument
132 GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, "%0", e); in arch_atomic64_inc_and_test()
139 * @v: pointer to type atomic64_t
141 * Atomically adds @i to @v and returns true
145 static inline bool arch_atomic64_add_negative(long i, atomic64_t *v) in arch_atomic64_add_negative() argument
147 GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, "er", i, "%0", s); in arch_atomic64_add_negative()
154 * @v: pointer to type atomic64_t
156 * Atomically adds @i to @v and returns @i + @v
158 static __always_inline long arch_atomic64_add_return(long i, atomic64_t *v) in arch_atomic64_add_return() argument
160 return i + xadd(&v->counter, i); in arch_atomic64_add_return()
163 static inline long arch_atomic64_sub_return(long i, atomic64_t *v) in arch_atomic64_sub_return() argument
165 return arch_atomic64_add_return(-i, v); in arch_atomic64_sub_return()
168 static inline long arch_atomic64_fetch_add(long i, atomic64_t *v) in arch_atomic64_fetch_add() argument
170 return xadd(&v->counter, i); in arch_atomic64_fetch_add()
173 static inline long arch_atomic64_fetch_sub(long i, atomic64_t *v) in arch_atomic64_fetch_sub() argument
175 return xadd(&v->counter, -i); in arch_atomic64_fetch_sub()
178 static inline long arch_atomic64_cmpxchg(atomic64_t *v, long old, long new) in arch_atomic64_cmpxchg() argument
180 return arch_cmpxchg(&v->counter, old, new); in arch_atomic64_cmpxchg()
184 static __always_inline bool arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, long new) in arch_atomic64_try_cmpxchg() argument
186 return try_cmpxchg(&v->counter, old, new); in arch_atomic64_try_cmpxchg()
189 static inline long arch_atomic64_xchg(atomic64_t *v, long new) in arch_atomic64_xchg() argument
191 return arch_xchg(&v->counter, new); in arch_atomic64_xchg()
194 static inline void arch_atomic64_and(long i, atomic64_t *v) in arch_atomic64_and() argument
197 : "+m" (v->counter) in arch_atomic64_and()
202 static inline long arch_atomic64_fetch_and(long i, atomic64_t *v) in arch_atomic64_fetch_and() argument
204 s64 val = arch_atomic64_read(v); in arch_atomic64_fetch_and()
207 } while (!arch_atomic64_try_cmpxchg(v, &val, val & i)); in arch_atomic64_fetch_and()
211 static inline void arch_atomic64_or(long i, atomic64_t *v) in arch_atomic64_or() argument
214 : "+m" (v->counter) in arch_atomic64_or()
219 static inline long arch_atomic64_fetch_or(long i, atomic64_t *v) in arch_atomic64_fetch_or() argument
221 s64 val = arch_atomic64_read(v); in arch_atomic64_fetch_or()
224 } while (!arch_atomic64_try_cmpxchg(v, &val, val | i)); in arch_atomic64_fetch_or()
228 static inline void arch_atomic64_xor(long i, atomic64_t *v) in arch_atomic64_xor() argument
231 : "+m" (v->counter) in arch_atomic64_xor()
236 static inline long arch_atomic64_fetch_xor(long i, atomic64_t *v) in arch_atomic64_fetch_xor() argument
238 s64 val = arch_atomic64_read(v); in arch_atomic64_fetch_xor()
241 } while (!arch_atomic64_try_cmpxchg(v, &val, val ^ i)); in arch_atomic64_fetch_xor()