Lines Matching refs:v
25 static __inline__ int atomic_read(const atomic_t *v) in atomic_read() argument
29 __asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter)); in atomic_read()
34 static __inline__ void atomic_set(atomic_t *v, int i) in atomic_set() argument
36 __asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i)); in atomic_set()
40 static __inline__ void atomic_##op(int a, atomic_t *v) \
49 : "=&r" (t), "+m" (v->counter) \
50 : "r" (a), "r" (&v->counter) \
55 static inline int atomic_##op##_return_relaxed(int a, atomic_t *v) \
64 : "=&r" (t), "+m" (v->counter) \
65 : "r" (a), "r" (&v->counter) \
72 static inline int atomic_fetch_##op##_relaxed(int a, atomic_t *v) \
81 : "=&r" (res), "=&r" (t), "+m" (v->counter) \
82 : "r" (a), "r" (&v->counter) \
120 static __inline__ void atomic_inc(atomic_t *v) in ATOMIC_OPS()
129 : "=&r" (t), "+m" (v->counter) in ATOMIC_OPS()
130 : "r" (&v->counter) in ATOMIC_OPS()
135 static __inline__ int atomic_inc_return_relaxed(atomic_t *v) in atomic_inc_return_relaxed() argument
144 : "=&r" (t), "+m" (v->counter) in atomic_inc_return_relaxed()
145 : "r" (&v->counter) in atomic_inc_return_relaxed()
151 static __inline__ void atomic_dec(atomic_t *v) in atomic_dec() argument
160 : "=&r" (t), "+m" (v->counter) in atomic_dec()
161 : "r" (&v->counter) in atomic_dec()
166 static __inline__ int atomic_dec_return_relaxed(atomic_t *v) in atomic_dec_return_relaxed() argument
175 : "=&r" (t), "+m" (v->counter) in atomic_dec_return_relaxed()
176 : "r" (&v->counter) in atomic_dec_return_relaxed()
185 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) argument
186 #define atomic_cmpxchg_relaxed(v, o, n) \ argument
187 cmpxchg_relaxed(&((v)->counter), (o), (n))
188 #define atomic_cmpxchg_acquire(v, o, n) \ argument
189 cmpxchg_acquire(&((v)->counter), (o), (n))
191 #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) argument
192 #define atomic_xchg_relaxed(v, new) xchg_relaxed(&((v)->counter), (new)) argument
201 atomic_try_cmpxchg_lock(atomic_t *v, int *old, int new) in atomic_try_cmpxchg_lock() argument
213 : "=&r" (r), "+m" (v->counter) in atomic_try_cmpxchg_lock()
214 : "r" (&v->counter), "r" (o), "r" (new) in atomic_try_cmpxchg_lock()
231 static __inline__ int atomic_fetch_add_unless(atomic_t *v, int a, int u) in atomic_fetch_add_unless() argument
247 : "r" (&v->counter), "r" (a), "r" (u) in atomic_fetch_add_unless()
261 static __inline__ int atomic_inc_not_zero(atomic_t *v) in atomic_inc_not_zero() argument
277 : "r" (&v->counter) in atomic_inc_not_zero()
282 #define atomic_inc_not_zero(v) atomic_inc_not_zero((v)) argument
289 static __inline__ int atomic_dec_if_positive(atomic_t *v) in atomic_dec_if_positive() argument
304 : "r" (&v->counter) in atomic_dec_if_positive()
315 static __inline__ s64 atomic64_read(const atomic64_t *v) in atomic64_read() argument
319 __asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter)); in atomic64_read()
324 static __inline__ void atomic64_set(atomic64_t *v, s64 i) in atomic64_set() argument
326 __asm__ __volatile__("std%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i)); in atomic64_set()
330 static __inline__ void atomic64_##op(s64 a, atomic64_t *v) \
339 : "=&r" (t), "+m" (v->counter) \
340 : "r" (a), "r" (&v->counter) \
346 atomic64_##op##_return_relaxed(s64 a, atomic64_t *v) \
355 : "=&r" (t), "+m" (v->counter) \
356 : "r" (a), "r" (&v->counter) \
364 atomic64_fetch_##op##_relaxed(s64 a, atomic64_t *v) \
373 : "=&r" (res), "=&r" (t), "+m" (v->counter) \
374 : "r" (a), "r" (&v->counter) \
412 static __inline__ void atomic64_inc(atomic64_t *v) in ATOMIC64_OPS()
421 : "=&r" (t), "+m" (v->counter) in ATOMIC64_OPS()
422 : "r" (&v->counter) in ATOMIC64_OPS()
427 static __inline__ s64 atomic64_inc_return_relaxed(atomic64_t *v) in atomic64_inc_return_relaxed() argument
436 : "=&r" (t), "+m" (v->counter) in atomic64_inc_return_relaxed()
437 : "r" (&v->counter) in atomic64_inc_return_relaxed()
443 static __inline__ void atomic64_dec(atomic64_t *v) in atomic64_dec() argument
452 : "=&r" (t), "+m" (v->counter) in atomic64_dec()
453 : "r" (&v->counter) in atomic64_dec()
458 static __inline__ s64 atomic64_dec_return_relaxed(atomic64_t *v) in atomic64_dec_return_relaxed() argument
467 : "=&r" (t), "+m" (v->counter) in atomic64_dec_return_relaxed()
468 : "r" (&v->counter) in atomic64_dec_return_relaxed()
481 static __inline__ s64 atomic64_dec_if_positive(atomic64_t *v) in atomic64_dec_if_positive() argument
495 : "r" (&v->counter) in atomic64_dec_if_positive()
502 #define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) argument
503 #define atomic64_cmpxchg_relaxed(v, o, n) \ argument
504 cmpxchg_relaxed(&((v)->counter), (o), (n))
505 #define atomic64_cmpxchg_acquire(v, o, n) \ argument
506 cmpxchg_acquire(&((v)->counter), (o), (n))
508 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) argument
509 #define atomic64_xchg_relaxed(v, new) xchg_relaxed(&((v)->counter), (new)) argument
520 static __inline__ s64 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) in atomic64_fetch_add_unless() argument
536 : "r" (&v->counter), "r" (a), "r" (u) in atomic64_fetch_add_unless()
550 static __inline__ int atomic64_inc_not_zero(atomic64_t *v) in atomic64_inc_not_zero() argument
566 : "r" (&v->counter) in atomic64_inc_not_zero()
571 #define atomic64_inc_not_zero(v) atomic64_inc_not_zero((v)) argument