Lines Matching refs:s64
834 static __always_inline s64
843 static __always_inline s64
853 atomic64_set(atomic64_t *v, s64 i) in atomic64_set()
862 atomic64_set_release(atomic64_t *v, s64 i) in atomic64_set_release()
871 atomic64_add(s64 i, atomic64_t *v) in atomic64_add()
879 static __always_inline s64
880 atomic64_add_return(s64 i, atomic64_t *v) in atomic64_add_return()
889 static __always_inline s64
890 atomic64_add_return_acquire(s64 i, atomic64_t *v) in atomic64_add_return_acquire()
899 static __always_inline s64
900 atomic64_add_return_release(s64 i, atomic64_t *v) in atomic64_add_return_release()
909 static __always_inline s64
910 atomic64_add_return_relaxed(s64 i, atomic64_t *v) in atomic64_add_return_relaxed()
919 static __always_inline s64
920 atomic64_fetch_add(s64 i, atomic64_t *v) in atomic64_fetch_add()
929 static __always_inline s64
930 atomic64_fetch_add_acquire(s64 i, atomic64_t *v) in atomic64_fetch_add_acquire()
939 static __always_inline s64
940 atomic64_fetch_add_release(s64 i, atomic64_t *v) in atomic64_fetch_add_release()
949 static __always_inline s64
950 atomic64_fetch_add_relaxed(s64 i, atomic64_t *v) in atomic64_fetch_add_relaxed()
959 atomic64_sub(s64 i, atomic64_t *v) in atomic64_sub()
967 static __always_inline s64
968 atomic64_sub_return(s64 i, atomic64_t *v) in atomic64_sub_return()
977 static __always_inline s64
978 atomic64_sub_return_acquire(s64 i, atomic64_t *v) in atomic64_sub_return_acquire()
987 static __always_inline s64
988 atomic64_sub_return_release(s64 i, atomic64_t *v) in atomic64_sub_return_release()
997 static __always_inline s64
998 atomic64_sub_return_relaxed(s64 i, atomic64_t *v) in atomic64_sub_return_relaxed()
1007 static __always_inline s64
1008 atomic64_fetch_sub(s64 i, atomic64_t *v) in atomic64_fetch_sub()
1017 static __always_inline s64
1018 atomic64_fetch_sub_acquire(s64 i, atomic64_t *v) in atomic64_fetch_sub_acquire()
1027 static __always_inline s64
1028 atomic64_fetch_sub_release(s64 i, atomic64_t *v) in atomic64_fetch_sub_release()
1037 static __always_inline s64
1038 atomic64_fetch_sub_relaxed(s64 i, atomic64_t *v) in atomic64_fetch_sub_relaxed()
1057 static __always_inline s64
1067 static __always_inline s64
1077 static __always_inline s64
1087 static __always_inline s64
1097 static __always_inline s64
1107 static __always_inline s64
1117 static __always_inline s64
1127 static __always_inline s64
1147 static __always_inline s64
1157 static __always_inline s64
1167 static __always_inline s64
1177 static __always_inline s64
1187 static __always_inline s64
1197 static __always_inline s64
1207 static __always_inline s64
1217 static __always_inline s64
1227 atomic64_and(s64 i, atomic64_t *v) in atomic64_and()
1235 static __always_inline s64
1236 atomic64_fetch_and(s64 i, atomic64_t *v) in atomic64_fetch_and()
1245 static __always_inline s64
1246 atomic64_fetch_and_acquire(s64 i, atomic64_t *v) in atomic64_fetch_and_acquire()
1255 static __always_inline s64
1256 atomic64_fetch_and_release(s64 i, atomic64_t *v) in atomic64_fetch_and_release()
1265 static __always_inline s64
1266 atomic64_fetch_and_relaxed(s64 i, atomic64_t *v) in atomic64_fetch_and_relaxed()
1276 atomic64_andnot(s64 i, atomic64_t *v) in atomic64_andnot()
1285 static __always_inline s64
1286 atomic64_fetch_andnot(s64 i, atomic64_t *v) in atomic64_fetch_andnot()
1295 static __always_inline s64
1296 atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v) in atomic64_fetch_andnot_acquire()
1305 static __always_inline s64
1306 atomic64_fetch_andnot_release(s64 i, atomic64_t *v) in atomic64_fetch_andnot_release()
1315 static __always_inline s64
1316 atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v) in atomic64_fetch_andnot_relaxed()
1325 atomic64_or(s64 i, atomic64_t *v) in atomic64_or()
1333 static __always_inline s64
1334 atomic64_fetch_or(s64 i, atomic64_t *v) in atomic64_fetch_or()
1343 static __always_inline s64
1344 atomic64_fetch_or_acquire(s64 i, atomic64_t *v) in atomic64_fetch_or_acquire()
1353 static __always_inline s64
1354 atomic64_fetch_or_release(s64 i, atomic64_t *v) in atomic64_fetch_or_release()
1363 static __always_inline s64
1364 atomic64_fetch_or_relaxed(s64 i, atomic64_t *v) in atomic64_fetch_or_relaxed()
1373 atomic64_xor(s64 i, atomic64_t *v) in atomic64_xor()
1381 static __always_inline s64
1382 atomic64_fetch_xor(s64 i, atomic64_t *v) in atomic64_fetch_xor()
1391 static __always_inline s64
1392 atomic64_fetch_xor_acquire(s64 i, atomic64_t *v) in atomic64_fetch_xor_acquire()
1401 static __always_inline s64
1402 atomic64_fetch_xor_release(s64 i, atomic64_t *v) in atomic64_fetch_xor_release()
1411 static __always_inline s64
1412 atomic64_fetch_xor_relaxed(s64 i, atomic64_t *v) in atomic64_fetch_xor_relaxed()
1421 static __always_inline s64
1422 atomic64_xchg(atomic64_t *v, s64 i) in atomic64_xchg()
1431 static __always_inline s64
1432 atomic64_xchg_acquire(atomic64_t *v, s64 i) in atomic64_xchg_acquire()
1441 static __always_inline s64
1442 atomic64_xchg_release(atomic64_t *v, s64 i) in atomic64_xchg_release()
1451 static __always_inline s64
1452 atomic64_xchg_relaxed(atomic64_t *v, s64 i) in atomic64_xchg_relaxed()
1461 static __always_inline s64
1462 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new) in atomic64_cmpxchg()
1471 static __always_inline s64
1472 atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new) in atomic64_cmpxchg_acquire()
1481 static __always_inline s64
1482 atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new) in atomic64_cmpxchg_release()
1491 static __always_inline s64
1492 atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new) in atomic64_cmpxchg_relaxed()
1502 atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) in atomic64_try_cmpxchg()
1513 atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new) in atomic64_try_cmpxchg_acquire()
1524 atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new) in atomic64_try_cmpxchg_release()
1535 atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new) in atomic64_try_cmpxchg_relaxed()
1546 atomic64_sub_and_test(s64 i, atomic64_t *v) in atomic64_sub_and_test()
1576 atomic64_add_negative(s64 i, atomic64_t *v) in atomic64_add_negative()
1585 static __always_inline s64
1586 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) in atomic64_fetch_add_unless()
1596 atomic64_add_unless(atomic64_t *v, s64 a, s64 u) in atomic64_add_unless()
1635 static __always_inline s64