Home
last modified time | relevance | path

Searched refs:ATOMIC_HASH_SIZE (Results 1 – 5 of 5) sorted by relevance

/arch/tile/lib/
Datomic_32.c43 return p >= &atomic_locks[0] && p < &atomic_locks[ATOMIC_HASH_SIZE]; in is_atomic_lock()
184 BUILD_BUG_ON(ATOMIC_HASH_SIZE & (ATOMIC_HASH_SIZE-1)); in __init_atomic_per_cpu()
185 BUG_ON(ATOMIC_HASH_SIZE < nr_cpu_ids); in __init_atomic_per_cpu()
198 BUILD_BUG_ON(ATOMIC_HASH_SIZE * sizeof(int) > PAGE_SIZE); in __init_atomic_per_cpu()
205 BUILD_BUG_ON((PAGE_SIZE >> 3) > ATOMIC_HASH_SIZE); in __init_atomic_per_cpu()
/arch/sparc/lib/
Datomic32.c15 #define ATOMIC_HASH_SIZE 4 macro
16 #define ATOMIC_HASH(a) (&__atomic_hash[(((unsigned long)a)>>8) & (ATOMIC_HASH_SIZE-1)])
18 spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] = {
19 [0 ... (ATOMIC_HASH_SIZE-1)] = __SPIN_LOCK_UNLOCKED(__atomic_hash)
25 #define ATOMIC_HASH_SIZE 1 macro
/arch/parisc/lib/
Dbitops.c14 arch_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned = {
15 [0 ... (ATOMIC_HASH_SIZE-1)] = __ARCH_SPIN_LOCK_UNLOCKED
/arch/parisc/include/asm/
Datomic.h28 # define ATOMIC_HASH_SIZE 4 macro
29 … ATOMIC_HASH(a) (&(__atomic_hash[ (((unsigned long) (a))/L1_CACHE_BYTES) & (ATOMIC_HASH_SIZE-1) ]))
31 extern arch_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned;
/arch/tile/include/asm/
Datomic_32.h211 #define ATOMIC_HASH_SIZE (1 << ATOMIC_HASH_SHIFT) macro