/lib/ |
D | atomic64.c | 28 raw_spinlock_t lock; member 32 .lock = __RAW_SPIN_LOCK_UNLOCKED(atomic64_lock.lock), 42 return &atomic64_lock[addr & (NR_LOCKS - 1)].lock; in lock_addr() 48 raw_spinlock_t *lock = lock_addr(v); in atomic64_read() local 51 raw_spin_lock_irqsave(lock, flags); in atomic64_read() 53 raw_spin_unlock_irqrestore(lock, flags); in atomic64_read() 61 raw_spinlock_t *lock = lock_addr(v); in atomic64_set() local 63 raw_spin_lock_irqsave(lock, flags); in atomic64_set() 65 raw_spin_unlock_irqrestore(lock, flags); in atomic64_set() 73 raw_spinlock_t *lock = lock_addr(v); \ [all …]
|
D | lockref.c | 16 while (likely(arch_spin_value_unlocked(old.lock.rlock.raw_lock))) { \ 52 spin_lock(&lockref->lock); in lockref_get() 54 spin_unlock(&lockref->lock); in lockref_get() 75 spin_lock(&lockref->lock); in lockref_get_not_zero() 81 spin_unlock(&lockref->lock); in lockref_get_not_zero() 103 spin_lock(&lockref->lock); in lockref_put_not_zero() 109 spin_unlock(&lockref->lock); in lockref_put_not_zero() 130 spin_lock(&lockref->lock); in lockref_get_or_lock() 134 spin_unlock(&lockref->lock); in lockref_get_or_lock() 174 spin_lock(&lockref->lock); in lockref_put_or_lock() [all …]
|
D | dec_and_lock.c | 21 int _atomic_dec_and_lock(atomic_t *atomic, spinlock_t *lock) in _atomic_dec_and_lock() argument 28 spin_lock(lock); in _atomic_dec_and_lock() 31 spin_unlock(lock); in _atomic_dec_and_lock() 37 int _atomic_dec_and_lock_irqsave(atomic_t *atomic, spinlock_t *lock, in _atomic_dec_and_lock_irqsave() argument 45 spin_lock_irqsave(lock, *flags); in _atomic_dec_and_lock_irqsave() 48 spin_unlock_irqrestore(lock, *flags); in _atomic_dec_and_lock_irqsave()
|
D | refcount.c | 314 bool refcount_dec_and_mutex_lock(refcount_t *r, struct mutex *lock) in refcount_dec_and_mutex_lock() argument 319 mutex_lock(lock); in refcount_dec_and_mutex_lock() 321 mutex_unlock(lock); in refcount_dec_and_mutex_lock() 345 bool refcount_dec_and_lock(refcount_t *r, spinlock_t *lock) in refcount_dec_and_lock() argument 350 spin_lock(lock); in refcount_dec_and_lock() 352 spin_unlock(lock); in refcount_dec_and_lock() 373 bool refcount_dec_and_lock_irqsave(refcount_t *r, spinlock_t *lock, in refcount_dec_and_lock_irqsave() argument 379 spin_lock_irqsave(lock, *flags); in refcount_dec_and_lock_irqsave() 381 spin_unlock_irqrestore(lock, *flags); in refcount_dec_and_lock_irqsave()
|
D | percpu_counter.c | 65 raw_spin_lock_irqsave(&fbc->lock, flags); in percpu_counter_set() 71 raw_spin_unlock_irqrestore(&fbc->lock, flags); in percpu_counter_set() 90 raw_spin_lock_irqsave(&fbc->lock, flags); in percpu_counter_add_batch() 93 raw_spin_unlock_irqrestore(&fbc->lock, flags); in percpu_counter_add_batch() 111 raw_spin_lock_irqsave(&fbc->lock, flags); in __percpu_counter_sum() 117 raw_spin_unlock_irqrestore(&fbc->lock, flags); in __percpu_counter_sum() 127 raw_spin_lock_init(&fbc->lock); in __percpu_counter_init() 128 lockdep_set_class(&fbc->lock, key); in __percpu_counter_init() 187 raw_spin_lock(&fbc->lock); in percpu_counter_cpu_dead() 191 raw_spin_unlock(&fbc->lock); in percpu_counter_cpu_dead()
|
D | debugobjects.c | 45 raw_spinlock_t lock; member 448 raw_spin_lock_irqsave(&db->lock, flags); in debug_objects_oom() 450 raw_spin_unlock_irqrestore(&db->lock, flags); in debug_objects_oom() 540 raw_spin_lock_irqsave(&db->lock, flags); in __debug_object_init() 547 raw_spin_unlock_irqrestore(&db->lock, flags); in __debug_object_init() 563 raw_spin_unlock_irqrestore(&db->lock, flags); in __debug_object_init() 569 raw_spin_unlock_irqrestore(&db->lock, flags); in __debug_object_init() 576 raw_spin_unlock_irqrestore(&db->lock, flags); in __debug_object_init() 632 raw_spin_lock_irqsave(&db->lock, flags); in debug_object_activate() 647 raw_spin_unlock_irqrestore(&db->lock, flags); in debug_object_activate() [all …]
|
D | flex_proportions.c | 98 raw_spin_lock_init(&pl->lock); in fprop_local_init_single() 115 raw_spin_lock_irqsave(&pl->lock, flags); in fprop_reflect_period_single() 118 raw_spin_unlock_irqrestore(&pl->lock, flags); in fprop_reflect_period_single() 127 raw_spin_unlock_irqrestore(&pl->lock, flags); in fprop_reflect_period_single() 180 raw_spin_lock_init(&pl->lock); in fprop_local_init_percpu() 198 raw_spin_lock_irqsave(&pl->lock, flags); in fprop_reflect_period_percpu() 201 raw_spin_unlock_irqrestore(&pl->lock, flags); in fprop_reflect_period_percpu() 216 raw_spin_unlock_irqrestore(&pl->lock, flags); in fprop_reflect_period_percpu()
|
D | ratelimit.c | 41 if (!raw_spin_trylock_irqsave(&rs->lock, flags)) in ___ratelimit() 66 raw_spin_unlock_irqrestore(&rs->lock, flags); in ___ratelimit()
|
D | rhashtable.c | 325 spin_lock(&ht->lock); in rhashtable_rehash_table() 337 spin_unlock(&ht->lock); in rhashtable_rehash_table() 666 spin_lock(&ht->lock); in rhashtable_walk_enter() 668 rcu_dereference_protected(ht->tbl, lockdep_is_held(&ht->lock)); in rhashtable_walk_enter() 670 spin_unlock(&ht->lock); in rhashtable_walk_enter() 682 spin_lock(&iter->ht->lock); in rhashtable_walk_exit() 685 spin_unlock(&iter->ht->lock); in rhashtable_walk_exit() 715 spin_lock(&ht->lock); in rhashtable_walk_start_check() 718 spin_unlock(&ht->lock); in rhashtable_walk_start_check() 934 spin_lock(&ht->lock); in rhashtable_walk_stop() [all …]
|
D | random32.c | 268 static DEFINE_SPINLOCK(lock); in __prandom_reseed() 280 if (!spin_trylock_irqsave(&lock, flags)) in __prandom_reseed() 289 spin_unlock_irqrestore(&lock, flags); in __prandom_reseed()
|
D | genalloc.c | 157 spin_lock_init(&pool->lock); in gen_pool_create() 200 spin_lock(&pool->lock); in gen_pool_add_owner() 202 spin_unlock(&pool->lock); in gen_pool_add_owner()
|
D | Kconfig.debug | 1127 This feature enables tracking lock contention points 1131 This also enables lock events required by "perf lock", 1133 If you want to use "perf lock", you also need to turn on 1136 CONFIG_LOCK_STAT defines "contended" and "acquired" lock events. 1147 bool "Spinlock and rw-lock debugging: basic checks" 1195 This feature will check whether any held lock (spinlock, rwlock, 1198 vfree(), etc.), whether a live lock is incorrectly reinitialized via 1199 spin_lock_init()/mutex_init()/etc., or whether there is any lock 1217 If you say Y here, the lock dependency engine will do 1239 lock debugging then those bugs wont be detected of course.) [all …]
|
D | locking-selftest.c | 140 init_class_##class(raw_spinlock_t *lock, rwlock_t *rwlock, \ 143 raw_spin_lock_init(lock); \
|
/lib/math/ |
D | prime_numbers.c | 64 static DEFINE_MUTEX(lock); 133 mutex_lock(&lock); in expand_to_next_prime() 134 p = rcu_dereference_protected(primes, lockdep_is_held(&lock)); in expand_to_next_prime() 157 mutex_unlock(&lock); in expand_to_next_prime() 165 mutex_lock(&lock); in free_primes() 166 p = rcu_dereference_protected(primes, lockdep_is_held(&lock)); in free_primes() 171 mutex_unlock(&lock); in free_primes()
|
/lib/raid6/ |
D | altivec.uc | 22 * bracked this with preempt_disable/enable or in a lock)
|