Home
last modified time | relevance | path

Searched refs:lock (Results 1 – 25 of 25) sorted by relevance

/lib/
Datomic64.c28 arch_spinlock_t lock; member
32 .lock = __ARCH_SPIN_LOCK_UNLOCKED,
42 return &atomic64_lock[addr & (NR_LOCKS - 1)].lock; in lock_addr()
48 arch_spinlock_t *lock = lock_addr(v); in generic_atomic64_read() local
52 arch_spin_lock(lock); in generic_atomic64_read()
54 arch_spin_unlock(lock); in generic_atomic64_read()
63 arch_spinlock_t *lock = lock_addr(v); in generic_atomic64_set() local
66 arch_spin_lock(lock); in generic_atomic64_set()
68 arch_spin_unlock(lock); in generic_atomic64_set()
77 arch_spinlock_t *lock = lock_addr(v); \
[all …]
Ddec_and_lock.c21 int _atomic_dec_and_lock(atomic_t *atomic, spinlock_t *lock) in _atomic_dec_and_lock() argument
28 spin_lock(lock); in _atomic_dec_and_lock()
31 spin_unlock(lock); in _atomic_dec_and_lock()
37 int _atomic_dec_and_lock_irqsave(atomic_t *atomic, spinlock_t *lock, in _atomic_dec_and_lock_irqsave() argument
45 spin_lock_irqsave(lock, *flags); in _atomic_dec_and_lock_irqsave()
48 spin_unlock_irqrestore(lock, *flags); in _atomic_dec_and_lock_irqsave()
53 int _atomic_dec_and_raw_lock(atomic_t *atomic, raw_spinlock_t *lock) in _atomic_dec_and_raw_lock() argument
60 raw_spin_lock(lock); in _atomic_dec_and_raw_lock()
63 raw_spin_unlock(lock); in _atomic_dec_and_raw_lock()
68 int _atomic_dec_and_raw_lock_irqsave(atomic_t *atomic, raw_spinlock_t *lock, in _atomic_dec_and_raw_lock_irqsave() argument
[all …]
Dlockref.c16 while (likely(arch_spin_value_unlocked(old.lock.rlock.raw_lock))) { \
50 spin_lock(&lockref->lock); in lockref_get()
52 spin_unlock(&lockref->lock); in lockref_get()
73 spin_lock(&lockref->lock); in lockref_get_not_zero()
79 spin_unlock(&lockref->lock); in lockref_get_not_zero()
101 spin_lock(&lockref->lock); in lockref_put_not_zero()
107 spin_unlock(&lockref->lock); in lockref_put_not_zero()
147 spin_lock(&lockref->lock); in lockref_put_or_lock()
151 spin_unlock(&lockref->lock); in lockref_put_or_lock()
162 assert_spin_locked(&lockref->lock); in lockref_mark_dead()
[all …]
Drefcount.c113 bool refcount_dec_and_mutex_lock(refcount_t *r, struct mutex *lock) in refcount_dec_and_mutex_lock() argument
118 mutex_lock(lock); in refcount_dec_and_mutex_lock()
120 mutex_unlock(lock); in refcount_dec_and_mutex_lock()
144 bool refcount_dec_and_lock(refcount_t *r, spinlock_t *lock) in refcount_dec_and_lock() argument
149 spin_lock(lock); in refcount_dec_and_lock()
151 spin_unlock(lock); in refcount_dec_and_lock()
172 bool refcount_dec_and_lock_irqsave(refcount_t *r, spinlock_t *lock, in refcount_dec_and_lock_irqsave() argument
178 spin_lock_irqsave(lock, *flags); in refcount_dec_and_lock_irqsave()
180 spin_unlock_irqrestore(lock, *flags); in refcount_dec_and_lock_irqsave()
Dpercpu_counter.c65 raw_spin_lock_irqsave(&fbc->lock, flags); in percpu_counter_set()
71 raw_spin_unlock_irqrestore(&fbc->lock, flags); in percpu_counter_set()
101 raw_spin_lock_irqsave(&fbc->lock, flags); in percpu_counter_add_batch()
109 raw_spin_unlock_irqrestore(&fbc->lock, flags); in percpu_counter_add_batch()
128 raw_spin_lock(&fbc->lock); in percpu_counter_add_batch()
131 raw_spin_unlock(&fbc->lock); in percpu_counter_add_batch()
151 raw_spin_lock_irqsave(&fbc->lock, flags); in percpu_counter_sync()
155 raw_spin_unlock_irqrestore(&fbc->lock, flags); in percpu_counter_sync()
177 raw_spin_lock_irqsave(&fbc->lock, flags); in __percpu_counter_sum()
183 raw_spin_unlock_irqrestore(&fbc->lock, flags); in __percpu_counter_sum()
[all …]
Dref_tracker.c92 lockdep_assert_held(&dir->lock); in __ref_tracker_dir_pr_ostream()
138 spin_lock_irqsave(&dir->lock, flags); in ref_tracker_dir_print()
140 spin_unlock_irqrestore(&dir->lock, flags); in ref_tracker_dir_print()
149 spin_lock_irqsave(&dir->lock, flags); in ref_tracker_dir_snprint()
151 spin_unlock_irqrestore(&dir->lock, flags); in ref_tracker_dir_snprint()
164 spin_lock_irqsave(&dir->lock, flags); in ref_tracker_dir_exit()
178 spin_unlock_irqrestore(&dir->lock, flags); in ref_tracker_dir_exit()
212 spin_lock_irqsave(&dir->lock, flags); in ref_tracker_alloc()
214 spin_unlock_irqrestore(&dir->lock, flags); in ref_tracker_alloc()
243 spin_lock_irqsave(&dir->lock, flags); in ref_tracker_free()
[all …]
Ddebugobjects.c43 raw_spinlock_t lock; member
472 raw_spin_lock_irqsave(&db->lock, flags); in debug_objects_oom()
474 raw_spin_unlock_irqrestore(&db->lock, flags); in debug_objects_oom()
631 raw_spin_lock_irqsave(&db->lock, flags); in __debug_object_init()
635 raw_spin_unlock_irqrestore(&db->lock, flags); in __debug_object_init()
645 raw_spin_unlock_irqrestore(&db->lock, flags); in __debug_object_init()
652 raw_spin_unlock_irqrestore(&db->lock, flags); in __debug_object_init()
708 raw_spin_lock_irqsave(&db->lock, flags); in debug_object_activate()
712 raw_spin_unlock_irqrestore(&db->lock, flags); in debug_object_activate()
726 raw_spin_unlock_irqrestore(&db->lock, flags); in debug_object_activate()
[all …]
Dflex_proportions.c99 raw_spin_lock_init(&pl->lock); in fprop_local_init_percpu()
117 raw_spin_lock_irqsave(&pl->lock, flags); in fprop_reflect_period_percpu()
120 raw_spin_unlock_irqrestore(&pl->lock, flags); in fprop_reflect_period_percpu()
135 raw_spin_unlock_irqrestore(&pl->lock, flags); in fprop_reflect_period_percpu()
Dlwq.c25 spin_lock(&q->lock); in __lwq_dequeue()
36 spin_unlock(&q->lock); in __lwq_dequeue()
56 spin_lock(&q->lock); in lwq_dequeue_all()
60 spin_unlock(&q->lock); in lwq_dequeue_all()
Dratelimit.c47 if (!raw_spin_trylock_irqsave(&rs->lock, flags)) in ___ratelimit()
72 raw_spin_unlock_irqrestore(&rs->lock, flags); in ___ratelimit()
Drhashtable.c341 spin_lock(&ht->lock); in rhashtable_rehash_table()
353 spin_unlock(&ht->lock); in rhashtable_rehash_table()
685 spin_lock(&ht->lock); in rhashtable_walk_enter()
687 rcu_dereference_protected(ht->tbl, lockdep_is_held(&ht->lock)); in rhashtable_walk_enter()
689 spin_unlock(&ht->lock); in rhashtable_walk_enter()
701 spin_lock(&iter->ht->lock); in rhashtable_walk_exit()
704 spin_unlock(&iter->ht->lock); in rhashtable_walk_exit()
734 spin_lock(&ht->lock); in rhashtable_walk_start_check()
737 spin_unlock(&ht->lock); in rhashtable_walk_start_check()
953 spin_lock(&ht->lock); in rhashtable_walk_stop()
[all …]
Dtest_hmm.c122 spinlock_t lock; /* protects the above */ member
573 spin_lock(&mdevice->lock); in dmirror_allocate_chunk()
585 spin_unlock(&mdevice->lock); in dmirror_allocate_chunk()
616 spin_lock(&mdevice->lock); in dmirror_devmem_alloc_page()
622 spin_unlock(&mdevice->lock); in dmirror_devmem_alloc_page()
624 spin_unlock(&mdevice->lock); in dmirror_devmem_alloc_page()
1275 spin_lock(&mdevice->lock); in dmirror_device_remove_chunks()
1278 spin_unlock(&mdevice->lock); in dmirror_device_remove_chunks()
1408 spin_lock(&mdevice->lock); in dmirror_devmem_free()
1416 spin_unlock(&mdevice->lock); in dmirror_devmem_free()
[all …]
Dradix-tree.c63 .lock = INIT_LOCAL_LOCK(lock),
334 local_lock(&radix_tree_preloads.lock); in __radix_tree_preload()
337 local_unlock(&radix_tree_preloads.lock); in __radix_tree_preload()
341 local_lock(&radix_tree_preloads.lock); in __radix_tree_preload()
383 local_lock(&radix_tree_preloads.lock); in radix_tree_maybe_preload()
1472 local_lock(&radix_tree_preloads.lock); in idr_preload()
Dcodetag.c31 void codetag_lock_module_list(struct codetag_type *cttype, bool lock) in codetag_lock_module_list() argument
33 if (lock) in codetag_lock_module_list()
Dgenalloc.c159 spin_lock_init(&pool->lock); in gen_pool_create()
202 spin_lock(&pool->lock); in gen_pool_add_owner()
204 spin_unlock(&pool->lock); in gen_pool_add_owner()
DKconfig.debug1276 the mutex lock which "hung tasks" are waiting.
1312 lockup, "hung task", or locking arbitrary lock for a long time.
1434 that the lock nesting rules for PREEMPT_RT enabled kernels are
1455 This feature enables tracking lock contention points
1459 This also enables lock events required by "perf lock",
1461 If you want to use "perf lock", you also need to turn on
1464 CONFIG_LOCK_STAT defines "contended" and "acquired" lock events.
1475 bool "Spinlock and rw-lock debugging: basic checks"
1524 This feature will check whether any held lock (spinlock, rwlock,
1527 vfree(), etc.), whether a live lock is incorrectly reinitialized via
[all …]
Dtest_lockup.c490 offsetof(spinlock_t, lock.wait_lock.magic), in test_lockup_init()
Dlocking-selftest.c156 init_class_##class(spinlock_t *lock, rwlock_t *rwlock, \
159 spin_lock_init(lock); \
/lib/kunit/
Dstring-stream.c82 spin_lock(&stream->lock); in string_stream_vadd()
85 spin_unlock(&stream->lock); in string_stream_vadd()
106 spin_lock(&stream->lock); in string_stream_clear()
114 spin_unlock(&stream->lock); in string_stream_clear()
127 spin_lock(&stream->lock); in string_stream_get_string()
130 spin_unlock(&stream->lock); in string_stream_get_string()
167 spin_lock_init(&stream->lock); in alloc_string_stream()
Dresource.c39 spin_lock_irqsave(&test->lock, flags); in __kunit_add_resource()
42 spin_unlock_irqrestore(&test->lock, flags); in __kunit_add_resource()
53 spin_lock_irqsave(&test->lock, flags); in kunit_remove_resource()
56 spin_unlock_irqrestore(&test->lock, flags); in kunit_remove_resource()
Ddebugfs.c55 spin_lock(&log->lock); in debugfs_print_result()
58 spin_unlock(&log->lock); in debugfs_print_result()
Dstring-stream.h25 spinlock_t lock; member
Dtest.c335 spin_lock_init(&test->lock); in kunit_init_test()
911 spin_lock_irqsave(&test->lock, flags); in kunit_cleanup()
913 spin_unlock_irqrestore(&test->lock, flags); in kunit_cleanup()
924 spin_unlock_irqrestore(&test->lock, flags); in kunit_cleanup()
/lib/math/
Dprime_numbers.c62 static DEFINE_MUTEX(lock);
131 mutex_lock(&lock); in expand_to_next_prime()
132 p = rcu_dereference_protected(primes, lockdep_is_held(&lock)); in expand_to_next_prime()
155 mutex_unlock(&lock); in expand_to_next_prime()
163 mutex_lock(&lock); in free_primes()
164 p = rcu_dereference_protected(primes, lockdep_is_held(&lock)); in free_primes()
169 mutex_unlock(&lock); in free_primes()
/lib/raid6/
Daltivec.uc22 * bracked this with preempt_disable/enable or in a lock)