Home
last modified time | relevance | path

Searched refs:lock (Results 1 – 25 of 117) sorted by relevance

12345

/tools/virtio/linux/
Dspinlock.h8 static inline void spin_lock_init(spinlock_t *lock) in spin_lock_init() argument
10 int r = pthread_spin_init(lock, 0); in spin_lock_init()
14 static inline void spin_lock(spinlock_t *lock) in spin_lock() argument
16 int ret = pthread_spin_lock(lock); in spin_lock()
20 static inline void spin_unlock(spinlock_t *lock) in spin_unlock() argument
22 int ret = pthread_spin_unlock(lock); in spin_unlock()
26 static inline void spin_lock_bh(spinlock_t *lock) in spin_lock_bh() argument
28 spin_lock(lock); in spin_lock_bh()
31 static inline void spin_unlock_bh(spinlock_t *lock) in spin_unlock_bh() argument
33 spin_unlock(lock); in spin_unlock_bh()
[all …]
/tools/testing/selftests/bpf/progs/
Dlinked_list.c12 struct bpf_spin_lock lock; member
28 int list_push_pop(struct bpf_spin_lock *lock, struct bpf_list_head *head, bool leave_in_map) in list_push_pop() argument
37 bpf_spin_lock(lock); in list_push_pop()
39 bpf_spin_unlock(lock); in list_push_pop()
46 bpf_spin_lock(lock); in list_push_pop()
48 bpf_spin_unlock(lock); in list_push_pop()
56 bpf_spin_lock(lock); in list_push_pop()
59 bpf_spin_unlock(lock); in list_push_pop()
62 bpf_spin_lock(lock); in list_push_pop()
64 bpf_spin_unlock(lock); in list_push_pop()
[all …]
Dtest_spin_lock_fail.c8 struct bpf_spin_lock lock; member
113 CHECK(kptr_kptr, &f1->lock, &f2->lock);
114 CHECK(kptr_global, &f1->lock, &lockA);
115 CHECK(kptr_mapval, &f1->lock, &v->lock);
116 CHECK(kptr_innermapval, &f1->lock, &iv->lock);
119 CHECK(global_kptr, &lockA, &f1->lock);
120 CHECK(global_mapval, &lockA, &v->lock);
121 CHECK(global_innermapval, &lockA, &iv->lock);
136 bpf_spin_lock(&f1->lock); in lock_id_mismatch_mapval_mapval()
138 bpf_spin_unlock(&f2->lock); in lock_id_mismatch_mapval_mapval()
[all …]
Drefcounted_kptr.c41 private(A) struct bpf_spin_lock lock; variable
75 struct bpf_spin_lock *lock) in __insert_in_tree_and_list() argument
87 bpf_spin_lock(lock); in __insert_in_tree_and_list()
90 bpf_spin_unlock(lock); in __insert_in_tree_and_list()
94 bpf_spin_unlock(lock); in __insert_in_tree_and_list()
96 bpf_spin_lock(lock); in __insert_in_tree_and_list()
99 bpf_spin_unlock(lock); in __insert_in_tree_and_list()
102 bpf_spin_unlock(lock); in __insert_in_tree_and_list()
107 struct bpf_spin_lock *lock) in __stash_map_insert_tree() argument
130 bpf_spin_lock(lock); in __stash_map_insert_tree()
[all …]
Dtest_helper_restricted.c10 struct lock { struct
25 __type(value, struct lock);
50 struct lock *lock; in spin_lock_work() local
52 lock = bpf_map_lookup_elem(&locks, &key); in spin_lock_work()
53 if (lock) { in spin_lock_work()
54 bpf_spin_lock(&lock->l); in spin_lock_work()
55 bpf_spin_unlock(&lock->l); in spin_lock_work()
Dtest_map_lock.c10 struct bpf_spin_lock lock; member
22 struct bpf_spin_lock lock; member
45 bpf_spin_lock(&val->lock); in bpf_map_lock_test()
48 bpf_spin_unlock(&val->lock); in bpf_map_lock_test()
54 bpf_spin_lock(&q->lock); in bpf_map_lock_test()
57 bpf_spin_unlock(&q->lock); in bpf_map_lock_test()
Dlinked_list_fail.c106 CHECK(kptr_kptr, op, &f1->lock, &f2->head); \
107 CHECK(kptr_global, op, &f1->lock, &ghead); \
108 CHECK(kptr_map, op, &f1->lock, &v->head); \
109 CHECK(kptr_inner_map, op, &f1->lock, &iv->head); \
116 CHECK(map_map, op, &v->lock, &v2->head); \
117 CHECK(map_kptr, op, &v->lock, &f2->head); \
118 CHECK(map_global, op, &v->lock, &ghead); \
119 CHECK(map_inner_map, op, &v->lock, &iv->head); \
121 CHECK(inner_map_inner_map, op, &iv->lock, &iv2->head); \
122 CHECK(inner_map_kptr, op, &iv->lock, &f2->head); \
[all …]
Dtest_spin_lock.c10 struct bpf_spin_lock lock; member
22 struct bpf_spin_lock lock; member
33 struct bpf_spin_lock lock; member
70 bpf_spin_lock(&val->lock); in bpf_spin_lock_test()
77 bpf_spin_unlock(&val->lock); in bpf_spin_lock_test()
84 bpf_spin_lock(&q->lock); in bpf_spin_lock_test()
91 bpf_spin_unlock(&q->lock); in bpf_spin_lock_test()
97 bpf_spin_lock(&cls->lock); in bpf_spin_lock_test()
99 bpf_spin_unlock(&cls->lock); in bpf_spin_lock_test()
Dfreplace_attach_probe.c12 struct bpf_spin_lock lock; member
33 bpf_spin_lock(&val->lock); in new_handle_kprobe()
35 bpf_spin_unlock(&val->lock); in new_handle_kprobe()
/tools/perf/tests/shell/
Dlock_contention.sh30 if ! perf list | grep -q lock:contention_begin; then
47 perf lock record -o ${perfdata} -- perf bench sched messaging > /dev/null 2>&1
49 perf lock contention -i ${perfdata} -E 1 -q 2> ${result}
61 if ! perf lock con -b true > /dev/null 2>&1 ; then
67 perf lock con -a -b -E 1 -q -- perf bench sched messaging > /dev/null 2> ${result}
78 perf lock record -o- -- perf bench sched messaging 2> /dev/null | \
79 perf lock contention -i- -E 1 -q 2> ${result}
90 perf lock contention -i ${perfdata} -t -E 1 -q 2> ${result}
97 if ! perf lock con -b true > /dev/null 2>&1 ; then
102 perf lock con -a -b -t -E 1 -q -- perf bench sched messaging > /dev/null 2> ${result}
[all …]
/tools/virtio/ringtest/
Dptr_ring.c64 static void spin_lock_init(spinlock_t *lock) in spin_lock_init() argument
66 int r = pthread_spin_init(lock, 0); in spin_lock_init()
70 static void spin_lock(spinlock_t *lock) in spin_lock() argument
72 int ret = pthread_spin_lock(lock); in spin_lock()
76 static void spin_unlock(spinlock_t *lock) in spin_unlock() argument
78 int ret = pthread_spin_unlock(lock); in spin_unlock()
82 static void spin_lock_bh(spinlock_t *lock) in spin_lock_bh() argument
84 spin_lock(lock); in spin_lock_bh()
87 static void spin_unlock_bh(spinlock_t *lock) in spin_unlock_bh() argument
89 spin_unlock(lock); in spin_unlock_bh()
[all …]
/tools/perf/Documentation/
Dperf-lock.txt1 perf-lock(1)
6 perf-lock - Analyze lock events
11 'perf lock' {record|report|script|info|contention}
15 You can analyze various lock behaviours
16 and statistics with this 'perf lock' command.
18 'perf lock record <command>' records lock events
21 results of lock events.
23 'perf lock report' reports statistical data.
25 'perf lock script' shows raw lock events.
27 'perf lock info' shows metadata like threads or addresses
[all …]
/tools/include/linux/
Drwsem.h8 pthread_rwlock_t lock; member
13 return pthread_rwlock_init(&sem->lock, NULL); in init_rwsem()
18 return pthread_rwlock_destroy(&sem->lock); in exit_rwsem()
23 return pthread_rwlock_rdlock(&sem->lock); in down_read()
28 return pthread_rwlock_unlock(&sem->lock); in up_read()
33 return pthread_rwlock_wrlock(&sem->lock); in down_write()
38 return pthread_rwlock_unlock(&sem->lock); in up_write()
/tools/perf/util/
Dthreads.c29 init_rwsem(&table->lock); in threads__init()
41 exit_rwsem(&table->lock); in threads__exit()
52 down_read(&table->lock); in threads__nr()
54 up_read(&table->lock); in threads__nr()
87 down_write(&table->lock); in threads_table_entry__set_last_match()
89 up_write(&table->lock); in threads_table_entry__set_last_match()
97 down_read(&table->lock); in threads__find()
103 up_read(&table->lock); in threads__find()
115 down_write(&table->lock); in threads__findnew()
131 up_write(&table->lock); in threads__findnew()
[all …]
Drwsem.c15 return pthread_rwlock_init(&sem->lock, NULL); in init_rwsem()
25 return pthread_rwlock_destroy(&sem->lock); in exit_rwsem()
35 return perf_singlethreaded ? 0 : pthread_rwlock_rdlock(&sem->lock); in down_read()
45 return perf_singlethreaded ? 0 : pthread_rwlock_unlock(&sem->lock); in up_read()
55 return perf_singlethreaded ? 0 : pthread_rwlock_wrlock(&sem->lock); in down_write()
65 return perf_singlethreaded ? 0 : pthread_rwlock_unlock(&sem->lock); in up_write()
Dmutex.c33 CHECK_ERR(pthread_mutex_init(&mtx->lock, &attr)); in __mutex_init()
49 CHECK_ERR(pthread_mutex_destroy(&mtx->lock)); in mutex_destroy()
55 CHECK_ERR(pthread_mutex_lock(&mtx->lock)); in mutex_lock()
61 CHECK_ERR(pthread_mutex_unlock(&mtx->lock)); in mutex_unlock()
66 int ret = pthread_mutex_trylock(&mtx->lock); in mutex_trylock()
108 CHECK_ERR(pthread_cond_wait(&cnd->cond, &mtx->lock)); in cond_wait()
Ddsos.c17 init_rwsem(&dsos->lock); in dsos__init()
27 down_write(&dsos->lock); in dsos__purge()
41 up_write(&dsos->lock); in dsos__purge()
47 exit_rwsem(&dsos->lock); in dsos__exit()
174 up_read(&dsos->lock); in __dsos__find_by_longname_id()
175 down_write(&dsos->lock); in __dsos__find_by_longname_id()
178 up_write(&dsos->lock); in __dsos__find_by_longname_id()
179 down_read(&dsos->lock); in __dsos__find_by_longname_id()
239 down_write(&dsos->lock); in dsos__add()
241 up_write(&dsos->lock); in dsos__add()
[all …]
Dcomm.c18 struct rw_semaphore lock; member
28 init_rwsem(&_comm_strs.lock); in comm_strs__init()
103 down_write(&comm_strs->lock); in comm_strs__remove_if_last()
118 up_write(&comm_strs->lock); in comm_strs__remove_if_last()
142 down_read(&comm_strs->lock); in comm_strs__findnew()
144 up_read(&comm_strs->lock); in comm_strs__findnew()
148 down_write(&comm_strs->lock); in comm_strs__findnew()
158 up_write(&comm_strs->lock); in comm_strs__findnew()
186 up_write(&comm_strs->lock); in comm_strs__findnew()
/tools/testing/selftests/kvm/lib/aarch64/
Dspinlock.c9 void spin_lock(struct spinlock *lock) in spin_lock() argument
20 : "r" (&lock->v) in spin_lock()
24 void spin_unlock(struct spinlock *lock) in spin_unlock() argument
26 asm volatile("stlr wzr, [%0]\n" : : "r" (&lock->v) : "memory"); in spin_unlock()
/tools/testing/selftests/timens/
Dclock_nanosleep.c27 pthread_mutex_t *lock; member
37 pthread_mutex_unlock(args->lock); in call_nanosleep()
46 pthread_mutex_t lock; in run_test() local
53 pthread_mutex_init(&lock, NULL); in run_test()
54 pthread_mutex_lock(&lock); in run_test()
70 args.lock = &lock; in run_test()
88 if (pthread_mutex_trylock(&lock) == 0) { in run_test()
97 pthread_mutex_destroy(&lock); in run_test()
/tools/testing/radix-tree/
Dregression1.c49 pthread_mutex_t lock; member
61 pthread_mutex_init(&p->lock, NULL); in page_alloc()
70 pthread_mutex_destroy(&p->lock); in page_rcu_free()
91 pthread_mutex_lock(&page->lock); in find_get_pages()
96 pthread_mutex_unlock(&page->lock); in find_get_pages()
106 pthread_mutex_unlock(&page->lock); in find_get_pages()
139 pthread_mutex_lock(&p->lock); in regression1_fn()
141 pthread_mutex_unlock(&p->lock); in regression1_fn()
147 pthread_mutex_lock(&p->lock); in regression1_fn()
149 pthread_mutex_unlock(&p->lock); in regression1_fn()
/tools/arch/x86/include/asm/
Dcmpxchg.h35 #define __raw_cmpxchg(ptr, old, new, size, lock) \ argument
44 asm volatile(lock "cmpxchgb %2,%1" \
53 asm volatile(lock "cmpxchgw %2,%1" \
62 asm volatile(lock "cmpxchgl %2,%1" \
71 asm volatile(lock "cmpxchgq %2,%1" \
/tools/testing/shared/
Dlinux.c20 pthread_mutex_t lock; member
89 pthread_mutex_lock(&cachep->lock); in kmem_cache_alloc_lru()
94 pthread_mutex_unlock(&cachep->lock); in kmem_cache_alloc_lru()
98 pthread_mutex_unlock(&cachep->lock); in kmem_cache_alloc_lru()
142 pthread_mutex_lock(&cachep->lock); in kmem_cache_free()
144 pthread_mutex_unlock(&cachep->lock); in kmem_cache_free()
152 pthread_mutex_lock(&cachep->lock); in kmem_cache_free_bulk()
155 pthread_mutex_unlock(&cachep->lock); in kmem_cache_free_bulk()
170 pthread_mutex_lock(&cachep->lock); in kmem_cache_alloc_bulk()
187 pthread_mutex_unlock(&cachep->lock); in kmem_cache_alloc_bulk()
[all …]
/tools/memory-model/
Dlinux-kernel.bell28 'rcu-lock (*rcu_read_lock*) ||
34 'after-unlock-lock (*smp_mb__after_unlock_lock*) ||
39 enum SRCU = 'srcu-lock || 'srcu-unlock || 'sync-srcu
42 let Srcu = Srcu-lock | Srcu-unlock | Sync-srcu
44 (* Compute matching pairs of nested Rcu-lock and Rcu-unlock *)
46 unmatched-locks = Rcu-lock \ domain(matched)
57 flag ~empty Rcu-lock \ domain(rcu-rscs) as unmatched-rcu-lock
60 (* Compute matching pairs of nested Srcu-lock and Srcu-unlock *)
62 let srcu-rscs = ([Srcu-lock] ; carry-srcu-data ; data ; [Srcu-unlock]) & loc
65 flag ~empty Srcu-lock \ domain(srcu-rscs) as unmatched-srcu-lock
[all …]
/tools/perf/util/bpf_skel/
Dlock_contention.bpf.c246 static inline struct task_struct *get_lock_owner(__u64 lock, __u32 flags) in get_lock_owner() argument
252 struct mutex *mutex = (void *)lock; in get_lock_owner()
263 struct rw_semaphore___old *rwsem = (void *)lock; in get_lock_owner()
266 struct rw_semaphore___new *rwsem = (void *)lock; in get_lock_owner()
271 struct rw_semaphore *rwsem = (void *)lock; in get_lock_owner()
283 static inline __u32 check_lock_type(__u64 lock, __u32 flags) in check_lock_type() argument
298 if (&mm_new->mmap_lock == (void *)lock) in check_lock_type()
304 if (&mm_old->mmap_sem == (void *)lock) in check_lock_type()
312 if (sighand && &sighand->siglock == (void *)lock) in check_lock_type()
332 if (pelem && pelem->lock) in get_tstamp_elem()
[all …]

12345