Home
last modified time | relevance | path

Searched refs:wq (Results 1 – 6 of 6) sorted by relevance

/kernel/
Dworkqueue.c207 struct workqueue_struct *wq; /* I: the owning workqueue */ member
381 static void workqueue_sysfs_unregister(struct workqueue_struct *wq);
396 #define assert_rcu_or_wq_mutex_or_pool_mutex(wq) \ argument
398 !lockdep_is_held(&wq->mutex) && \
451 #define for_each_pwq(pwq, wq) \ argument
452 list_for_each_entry_rcu((pwq), &(wq)->pwqs, pwqs_node, \
453 lockdep_is_held(&(wq->mutex)))
586 static struct pool_workqueue *unbound_pwq_by_node(struct workqueue_struct *wq, in unbound_pwq_by_node() argument
589 assert_rcu_or_wq_mutex_or_pool_mutex(wq); in unbound_pwq_by_node()
598 return wq->dfl_pwq; in unbound_pwq_by_node()
[all …]
Dwatch_queue.c461 struct watch_queue *wq = rcu_access_pointer(w->queue); in add_one_watch() local
462 if (wqueue == wq && watch->id == w->id) in add_one_watch()
524 int remove_watch_from_object(struct watch_list *wlist, struct watch_queue *wq, in remove_watch_from_object() argument
538 (watch->id == id && rcu_access_pointer(watch->queue) == wq)) in remove_watch_from_object()
/kernel/sched/
Dwait.c352 int do_wait_intr(wait_queue_head_t *wq, wait_queue_entry_t *wait) in do_wait_intr() argument
355 __add_wait_queue_entry_tail(wq, wait); in do_wait_intr()
361 spin_unlock(&wq->lock); in do_wait_intr()
363 spin_lock(&wq->lock); in do_wait_intr()
369 int do_wait_intr_irq(wait_queue_head_t *wq, wait_queue_entry_t *wait) in do_wait_intr_irq() argument
372 __add_wait_queue_entry_tail(wq, wait); in do_wait_intr_irq()
378 spin_unlock_irq(&wq->lock); in do_wait_intr_irq()
380 spin_lock_irq(&wq->lock); in do_wait_intr_irq()
/kernel/locking/
Dtest-ww_mutex.c17 struct workqueue_struct *wq; variable
345 queue_work(wq, &cycles[n].work); in __test_cycle()
347 flush_workqueue(wq); in __test_cycle()
611 queue_work(wq, &stress->work); in stress()
615 flush_workqueue(wq); in stress()
632 wq = alloc_workqueue("test-ww_mutex", WQ_UNBOUND, 0); in test_ww_mutex_init()
633 if (!wq) in test_ww_mutex_init()
676 destroy_workqueue(wq); in test_ww_mutex_exit()
/kernel/rcu/
Drefscale.c100 wait_queue_head_t wq; member
555 wait_event(rt->wq, (atomic_read(&nreaders_exp) && smp_load_acquire(&rt->start_reader)) || in ref_scale_reader()
702 wake_up(&reader_tasks[r].wq); in main_func()
870 init_waitqueue_head(&reader_tasks[i].wq); in ref_scale_init()
/kernel/events/
Duprobes.c99 wait_queue_head_t wq; /* if all slots are busy */ member
1504 init_waitqueue_head(&area->wq); in __create_xol_area()
1596 wait_event(area->wq, (atomic_read(&area->slot_count) < UINSNS_PER_PAGE)); in xol_take_insn_slot()
1660 if (waitqueue_active(&area->wq)) in xol_free_insn_slot()
1661 wake_up(&area->wq); in xol_free_insn_slot()