Home
last modified time | relevance | path

Searched refs:wq (Results 1 – 6 of 6) sorted by relevance

/kernel/
Dworkqueue.c206 struct workqueue_struct *wq; /* I: the owning workqueue */ member
363 static void workqueue_sysfs_unregister(struct workqueue_struct *wq);
377 #define assert_rcu_or_wq_mutex_or_pool_mutex(wq) \ argument
379 !lockdep_is_held(&wq->mutex) && \
432 #define for_each_pwq(pwq, wq) \ argument
433 list_for_each_entry_rcu((pwq), &(wq)->pwqs, pwqs_node, \
434 lockdep_is_held(&(wq->mutex)))
567 static struct pool_workqueue *unbound_pwq_by_node(struct workqueue_struct *wq, in unbound_pwq_by_node() argument
570 assert_rcu_or_wq_mutex_or_pool_mutex(wq); in unbound_pwq_by_node()
579 return wq->dfl_pwq; in unbound_pwq_by_node()
[all …]
Dwatch_queue.c467 struct watch_queue *wq = rcu_access_pointer(w->queue); in add_one_watch() local
468 if (wqueue == wq && watch->id == w->id) in add_one_watch()
530 int remove_watch_from_object(struct watch_list *wlist, struct watch_queue *wq, in remove_watch_from_object() argument
544 (watch->id == id && rcu_access_pointer(watch->queue) == wq)) in remove_watch_from_object()
/kernel/sched/
Dwait.c334 int do_wait_intr(wait_queue_head_t *wq, wait_queue_entry_t *wait) in do_wait_intr() argument
337 __add_wait_queue_entry_tail(wq, wait); in do_wait_intr()
343 spin_unlock(&wq->lock); in do_wait_intr()
345 spin_lock(&wq->lock); in do_wait_intr()
351 int do_wait_intr_irq(wait_queue_head_t *wq, wait_queue_entry_t *wait) in do_wait_intr_irq() argument
354 __add_wait_queue_entry_tail(wq, wait); in do_wait_intr_irq()
360 spin_unlock_irq(&wq->lock); in do_wait_intr_irq()
362 spin_lock_irq(&wq->lock); in do_wait_intr_irq()
/kernel/locking/
Dtest-ww_mutex.c17 struct workqueue_struct *wq; variable
305 queue_work(wq, &cycles[n].work); in __test_cycle()
307 flush_workqueue(wq); in __test_cycle()
571 queue_work(wq, &stress->work); in stress()
575 flush_workqueue(wq); in stress()
590 wq = alloc_workqueue("test-ww_mutex", WQ_UNBOUND, 0); in test_ww_mutex_init()
591 if (!wq) in test_ww_mutex_init()
631 destroy_workqueue(wq); in test_ww_mutex_exit()
/kernel/rcu/
Drefscale.c85 wait_queue_head_t wq; member
381 wait_event(rt->wq, (atomic_read(&nreaders_exp) && smp_load_acquire(&rt->start_reader)) || in ref_scale_reader()
529 wake_up(&reader_tasks[r].wq); in main_func()
695 init_waitqueue_head(&reader_tasks[i].wq); in ref_scale_init()
/kernel/events/
Duprobes.c99 wait_queue_head_t wq; /* if all slots are busy */ member
1506 init_waitqueue_head(&area->wq); in __create_xol_area()
1598 wait_event(area->wq, (atomic_read(&area->slot_count) < UINSNS_PER_PAGE)); in xol_take_insn_slot()
1662 if (waitqueue_active(&area->wq)) in xol_free_insn_slot()
1663 wake_up(&area->wq); in xol_free_insn_slot()