Home
last modified time | relevance | path

Searched refs:wait_list (Results 1 – 25 of 26) sorted by relevance

12

/kernel/linux/linux-5.10/include/linux/
Dsemaphore.h18 struct list_head wait_list; member
25 .wait_list = LIST_HEAD_INIT((name).wait_list), \
Drwsem.h47 struct list_head wait_list; member
94 .wait_list = LIST_HEAD_INIT((name).wait_list), \
119 return !list_empty(&sem->wait_list); in rwsem_is_contended()
Dmutex.h59 struct list_head wait_list; member
135 , .wait_list = LIST_HEAD_INIT(lockname.wait_list) \
/kernel/linux/linux-5.10/kernel/locking/
Dmutex-debug.c40 DEBUG_LOCKS_WARN_ON(list_empty(&lock->wait_list)); in debug_mutex_wake_waiter()
76 DEBUG_LOCKS_WARN_ON(!lock->wait_list.prev && !lock->wait_list.next); in debug_mutex_unlock()
Drwsem.c111 list_empty(&(sem)->wait_list) ? "" : "not ")) \
338 INIT_LIST_HEAD(&sem->wait_list); in __init_rwsem()
359 list_first_entry(&sem->wait_list, struct rwsem_waiter, list)
501 list_for_each_entry_safe(waiter, tmp, &sem->wait_list, list) { in rwsem_mark_wake()
517 if (list_empty(&sem->wait_list)) { in rwsem_mark_wake()
587 if (list_is_singular(&sem->wait_list)) in rwsem_try_write_lock()
1023 if (!list_empty(&sem->wait_list)) in rwsem_down_read_slowpath()
1041 if (list_empty(&sem->wait_list)) { in rwsem_down_read_slowpath()
1059 list_add_tail(&waiter.list, &sem->wait_list); in rwsem_down_read_slowpath()
1109 if (list_empty(&sem->wait_list)) { in rwsem_down_read_slowpath()
[all …]
Dmutex.c44 INIT_LIST_HEAD(&lock->wait_list); in __mutex_init()
200 return list_first_entry(&lock->wait_list, struct mutex_waiter, list) == waiter; in __mutex_waiter_is_first()
222 if (likely(list_empty(&lock->wait_list))) in __mutex_remove_waiter()
460 list_for_each_entry(cur, &lock->wait_list, list) { in __ww_mutex_check_waiters()
840 list_for_each_entry_continue_reverse(cur, &lock->wait_list, list) { in __ww_mutex_check_kill()
871 __mutex_add_waiter(lock, waiter, &lock->wait_list); in __ww_mutex_add_waiter()
884 pos = &lock->wait_list; in __ww_mutex_add_waiter()
885 list_for_each_entry_reverse(cur, &lock->wait_list, list) { in __ww_mutex_add_waiter()
997 __mutex_add_waiter(lock, &waiter, &lock->wait_list); in __mutex_lock_common()
1269 if (!list_empty(&lock->wait_list)) { in __mutex_unlock_slowpath()
[all …]
Dsemaphore.c183 if (likely(list_empty(&sem->wait_list))) in up()
209 list_add_tail(&waiter.list, &sem->wait_list); in __down_common()
257 struct semaphore_waiter *waiter = list_first_entry(&sem->wait_list, in __up()
/kernel/linux/linux-5.10/drivers/gpu/drm/omapdrm/
Domap_irq.c27 list_for_each_entry(wait, &priv->wait_list, node) in omap_irq_update()
53 list_add(&wait->node, &priv->wait_list); in omap_irq_wait_init()
242 list_for_each_entry_safe(wait, n, &priv->wait_list, node) { in omap_irq_handler()
274 INIT_LIST_HEAD(&priv->wait_list); in omap_drm_irq_install()
Domap_drv.h77 struct list_head wait_list; /* list of omap_irq_wait */ member
/kernel/linux/linux-5.10/drivers/md/bcache/
Dclosure.c60 void __closure_wake_up(struct closure_waitlist *wait_list) in __closure_wake_up() argument
66 list = llist_del_all(&wait_list->list); in __closure_wake_up()
/kernel/linux/linux-5.10/fs/xfs/
Dxfs_buf.c2143 struct list_head *wait_list) in xfs_buf_delwri_submit_buffers() argument
2153 if (!wait_list) { in xfs_buf_delwri_submit_buffers()
2186 if (wait_list) { in xfs_buf_delwri_submit_buffers()
2188 list_move_tail(&bp->b_list, wait_list); in xfs_buf_delwri_submit_buffers()
2235 LIST_HEAD (wait_list); in xfs_buf_delwri_submit()
2239 xfs_buf_delwri_submit_buffers(buffer_list, &wait_list); in xfs_buf_delwri_submit()
2242 while (!list_empty(&wait_list)) { in xfs_buf_delwri_submit()
2243 bp = list_first_entry(&wait_list, struct xfs_buf, b_list); in xfs_buf_delwri_submit()
/kernel/linux/linux-5.10/drivers/nvme/target/
Drdma.c75 struct list_head wait_list; member
528 struct nvmet_rdma_rsp, wait_list); in nvmet_rdma_process_wr_wait_list()
529 list_del(&rsp->wait_list); in nvmet_rdma_process_wr_wait_list()
536 list_add(&rsp->wait_list, &queue->rsp_wr_wait_list); in nvmet_rdma_process_wr_wait_list()
997 list_add_tail(&cmd->wait_list, &queue->rsp_wr_wait_list); in nvmet_rdma_handle_command()
1053 list_add_tail(&rsp->wait_list, &queue->rsp_wait_list); in nvmet_rdma_recv_done()
1629 struct nvmet_rdma_rsp, wait_list); in nvmet_rdma_queue_established()
1630 list_del(&cmd->wait_list); in nvmet_rdma_queue_established()
1656 wait_list); in __nvmet_rdma_queue_disconnect()
1657 list_del(&rsp->wait_list); in __nvmet_rdma_queue_disconnect()
/kernel/linux/linux-5.10/drivers/infiniband/ulp/rtrs/
Drtrs-srv.h59 struct list_head wait_list; member
Drtrs-srv.c548 list_add_tail(&id->wait_list, &con->rsp_wr_wait_list); in rtrs_srv_resp_rdma()
1166 struct rtrs_srv_op, wait_list); in rtrs_rdma_process_wr_wait_list()
1167 list_del(&id->wait_list); in rtrs_rdma_process_wr_wait_list()
1174 list_add(&id->wait_list, &con->rsp_wr_wait_list); in rtrs_rdma_process_wr_wait_list()
/kernel/linux/linux-5.10/fs/hmdfs/
Dmain.c432 list_add_tail(&item.list, &sbi->hsi.wait_list); in hmdfs_sync_fs()
544 if (list_empty(&sbi->hsi.wait_list)) { in hmdfs_sync_fs()
547 entry = list_last_entry(&sbi->hsi.wait_list, struct syncfs_item, in hmdfs_sync_fs()
550 list_splice_init(&sbi->hsi.wait_list, &sbi->hsi.pending_list); in hmdfs_sync_fs()
943 INIT_LIST_HEAD(&sbi->hsi.wait_list); in hmdfs_fill_super()
Dhmdfs.h96 struct list_head wait_list; member
/kernel/linux/linux-5.10/drivers/infiniband/core/
Dmad_priv.h98 struct list_head wait_list; member
Dmad.c399 INIT_LIST_HEAD(&mad_agent_priv->wait_list); in ib_register_mad_agent()
1750 list_for_each_entry(wr, &mad_agent_priv->wait_list, agent_list) { in ib_find_send_mad()
2164 if (list_empty(&mad_agent_priv->wait_list)) { in adjust_timeout()
2167 mad_send_wr = list_entry(mad_agent_priv->wait_list.next, in adjust_timeout()
2197 list_for_each_prev(list_item, &mad_agent_priv->wait_list) { in wait_for_response()
2207 list_item = &mad_agent_priv->wait_list; in wait_for_response()
2211 if (mad_agent_priv->wait_list.next == &mad_send_wr->agent_list) in wait_for_response()
2428 list_splice_init(&mad_agent_priv->wait_list, &cancel_list); in cancel_mads()
2451 list_for_each_entry(mad_send_wr, &mad_agent_priv->wait_list, in find_send_wr()
2644 while (!list_empty(&mad_agent_priv->wait_list)) { in timeout_sends()
[all …]
/kernel/linux/linux-5.10/drivers/infiniband/ulp/srpt/
Dib_srpt.h185 struct list_head wait_list; member
Dib_srpt.c1638 if (!list_empty(&recv_ioctx->wait_list)) { in srpt_handle_new_iu()
1640 list_del_init(&recv_ioctx->wait_list); in srpt_handle_new_iu()
1675 if (list_empty(&recv_ioctx->wait_list)) { in srpt_handle_new_iu()
1677 list_add_tail(&recv_ioctx->wait_list, &ch->cmd_wait_list); in srpt_handle_new_iu()
1719 wait_list) { in srpt_process_wait_list()
2289 INIT_LIST_HEAD(&ch->ioctx_recv_ring[i]->wait_list); in srpt_cm_req_recv()
3090 INIT_LIST_HEAD(&sdev->ioctx_ring[i]->wait_list); in srpt_alloc_srq()
3349 WARN_ON_ONCE(!list_empty(&recv_ioctx->wait_list)); in srpt_release_cmd()
/kernel/linux/linux-5.10/drivers/net/ethernet/qlogic/qlcnic/
Dqlcnic_sriov_common.c195 INIT_LIST_HEAD(&vf->rcv_act.wait_list); in qlcnic_sriov_init()
196 INIT_LIST_HEAD(&vf->rcv_pend.wait_list); in qlcnic_sriov_init()
247 while (!list_empty(&t_list->wait_list)) { in qlcnic_sriov_cleanup_list()
248 trans = list_first_entry(&t_list->wait_list, in qlcnic_sriov_cleanup_list()
1066 trans = list_first_entry(&vf->rcv_act.wait_list, in qlcnic_sriov_process_bc_cmd()
1126 list_add_tail(&trans->list, &t_list->wait_list); in __qlcnic_sriov_add_act_list()
1158 list_for_each(node, &vf->rcv_pend.wait_list) { in qlcnic_sriov_handle_pending_trans()
1259 list_add_tail(&trans->list, &vf->rcv_pend.wait_list); in qlcnic_sriov_handle_bc_cmd()
Dqlcnic_sriov.h61 struct list_head wait_list; member
/kernel/linux/linux-5.10/drivers/md/
Ddm-integrity.c216 struct list_head wait_list; member
1124 list_for_each_entry(range, &ic->wait_list, wait_entry) { in add_new_range()
1154 while (unlikely(!list_empty(&ic->wait_list))) { in remove_range_unlocked()
1156 list_first_entry(&ic->wait_list, struct dm_integrity_range, wait_entry); in remove_range_unlocked()
1162 list_add(&last_range->wait_entry, &ic->wait_list); in remove_range_unlocked()
1182 list_add_tail(&new_range->wait_entry, &ic->wait_list); in wait_and_add_new_range()
3845 INIT_LIST_HEAD(&ic->wait_list); in dm_integrity_ctr()
4389 BUG_ON(!list_empty(&ic->wait_list)); in dm_integrity_dtr()
/kernel/linux/linux-5.10/fs/f2fs/
Dsegment.c1269 struct list_head *wait_list = (dpolicy->type == DPOLICY_FSTRIM) ? in __submit_discard_cmd() local
1270 &(dcc->fstrim_list) : &(dcc->wait_list); in __submit_discard_cmd()
1344 list_move_tail(&dc->list, wait_list); in __submit_discard_cmd()
1754 struct list_head *wait_list = (dpolicy->type == DPOLICY_FSTRIM) ? in __wait_discard_cmd_range() local
1755 &(dcc->fstrim_list) : &(dcc->wait_list); in __wait_discard_cmd_range()
1764 list_for_each_entry_safe(dc, tmp, wait_list, list) { in __wait_discard_cmd_range()
2245 INIT_LIST_HEAD(&dcc->wait_list); in create_discard_cmd_control()
/kernel/linux/linux-5.10/drivers/acpi/
Dosl.c1232 BUG_ON(!list_empty(&sem->wait_list)); in acpi_os_delete_semaphore()

12