Lines Matching refs:iowq
7782 static inline bool io_should_wake(struct io_wait_queue *iowq) in io_should_wake() argument
7784 struct io_ring_ctx *ctx = iowq->ctx; in io_should_wake()
7785 int dist = ctx->cached_cq_tail - (int) iowq->cq_tail; in io_should_wake()
7792 return dist >= 0 || atomic_read(&ctx->cq_timeouts) != iowq->nr_timeouts; in io_should_wake()
7798 struct io_wait_queue *iowq = container_of(curr, struct io_wait_queue, in io_wake_function() local
7805 if (io_should_wake(iowq) || test_bit(0, &iowq->ctx->check_cq_overflow)) in io_wake_function()
7832 struct io_wait_queue *iowq, in io_cqring_wait_schedule() argument
7839 if (ret || io_should_wake(iowq)) in io_cqring_wait_schedule()
7868 struct io_wait_queue iowq; in io_cqring_wait() local
7902 init_waitqueue_func_entry(&iowq.wq, io_wake_function); in io_cqring_wait()
7903 iowq.wq.private = current; in io_cqring_wait()
7904 INIT_LIST_HEAD(&iowq.wq.entry); in io_cqring_wait()
7905 iowq.ctx = ctx; in io_cqring_wait()
7906 iowq.nr_timeouts = atomic_read(&ctx->cq_timeouts); in io_cqring_wait()
7907 iowq.cq_tail = READ_ONCE(ctx->rings->cq.head) + min_events; in io_cqring_wait()
7916 prepare_to_wait_exclusive(&ctx->cq_wait, &iowq.wq, in io_cqring_wait()
7918 ret = io_cqring_wait_schedule(ctx, &iowq, &timeout); in io_cqring_wait()
7919 finish_wait(&ctx->cq_wait, &iowq.wq); in io_cqring_wait()