Lines Matching refs:iowq
7489 static inline bool io_should_wake(struct io_wait_queue *iowq) in io_should_wake() argument
7491 struct io_ring_ctx *ctx = iowq->ctx; in io_should_wake()
7492 int dist = ctx->cached_cq_tail - (int) iowq->cq_tail; in io_should_wake()
7499 return dist >= 0 || atomic_read(&ctx->cq_timeouts) != iowq->nr_timeouts; in io_should_wake()
7505 struct io_wait_queue *iowq = container_of(curr, struct io_wait_queue, in io_wake_function() local
7512 if (io_should_wake(iowq) || test_bit(0, &iowq->ctx->check_cq_overflow)) in io_wake_function()
7530 struct io_wait_queue *iowq, in io_cqring_wait_schedule() argument
7537 if (ret || io_should_wake(iowq)) in io_cqring_wait_schedule()
7555 struct io_wait_queue iowq; in io_cqring_wait() local
7589 init_waitqueue_func_entry(&iowq.wq, io_wake_function); in io_cqring_wait()
7590 iowq.wq.private = current; in io_cqring_wait()
7591 INIT_LIST_HEAD(&iowq.wq.entry); in io_cqring_wait()
7592 iowq.ctx = ctx; in io_cqring_wait()
7593 iowq.nr_timeouts = atomic_read(&ctx->cq_timeouts); in io_cqring_wait()
7594 iowq.cq_tail = READ_ONCE(ctx->rings->cq.head) + min_events; in io_cqring_wait()
7603 prepare_to_wait_exclusive(&ctx->cq_wait, &iowq.wq, in io_cqring_wait()
7605 ret = io_cqring_wait_schedule(ctx, &iowq, &timeout); in io_cqring_wait()
7606 finish_wait(&ctx->cq_wait, &iowq.wq); in io_cqring_wait()