Lines Matching refs:io_kiocb
135 struct io_kiocb *req;
147 static void io_dismantle_req(struct io_kiocb *req);
148 static void io_clean_op(struct io_kiocb *req);
149 static void io_queue_sqe(struct io_kiocb *req);
184 static bool io_match_linked(struct io_kiocb *head) in io_match_linked()
186 struct io_kiocb *req; in io_match_linked()
199 bool io_match_task_safe(struct io_kiocb *head, struct task_struct *task, in io_match_task_safe()
222 static inline void req_fail_link_node(struct io_kiocb *req, int res) in req_fail_link_node()
228 static inline void io_req_add_to_cache(struct io_kiocb *req, struct io_ring_ctx *ctx) in io_req_add_to_cache()
245 struct io_kiocb *req, *tmp; in io_fallback_req_func()
354 static bool req_need_defer(struct io_kiocb *req, u32 seq) in req_need_defer()
365 static inline void io_req_track_inflight(struct io_kiocb *req) in io_req_track_inflight()
373 static struct io_kiocb *__io_prep_linked_timeout(struct io_kiocb *req) in __io_prep_linked_timeout()
387 static inline struct io_kiocb *io_prep_linked_timeout(struct io_kiocb *req) in io_prep_linked_timeout()
394 static noinline void __io_arm_ltimeout(struct io_kiocb *req) in __io_arm_ltimeout()
399 static inline void io_arm_ltimeout(struct io_kiocb *req) in io_arm_ltimeout()
405 static void io_prep_async_work(struct io_kiocb *req) in io_prep_async_work()
433 static void io_prep_async_link(struct io_kiocb *req) in io_prep_async_link()
435 struct io_kiocb *cur; in io_prep_async_link()
450 void io_queue_iowq(struct io_kiocb *req, bool *dont_use) in io_queue_iowq()
452 struct io_kiocb *link = io_prep_linked_timeout(req); in io_queue_iowq()
728 bool io_req_cqe_overflow(struct io_kiocb *req) in io_req_cqe_overflow()
826 static void __io_req_complete_put(struct io_kiocb *req) in __io_req_complete_put()
857 void __io_req_complete_post(struct io_kiocb *req) in __io_req_complete_post()
864 void io_req_complete_post(struct io_kiocb *req) in io_req_complete_post()
873 inline void __io_req_complete(struct io_kiocb *req, unsigned issue_flags) in __io_req_complete()
878 void io_req_complete_failed(struct io_kiocb *req, s32 res) in io_req_complete_failed()
893 static void io_preinit_req(struct io_kiocb *req, struct io_ring_ctx *ctx) in io_preinit_req()
950 struct io_kiocb *req = reqs[i]; in __io_alloc_req_refill()
958 static inline void io_dismantle_req(struct io_kiocb *req) in io_dismantle_req()
968 __cold void io_free_req(struct io_kiocb *req) in io_free_req()
982 static void __io_req_find_next_prep(struct io_kiocb *req) in __io_req_find_next_prep()
991 static inline struct io_kiocb *io_req_find_next(struct io_kiocb *req) in io_req_find_next()
993 struct io_kiocb *nxt; in io_req_find_next()
1030 struct io_kiocb *req = container_of(node, struct io_kiocb, in handle_tw_list()
1033 prefetch(container_of(next, struct io_kiocb, io_task_work.node)); in handle_tw_list()
1108 static void io_req_local_work_add(struct io_kiocb *req) in io_req_local_work_add()
1130 static inline void __io_req_task_work_add(struct io_kiocb *req, bool allow_local) in __io_req_task_work_add()
1154 req = container_of(node, struct io_kiocb, io_task_work.node); in __io_req_task_work_add()
1162 void io_req_task_work_add(struct io_kiocb *req) in io_req_task_work_add()
1173 struct io_kiocb *req = container_of(node, struct io_kiocb, in io_move_task_work_from_local()
1197 struct io_kiocb *req = container_of(node, struct io_kiocb, in __io_run_local_work()
1199 prefetch(container_of(next, struct io_kiocb, io_task_work.node)); in __io_run_local_work()
1240 static void io_req_tw_post(struct io_kiocb *req, bool *locked) in io_req_tw_post()
1245 void io_req_tw_post_queue(struct io_kiocb *req, s32 res, u32 cflags) in io_req_tw_post_queue()
1252 static void io_req_task_cancel(struct io_kiocb *req, bool *locked) in io_req_task_cancel()
1259 void io_req_task_submit(struct io_kiocb *req, bool *locked) in io_req_task_submit()
1269 void io_req_task_queue_fail(struct io_kiocb *req, int ret) in io_req_task_queue_fail()
1276 void io_req_task_queue(struct io_kiocb *req) in io_req_task_queue()
1282 void io_queue_next(struct io_kiocb *req) in io_queue_next()
1284 struct io_kiocb *nxt = io_req_find_next(req); in io_queue_next()
1297 struct io_kiocb *req = container_of(node, struct io_kiocb, in io_free_batch_list()
1348 struct io_kiocb *req = container_of(node, struct io_kiocb, in __io_submit_flush_completions()
1364 static inline struct io_kiocb *io_put_req_find_next(struct io_kiocb *req) in io_put_req_find_next()
1366 struct io_kiocb *nxt = NULL; in io_put_req_find_next()
1477 void io_req_task_complete(struct io_kiocb *req, bool *locked) in io_req_task_complete()
1497 static void io_iopoll_req_issued(struct io_kiocb *req, unsigned int issue_flags) in io_iopoll_req_issued()
1514 struct io_kiocb *list_req; in io_iopoll_req_issued()
1516 list_req = container_of(ctx->iopoll_list.first, struct io_kiocb, in io_iopoll_req_issued()
1597 bool io_alloc_async_data(struct io_kiocb *req) in io_alloc_async_data()
1608 int io_req_prep_async(struct io_kiocb *req) in io_req_prep_async()
1626 static u32 io_get_sequence(struct io_kiocb *req) in io_get_sequence()
1629 struct io_kiocb *cur; in io_get_sequence()
1637 static __cold void io_drain_req(struct io_kiocb *req) in io_drain_req()
1682 static void io_clean_op(struct io_kiocb *req) in io_clean_op()
1715 static bool io_assign_file(struct io_kiocb *req, unsigned int issue_flags) in io_assign_file()
1728 static int io_issue_sqe(struct io_kiocb *req, unsigned int issue_flags) in io_issue_sqe()
1766 int io_poll_issue(struct io_kiocb *req, bool *locked) in io_poll_issue()
1776 struct io_kiocb *req = container_of(work, struct io_kiocb, work); in io_wq_free_work()
1784 struct io_kiocb *req = container_of(work, struct io_kiocb, work); in io_wq_submit_work()
1847 inline struct file *io_file_get_fixed(struct io_kiocb *req, int fd, in io_file_get_fixed()
1870 struct file *io_file_get_normal(struct io_kiocb *req, int fd) in io_file_get_normal()
1882 static void io_queue_async(struct io_kiocb *req, int ret) in io_queue_async()
1885 struct io_kiocb *linked_timeout; in io_queue_async()
1911 static inline void io_queue_sqe(struct io_kiocb *req) in io_queue_sqe()
1928 static void io_queue_sqe_fallback(struct io_kiocb *req) in io_queue_sqe_fallback()
1957 struct io_kiocb *req, in io_check_restriction()
1974 static void io_init_req_drain(struct io_kiocb *req) in io_init_req_drain()
1977 struct io_kiocb *head = ctx->submit_state.link.head; in io_init_req_drain()
1993 static int io_init_req(struct io_ring_ctx *ctx, struct io_kiocb *req, in io_init_req()
2088 struct io_kiocb *req, int ret) in io_submit_fail_init()
2092 struct io_kiocb *head = link->head; in io_submit_fail_init()
2124 static inline int io_submit_sqe(struct io_ring_ctx *ctx, struct io_kiocb *req, in io_submit_sqe()
2270 struct io_kiocb *req; in io_submit_sqes()
2574 struct io_kiocb *req = io_alloc_req(ctx); in io_req_caches_free()
2720 struct io_kiocb *req = container_of(work, struct io_kiocb, work); in io_cancel_ctx_cb()
2847 struct io_kiocb *req = container_of(work, struct io_kiocb, work); in io_cancel_task_cb()
4154 req_cachep = KMEM_CACHE(io_kiocb, SLAB_HWCACHE_ALIGN | SLAB_PANIC | in io_uring_init()