Searched refs:io_work (Results 1 – 9 of 9) sorted by relevance
/Linux-v5.4/drivers/nvme/target/ |
D | tcp.c | 86 struct work_struct io_work; member 492 queue_work_on(cmd->queue->cpu, nvmet_tcp_wq, &cmd->queue->io_work); in nvmet_tcp_queue_response() 1164 container_of(w, struct nvmet_tcp_queue, io_work); in nvmet_tcp_io_work() 1200 queue_work_on(queue->cpu, nvmet_tcp_wq, &queue->io_work); in nvmet_tcp_io_work() 1339 flush_work(&queue->io_work); in nvmet_tcp_release_queue_work() 1343 cancel_work_sync(&queue->io_work); in nvmet_tcp_release_queue_work() 1360 queue_work_on(queue->cpu, nvmet_tcp_wq, &queue->io_work); in nvmet_tcp_data_ready() 1380 queue_work_on(queue->cpu, nvmet_tcp_wq, &queue->io_work); in nvmet_tcp_write_space() 1472 INIT_WORK(&queue->io_work, nvmet_tcp_io_work); in nvmet_tcp_alloc_queue() 1509 queue_work_on(queue->cpu, nvmet_tcp_wq, &queue->io_work); in nvmet_tcp_alloc_queue()
|
/Linux-v5.4/drivers/s390/cio/ |
D | vfio_ccw_drv.c | 87 private = container_of(work, struct vfio_ccw_private, io_work); in vfio_ccw_sch_io_todo() 162 INIT_WORK(&private->io_work, vfio_ccw_sch_io_todo); in vfio_ccw_sch_probe()
|
D | vfio_ccw_private.h | 96 struct work_struct io_work; member
|
D | vfio_ccw_fsm.c | 362 queue_work(vfio_ccw_work_q, &private->io_work); in fsm_irq()
|
/Linux-v5.4/drivers/scsi/qedf/ |
D | qedf_main.c | 2096 struct qedf_io_work *io_work; in qedf_process_completions() local 2158 io_work = mempool_alloc(qedf->io_mempool, GFP_ATOMIC); in qedf_process_completions() 2159 if (!io_work) { in qedf_process_completions() 2164 memset(io_work, 0, sizeof(struct qedf_io_work)); in qedf_process_completions() 2166 INIT_WORK(&io_work->work, qedf_fp_io_handler); in qedf_process_completions() 2169 memcpy(&io_work->cqe, cqe, sizeof(struct fcoe_cqe)); in qedf_process_completions() 2171 io_work->qedf = fp->qedf; in qedf_process_completions() 2172 io_work->fp = NULL; /* Only used for unsolicited frames */ in qedf_process_completions() 2174 queue_work_on(cpu, qedf_io_wq, &io_work->work); in qedf_process_completions() 2560 struct qedf_io_work *io_work = in qedf_fp_io_handler() local [all …]
|
D | qedf_io.c | 2511 struct qedf_io_work *io_work; in qedf_process_unsol_compl() local 2575 io_work = mempool_alloc(qedf->io_mempool, GFP_ATOMIC); in qedf_process_unsol_compl() 2576 if (!io_work) { in qedf_process_unsol_compl() 2582 memset(io_work, 0, sizeof(struct qedf_io_work)); in qedf_process_unsol_compl() 2584 INIT_WORK(&io_work->work, qedf_fp_io_handler); in qedf_process_unsol_compl() 2587 memcpy(&io_work->cqe, cqe, sizeof(struct fcoe_cqe)); in qedf_process_unsol_compl() 2589 io_work->qedf = qedf; in qedf_process_unsol_compl() 2590 io_work->fp = fp; in qedf_process_unsol_compl() 2592 queue_work_on(smp_processor_id(), qedf_io_wq, &io_work->work); in qedf_process_unsol_compl()
|
/Linux-v5.4/drivers/memstick/core/ |
D | ms_block.h | 161 struct work_struct io_work; member
|
D | ms_block.c | 1497 queue_work(msb->io_queue, &msb->io_work); in msb_cache_flush_timer() 1869 struct msb_data *msb = container_of(work, struct msb_data, io_work); in msb_io_work() 2027 queue_work(msb->io_queue, &msb->io_work); in msb_queue_rq() 2084 queue_work(msb->io_queue, &msb->io_work); in msb_start() 2148 INIT_WORK(&msb->io_work, msb_io_work); in msb_init_disk()
|
/Linux-v5.4/drivers/nvme/host/ |
D | tcp.c | 64 struct work_struct io_work; member 260 queue_work_on(queue->io_cpu, nvme_tcp_wq, &queue->io_work); in nvme_tcp_queue_request() 792 queue_work_on(queue->io_cpu, nvme_tcp_wq, &queue->io_work); in nvme_tcp_data_ready() 804 queue_work_on(queue->io_cpu, nvme_tcp_wq, &queue->io_work); in nvme_tcp_write_space() 1044 container_of(w, struct nvme_tcp_queue, io_work); in nvme_tcp_io_work() 1072 queue_work_on(queue->io_cpu, nvme_tcp_wq, &queue->io_work); in nvme_tcp_io_work() 1257 INIT_WORK(&queue->io_work, nvme_tcp_io_work); in nvme_tcp_alloc_queue() 1425 cancel_work_sync(&queue->io_work); in __nvme_tcp_stop_queue()
|