Lines Matching refs:iod
93 struct nvmet_fc_ls_iod *iod; member
153 return (iodptr - iodptr->tgtport->iod); in nvmet_fc_iodnum()
327 struct nvmet_fc_ls_iod *iod; in nvmet_fc_alloc_ls_iodlist() local
330 iod = kcalloc(NVMET_LS_CTX_COUNT, sizeof(struct nvmet_fc_ls_iod), in nvmet_fc_alloc_ls_iodlist()
332 if (!iod) in nvmet_fc_alloc_ls_iodlist()
335 tgtport->iod = iod; in nvmet_fc_alloc_ls_iodlist()
337 for (i = 0; i < NVMET_LS_CTX_COUNT; iod++, i++) { in nvmet_fc_alloc_ls_iodlist()
338 INIT_WORK(&iod->work, nvmet_fc_handle_ls_rqst_work); in nvmet_fc_alloc_ls_iodlist()
339 iod->tgtport = tgtport; in nvmet_fc_alloc_ls_iodlist()
340 list_add_tail(&iod->ls_list, &tgtport->ls_list); in nvmet_fc_alloc_ls_iodlist()
342 iod->rqstbuf = kcalloc(2, NVME_FC_MAX_LS_BUFFER_SIZE, in nvmet_fc_alloc_ls_iodlist()
344 if (!iod->rqstbuf) in nvmet_fc_alloc_ls_iodlist()
347 iod->rspbuf = iod->rqstbuf + NVME_FC_MAX_LS_BUFFER_SIZE; in nvmet_fc_alloc_ls_iodlist()
349 iod->rspdma = fc_dma_map_single(tgtport->dev, iod->rspbuf, in nvmet_fc_alloc_ls_iodlist()
352 if (fc_dma_mapping_error(tgtport->dev, iod->rspdma)) in nvmet_fc_alloc_ls_iodlist()
359 kfree(iod->rqstbuf); in nvmet_fc_alloc_ls_iodlist()
360 list_del(&iod->ls_list); in nvmet_fc_alloc_ls_iodlist()
361 for (iod--, i--; i >= 0; iod--, i--) { in nvmet_fc_alloc_ls_iodlist()
362 fc_dma_unmap_single(tgtport->dev, iod->rspdma, in nvmet_fc_alloc_ls_iodlist()
364 kfree(iod->rqstbuf); in nvmet_fc_alloc_ls_iodlist()
365 list_del(&iod->ls_list); in nvmet_fc_alloc_ls_iodlist()
368 kfree(iod); in nvmet_fc_alloc_ls_iodlist()
376 struct nvmet_fc_ls_iod *iod = tgtport->iod; in nvmet_fc_free_ls_iodlist() local
379 for (i = 0; i < NVMET_LS_CTX_COUNT; iod++, i++) { in nvmet_fc_free_ls_iodlist()
381 iod->rspdma, NVME_FC_MAX_LS_BUFFER_SIZE, in nvmet_fc_free_ls_iodlist()
383 kfree(iod->rqstbuf); in nvmet_fc_free_ls_iodlist()
384 list_del(&iod->ls_list); in nvmet_fc_free_ls_iodlist()
386 kfree(tgtport->iod); in nvmet_fc_free_ls_iodlist()
392 struct nvmet_fc_ls_iod *iod; in nvmet_fc_alloc_ls_iod() local
396 iod = list_first_entry_or_null(&tgtport->ls_list, in nvmet_fc_alloc_ls_iod()
398 if (iod) in nvmet_fc_alloc_ls_iod()
399 list_move_tail(&iod->ls_list, &tgtport->ls_busylist); in nvmet_fc_alloc_ls_iod()
401 return iod; in nvmet_fc_alloc_ls_iod()
407 struct nvmet_fc_ls_iod *iod) in nvmet_fc_free_ls_iod() argument
412 list_move(&iod->ls_list, &tgtport->ls_list); in nvmet_fc_free_ls_iod()
1322 struct nvmet_fc_ls_iod *iod) in nvmet_fc_ls_create_association() argument
1325 (struct fcnvme_ls_cr_assoc_rqst *)iod->rqstbuf; in nvmet_fc_ls_create_association()
1327 (struct fcnvme_ls_cr_assoc_acc *)iod->rspbuf; in nvmet_fc_ls_create_association()
1341 if (iod->rqstdatalen < FCNVME_LSDESC_CRA_RQST_MINLEN) in nvmet_fc_ls_create_association()
1359 iod->assoc = nvmet_fc_alloc_target_assoc(tgtport); in nvmet_fc_ls_create_association()
1360 if (!iod->assoc) in nvmet_fc_ls_create_association()
1363 queue = nvmet_fc_alloc_target_queue(iod->assoc, 0, in nvmet_fc_ls_create_association()
1374 iod->lsreq->rsplen = nvmet_fc_format_rjt(acc, in nvmet_fc_ls_create_association()
1387 iod->lsreq->rsplen = sizeof(*acc); in nvmet_fc_ls_create_association()
1398 cpu_to_be64(nvmet_fc_makeconnid(iod->assoc, 0)); in nvmet_fc_ls_create_association()
1408 struct nvmet_fc_ls_iod *iod) in nvmet_fc_ls_create_connection() argument
1411 (struct fcnvme_ls_cr_conn_rqst *)iod->rqstbuf; in nvmet_fc_ls_create_connection()
1413 (struct fcnvme_ls_cr_conn_acc *)iod->rspbuf; in nvmet_fc_ls_create_connection()
1419 if (iod->rqstdatalen < sizeof(struct fcnvme_ls_cr_conn_rqst)) in nvmet_fc_ls_create_connection()
1445 iod->assoc = nvmet_fc_find_target_assoc(tgtport, in nvmet_fc_ls_create_connection()
1447 if (!iod->assoc) in nvmet_fc_ls_create_connection()
1450 queue = nvmet_fc_alloc_target_queue(iod->assoc, in nvmet_fc_ls_create_connection()
1457 nvmet_fc_tgt_a_put(iod->assoc); in nvmet_fc_ls_create_connection()
1465 iod->lsreq->rsplen = nvmet_fc_format_rjt(acc, in nvmet_fc_ls_create_connection()
1480 iod->lsreq->rsplen = sizeof(*acc); in nvmet_fc_ls_create_connection()
1490 cpu_to_be64(nvmet_fc_makeconnid(iod->assoc, in nvmet_fc_ls_create_connection()
1496 struct nvmet_fc_ls_iod *iod) in nvmet_fc_ls_disconnect() argument
1499 (struct fcnvme_ls_disconnect_rqst *)iod->rqstbuf; in nvmet_fc_ls_disconnect()
1501 (struct fcnvme_ls_disconnect_acc *)iod->rspbuf; in nvmet_fc_ls_disconnect()
1507 if (iod->rqstdatalen < sizeof(struct fcnvme_ls_disconnect_rqst)) in nvmet_fc_ls_disconnect()
1533 iod->assoc = assoc; in nvmet_fc_ls_disconnect()
1542 iod->lsreq->rsplen = nvmet_fc_format_rjt(acc, in nvmet_fc_ls_disconnect()
1555 iod->lsreq->rsplen = sizeof(*acc); in nvmet_fc_ls_disconnect()
1563 nvmet_fc_tgt_a_put(iod->assoc); in nvmet_fc_ls_disconnect()
1565 nvmet_fc_delete_target_assoc(iod->assoc); in nvmet_fc_ls_disconnect()
1579 struct nvmet_fc_ls_iod *iod = lsreq->nvmet_fc_private; in nvmet_fc_xmt_ls_rsp_done() local
1580 struct nvmet_fc_tgtport *tgtport = iod->tgtport; in nvmet_fc_xmt_ls_rsp_done()
1582 fc_dma_sync_single_for_cpu(tgtport->dev, iod->rspdma, in nvmet_fc_xmt_ls_rsp_done()
1584 nvmet_fc_free_ls_iod(tgtport, iod); in nvmet_fc_xmt_ls_rsp_done()
1590 struct nvmet_fc_ls_iod *iod) in nvmet_fc_xmt_ls_rsp() argument
1594 fc_dma_sync_single_for_device(tgtport->dev, iod->rspdma, in nvmet_fc_xmt_ls_rsp()
1597 ret = tgtport->ops->xmt_ls_rsp(&tgtport->fc_target_port, iod->lsreq); in nvmet_fc_xmt_ls_rsp()
1599 nvmet_fc_xmt_ls_rsp_done(iod->lsreq); in nvmet_fc_xmt_ls_rsp()
1607 struct nvmet_fc_ls_iod *iod) in nvmet_fc_handle_ls_rqst() argument
1610 (struct fcnvme_ls_rqst_w0 *)iod->rqstbuf; in nvmet_fc_handle_ls_rqst()
1612 iod->lsreq->nvmet_fc_private = iod; in nvmet_fc_handle_ls_rqst()
1613 iod->lsreq->rspbuf = iod->rspbuf; in nvmet_fc_handle_ls_rqst()
1614 iod->lsreq->rspdma = iod->rspdma; in nvmet_fc_handle_ls_rqst()
1615 iod->lsreq->done = nvmet_fc_xmt_ls_rsp_done; in nvmet_fc_handle_ls_rqst()
1617 iod->lsreq->rsplen = 0; in nvmet_fc_handle_ls_rqst()
1619 iod->assoc = NULL; in nvmet_fc_handle_ls_rqst()
1629 nvmet_fc_ls_create_association(tgtport, iod); in nvmet_fc_handle_ls_rqst()
1633 nvmet_fc_ls_create_connection(tgtport, iod); in nvmet_fc_handle_ls_rqst()
1637 nvmet_fc_ls_disconnect(tgtport, iod); in nvmet_fc_handle_ls_rqst()
1640 iod->lsreq->rsplen = nvmet_fc_format_rjt(iod->rspbuf, in nvmet_fc_handle_ls_rqst()
1645 nvmet_fc_xmt_ls_rsp(tgtport, iod); in nvmet_fc_handle_ls_rqst()
1654 struct nvmet_fc_ls_iod *iod = in nvmet_fc_handle_ls_rqst_work() local
1656 struct nvmet_fc_tgtport *tgtport = iod->tgtport; in nvmet_fc_handle_ls_rqst_work()
1658 nvmet_fc_handle_ls_rqst(tgtport, iod); in nvmet_fc_handle_ls_rqst_work()
1685 struct nvmet_fc_ls_iod *iod; in nvmet_fc_rcv_ls_req() local
1693 iod = nvmet_fc_alloc_ls_iod(tgtport); in nvmet_fc_rcv_ls_req()
1694 if (!iod) { in nvmet_fc_rcv_ls_req()
1699 iod->lsreq = lsreq; in nvmet_fc_rcv_ls_req()
1700 iod->fcpreq = NULL; in nvmet_fc_rcv_ls_req()
1701 memcpy(iod->rqstbuf, lsreqbuf, lsreqbuf_len); in nvmet_fc_rcv_ls_req()
1702 iod->rqstdatalen = lsreqbuf_len; in nvmet_fc_rcv_ls_req()
1704 schedule_work(&iod->work); in nvmet_fc_rcv_ls_req()