Lines Matching refs:iod
103 struct nvmet_fc_ls_iod *iod; member
176 return (iodptr - iodptr->tgtport->iod); in nvmet_fc_iodnum()
254 struct nvmet_fc_ls_iod *iod);
534 struct nvmet_fc_ls_iod *iod; in nvmet_fc_alloc_ls_iodlist() local
537 iod = kcalloc(NVMET_LS_CTX_COUNT, sizeof(struct nvmet_fc_ls_iod), in nvmet_fc_alloc_ls_iodlist()
539 if (!iod) in nvmet_fc_alloc_ls_iodlist()
542 tgtport->iod = iod; in nvmet_fc_alloc_ls_iodlist()
544 for (i = 0; i < NVMET_LS_CTX_COUNT; iod++, i++) { in nvmet_fc_alloc_ls_iodlist()
545 INIT_WORK(&iod->work, nvmet_fc_handle_ls_rqst_work); in nvmet_fc_alloc_ls_iodlist()
546 iod->tgtport = tgtport; in nvmet_fc_alloc_ls_iodlist()
547 list_add_tail(&iod->ls_rcv_list, &tgtport->ls_rcv_list); in nvmet_fc_alloc_ls_iodlist()
549 iod->rqstbuf = kzalloc(sizeof(union nvmefc_ls_requests) + in nvmet_fc_alloc_ls_iodlist()
552 if (!iod->rqstbuf) in nvmet_fc_alloc_ls_iodlist()
555 iod->rspbuf = (union nvmefc_ls_responses *)&iod->rqstbuf[1]; in nvmet_fc_alloc_ls_iodlist()
557 iod->rspdma = fc_dma_map_single(tgtport->dev, iod->rspbuf, in nvmet_fc_alloc_ls_iodlist()
558 sizeof(*iod->rspbuf), in nvmet_fc_alloc_ls_iodlist()
560 if (fc_dma_mapping_error(tgtport->dev, iod->rspdma)) in nvmet_fc_alloc_ls_iodlist()
567 kfree(iod->rqstbuf); in nvmet_fc_alloc_ls_iodlist()
568 list_del(&iod->ls_rcv_list); in nvmet_fc_alloc_ls_iodlist()
569 for (iod--, i--; i >= 0; iod--, i--) { in nvmet_fc_alloc_ls_iodlist()
570 fc_dma_unmap_single(tgtport->dev, iod->rspdma, in nvmet_fc_alloc_ls_iodlist()
571 sizeof(*iod->rspbuf), DMA_TO_DEVICE); in nvmet_fc_alloc_ls_iodlist()
572 kfree(iod->rqstbuf); in nvmet_fc_alloc_ls_iodlist()
573 list_del(&iod->ls_rcv_list); in nvmet_fc_alloc_ls_iodlist()
576 kfree(iod); in nvmet_fc_alloc_ls_iodlist()
584 struct nvmet_fc_ls_iod *iod = tgtport->iod; in nvmet_fc_free_ls_iodlist() local
587 for (i = 0; i < NVMET_LS_CTX_COUNT; iod++, i++) { in nvmet_fc_free_ls_iodlist()
589 iod->rspdma, sizeof(*iod->rspbuf), in nvmet_fc_free_ls_iodlist()
591 kfree(iod->rqstbuf); in nvmet_fc_free_ls_iodlist()
592 list_del(&iod->ls_rcv_list); in nvmet_fc_free_ls_iodlist()
594 kfree(tgtport->iod); in nvmet_fc_free_ls_iodlist()
600 struct nvmet_fc_ls_iod *iod; in nvmet_fc_alloc_ls_iod() local
604 iod = list_first_entry_or_null(&tgtport->ls_rcv_list, in nvmet_fc_alloc_ls_iod()
606 if (iod) in nvmet_fc_alloc_ls_iod()
607 list_move_tail(&iod->ls_rcv_list, &tgtport->ls_busylist); in nvmet_fc_alloc_ls_iod()
609 return iod; in nvmet_fc_alloc_ls_iod()
615 struct nvmet_fc_ls_iod *iod) in nvmet_fc_free_ls_iod() argument
620 list_move(&iod->ls_rcv_list, &tgtport->ls_rcv_list); in nvmet_fc_free_ls_iod()
1636 struct nvmet_fc_ls_iod *iod) in nvmet_fc_ls_create_association() argument
1638 struct fcnvme_ls_cr_assoc_rqst *rqst = &iod->rqstbuf->rq_cr_assoc; in nvmet_fc_ls_create_association()
1639 struct fcnvme_ls_cr_assoc_acc *acc = &iod->rspbuf->rsp_cr_assoc; in nvmet_fc_ls_create_association()
1653 if (iod->rqstdatalen < FCNVME_LSDESC_CRA_RQST_MINLEN) in nvmet_fc_ls_create_association()
1671 iod->assoc = nvmet_fc_alloc_target_assoc( in nvmet_fc_ls_create_association()
1672 tgtport, iod->hosthandle); in nvmet_fc_ls_create_association()
1673 if (!iod->assoc) in nvmet_fc_ls_create_association()
1676 queue = nvmet_fc_alloc_target_queue(iod->assoc, 0, in nvmet_fc_ls_create_association()
1687 iod->lsrsp->rsplen = nvme_fc_format_rjt(acc, in nvmet_fc_ls_create_association()
1700 tgtport->fc_target_port.port_num, iod->assoc->a_id); in nvmet_fc_ls_create_association()
1704 iod->lsrsp->rsplen = sizeof(*acc); in nvmet_fc_ls_create_association()
1715 cpu_to_be64(nvmet_fc_makeconnid(iod->assoc, 0)); in nvmet_fc_ls_create_association()
1725 struct nvmet_fc_ls_iod *iod) in nvmet_fc_ls_create_connection() argument
1727 struct fcnvme_ls_cr_conn_rqst *rqst = &iod->rqstbuf->rq_cr_conn; in nvmet_fc_ls_create_connection()
1728 struct fcnvme_ls_cr_conn_acc *acc = &iod->rspbuf->rsp_cr_conn; in nvmet_fc_ls_create_connection()
1734 if (iod->rqstdatalen < sizeof(struct fcnvme_ls_cr_conn_rqst)) in nvmet_fc_ls_create_connection()
1760 iod->assoc = nvmet_fc_find_target_assoc(tgtport, in nvmet_fc_ls_create_connection()
1762 if (!iod->assoc) in nvmet_fc_ls_create_connection()
1765 queue = nvmet_fc_alloc_target_queue(iod->assoc, in nvmet_fc_ls_create_connection()
1772 nvmet_fc_tgt_a_put(iod->assoc); in nvmet_fc_ls_create_connection()
1780 iod->lsrsp->rsplen = nvme_fc_format_rjt(acc, in nvmet_fc_ls_create_connection()
1795 iod->lsrsp->rsplen = sizeof(*acc); in nvmet_fc_ls_create_connection()
1805 cpu_to_be64(nvmet_fc_makeconnid(iod->assoc, in nvmet_fc_ls_create_connection()
1815 struct nvmet_fc_ls_iod *iod) in nvmet_fc_ls_disconnect() argument
1818 &iod->rqstbuf->rq_dis_assoc; in nvmet_fc_ls_disconnect()
1820 &iod->rspbuf->rsp_dis_assoc; in nvmet_fc_ls_disconnect()
1828 ret = nvmefc_vldt_lsreq_discon_assoc(iod->rqstdatalen, rqst); in nvmet_fc_ls_disconnect()
1833 iod->assoc = assoc; in nvmet_fc_ls_disconnect()
1842 iod->lsrsp->rsplen = nvme_fc_format_rjt(acc, in nvmet_fc_ls_disconnect()
1853 iod->lsrsp->rsplen = sizeof(*acc); in nvmet_fc_ls_disconnect()
1874 assoc->rcv_disconn = iod; in nvmet_fc_ls_disconnect()
1886 sizeof(*iod->rspbuf), in nvmet_fc_ls_disconnect()
1908 struct nvmet_fc_ls_iod *iod = lsrsp->nvme_fc_private; in nvmet_fc_xmt_ls_rsp_done() local
1909 struct nvmet_fc_tgtport *tgtport = iod->tgtport; in nvmet_fc_xmt_ls_rsp_done()
1911 fc_dma_sync_single_for_cpu(tgtport->dev, iod->rspdma, in nvmet_fc_xmt_ls_rsp_done()
1912 sizeof(*iod->rspbuf), DMA_TO_DEVICE); in nvmet_fc_xmt_ls_rsp_done()
1913 nvmet_fc_free_ls_iod(tgtport, iod); in nvmet_fc_xmt_ls_rsp_done()
1919 struct nvmet_fc_ls_iod *iod) in nvmet_fc_xmt_ls_rsp() argument
1923 fc_dma_sync_single_for_device(tgtport->dev, iod->rspdma, in nvmet_fc_xmt_ls_rsp()
1924 sizeof(*iod->rspbuf), DMA_TO_DEVICE); in nvmet_fc_xmt_ls_rsp()
1926 ret = tgtport->ops->xmt_ls_rsp(&tgtport->fc_target_port, iod->lsrsp); in nvmet_fc_xmt_ls_rsp()
1928 nvmet_fc_xmt_ls_rsp_done(iod->lsrsp); in nvmet_fc_xmt_ls_rsp()
1936 struct nvmet_fc_ls_iod *iod) in nvmet_fc_handle_ls_rqst() argument
1938 struct fcnvme_ls_rqst_w0 *w0 = &iod->rqstbuf->rq_cr_assoc.w0; in nvmet_fc_handle_ls_rqst()
1941 iod->lsrsp->nvme_fc_private = iod; in nvmet_fc_handle_ls_rqst()
1942 iod->lsrsp->rspbuf = iod->rspbuf; in nvmet_fc_handle_ls_rqst()
1943 iod->lsrsp->rspdma = iod->rspdma; in nvmet_fc_handle_ls_rqst()
1944 iod->lsrsp->done = nvmet_fc_xmt_ls_rsp_done; in nvmet_fc_handle_ls_rqst()
1946 iod->lsrsp->rsplen = 0; in nvmet_fc_handle_ls_rqst()
1948 iod->assoc = NULL; in nvmet_fc_handle_ls_rqst()
1958 nvmet_fc_ls_create_association(tgtport, iod); in nvmet_fc_handle_ls_rqst()
1962 nvmet_fc_ls_create_connection(tgtport, iod); in nvmet_fc_handle_ls_rqst()
1966 sendrsp = nvmet_fc_ls_disconnect(tgtport, iod); in nvmet_fc_handle_ls_rqst()
1969 iod->lsrsp->rsplen = nvme_fc_format_rjt(iod->rspbuf, in nvmet_fc_handle_ls_rqst()
1970 sizeof(*iod->rspbuf), w0->ls_cmd, in nvmet_fc_handle_ls_rqst()
1975 nvmet_fc_xmt_ls_rsp(tgtport, iod); in nvmet_fc_handle_ls_rqst()
1984 struct nvmet_fc_ls_iod *iod = in nvmet_fc_handle_ls_rqst_work() local
1986 struct nvmet_fc_tgtport *tgtport = iod->tgtport; in nvmet_fc_handle_ls_rqst_work()
1988 nvmet_fc_handle_ls_rqst(tgtport, iod); in nvmet_fc_handle_ls_rqst_work()
2016 struct nvmet_fc_ls_iod *iod; in nvmet_fc_rcv_ls_req() local
2036 iod = nvmet_fc_alloc_ls_iod(tgtport); in nvmet_fc_rcv_ls_req()
2037 if (!iod) { in nvmet_fc_rcv_ls_req()
2046 iod->lsrsp = lsrsp; in nvmet_fc_rcv_ls_req()
2047 iod->fcpreq = NULL; in nvmet_fc_rcv_ls_req()
2048 memcpy(iod->rqstbuf, lsreqbuf, lsreqbuf_len); in nvmet_fc_rcv_ls_req()
2049 iod->rqstdatalen = lsreqbuf_len; in nvmet_fc_rcv_ls_req()
2050 iod->hosthandle = hosthandle; in nvmet_fc_rcv_ls_req()
2052 schedule_work(&iod->work); in nvmet_fc_rcv_ls_req()