Lines Matching refs:tgtport
48 struct nvmet_fc_tgtport *tgtport; member
93 struct nvmet_fc_tgtport *tgtport; member
150 struct nvmet_fc_tgtport *tgtport; member
161 return (iodptr - iodptr->tgtport->iod); in nvmet_fc_iodnum()
234 static void nvmet_fc_tgtport_put(struct nvmet_fc_tgtport *tgtport);
235 static int nvmet_fc_tgtport_get(struct nvmet_fc_tgtport *tgtport);
236 static void nvmet_fc_handle_fcp_rqst(struct nvmet_fc_tgtport *tgtport,
334 nvmet_fc_alloc_ls_iodlist(struct nvmet_fc_tgtport *tgtport) in nvmet_fc_alloc_ls_iodlist() argument
344 tgtport->iod = iod; in nvmet_fc_alloc_ls_iodlist()
348 iod->tgtport = tgtport; in nvmet_fc_alloc_ls_iodlist()
349 list_add_tail(&iod->ls_list, &tgtport->ls_list); in nvmet_fc_alloc_ls_iodlist()
358 iod->rspdma = fc_dma_map_single(tgtport->dev, iod->rspbuf, in nvmet_fc_alloc_ls_iodlist()
361 if (fc_dma_mapping_error(tgtport->dev, iod->rspdma)) in nvmet_fc_alloc_ls_iodlist()
371 fc_dma_unmap_single(tgtport->dev, iod->rspdma, in nvmet_fc_alloc_ls_iodlist()
383 nvmet_fc_free_ls_iodlist(struct nvmet_fc_tgtport *tgtport) in nvmet_fc_free_ls_iodlist() argument
385 struct nvmet_fc_ls_iod *iod = tgtport->iod; in nvmet_fc_free_ls_iodlist()
389 fc_dma_unmap_single(tgtport->dev, in nvmet_fc_free_ls_iodlist()
395 kfree(tgtport->iod); in nvmet_fc_free_ls_iodlist()
399 nvmet_fc_alloc_ls_iod(struct nvmet_fc_tgtport *tgtport) in nvmet_fc_alloc_ls_iod() argument
404 spin_lock_irqsave(&tgtport->lock, flags); in nvmet_fc_alloc_ls_iod()
405 iod = list_first_entry_or_null(&tgtport->ls_list, in nvmet_fc_alloc_ls_iod()
408 list_move_tail(&iod->ls_list, &tgtport->ls_busylist); in nvmet_fc_alloc_ls_iod()
409 spin_unlock_irqrestore(&tgtport->lock, flags); in nvmet_fc_alloc_ls_iod()
415 nvmet_fc_free_ls_iod(struct nvmet_fc_tgtport *tgtport, in nvmet_fc_free_ls_iod() argument
420 spin_lock_irqsave(&tgtport->lock, flags); in nvmet_fc_free_ls_iod()
421 list_move(&iod->ls_list, &tgtport->ls_list); in nvmet_fc_free_ls_iod()
422 spin_unlock_irqrestore(&tgtport->lock, flags); in nvmet_fc_free_ls_iod()
426 nvmet_fc_prep_fcp_iodlist(struct nvmet_fc_tgtport *tgtport, in nvmet_fc_prep_fcp_iodlist() argument
436 fod->tgtport = tgtport; in nvmet_fc_prep_fcp_iodlist()
445 fod->rspdma = fc_dma_map_single(tgtport->dev, &fod->rspiubuf, in nvmet_fc_prep_fcp_iodlist()
447 if (fc_dma_mapping_error(tgtport->dev, fod->rspdma)) { in nvmet_fc_prep_fcp_iodlist()
450 fc_dma_unmap_single(tgtport->dev, fod->rspdma, in nvmet_fc_prep_fcp_iodlist()
463 nvmet_fc_destroy_fcp_iodlist(struct nvmet_fc_tgtport *tgtport, in nvmet_fc_destroy_fcp_iodlist() argument
471 fc_dma_unmap_single(tgtport->dev, fod->rspdma, in nvmet_fc_destroy_fcp_iodlist()
499 nvmet_fc_queue_fcp_req(struct nvmet_fc_tgtport *tgtport, in nvmet_fc_queue_fcp_req() argument
510 ((queue->qid - 1) % tgtport->ops->max_hw_queues) : 0; in nvmet_fc_queue_fcp_req()
512 if (tgtport->ops->target_features & NVMET_FCTGTFEAT_CMD_IN_ISR) in nvmet_fc_queue_fcp_req()
515 nvmet_fc_handle_fcp_rqst(tgtport, fod); in nvmet_fc_queue_fcp_req()
525 nvmet_fc_queue_fcp_req(fod->tgtport, fod->queue, fod->fcpreq); in nvmet_fc_fcp_rqst_op_defer_work()
534 struct nvmet_fc_tgtport *tgtport = fod->tgtport; in nvmet_fc_free_fcp_iod() local
538 fc_dma_sync_single_for_cpu(tgtport->dev, fod->rspdma, in nvmet_fc_free_fcp_iod()
549 tgtport->ops->fcp_req_release(&tgtport->fc_target_port, fcpreq); in nvmet_fc_free_fcp_iod()
584 tgtport->ops->defer_rcv(&tgtport->fc_target_port, fcpreq); in nvmet_fc_free_fcp_iod()
595 nvmet_fc_queue_to_cpu(struct nvmet_fc_tgtport *tgtport, int qid) in nvmet_fc_queue_to_cpu() argument
599 if (tgtport->ops->max_hw_queues == 1) in nvmet_fc_queue_to_cpu()
639 assoc->tgtport->fc_target_port.port_num, in nvmet_fc_alloc_target_queue()
648 queue->port = assoc->tgtport->port; in nvmet_fc_alloc_target_queue()
649 queue->cpu = nvmet_fc_queue_to_cpu(assoc->tgtport, qid); in nvmet_fc_alloc_target_queue()
660 nvmet_fc_prep_fcp_iodlist(assoc->tgtport, queue); in nvmet_fc_alloc_target_queue()
667 spin_lock_irqsave(&assoc->tgtport->lock, flags); in nvmet_fc_alloc_target_queue()
669 spin_unlock_irqrestore(&assoc->tgtport->lock, flags); in nvmet_fc_alloc_target_queue()
674 nvmet_fc_destroy_fcp_iodlist(assoc->tgtport, queue); in nvmet_fc_alloc_target_queue()
691 spin_lock_irqsave(&queue->assoc->tgtport->lock, flags); in nvmet_fc_tgt_queue_free()
693 spin_unlock_irqrestore(&queue->assoc->tgtport->lock, flags); in nvmet_fc_tgt_queue_free()
695 nvmet_fc_destroy_fcp_iodlist(queue->assoc->tgtport, queue); in nvmet_fc_tgt_queue_free()
720 struct nvmet_fc_tgtport *tgtport = queue->assoc->tgtport; in nvmet_fc_delete_target_queue() local
746 tgtport->ops->fcp_abort( in nvmet_fc_delete_target_queue()
747 &tgtport->fc_target_port, fod->fcpreq); in nvmet_fc_delete_target_queue()
768 tgtport->ops->defer_rcv(&tgtport->fc_target_port, in nvmet_fc_delete_target_queue()
771 tgtport->ops->fcp_abort(&tgtport->fc_target_port, in nvmet_fc_delete_target_queue()
774 tgtport->ops->fcp_req_release(&tgtport->fc_target_port, in nvmet_fc_delete_target_queue()
795 nvmet_fc_find_target_queue(struct nvmet_fc_tgtport *tgtport, in nvmet_fc_find_target_queue() argument
807 spin_lock_irqsave(&tgtport->lock, flags); in nvmet_fc_find_target_queue()
808 list_for_each_entry(assoc, &tgtport->assoc_list, a_list) { in nvmet_fc_find_target_queue()
815 spin_unlock_irqrestore(&tgtport->lock, flags); in nvmet_fc_find_target_queue()
819 spin_unlock_irqrestore(&tgtport->lock, flags); in nvmet_fc_find_target_queue()
834 nvmet_fc_alloc_target_assoc(struct nvmet_fc_tgtport *tgtport) in nvmet_fc_alloc_target_assoc() argument
846 idx = ida_simple_get(&tgtport->assoc_cnt, 0, 0, GFP_KERNEL); in nvmet_fc_alloc_target_assoc()
850 if (!nvmet_fc_tgtport_get(tgtport)) in nvmet_fc_alloc_target_assoc()
853 assoc->tgtport = tgtport; in nvmet_fc_alloc_target_assoc()
863 spin_lock_irqsave(&tgtport->lock, flags); in nvmet_fc_alloc_target_assoc()
865 list_for_each_entry(tmpassoc, &tgtport->assoc_list, a_list) in nvmet_fc_alloc_target_assoc()
872 list_add_tail(&assoc->a_list, &tgtport->assoc_list); in nvmet_fc_alloc_target_assoc()
874 spin_unlock_irqrestore(&tgtport->lock, flags); in nvmet_fc_alloc_target_assoc()
880 ida_simple_remove(&tgtport->assoc_cnt, idx); in nvmet_fc_alloc_target_assoc()
891 struct nvmet_fc_tgtport *tgtport = assoc->tgtport; in nvmet_fc_target_assoc_free() local
894 spin_lock_irqsave(&tgtport->lock, flags); in nvmet_fc_target_assoc_free()
896 spin_unlock_irqrestore(&tgtport->lock, flags); in nvmet_fc_target_assoc_free()
897 ida_simple_remove(&tgtport->assoc_cnt, assoc->a_id); in nvmet_fc_target_assoc_free()
899 nvmet_fc_tgtport_put(tgtport); in nvmet_fc_target_assoc_free()
917 struct nvmet_fc_tgtport *tgtport = assoc->tgtport; in nvmet_fc_delete_target_assoc() local
922 spin_lock_irqsave(&tgtport->lock, flags); in nvmet_fc_delete_target_assoc()
928 spin_unlock_irqrestore(&tgtport->lock, flags); in nvmet_fc_delete_target_assoc()
931 spin_lock_irqsave(&tgtport->lock, flags); in nvmet_fc_delete_target_assoc()
934 spin_unlock_irqrestore(&tgtport->lock, flags); in nvmet_fc_delete_target_assoc()
940 nvmet_fc_find_target_assoc(struct nvmet_fc_tgtport *tgtport, in nvmet_fc_find_target_assoc() argument
947 spin_lock_irqsave(&tgtport->lock, flags); in nvmet_fc_find_target_assoc()
948 list_for_each_entry(assoc, &tgtport->assoc_list, a_list) { in nvmet_fc_find_target_assoc()
955 spin_unlock_irqrestore(&tgtport->lock, flags); in nvmet_fc_find_target_assoc()
1060 struct nvmet_fc_tgtport *tgtport = in nvmet_fc_free_tgtport() local
1062 struct device *dev = tgtport->dev; in nvmet_fc_free_tgtport()
1066 list_del(&tgtport->tgt_list); in nvmet_fc_free_tgtport()
1069 nvmet_fc_free_ls_iodlist(tgtport); in nvmet_fc_free_tgtport()
1072 tgtport->ops->targetport_delete(&tgtport->fc_target_port); in nvmet_fc_free_tgtport()
1075 tgtport->fc_target_port.port_num); in nvmet_fc_free_tgtport()
1077 ida_destroy(&tgtport->assoc_cnt); in nvmet_fc_free_tgtport()
1079 kfree(tgtport); in nvmet_fc_free_tgtport()
1085 nvmet_fc_tgtport_put(struct nvmet_fc_tgtport *tgtport) in nvmet_fc_tgtport_put() argument
1087 kref_put(&tgtport->ref, nvmet_fc_free_tgtport); in nvmet_fc_tgtport_put()
1091 nvmet_fc_tgtport_get(struct nvmet_fc_tgtport *tgtport) in nvmet_fc_tgtport_get() argument
1093 return kref_get_unless_zero(&tgtport->ref); in nvmet_fc_tgtport_get()
1097 __nvmet_fc_free_assocs(struct nvmet_fc_tgtport *tgtport) in __nvmet_fc_free_assocs() argument
1102 spin_lock_irqsave(&tgtport->lock, flags); in __nvmet_fc_free_assocs()
1104 &tgtport->assoc_list, a_list) { in __nvmet_fc_free_assocs()
1107 spin_unlock_irqrestore(&tgtport->lock, flags); in __nvmet_fc_free_assocs()
1110 spin_lock_irqsave(&tgtport->lock, flags); in __nvmet_fc_free_assocs()
1112 spin_unlock_irqrestore(&tgtport->lock, flags); in __nvmet_fc_free_assocs()
1121 struct nvmet_fc_tgtport *tgtport, *next; in nvmet_fc_delete_ctrl() local
1129 list_for_each_entry_safe(tgtport, next, &nvmet_fc_target_list, in nvmet_fc_delete_ctrl()
1131 if (!nvmet_fc_tgtport_get(tgtport)) in nvmet_fc_delete_ctrl()
1135 spin_lock_irqsave(&tgtport->lock, flags); in nvmet_fc_delete_ctrl()
1136 list_for_each_entry(assoc, &tgtport->assoc_list, a_list) { in nvmet_fc_delete_ctrl()
1144 spin_unlock_irqrestore(&tgtport->lock, flags); in nvmet_fc_delete_ctrl()
1146 nvmet_fc_tgtport_put(tgtport); in nvmet_fc_delete_ctrl()
1172 struct nvmet_fc_tgtport *tgtport = targetport_to_tgtport(target_port); in nvmet_fc_unregister_targetport() local
1175 __nvmet_fc_free_assocs(tgtport); in nvmet_fc_unregister_targetport()
1177 nvmet_fc_tgtport_put(tgtport); in nvmet_fc_unregister_targetport()
1284 nvmet_fc_ls_create_association(struct nvmet_fc_tgtport *tgtport, in nvmet_fc_ls_create_association() argument
1322 iod->assoc = nvmet_fc_alloc_target_assoc(tgtport); in nvmet_fc_ls_create_association()
1334 dev_err(tgtport->dev, in nvmet_fc_ls_create_association()
1370 nvmet_fc_ls_create_connection(struct nvmet_fc_tgtport *tgtport, in nvmet_fc_ls_create_connection() argument
1408 iod->assoc = nvmet_fc_find_target_assoc(tgtport, in nvmet_fc_ls_create_connection()
1425 dev_err(tgtport->dev, in nvmet_fc_ls_create_connection()
1458 nvmet_fc_ls_disconnect(struct nvmet_fc_tgtport *tgtport, in nvmet_fc_ls_disconnect() argument
1496 assoc = nvmet_fc_find_target_assoc(tgtport, in nvmet_fc_ls_disconnect()
1502 queue = nvmet_fc_find_target_queue(tgtport, in nvmet_fc_ls_disconnect()
1515 dev_err(tgtport->dev, in nvmet_fc_ls_disconnect()
1572 struct nvmet_fc_tgtport *tgtport = iod->tgtport; in nvmet_fc_xmt_ls_rsp_done() local
1574 fc_dma_sync_single_for_cpu(tgtport->dev, iod->rspdma, in nvmet_fc_xmt_ls_rsp_done()
1576 nvmet_fc_free_ls_iod(tgtport, iod); in nvmet_fc_xmt_ls_rsp_done()
1577 nvmet_fc_tgtport_put(tgtport); in nvmet_fc_xmt_ls_rsp_done()
1581 nvmet_fc_xmt_ls_rsp(struct nvmet_fc_tgtport *tgtport, in nvmet_fc_xmt_ls_rsp() argument
1586 fc_dma_sync_single_for_device(tgtport->dev, iod->rspdma, in nvmet_fc_xmt_ls_rsp()
1589 ret = tgtport->ops->xmt_ls_rsp(&tgtport->fc_target_port, iod->lsreq); in nvmet_fc_xmt_ls_rsp()
1598 nvmet_fc_handle_ls_rqst(struct nvmet_fc_tgtport *tgtport, in nvmet_fc_handle_ls_rqst() argument
1621 nvmet_fc_ls_create_association(tgtport, iod); in nvmet_fc_handle_ls_rqst()
1625 nvmet_fc_ls_create_connection(tgtport, iod); in nvmet_fc_handle_ls_rqst()
1629 nvmet_fc_ls_disconnect(tgtport, iod); in nvmet_fc_handle_ls_rqst()
1637 nvmet_fc_xmt_ls_rsp(tgtport, iod); in nvmet_fc_handle_ls_rqst()
1648 struct nvmet_fc_tgtport *tgtport = iod->tgtport; in nvmet_fc_handle_ls_rqst_work() local
1650 nvmet_fc_handle_ls_rqst(tgtport, iod); in nvmet_fc_handle_ls_rqst_work()
1676 struct nvmet_fc_tgtport *tgtport = targetport_to_tgtport(target_port); in nvmet_fc_rcv_ls_req() local
1682 if (!nvmet_fc_tgtport_get(tgtport)) in nvmet_fc_rcv_ls_req()
1685 iod = nvmet_fc_alloc_ls_iod(tgtport); in nvmet_fc_rcv_ls_req()
1687 nvmet_fc_tgtport_put(tgtport); in nvmet_fc_rcv_ls_req()
1721 fod->data_sg_cnt = fc_dma_map_sg(fod->tgtport->dev, sg, nent, in nvmet_fc_alloc_tgt_pgs()
1739 fc_dma_unmap_sg(fod->tgtport->dev, fod->data_sg, fod->data_sg_cnt, in nvmet_fc_free_tgt_pgs()
1765 nvmet_fc_prep_fcp_rsp(struct nvmet_fc_tgtport *tgtport, in nvmet_fc_prep_fcp_rsp() argument
1823 fc_dma_sync_single_for_device(tgtport->dev, fod->rspdma, in nvmet_fc_prep_fcp_rsp()
1830 nvmet_fc_abort_op(struct nvmet_fc_tgtport *tgtport, in nvmet_fc_abort_op() argument
1844 tgtport->ops->fcp_abort(&tgtport->fc_target_port, fcpreq); in nvmet_fc_abort_op()
1850 nvmet_fc_xmt_fcp_rsp(struct nvmet_fc_tgtport *tgtport, in nvmet_fc_xmt_fcp_rsp() argument
1858 nvmet_fc_prep_fcp_rsp(tgtport, fod); in nvmet_fc_xmt_fcp_rsp()
1860 ret = tgtport->ops->fcp_op(&tgtport->fc_target_port, fod->fcpreq); in nvmet_fc_xmt_fcp_rsp()
1862 nvmet_fc_abort_op(tgtport, fod); in nvmet_fc_xmt_fcp_rsp()
1866 nvmet_fc_transfer_fcp_data(struct nvmet_fc_tgtport *tgtport, in nvmet_fc_transfer_fcp_data() argument
1892 fcpreq->sg_cnt < tgtport->max_sg_cnt && in nvmet_fc_transfer_fcp_data()
1919 (tgtport->ops->target_features & NVMET_FCTGTFEAT_READDATA_RSP)) { in nvmet_fc_transfer_fcp_data()
1921 nvmet_fc_prep_fcp_rsp(tgtport, fod); in nvmet_fc_transfer_fcp_data()
1924 ret = tgtport->ops->fcp_op(&tgtport->fc_target_port, fod->fcpreq); in nvmet_fc_transfer_fcp_data()
1950 struct nvmet_fc_tgtport *tgtport = fod->tgtport; in __nvmet_fc_fod_op_abort() local
1959 nvmet_fc_abort_op(tgtport, fod); in __nvmet_fc_fod_op_abort()
1973 struct nvmet_fc_tgtport *tgtport = fod->tgtport; in nvmet_fc_fod_op_done() local
2004 nvmet_fc_transfer_fcp_data(tgtport, fod, in nvmet_fc_fod_op_done()
2019 nvmet_fc_abort_op(tgtport, fod); in nvmet_fc_fod_op_done()
2035 nvmet_fc_transfer_fcp_data(tgtport, fod, in nvmet_fc_fod_op_done()
2045 nvmet_fc_xmt_fcp_rsp(tgtport, fod); in nvmet_fc_fod_op_done()
2075 if (fod->tgtport->ops->target_features & NVMET_FCTGTFEAT_OPDONE_IN_ISR) in nvmet_fc_xmt_fcp_op_done()
2086 __nvmet_fc_fcp_nvme_cmd_done(struct nvmet_fc_tgtport *tgtport, in __nvmet_fc_fcp_nvme_cmd_done() argument
2103 nvmet_fc_abort_op(tgtport, fod); in __nvmet_fc_fcp_nvme_cmd_done()
2124 nvmet_fc_transfer_fcp_data(tgtport, fod, in __nvmet_fc_fcp_nvme_cmd_done()
2135 nvmet_fc_xmt_fcp_rsp(tgtport, fod); in __nvmet_fc_fcp_nvme_cmd_done()
2143 struct nvmet_fc_tgtport *tgtport = fod->tgtport; in nvmet_fc_fcp_nvme_cmd_done() local
2145 __nvmet_fc_fcp_nvme_cmd_done(tgtport, fod, 0); in nvmet_fc_fcp_nvme_cmd_done()
2153 nvmet_fc_handle_fcp_rqst(struct nvmet_fc_tgtport *tgtport, in nvmet_fc_handle_fcp_rqst() argument
2223 nvmet_fc_transfer_fcp_data(tgtport, fod, NVMET_FCOP_WRITEDATA); in nvmet_fc_handle_fcp_rqst()
2237 nvmet_fc_abort_op(tgtport, fod); in nvmet_fc_handle_fcp_rqst()
2248 struct nvmet_fc_tgtport *tgtport = fod->tgtport; in nvmet_fc_handle_fcp_rqst_work() local
2250 nvmet_fc_handle_fcp_rqst(tgtport, fod); in nvmet_fc_handle_fcp_rqst_work()
2305 struct nvmet_fc_tgtport *tgtport = targetport_to_tgtport(target_port); in nvmet_fc_rcv_fcp_req() local
2319 queue = nvmet_fc_find_target_queue(tgtport, in nvmet_fc_rcv_fcp_req()
2342 nvmet_fc_queue_fcp_req(tgtport, queue, fcpreq); in nvmet_fc_rcv_fcp_req()
2347 if (!tgtport->ops->defer_rcv) { in nvmet_fc_rcv_fcp_req()
2510 struct nvmet_fc_tgtport *tgtport; in nvmet_fc_add_port() local
2529 list_for_each_entry(tgtport, &nvmet_fc_target_list, tgt_list) { in nvmet_fc_add_port()
2530 if ((tgtport->fc_target_port.node_name == traddr.nn) && in nvmet_fc_add_port()
2531 (tgtport->fc_target_port.port_name == traddr.pn)) { in nvmet_fc_add_port()
2532 tgtport->port = port; in nvmet_fc_add_port()