/Linux-v5.4/drivers/infiniband/sw/siw/ |
D | siw_cm.c | 329 event.ird = cep->ird; in siw_cm_upcall() 332 event.ird = cep->ord; in siw_cm_upcall() 333 event.ord = cep->ird; in siw_cm_upcall() 654 cep->ord = ntohs(v2->ird) & MPA_IRD_ORD_MASK; in siw_proc_mpareq() 656 cep->ird = ntohs(v2->ord) & MPA_IRD_ORD_MASK; in siw_proc_mpareq() 657 cep->ird = min(cep->ird, SIW_MAX_IRD_QP); in siw_proc_mpareq() 660 cep->mpa.v2_ctrl.ird = htons(cep->ird); in siw_proc_mpareq() 671 if (v2->ird & MPA_V2_PEER_TO_PEER) { in siw_proc_mpareq() 672 cep->mpa.v2_ctrl.ird |= MPA_V2_PEER_TO_PEER; in siw_proc_mpareq() 790 rep_ird = ntohs(v2->ird) & MPA_IRD_ORD_MASK; in siw_proc_mpareply() [all …]
|
D | siw_cm.h | 58 int ird; member
|
D | iwarp.h | 70 __be16 ird; member
|
/Linux-v5.4/include/rdma/ |
D | iw_cm.h | 58 u8 ird; member 105 u32 ird; member
|
/Linux-v5.4/drivers/infiniband/hw/qedr/ |
D | qedr_iw_cm.c | 111 event.ird = params->cm_info->ird; in qedr_iw_mpa_request() 129 event.ird = params->cm_info->ird; in qedr_iw_issue_event() 573 conn_param->ord, conn_param->ird, conn_param->private_data, in qedr_iw_connect() 577 cm_info->ird = conn_param->ird; in qedr_iw_connect() 700 params.ird = conn_param->ird; in qedr_iw_accept()
|
/Linux-v5.4/drivers/infiniband/hw/cxgb4/ |
D | cm.c | 1001 pr_debug("initiator ird %u ord %u\n", ep->ird, in send_mpa_req() 1003 mpa_v2_params.ird = htons((u16)ep->ird); in send_mpa_req() 1007 mpa_v2_params.ird |= htons(MPA_V2_PEER2PEER_MODEL); in send_mpa_req() 1093 mpa_v2_params.ird = htons(((u16)ep->ird) | in send_mpa_reject() 1177 mpa_v2_params.ird = htons((u16)ep->ird); in send_mpa_reply() 1181 mpa_v2_params.ird |= htons(MPA_V2_PEER2PEER_MODEL); in send_mpa_reply() 1333 event.ord = ep->ird; in connect_reply_upcall() 1334 event.ird = ep->ord; in connect_reply_upcall() 1343 event.ird = cur_max_read_depth(ep->com.dev); in connect_reply_upcall() 1375 event.ird = ep->ird; in connect_request_upcall() [all …]
|
D | iw_cxgb4.h | 714 __be16 ird; member 894 u32 ird; member
|
D | qp.c | 62 static int alloc_ird(struct c4iw_dev *dev, u32 ird) in alloc_ird() argument 67 if (ird <= dev->avail_ird) in alloc_ird() 68 dev->avail_ird -= ird; in alloc_ird() 80 static void free_ird(struct c4iw_dev *dev, int ird) in free_ird() argument 83 dev->avail_ird += ird; in free_ird() 1755 qhp->wq.sq.qid, qhp->ep->hwtid, qhp->ep->ird, qhp->ep->ord); in rdma_init()
|
D | restrack.c | 254 if (rdma_nl_put_driver_u32(msg, "ird", ep->ird)) in fill_res_ep_entry()
|
/Linux-v5.4/drivers/net/ethernet/qlogic/qed/ |
D | qed_iwarp.c | 53 __be16 ird; member 774 mpa_ird = ntohs(mpa_v2->ird); in qed_iwarp_mpa_received() 783 ep->cm_info.ird = (u8)min_t(u16, in qed_iwarp_mpa_received() 809 ep->cm_info.ird = QED_IWARP_IRD_DEFAULT; in qed_iwarp_mpa_received() 815 mpa_rev, ep->cm_info.ord, ep->cm_info.ird, ep->rtr_type, in qed_iwarp_mpa_received() 878 p_mpa_ramrod->common.out_rq.ird = ep->cm_info.ird; in qed_iwarp_mpa_offload() 922 ep->cm_info.ird, in qed_iwarp_mpa_offload() 962 mpa_ird = ntohs(mpa_v2_params->ird); in qed_iwarp_parse_private_data() 965 ep->cm_info.ird = (u8)(mpa_ord & MPA_V2_IRD_ORD_MASK); in qed_iwarp_parse_private_data() 992 ep->mpa_rev, ep->cm_info.ord, ep->cm_info.ird); in qed_iwarp_mpa_reply_arrived() [all …]
|
D | qed_hsi.h | 10088 __le32 ird; member
|
/Linux-v5.4/drivers/iommu/ |
D | intel_irq_remapping.c | 1349 struct intel_ir_data *data, *ird; in intel_irq_remapping_alloc() local 1394 ird = kzalloc(sizeof(*ird), GFP_KERNEL); in intel_irq_remapping_alloc() 1395 if (!ird) in intel_irq_remapping_alloc() 1398 ird->irq_2_iommu = data->irq_2_iommu; in intel_irq_remapping_alloc() 1399 ird->irq_2_iommu.sub_handle = i; in intel_irq_remapping_alloc() 1401 ird = data; in intel_irq_remapping_alloc() 1405 irq_data->chip_data = ird; in intel_irq_remapping_alloc() 1407 intel_irq_remapping_prepare_irte(ird, irq_cfg, info, index, i); in intel_irq_remapping_alloc()
|
/Linux-v5.4/drivers/infiniband/hw/cxgb3/ |
D | iwch_cm.c | 760 event.ird = event.ord = 8; in connect_request_upcall() 782 event.ird = event.ord = 8; in established_upcall() 911 attrs.max_ird = ep->ird; in process_mpa_reply() 1799 (conn_param->ird > qp->rhp->attr.max_rdma_reads_per_qp)) { in iwch_accept_cr() 1809 ep->ird = conn_param->ird; in iwch_accept_cr() 1812 if (peer2peer && ep->ird == 0) in iwch_accept_cr() 1813 ep->ird = 1; in iwch_accept_cr() 1815 pr_debug("%s %d ird %d ord %d\n", __func__, __LINE__, ep->ird, ep->ord); in iwch_accept_cr() 1819 attrs.max_ird = ep->ird; in iwch_accept_cr() 1905 ep->ird = conn_param->ird; in iwch_connect()
|
D | iwch_cm.h | 192 u32 ird; member
|
D | cxio_wr.h | 347 u32 ird; member 372 __be32 ird; member
|
D | iwch_qp.c | 843 init_attr.ird = qhp->attr.max_ird; in rdma_init() 853 if (init_attr.ird == 0 && !qhp->attr.mpa_attr.initiator) in rdma_init() 854 init_attr.ird = 1; in rdma_init()
|
D | cxio_hal.c | 835 wqe->ird = cpu_to_be32(attr->ird); in cxio_rdma_init()
|
/Linux-v5.4/include/linux/qed/ |
D | qed_rdma_if.h | 495 u8 ird; member 545 u8 ird; member
|
/Linux-v5.4/drivers/infiniband/hw/ocrdma/ |
D | ocrdma.h | 115 u8 ird; member
|
D | ocrdma_hw.c | 1213 attr->ird = 1; in ocrdma_get_attr() 2311 if (dev->attr.ird == 0) in ocrdma_set_create_qp_ird_cmd()
|
/Linux-v5.4/drivers/infiniband/core/ |
D | cma.c | 2289 event.param.conn.initiator_depth = iw_event->ird; in cma_iw_handler() 2306 event.param.conn.initiator_depth = iw_event->ird; in cma_iw_handler() 2344 event.param.conn.initiator_depth = iw_event->ird; in iw_conn_req_handler() 3886 iw_param.ird = conn_param->responder_resources; in cma_connect_iw() 3981 iw_param.ird = conn_param->responder_resources; in cma_accept_iw()
|
/Linux-v5.4/drivers/infiniband/hw/i40iw/ |
D | i40iw_cm.c | 243 event->ird = cm_node->ird_size; in i40iw_get_cmevent_info() 274 event.ird = cm_node->ird_size; in i40iw_send_cm_event() 280 event.ird = cm_node->ird_size; in i40iw_send_cm_event() 2948 i40iw_record_ird_ord(cm_node, conn_param->ird, conn_param->ord); in i40iw_create_cm_node()
|