Home
last modified time | relevance | path

Searched refs:num_sge (Results 1 – 25 of 90) sorted by relevance

1234

/Linux-v5.4/drivers/infiniband/core/
Duverbs_std_types_mr.c53 int num_sge; in UVERBS_HANDLER() local
70 num_sge = uverbs_attr_ptr_get_array_size( in UVERBS_HANDLER()
72 if (num_sge < 0) in UVERBS_HANDLER()
73 return num_sge; in UVERBS_HANDLER()
77 return ib_dev->ops.advise_mr(pd, advice, flags, sg_list, num_sge, in UVERBS_HANDLER()
/Linux-v5.4/include/rdma/
Drdmavt_mr.h122 u8 num_sge; member
137 while (ss->num_sge) { in rvt_put_ss()
139 if (--ss->num_sge) in rvt_put_ss()
167 if (--ss->num_sge) in rvt_update_sge()
/Linux-v5.4/drivers/infiniband/sw/rxe/
Drxe_verbs.c251 int num_sge = ibwr->num_sge; in post_one_recv() local
258 if (unlikely(num_sge > rq->max_sge)) { in post_one_recv()
264 for (i = 0; i < num_sge; i++) in post_one_recv()
269 recv_wqe->num_sge = num_sge; in post_one_recv()
272 num_sge * sizeof(struct ib_sge)); in post_one_recv()
276 recv_wqe->dma.num_sge = num_sge; in post_one_recv()
506 int num_sge = ibwr->num_sge; in validate_send_wr() local
509 if (unlikely(num_sge > sq->max_sge)) in validate_send_wr()
534 wr->num_sge = ibwr->num_sge; in init_send_wr()
590 int num_sge = ibwr->num_sge; in init_send_wqe() local
[all …]
/Linux-v5.4/drivers/infiniband/sw/siw/
Dsiw_verbs.c661 int num_sge = core_wr->num_sge, bytes = 0; in siw_copy_inline_sgl() local
666 while (num_sge--) { in siw_copy_inline_sgl()
683 sqe->num_sge = bytes > 0 ? 1 : 0; in siw_copy_inline_sgl()
738 if (wr->num_sge > qp->attrs.sq_max_sges) { in siw_post_send()
739 siw_dbg_qp(qp, "too many sge's: %d\n", wr->num_sge); in siw_post_send()
760 wr->num_sge); in siw_post_send()
761 sqe->num_sge = wr->num_sge; in siw_post_send()
769 sqe->num_sge = 1; in siw_post_send()
788 if (unlikely(wr->num_sge != 1)) { in siw_post_send()
798 sqe->num_sge = 1; in siw_post_send()
[all …]
Dsiw_mem.h37 static inline void siw_unref_mem_sgl(struct siw_mem **mem, unsigned int num_sge) in siw_unref_mem_sgl() argument
39 while (num_sge) { in siw_unref_mem_sgl()
46 num_sge--; in siw_unref_mem_sgl()
Dsiw_verbs.h25 int num_sge) in siw_copy_sgl() argument
27 while (num_sge--) { in siw_copy_sgl()
Dsiw_qp_rx.c351 int num_sge = rqe->num_sge; in siw_rqe_get() local
353 if (likely(num_sge <= SIW_MAX_SGE)) { in siw_rqe_get()
363 wqe->rqe.num_sge = num_sge; in siw_rqe_get()
365 while (i < num_sge) { in siw_rqe_get()
376 siw_dbg_qp(qp, "too many sge's: %d\n", rqe->num_sge); in siw_rqe_get()
572 wqe->rqe.num_sge = 1; in siw_proc_write()
709 resp->num_sge = length ? 1 : 0; in siw_init_rresp()
755 wqe->sqe.num_sge = 1; in siw_orqe_start_rx()
/Linux-v5.4/include/uapi/rdma/
Drdma_user_rxe.h71 __u32 num_sge; member
124 __u32 num_sge; member
150 __u32 num_sge; member
Dsiw-abi.h110 __u8 num_sge; member
128 __u8 num_sge; member
Dvmw_pvrdma-abi.h219 __u32 num_sge; /* size of s/g array */ member
227 __u32 num_sge; /* size of s/g array */ member
Drvt-abi.h45 __u8 num_sge; member
/Linux-v5.4/drivers/infiniband/sw/rdmavt/
Dtrace_tx.h110 __field(int, num_sge)
130 __entry->num_sge = wqe->wr.num_sge;
151 __entry->num_sge,
Drc.c203 ss->num_sge = wqe->wr.num_sge; in rvt_restart_sge()
Dqp.c661 for (i = 0; i < wqe->wr.num_sge; i++) { in rvt_swqe_has_lkey()
900 qp->r_sge.num_sge = 0; in rvt_init_qp()
1823 if ((unsigned)wr->num_sge > qp->r_rq.max_sge) { in rvt_post_recv()
1849 wqe->num_sge = wr->num_sge; in rvt_post_recv()
1850 for (i = 0; i < wr->num_sge; i++) { in rvt_post_recv()
1899 (wr->num_sge == 0 || in rvt_qp_valid_operation()
2002 if (unlikely(wr->num_sge > qp->s_max_sge)) in rvt_post_one_wr()
2066 if (wr->num_sge) { in rvt_post_one_wr()
2071 for (i = 0; i < wr->num_sge; i++) { in rvt_post_one_wr()
2085 wqe->wr.num_sge = j; in rvt_post_one_wr()
[all …]
Dsrq.c244 p->num_sge = wqe->num_sge; in rvt_modify_srq()
245 for (i = 0; i < wqe->num_sge; i++) in rvt_modify_srq()
/Linux-v5.4/net/rds/
Dib_send.c629 send->s_wr.num_sge = 1; in rds_ib_xmit()
645 send->s_wr.num_sge = 2; in rds_ib_xmit()
674 &send->s_wr, send->s_wr.num_sge, send->s_wr.next); in rds_ib_xmit()
789 send->s_atomic_wr.wr.num_sge = 1; in rds_ib_xmit_atomic()
856 int num_sge; in rds_ib_xmit_rdma() local
893 num_sge = op->op_count; in rds_ib_xmit_rdma()
908 if (num_sge > max_sge) { in rds_ib_xmit_rdma()
909 send->s_rdma_wr.wr.num_sge = max_sge; in rds_ib_xmit_rdma()
910 num_sge -= max_sge; in rds_ib_xmit_rdma()
912 send->s_rdma_wr.wr.num_sge = num_sge; in rds_ib_xmit_rdma()
[all …]
/Linux-v5.4/drivers/infiniband/hw/cxgb3/
Diwch_qp.c66 if (wr->num_sge > T3_MAX_SGE) in build_rdma_send()
72 for (i = 0; i < wr->num_sge; i++) { in build_rdma_send()
81 wqe->send.num_sgle = cpu_to_be32(wr->num_sge); in build_rdma_send()
82 *flit_cnt = 4 + ((wr->num_sge) << 1); in build_rdma_send()
92 if (wr->num_sge > T3_MAX_SGE) in build_rdma_write()
109 for (i = 0; i < wr->num_sge; i++) { in build_rdma_write()
121 wqe->write.num_sgle = cpu_to_be32(wr->num_sge); in build_rdma_write()
122 *flit_cnt = 5 + ((wr->num_sge) << 1); in build_rdma_write()
131 if (wr->num_sge > 1) in build_rdma_read()
255 err = iwch_sgl2pbl_map(qhp->rhp, wr->sg_list, wr->num_sge, pbl_addr, in build_rdma_recv()
[all …]
/Linux-v5.4/drivers/infiniband/hw/qib/
Dqib_uc.c100 qp->s_sge.num_sge = wqe->wr.num_sge; in qib_make_uc_req()
277 qp->r_sge.num_sge = 0; in qib_uc_rcv()
429 qp->r_sge.num_sge = 1; in qib_uc_rcv()
431 qp->r_sge.num_sge = 0; in qib_uc_rcv()
511 qp->r_sge.num_sge = 0; in qib_uc_rcv()
Dqib_ud.c173 ssge.num_sge = swqe->wr.num_sge; in qib_ud_loopback()
183 if (--ssge.num_sge) in qib_ud_loopback()
319 qp->s_sge.num_sge = wqe->wr.num_sge; in qib_make_ud_req()
/Linux-v5.4/drivers/infiniband/hw/mlx5/
Dodp.c1629 u32 num_sge; member
1634 struct ib_sge *sg_list, u32 num_sge, in num_pending_prefetch_dec() argument
1642 for (i = from; i < num_sge; ++i) { in num_pending_prefetch_dec()
1656 struct ib_sge *sg_list, u32 num_sge) in num_pending_prefetch_inc() argument
1662 for (i = 0; i < num_sge; ++i) { in num_pending_prefetch_inc()
1700 struct ib_sge *sg_list, u32 num_sge) in mlx5_ib_prefetch_sg_list() argument
1706 for (i = 0; i < num_sge; ++i) { in mlx5_ib_prefetch_sg_list()
1728 w->num_sge); in mlx5_ib_prefetch_mr_work()
1733 w->num_sge, 0); in mlx5_ib_prefetch_mr_work()
1739 u32 flags, struct ib_sge *sg_list, u32 num_sge) in mlx5_ib_advise_mr_prefetch() argument
[all …]
/Linux-v5.4/drivers/infiniband/hw/hfi1/
Duc.c155 qp->s_sge.num_sge = wqe->wr.num_sge; in hfi1_make_uc_req()
339 qp->r_sge.num_sge = 0; in hfi1_uc_rcv()
504 qp->r_sge.num_sge = 1; in hfi1_uc_rcv()
506 qp->r_sge.num_sge = 0; in hfi1_uc_rcv()
581 qp->r_sge.num_sge = 0; in hfi1_uc_rcv()
/Linux-v5.4/drivers/infiniband/hw/vmw_pvrdma/
Dpvrdma_qp.c651 if (unlikely(wr->num_sge > qp->sq.max_sg || wr->num_sge < 0)) { in pvrdma_post_send()
708 wqe_hdr->num_sge = wr->num_sge; in pvrdma_post_send()
791 for (i = 0; i < wr->num_sge; i++) { in pvrdma_post_send()
859 if (unlikely(wr->num_sge > qp->rq.max_sg || in pvrdma_post_recv()
860 wr->num_sge < 0)) { in pvrdma_post_recv()
879 wqe_hdr->num_sge = wr->num_sge; in pvrdma_post_recv()
883 for (i = 0; i < wr->num_sge; i++) { in pvrdma_post_recv()
/Linux-v5.4/net/sunrpc/xprtrdma/
Dsvc_rdma_sendto.c207 ctxt->sc_send_wr.num_sge = 0; in svc_rdma_send_ctxt_get()
236 for (i = 1; i < ctxt->sc_send_wr.num_sge; i++) in svc_rdma_send_ctxt_put()
498 ctxt->sc_send_wr.num_sge++; in svc_rdma_dma_map_page()
530 ctxt->sc_send_wr.num_sge++; in svc_rdma_sync_reply_hdr()
769 sctxt->sc_send_wr.num_sge); in svc_rdma_send_reply_msg()
/Linux-v5.4/drivers/infiniband/hw/cxgb4/
Dqp.c422 for (i = 0; i < wr->num_sge; i++) { in build_immd()
454 int num_sge, u32 *plenp) in build_isgl() argument
466 for (i = 0; i < num_sge; i++) { in build_isgl()
481 isglp->nsge = cpu_to_be16(num_sge); in build_isgl()
495 if (wr->num_sge > T4_MAX_SEND_SGE) in build_rdma_send()
524 if (wr->num_sge) { in build_rdma_send()
536 wr->sg_list, wr->num_sge, &plen); in build_rdma_send()
540 wr->num_sge * sizeof(struct fw_ri_sge); in build_rdma_send()
562 if (wr->num_sge > T4_MAX_SEND_SGE) in build_rdma_write()
575 if (wr->num_sge) { in build_rdma_write()
[all …]
/Linux-v5.4/drivers/infiniband/hw/qedr/
Dqedr_roce_cm.c408 for (i = 0; i < swr->num_sge; ++i) in qedr_gsi_build_header()
532 packet->n_seg = swr->num_sge; in qedr_gsi_build_packet()
560 if (wr->num_sge > RDMA_MAX_SGE_PER_SQ_WQE) { in qedr_gsi_post_send()
562 wr->num_sge, RDMA_MAX_SGE_PER_SQ_WQE); in qedr_gsi_post_send()
633 if (wr->num_sge > QEDR_GSI_MAX_RECV_SGE) { in qedr_gsi_post_recv()
636 wr->num_sge, QEDR_GSI_MAX_RECV_SGE); in qedr_gsi_post_recv()

1234