Home
last modified time | relevance | path

Searched refs:num_sge (Results 1 – 25 of 108) sorted by relevance

12345

/Linux-v6.6/drivers/vfio/pci/pds/
Dcmds.c221 lm_file->num_sge = lm_file->sg_table.nents; in pds_vfio_dma_map_lm_file()
224 sgl_size = lm_file->num_sge * sizeof(struct pds_lm_sg_elem); in pds_vfio_dma_map_lm_file()
254 lm_file->num_sge = 0; in pds_vfio_dma_map_lm_file()
269 lm_file->num_sge * sizeof(*lm_file->sgl), in pds_vfio_dma_unmap_lm_file()
274 lm_file->num_sge = 0; in pds_vfio_dma_unmap_lm_file()
307 cmd.lm_save.num_sge = cpu_to_le32(lm_file->num_sge); in pds_vfio_get_lm_state_cmd()
346 cmd.lm_restore.num_sge = cpu_to_le32(lm_file->num_sge); in pds_vfio_set_lm_state_cmd()
480 u64 sgl_dma, u16 num_sge, u32 offset, in pds_vfio_dirty_seq_ack_cmd() argument
490 .num_sge = cpu_to_le16(num_sge), in pds_vfio_dirty_seq_ack_cmd()
Ddirty.c109 bmp_info->num_sge * sizeof(struct pds_lm_sg_elem), in __pds_vfio_dirty_free_sgl()
113 bmp_info->num_sge = 0; in __pds_vfio_dirty_free_sgl()
151 bmp_info->num_sge = max_sge; in __pds_vfio_dirty_alloc_sgl()
338 u16 num_sge; in pds_vfio_dirty_seq_ack() local
384 num_sge = sg_table.nents; in pds_vfio_dirty_seq_ack()
385 size = num_sge * sizeof(struct pds_lm_sg_elem); in pds_vfio_dirty_seq_ack()
387 err = pds_vfio_dirty_seq_ack_cmd(pds_vfio, bmp_info->sgl_addr, num_sge, in pds_vfio_dirty_seq_ack()
393 num_sge, bmp_info->sgl_addr, ERR_PTR(err)); in pds_vfio_dirty_seq_ack()
/Linux-v6.6/include/rdma/
Drdmavt_mr.h80 u8 num_sge; member
95 while (ss->num_sge) { in rvt_put_ss()
97 if (--ss->num_sge) in rvt_put_ss()
125 if (--ss->num_sge) in rvt_update_sge()
/Linux-v6.6/drivers/infiniband/sw/siw/
Dsiw_verbs.c648 int num_sge = core_wr->num_sge, bytes = 0; in siw_copy_inline_sgl() local
653 while (num_sge--) { in siw_copy_inline_sgl()
670 sqe->num_sge = bytes > 0 ? 1 : 0; in siw_copy_inline_sgl()
826 if (wr->num_sge > qp->attrs.sq_max_sges) { in siw_post_send()
827 siw_dbg_qp(qp, "too many sge's: %d\n", wr->num_sge); in siw_post_send()
848 wr->num_sge); in siw_post_send()
849 sqe->num_sge = wr->num_sge; in siw_post_send()
857 sqe->num_sge = 1; in siw_post_send()
876 if (unlikely(wr->num_sge != 1)) { in siw_post_send()
886 sqe->num_sge = 1; in siw_post_send()
[all …]
Dsiw_mem.h32 static inline void siw_unref_mem_sgl(struct siw_mem **mem, unsigned int num_sge) in siw_unref_mem_sgl() argument
34 while (num_sge) { in siw_unref_mem_sgl()
41 num_sge--; in siw_unref_mem_sgl()
Dsiw_verbs.h25 int num_sge) in siw_copy_sgl() argument
27 while (num_sge--) { in siw_copy_sgl()
Dsiw_qp_rx.c352 int num_sge = rqe->num_sge; in siw_rqe_get() local
354 if (likely(num_sge <= SIW_MAX_SGE)) { in siw_rqe_get()
364 wqe->rqe.num_sge = num_sge; in siw_rqe_get()
366 while (i < num_sge) { in siw_rqe_get()
377 siw_dbg_qp(qp, "too many sge's: %d\n", rqe->num_sge); in siw_rqe_get()
573 wqe->rqe.num_sge = 1; in siw_proc_write()
714 resp->num_sge = length ? 1 : 0; in siw_init_rresp()
764 wqe->sqe.num_sge = 1; in siw_orqe_start_rx()
/Linux-v6.6/drivers/infiniband/sw/rdmavt/
Dtrace_tx.h68 __field(int, num_sge)
88 __entry->num_sge = wqe->wr.num_sge;
109 __entry->num_sge,
Dqp.c625 for (i = 0; i < wqe->wr.num_sge; i++) { in rvt_swqe_has_lkey()
863 qp->r_sge.num_sge = 0; in rvt_init_qp()
1801 if ((unsigned)wr->num_sge > qp->r_rq.max_sge) { in rvt_post_recv()
1827 wqe->num_sge = wr->num_sge; in rvt_post_recv()
1828 for (i = 0; i < wr->num_sge; i++) { in rvt_post_recv()
1877 (wr->num_sge == 0 || in rvt_qp_valid_operation()
1981 if (unlikely(wr->num_sge > qp->s_max_sge)) in rvt_post_one_wr()
2045 if (wr->num_sge) { in rvt_post_one_wr()
2050 for (i = 0; i < wr->num_sge; i++) { in rvt_post_one_wr()
2064 wqe->wr.num_sge = j; in rvt_post_one_wr()
[all …]
Drc.c166 ss->num_sge = wqe->wr.num_sge; in rvt_restart_sge()
Dsrq.c202 p->num_sge = wqe->num_sge; in rvt_modify_srq()
203 for (i = 0; i < wqe->num_sge; i++) in rvt_modify_srq()
/Linux-v6.6/net/rds/
Dib_send.c630 send->s_wr.num_sge = 1; in rds_ib_xmit()
653 send->s_wr.num_sge = 2; in rds_ib_xmit()
683 &send->s_wr, send->s_wr.num_sge, send->s_wr.next); in rds_ib_xmit()
802 send->s_atomic_wr.wr.num_sge = 1; in rds_ib_xmit_atomic()
869 int num_sge; in rds_ib_xmit_rdma() local
915 num_sge = op->op_count; in rds_ib_xmit_rdma()
930 if (num_sge > max_sge) { in rds_ib_xmit_rdma()
931 send->s_rdma_wr.wr.num_sge = max_sge; in rds_ib_xmit_rdma()
932 num_sge -= max_sge; in rds_ib_xmit_rdma()
934 send->s_rdma_wr.wr.num_sge = num_sge; in rds_ib_xmit_rdma()
[all …]
/Linux-v6.6/drivers/infiniband/core/
Duverbs_std_types_mr.c55 int num_sge; in UVERBS_HANDLER() local
72 num_sge = uverbs_attr_ptr_get_array_size( in UVERBS_HANDLER()
74 if (num_sge <= 0) in UVERBS_HANDLER()
75 return num_sge; in UVERBS_HANDLER()
79 return ib_dev->ops.advise_mr(pd, advice, flags, sg_list, num_sge, in UVERBS_HANDLER()
/Linux-v6.6/drivers/infiniband/hw/erdma/
Derdma_qp.c220 while (i < send_wr->num_sge) { in fill_inline_data()
261 if (send_wr->num_sge > qp->dev->attrs.max_send_sge) in fill_sgl()
267 while (i < send_wr->num_sge) { in fill_sgl()
347 if (unlikely(send_wr->num_sge != 1)) in erdma_push_one_sqe()
371 send_wr->num_sge * sizeof(struct ib_sge); in erdma_push_one_sqe()
474 wqe_size += send_wr->num_sge * sizeof(struct ib_sge); in erdma_push_one_sqe()
476 send_wr->num_sge); in erdma_push_one_sqe()
550 if (recv_wr->num_sge == 0) { in erdma_post_recv_one()
552 } else if (recv_wr->num_sge == 1) { in erdma_post_recv_one()
/Linux-v6.6/drivers/infiniband/sw/rxe/
Drxe_verbs.c668 int num_sge = ibwr->num_sge; in validate_send_wr() local
682 if (num_sge > sq->max_sge) { in validate_send_wr()
688 for (i = 0; i < ibwr->num_sge; i++) in validate_send_wr()
814 for (i = 0; i < ibwr->num_sge; i++, sge++) { in copy_inline_data_to_wqe()
824 int num_sge = ibwr->num_sge; in init_send_wqe() local
842 num_sge * sizeof(struct ib_sge)); in init_send_wqe()
849 wqe->dma.num_sge = num_sge; in init_send_wqe()
955 int num_sge = ibwr->num_sge; in post_one_recv() local
966 if (unlikely(num_sge > rq->max_sge)) { in post_one_recv()
973 for (i = 0; i < num_sge; i++) in post_one_recv()
[all …]
/Linux-v6.6/drivers/infiniband/hw/mlx5/
Dwr.c272 for (i = 0; i < wr->num_sge; i++) { in set_data_inl_seg()
569 if (unlikely(send_wr->num_sge != 0) || in set_pi_umr_wr()
935 u8 next_fence, int *num_sge) in handle_qpt_rc() argument
955 *num_sge = 0; in handle_qpt_rc()
962 *num_sge = 0; in handle_qpt_rc()
971 *num_sge = 0; in handle_qpt_rc()
1064 int num_sge; in mlx5_ib_post_send() local
1090 num_sge = wr->num_sge; in mlx5_ib_post_send()
1091 if (unlikely(num_sge > qp->sq.max_gs)) { in mlx5_ib_post_send()
1131 next_fence, &num_sge); in mlx5_ib_post_send()
[all …]
Dodp.c1645 u32 num_sge; member
1657 for (i = 0; i < work->num_sge; ++i) in destroy_prefetch_work()
1711 WARN_ON(!work->num_sge); in mlx5_ib_prefetch_mr_work()
1712 for (i = 0; i < work->num_sge; ++i) { in mlx5_ib_prefetch_mr_work()
1727 struct ib_sge *sg_list, u32 num_sge) in init_prefetch_work() argument
1734 for (i = 0; i < num_sge; ++i) { in init_prefetch_work()
1739 work->num_sge = i; in init_prefetch_work()
1746 work->num_sge = num_sge; in init_prefetch_work()
1753 u32 num_sge) in mlx5_ib_prefetch_sg_list() argument
1759 for (i = 0; i < num_sge; ++i) { in mlx5_ib_prefetch_sg_list()
[all …]
/Linux-v6.6/include/uapi/rdma/
Dsiw-abi.h110 __u8 num_sge; member
128 __u8 num_sge; member
Dvmw_pvrdma-abi.h231 __u32 num_sge; /* size of s/g array */ member
239 __u32 num_sge; /* size of s/g array */ member
/Linux-v6.6/drivers/infiniband/hw/qib/
Dqib_uc.c101 qp->s_sge.num_sge = wqe->wr.num_sge; in qib_make_uc_req()
278 qp->r_sge.num_sge = 0; in qib_uc_rcv()
430 qp->r_sge.num_sge = 1; in qib_uc_rcv()
432 qp->r_sge.num_sge = 0; in qib_uc_rcv()
512 qp->r_sge.num_sge = 0; in qib_uc_rcv()
Dqib_ud.c173 ssge.num_sge = swqe->wr.num_sge; in qib_ud_loopback()
183 if (--ssge.num_sge) in qib_ud_loopback()
320 qp->s_sge.num_sge = wqe->wr.num_sge; in qib_make_ud_req()
/Linux-v6.6/drivers/infiniband/hw/hfi1/
Duc.c114 qp->s_sge.num_sge = wqe->wr.num_sge; in hfi1_make_uc_req()
293 qp->r_sge.num_sge = 0; in hfi1_uc_rcv()
458 qp->r_sge.num_sge = 1; in hfi1_uc_rcv()
460 qp->r_sge.num_sge = 0; in hfi1_uc_rcv()
535 qp->r_sge.num_sge = 0; in hfi1_uc_rcv()
/Linux-v6.6/drivers/infiniband/ulp/rtrs/
Drtrs.c90 .num_sge = 1, in rtrs_iu_post_recv()
145 .num_sge = 1, in rtrs_iu_post_send()
155 struct ib_sge *sge, unsigned int num_sge, in rtrs_iu_post_rdma_write_imm() argument
167 .wr.num_sge = num_sge, in rtrs_iu_post_rdma_write_imm()
179 for (i = 0; i < num_sge; i++) in rtrs_iu_post_rdma_write_imm()
/Linux-v6.6/drivers/infiniband/hw/vmw_pvrdma/
Dpvrdma_qp.c704 if (unlikely(wr->num_sge > qp->sq.max_sg || wr->num_sge < 0)) { in pvrdma_post_send()
753 wqe_hdr->num_sge = wr->num_sge; in pvrdma_post_send()
836 for (i = 0; i < wr->num_sge; i++) { in pvrdma_post_send()
904 if (unlikely(wr->num_sge > qp->rq.max_sg || in pvrdma_post_recv()
905 wr->num_sge < 0)) { in pvrdma_post_recv()
924 wqe_hdr->num_sge = wr->num_sge; in pvrdma_post_recv()
928 for (i = 0; i < wr->num_sge; i++) { in pvrdma_post_recv()
/Linux-v6.6/drivers/infiniband/hw/cxgb4/
Dqp.c423 for (i = 0; i < wr->num_sge; i++) { in build_immd()
455 int num_sge, u32 *plenp) in build_isgl() argument
467 for (i = 0; i < num_sge; i++) { in build_isgl()
482 isglp->nsge = cpu_to_be16(num_sge); in build_isgl()
496 if (wr->num_sge > T4_MAX_SEND_SGE) in build_rdma_send()
525 if (wr->num_sge) { in build_rdma_send()
537 wr->sg_list, wr->num_sge, &plen); in build_rdma_send()
541 wr->num_sge * sizeof(struct fw_ri_sge); in build_rdma_send()
563 if (wr->num_sge > T4_MAX_SEND_SGE) in build_rdma_write()
576 if (wr->num_sge) { in build_rdma_write()
[all …]

12345