Home
last modified time | relevance | path

Searched refs:wqe_size (Results 1 – 25 of 33) sorted by relevance

12

/Linux-v5.10/drivers/net/ethernet/huawei/hinic/
Dhinic_hw_qp.h182 void hinic_sq_write_db(struct hinic_sq *sq, u16 prod_idx, unsigned int wqe_size,
186 unsigned int wqe_size, u16 *prod_idx);
188 void hinic_sq_return_wqe(struct hinic_sq *sq, unsigned int wqe_size);
192 unsigned int wqe_size);
196 unsigned int wqe_size, u16 *cons_idx);
200 unsigned int *wqe_size, u16 *cons_idx);
202 void hinic_sq_put_wqe(struct hinic_sq *sq, unsigned int wqe_size);
208 unsigned int wqe_size, u16 *prod_idx);
214 unsigned int wqe_size,
218 unsigned int wqe_size,
[all …]
Dhinic_hw_qp.c639 void hinic_sq_write_db(struct hinic_sq *sq, u16 prod_idx, unsigned int wqe_size, in hinic_sq_write_db() argument
645 prod_idx += ALIGN(wqe_size, wq->wqebb_size) / wq->wqebb_size; in hinic_sq_write_db()
662 unsigned int wqe_size, u16 *prod_idx) in hinic_sq_get_wqe() argument
664 struct hinic_hw_wqe *hw_wqe = hinic_get_wqe(sq->wq, wqe_size, in hinic_sq_get_wqe()
678 void hinic_sq_return_wqe(struct hinic_sq *sq, unsigned int wqe_size) in hinic_sq_return_wqe() argument
680 hinic_return_wqe(sq->wq, wqe_size); in hinic_sq_return_wqe()
693 struct sk_buff *skb, unsigned int wqe_size) in hinic_sq_write_wqe() argument
700 hinic_cpu_to_be32(sq_wqe, wqe_size); in hinic_sq_write_wqe()
702 hinic_write_wqe(sq->wq, hw_wqe, wqe_size); in hinic_sq_write_wqe()
717 unsigned int *wqe_size, u16 *cons_idx) in hinic_sq_read_wqebb() argument
[all …]
Dhinic_hw_wq.h96 struct hinic_hw_wqe *hinic_get_wqe(struct hinic_wq *wq, unsigned int wqe_size,
99 void hinic_return_wqe(struct hinic_wq *wq, unsigned int wqe_size);
101 void hinic_put_wqe(struct hinic_wq *wq, unsigned int wqe_size);
103 struct hinic_hw_wqe *hinic_read_wqe(struct hinic_wq *wq, unsigned int wqe_size,
109 unsigned int wqe_size);
Dhinic_tx.c498 unsigned int wqe_size; in hinic_lb_xmit_frame() local
510 wqe_size = HINIC_SQ_WQE_SIZE(nr_sges); in hinic_lb_xmit_frame()
512 sq_wqe = hinic_sq_get_wqe(txq->sq, wqe_size, &prod_idx); in hinic_lb_xmit_frame()
516 sq_wqe = hinic_sq_get_wqe(txq->sq, wqe_size, &prod_idx); in hinic_lb_xmit_frame()
528 wqe_size = 0; in hinic_lb_xmit_frame()
534 hinic_sq_write_wqe(txq->sq, prod_idx, sq_wqe, skb, wqe_size); in hinic_lb_xmit_frame()
539 hinic_sq_write_db(txq->sq, prod_idx, wqe_size, 0); in hinic_lb_xmit_frame()
559 unsigned int wqe_size; in hinic_xmit_frame() local
591 wqe_size = HINIC_SQ_WQE_SIZE(nr_sges); in hinic_xmit_frame()
593 sq_wqe = hinic_sq_get_wqe(txq->sq, wqe_size, &prod_idx); in hinic_xmit_frame()
[all …]
Dhinic_hw_wq.c742 struct hinic_hw_wqe *hinic_get_wqe(struct hinic_wq *wq, unsigned int wqe_size, in hinic_get_wqe() argument
750 num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) >> wq->wqebb_size_shift; in hinic_get_wqe()
791 void hinic_return_wqe(struct hinic_wq *wq, unsigned int wqe_size) in hinic_return_wqe() argument
793 int num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) / wq->wqebb_size; in hinic_return_wqe()
805 void hinic_put_wqe(struct hinic_wq *wq, unsigned int wqe_size) in hinic_put_wqe() argument
807 int num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) in hinic_put_wqe()
823 struct hinic_hw_wqe *hinic_read_wqe(struct hinic_wq *wq, unsigned int wqe_size, in hinic_read_wqe() argument
826 int num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) in hinic_read_wqe()
888 unsigned int wqe_size) in hinic_write_wqe() argument
898 num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) / wq->wqebb_size; in hinic_write_wqe()
Dhinic_hw_cmdq.c160 unsigned int wqe_size = 0; in cmdq_wqe_size_from_bdlen() local
164 wqe_size = WQE_LCMD_SIZE; in cmdq_wqe_size_from_bdlen()
167 wqe_size = WQE_SCMD_SIZE; in cmdq_wqe_size_from_bdlen()
171 return wqe_size; in cmdq_wqe_size_from_bdlen()
537 unsigned int bufdesc_len, wqe_size; in clear_wqe_complete_bit() local
541 wqe_size = cmdq_wqe_size_from_bdlen(bufdesc_len); in clear_wqe_complete_bit()
542 if (wqe_size == WQE_LCMD_SIZE) { in clear_wqe_complete_bit()
/Linux-v5.10/drivers/infiniband/hw/i40iw/
Di40iw_uk.c60 qp->sq_wrtrk_array[wqe_idx].wqe_size = I40IW_QP_WQE_MIN_SIZE; in i40iw_nop_1()
136 u8 wqe_size, in i40iw_qp_get_next_send_wqe() argument
155 if ((offset + wqe_size) > I40IW_QP_WQE_MAX_SIZE) { in i40iw_qp_get_next_send_wqe()
169 if (((*wqe_idx & 3) == 1) && (wqe_size == I40IW_WQE_SIZE_64)) { in i40iw_qp_get_next_send_wqe()
179 wqe_size / I40IW_QP_WQE_MIN_SIZE, ret_code); in i40iw_qp_get_next_send_wqe()
195 qp->sq_wrtrk_array[*wqe_idx].wqe_size = wqe_size; in i40iw_qp_get_next_send_wqe()
256 u8 wqe_size; in i40iw_rdma_write() local
270 ret_code = i40iw_fragcnt_to_wqesize_sq(op_info->num_lo_sges, &wqe_size); in i40iw_rdma_write()
274 wqe = i40iw_qp_get_next_send_wqe(qp, &wqe_idx, wqe_size, total_size, info->wr_id); in i40iw_rdma_write()
324 u8 wqe_size; in i40iw_rdma_read() local
[all …]
Di40iw_user.h328 u8 wqe_size; member
408 u8 wqe_size,
423 enum i40iw_status_code i40iw_fragcnt_to_wqesize_sq(u32 frag_cnt, u8 *wqe_size);
424 enum i40iw_status_code i40iw_fragcnt_to_wqesize_rq(u32 frag_cnt, u8 *wqe_size);
426 u8 *wqe_size);
/Linux-v5.10/drivers/infiniband/hw/qedr/
Dqedr_hsi_rdma.h310 u8 wqe_size; member
338 u8 wqe_size; member
374 u8 wqe_size; member
420 u8 wqe_size; member
475 u8 wqe_size; member
498 u8 wqe_size; member
548 u8 wqe_size; member
602 u8 wqe_size; member
628 u8 wqe_size; member
663 u8 wqe_size; member
[all …]
Dverbs.c3268 struct qedr_qp *qp, u8 *wqe_size, in qedr_prepare_sq_inline_data() argument
3305 (*wqe_size)++; in qedr_prepare_sq_inline_data()
3353 static u32 qedr_prepare_sq_sges(struct qedr_qp *qp, u8 *wqe_size, in qedr_prepare_sq_sges() argument
3368 if (wqe_size) in qedr_prepare_sq_sges()
3369 *wqe_size += wr->num_sge; in qedr_prepare_sq_sges()
3390 return qedr_prepare_sq_inline_data(dev, qp, &rwqe->wqe_size, wr, in qedr_prepare_sq_rdma_data()
3394 return qedr_prepare_sq_sges(qp, &rwqe->wqe_size, wr); in qedr_prepare_sq_rdma_data()
3409 return qedr_prepare_sq_inline_data(dev, qp, &swqe->wqe_size, wr, in qedr_prepare_sq_send_data()
3413 return qedr_prepare_sq_sges(qp, &swqe->wqe_size, wr); in qedr_prepare_sq_send_data()
3566 swqe->wqe_size = 2; in __qedr_post_send()
[all …]
Dqedr.h433 u8 wqe_size; member
444 u8 wqe_size; member
/Linux-v5.10/drivers/net/ethernet/mellanox/mlx5/core/en/
Dtxrx.h73 static inline void *mlx5e_fetch_wqe(struct mlx5_wq_cyc *wq, u16 pi, size_t wqe_size) in mlx5e_fetch_wqe() argument
78 memset(wqe, 0, wqe_size); in mlx5e_fetch_wqe()
402 static inline u16 mlx5e_stop_room_for_wqe(u16 wqe_size) in mlx5e_stop_room_for_wqe() argument
415 if (__builtin_constant_p(wqe_size)) in mlx5e_stop_room_for_wqe()
416 BUILD_BUG_ON(wqe_size > MLX5_SEND_WQE_MAX_WQEBBS); in mlx5e_stop_room_for_wqe()
418 WARN_ON_ONCE(wqe_size > MLX5_SEND_WQE_MAX_WQEBBS); in mlx5e_stop_room_for_wqe()
420 return wqe_size * 2 - 1; in mlx5e_stop_room_for_wqe()
/Linux-v5.10/drivers/infiniband/sw/rxe/
Drxe_qp.c197 int wqe_size; in rxe_qp_init_req() local
216 wqe_size = max_t(int, init->cap.max_send_sge * sizeof(struct ib_sge), in rxe_qp_init_req()
219 wqe_size / sizeof(struct ib_sge); in rxe_qp_init_req()
220 qp->sq.max_inline = init->cap.max_inline_data = wqe_size; in rxe_qp_init_req()
221 wqe_size += sizeof(struct rxe_send_wqe); in rxe_qp_init_req()
223 qp->sq.queue = rxe_queue_init(rxe, &qp->sq.max_wr, wqe_size); in rxe_qp_init_req()
264 int wqe_size; in rxe_qp_init_resp() local
270 wqe_size = rcv_wqe_size(qp->rq.max_sge); in rxe_qp_init_resp()
273 qp_num(qp), qp->rq.max_wr, qp->rq.max_sge, wqe_size); in rxe_qp_init_resp()
277 wqe_size); in rxe_qp_init_resp()
/Linux-v5.10/drivers/infiniband/hw/bnxt_re/
Dqplib_fp.h93 u16 wqe_size; member
252 u16 wqe_size; member
567 return (que->wqe_size * que->max_wqe) / sizeof(struct sq_sge); in bnxt_qplib_get_depth()
582 static inline u32 bnxt_qplib_set_rq_max_slot(u32 wqe_size) in bnxt_qplib_set_rq_max_slot() argument
584 return (wqe_size / sizeof(struct sq_sge)); in bnxt_qplib_set_rq_max_slot()
Dib_verbs.c857 u16 wqe_size, calc_ils; in bnxt_re_get_wqe_size() local
859 wqe_size = bnxt_re_get_swqe_size(nsge); in bnxt_re_get_wqe_size()
862 wqe_size = max_t(u16, calc_ils, wqe_size); in bnxt_re_get_wqe_size()
863 wqe_size = ALIGN(wqe_size, sizeof(struct sq_send_hdr)); in bnxt_re_get_wqe_size()
865 return wqe_size; in bnxt_re_get_wqe_size()
885 sq->wqe_size = bnxt_re_get_wqe_size(ilsize, sq->max_sge); in bnxt_re_setup_swqe_size()
886 if (sq->wqe_size > bnxt_re_get_swqe_size(dev_attr->max_qp_sges)) in bnxt_re_setup_swqe_size()
891 if (sq->wqe_size < bnxt_re_get_swqe_size(dev_attr->max_qp_sges) && in bnxt_re_setup_swqe_size()
893 sq->wqe_size = bnxt_re_get_swqe_size(dev_attr->max_qp_sges); in bnxt_re_setup_swqe_size()
896 qplqp->max_inline_data = sq->wqe_size - in bnxt_re_setup_swqe_size()
[all …]
Droce_hsi.h194 u8 wqe_size; member
217 u8 wqe_size; member
237 u8 wqe_size; member
286 u8 wqe_size; member
309 u8 wqe_size; member
324 u8 wqe_size; member
521 u8 wqe_size; member
537 u8 wqe_size; member
/Linux-v5.10/drivers/infiniband/hw/ocrdma/
Docrdma_verbs.c366 dev->attr.wqe_size) : 0; in _ocrdma_alloc_pd()
498 resp.wqe_size = dev->attr.wqe_size; in ocrdma_alloc_ucontext()
500 resp.dpp_wqe_size = dev->attr.wqe_size; in ocrdma_alloc_ucontext()
1921 const struct ib_send_wr *wr, u32 wqe_size) in ocrdma_build_inline_sges() argument
1942 wqe_size += roundup(hdr->total_len, OCRDMA_WQE_ALIGN_BYTES); in ocrdma_build_inline_sges()
1944 wqe_size += sizeof(struct ocrdma_sge); in ocrdma_build_inline_sges()
1949 wqe_size += (wr->num_sge * sizeof(struct ocrdma_sge)); in ocrdma_build_inline_sges()
1951 wqe_size += sizeof(struct ocrdma_sge); in ocrdma_build_inline_sges()
1954 hdr->cw |= ((wqe_size / OCRDMA_WQE_STRIDE) << OCRDMA_WQE_SIZE_SHIFT); in ocrdma_build_inline_sges()
1963 u32 wqe_size = sizeof(*hdr); in ocrdma_build_send() local
[all …]
/Linux-v5.10/drivers/infiniband/hw/mlx5/
Dqp.c261 size_t wqe_size = 1 << wq->wqe_shift; in mlx5_ib_read_wqe_rq() local
263 if (buflen < wqe_size) in mlx5_ib_read_wqe_rq()
293 size_t wqe_size = 1 << srq->msrq.wqe_shift; in mlx5_ib_read_wqe_srq() local
295 if (buflen < wqe_size) in mlx5_ib_read_wqe_srq()
354 int wqe_size; in set_rq_size() local
385 wqe_size = in set_rq_size()
388 wqe_size += cap->max_recv_sge * sizeof(struct mlx5_wqe_data_seg); in set_rq_size()
389 wqe_size = roundup_pow_of_two(wqe_size); in set_rq_size()
390 wq_size = roundup_pow_of_two(cap->max_recv_wr) * wqe_size; in set_rq_size()
392 qp->rq.wqe_cnt = wq_size / wqe_size; in set_rq_size()
[all …]
Dodp.c1191 int wqe_size = 1 << srq->msrq.wqe_shift; in mlx5_ib_mr_responder_pfault_handler_srq() local
1193 if (wqe_size > wqe_length) { in mlx5_ib_mr_responder_pfault_handler_srq()
1198 *wqe_end = *wqe + wqe_size; in mlx5_ib_mr_responder_pfault_handler_srq()
1210 int wqe_size = 1 << wq->wqe_shift; in mlx5_ib_mr_responder_pfault_handler_rq() local
1217 if (wqe_size > wqe_length) { in mlx5_ib_mr_responder_pfault_handler_rq()
1222 *wqe_end = wqe + wqe_size; in mlx5_ib_mr_responder_pfault_handler_rq()
Dwr.c652 int wqe_size; in set_sig_data_segment() local
683 wqe_size = ALIGN(sizeof(*data_klm), 64); in set_sig_data_segment()
729 wqe_size = ALIGN(sizeof(*sblock_ctrl) + sizeof(*data_sentry) + in set_sig_data_segment()
733 *seg += wqe_size; in set_sig_data_segment()
734 *size += wqe_size / 16; in set_sig_data_segment()
/Linux-v5.10/include/uapi/rdma/
Docrdma-abi.h55 __u32 wqe_size; member
Dib_user_verbs.h840 __u32 wqe_size; member
859 __u32 wqe_size; member
872 __u32 wqe_size; member
/Linux-v5.10/drivers/infiniband/hw/vmw_pvrdma/
Dpvrdma_qp.c147 qp->rq.wqe_size = roundup_pow_of_two(sizeof(struct pvrdma_rq_wqe_hdr) + in pvrdma_set_rq_size()
150 qp->npages_recv = (qp->rq.wqe_cnt * qp->rq.wqe_size + PAGE_SIZE - 1) / in pvrdma_set_rq_size()
172 qp->sq.wqe_size = roundup_pow_of_two(sizeof(struct pvrdma_sq_wqe_hdr) + in pvrdma_set_sq_size()
177 (qp->sq.wqe_cnt * qp->sq.wqe_size + PAGE_SIZE - 1) / in pvrdma_set_sq_size()
645 qp->sq.offset + n * qp->sq.wqe_size); in get_sq_wqe()
651 qp->rq.offset + n * qp->rq.wqe_size); in get_rq_wqe()
Dpvrdma.h155 int wqe_size; member
170 int wqe_size; member
/Linux-v5.10/drivers/infiniband/core/
Duverbs_cmd.c1999 wqes = uverbs_request_next_ptr(&iter, cmd.wqe_size * cmd.wr_count); in ib_uverbs_post_send()
2010 user_wr = kmalloc(cmd.wqe_size, GFP_KERNEL); in ib_uverbs_post_send()
2024 if (copy_from_user(user_wr, wqes + i * cmd.wqe_size, in ib_uverbs_post_send()
2025 cmd.wqe_size)) { in ib_uverbs_post_send()
2176 u32 wqe_size, u32 sge_count) in ib_uverbs_unmarshall_recv() argument
2186 if (wqe_size < sizeof (struct ib_uverbs_recv_wr)) in ib_uverbs_unmarshall_recv()
2189 wqes = uverbs_request_next_ptr(iter, wqe_size * wr_count); in ib_uverbs_unmarshall_recv()
2200 user_wr = kmalloc(wqe_size, GFP_KERNEL); in ib_uverbs_unmarshall_recv()
2207 if (copy_from_user(user_wr, wqes + i * wqe_size, in ib_uverbs_unmarshall_recv()
2208 wqe_size)) { in ib_uverbs_unmarshall_recv()
[all …]

12