Home
last modified time | relevance | path

Searched refs:wqe_size (Results 1 – 25 of 29) sorted by relevance

12

/Linux-v4.19/drivers/net/ethernet/huawei/hinic/
Dhinic_hw_qp.h156 void hinic_sq_write_db(struct hinic_sq *sq, u16 prod_idx, unsigned int wqe_size,
160 unsigned int wqe_size, u16 *prod_idx);
164 unsigned int wqe_size);
168 unsigned int wqe_size, u16 *cons_idx);
172 unsigned int *wqe_size, u16 *cons_idx);
174 void hinic_sq_put_wqe(struct hinic_sq *sq, unsigned int wqe_size);
180 unsigned int wqe_size, u16 *prod_idx);
186 unsigned int wqe_size,
190 unsigned int wqe_size,
195 unsigned int wqe_size);
Dhinic_hw_qp.c582 void hinic_sq_write_db(struct hinic_sq *sq, u16 prod_idx, unsigned int wqe_size, in hinic_sq_write_db() argument
588 prod_idx += ALIGN(wqe_size, wq->wqebb_size) / wq->wqebb_size; in hinic_sq_write_db()
604 unsigned int wqe_size, u16 *prod_idx) in hinic_sq_get_wqe() argument
606 struct hinic_hw_wqe *hw_wqe = hinic_get_wqe(sq->wq, wqe_size, in hinic_sq_get_wqe()
625 struct sk_buff *skb, unsigned int wqe_size) in hinic_sq_write_wqe() argument
632 hinic_cpu_to_be32(sq_wqe, wqe_size); in hinic_sq_write_wqe()
634 hinic_write_wqe(sq->wq, hw_wqe, wqe_size); in hinic_sq_write_wqe()
649 unsigned int *wqe_size, u16 *cons_idx) in hinic_sq_read_wqebb() argument
669 *wqe_size = sizeof(*ctrl) + sizeof(sq_wqe->task); in hinic_sq_read_wqebb()
670 *wqe_size += SECT_SIZE_FROM_8BYTES(buf_sect_len); in hinic_sq_read_wqebb()
[all …]
Dhinic_tx.c184 unsigned int wqe_size; in hinic_xmit_frame() local
211 wqe_size = HINIC_SQ_WQE_SIZE(nr_sges); in hinic_xmit_frame()
213 sq_wqe = hinic_sq_get_wqe(txq->sq, wqe_size, &prod_idx); in hinic_xmit_frame()
220 sq_wqe = hinic_sq_get_wqe(txq->sq, wqe_size, &prod_idx); in hinic_xmit_frame()
232 wqe_size = 0; in hinic_xmit_frame()
239 hinic_sq_write_wqe(txq->sq, prod_idx, sq_wqe, skb, wqe_size); in hinic_xmit_frame()
244 hinic_sq_write_db(txq->sq, prod_idx, wqe_size, 0); in hinic_xmit_frame()
281 unsigned int wqe_size; in free_all_tx_skbs() local
286 while ((sq_wqe = hinic_sq_read_wqebb(sq, &skb, &wqe_size, &ci))) { in free_all_tx_skbs()
287 sq_wqe = hinic_sq_read_wqe(sq, &skb, wqe_size, &ci); in free_all_tx_skbs()
[all …]
Dhinic_hw_wq.h104 struct hinic_hw_wqe *hinic_get_wqe(struct hinic_wq *wq, unsigned int wqe_size,
107 void hinic_put_wqe(struct hinic_wq *wq, unsigned int wqe_size);
109 struct hinic_hw_wqe *hinic_read_wqe(struct hinic_wq *wq, unsigned int wqe_size,
115 unsigned int wqe_size);
Dhinic_hw_wq.c736 struct hinic_hw_wqe *hinic_get_wqe(struct hinic_wq *wq, unsigned int wqe_size, in hinic_get_wqe() argument
744 num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) / wq->wqebb_size; in hinic_get_wqe()
782 void hinic_put_wqe(struct hinic_wq *wq, unsigned int wqe_size) in hinic_put_wqe() argument
784 int num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) / wq->wqebb_size; in hinic_put_wqe()
799 struct hinic_hw_wqe *hinic_read_wqe(struct hinic_wq *wq, unsigned int wqe_size, in hinic_read_wqe() argument
802 int num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) / wq->wqebb_size; in hinic_read_wqe()
863 unsigned int wqe_size) in hinic_write_wqe() argument
873 num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) / wq->wqebb_size; in hinic_write_wqe()
Dhinic_hw_cmdq.c169 unsigned int wqe_size = 0; in cmdq_wqe_size_from_bdlen() local
173 wqe_size = WQE_LCMD_SIZE; in cmdq_wqe_size_from_bdlen()
176 wqe_size = WQE_SCMD_SIZE; in cmdq_wqe_size_from_bdlen()
180 return wqe_size; in cmdq_wqe_size_from_bdlen()
544 unsigned int bufdesc_len, wqe_size; in clear_wqe_complete_bit() local
548 wqe_size = cmdq_wqe_size_from_bdlen(bufdesc_len); in clear_wqe_complete_bit()
549 if (wqe_size == WQE_LCMD_SIZE) { in clear_wqe_complete_bit()
/Linux-v4.19/drivers/infiniband/hw/i40iw/
Di40iw_uk.c60 qp->sq_wrtrk_array[wqe_idx].wqe_size = I40IW_QP_WQE_MIN_SIZE; in i40iw_nop_1()
136 u8 wqe_size, in i40iw_qp_get_next_send_wqe() argument
155 if ((offset + wqe_size) > I40IW_QP_WQE_MAX_SIZE) { in i40iw_qp_get_next_send_wqe()
169 if (((*wqe_idx & 3) == 1) && (wqe_size == I40IW_WQE_SIZE_64)) { in i40iw_qp_get_next_send_wqe()
179 wqe_size / I40IW_QP_WQE_MIN_SIZE, ret_code); in i40iw_qp_get_next_send_wqe()
195 qp->sq_wrtrk_array[*wqe_idx].wqe_size = wqe_size; in i40iw_qp_get_next_send_wqe()
256 u8 wqe_size; in i40iw_rdma_write() local
270 ret_code = i40iw_fragcnt_to_wqesize_sq(op_info->num_lo_sges, &wqe_size); in i40iw_rdma_write()
274 wqe = i40iw_qp_get_next_send_wqe(qp, &wqe_idx, wqe_size, total_size, info->wr_id); in i40iw_rdma_write()
324 u8 wqe_size; in i40iw_rdma_read() local
[all …]
Di40iw_user.h328 u8 wqe_size; member
408 u8 wqe_size,
423 enum i40iw_status_code i40iw_fragcnt_to_wqesize_sq(u32 frag_cnt, u8 *wqe_size);
424 enum i40iw_status_code i40iw_fragcnt_to_wqesize_rq(u32 frag_cnt, u8 *wqe_size);
426 u8 *wqe_size);
/Linux-v4.19/drivers/infiniband/hw/qedr/
Dqedr_hsi_rdma.h310 u8 wqe_size; member
338 u8 wqe_size; member
374 u8 wqe_size; member
420 u8 wqe_size; member
475 u8 wqe_size; member
498 u8 wqe_size; member
548 u8 wqe_size; member
602 u8 wqe_size; member
628 u8 wqe_size; member
663 u8 wqe_size; member
[all …]
Dverbs.c3040 struct qedr_qp *qp, u8 *wqe_size, in qedr_prepare_sq_inline_data() argument
3077 (*wqe_size)++; in qedr_prepare_sq_inline_data()
3125 static u32 qedr_prepare_sq_sges(struct qedr_qp *qp, u8 *wqe_size, in qedr_prepare_sq_sges() argument
3140 if (wqe_size) in qedr_prepare_sq_sges()
3141 *wqe_size += wr->num_sge; in qedr_prepare_sq_sges()
3162 return qedr_prepare_sq_inline_data(dev, qp, &rwqe->wqe_size, wr, in qedr_prepare_sq_rdma_data()
3166 return qedr_prepare_sq_sges(qp, &rwqe->wqe_size, wr); in qedr_prepare_sq_rdma_data()
3181 return qedr_prepare_sq_inline_data(dev, qp, &swqe->wqe_size, wr, in qedr_prepare_sq_send_data()
3185 return qedr_prepare_sq_sges(qp, &swqe->wqe_size, wr); in qedr_prepare_sq_send_data()
3338 swqe->wqe_size = 2; in __qedr_post_send()
[all …]
Dqedr.h422 u8 wqe_size; member
433 u8 wqe_size; member
/Linux-v4.19/drivers/infiniband/hw/ocrdma/
Docrdma_verbs.c398 dev->attr.wqe_size) : 0; in _ocrdma_alloc_pd()
541 resp.wqe_size = dev->attr.wqe_size; in ocrdma_alloc_ucontext()
543 resp.dpp_wqe_size = dev->attr.wqe_size; in ocrdma_alloc_ucontext()
2003 const struct ib_send_wr *wr, u32 wqe_size) in ocrdma_build_inline_sges() argument
2024 wqe_size += roundup(hdr->total_len, OCRDMA_WQE_ALIGN_BYTES); in ocrdma_build_inline_sges()
2026 wqe_size += sizeof(struct ocrdma_sge); in ocrdma_build_inline_sges()
2031 wqe_size += (wr->num_sge * sizeof(struct ocrdma_sge)); in ocrdma_build_inline_sges()
2033 wqe_size += sizeof(struct ocrdma_sge); in ocrdma_build_inline_sges()
2036 hdr->cw |= ((wqe_size / OCRDMA_WQE_STRIDE) << OCRDMA_WQE_SIZE_SHIFT); in ocrdma_build_inline_sges()
2045 u32 wqe_size = sizeof(*hdr); in ocrdma_build_send() local
[all …]
Docrdma.h111 u32 wqe_size; member
Docrdma_hw.c1201 attr->wqe_size = ((rsp->wqe_rqe_stride_max_dpp_cqs & in ocrdma_get_attr()
1210 attr->wqe_size - (sizeof(struct ocrdma_hdr_wqe) + in ocrdma_get_attr()
2203 dev->attr.wqe_size, &hw_pages, &hw_page_size); in ocrdma_set_create_qp_sq_cmd()
2217 qp->sq.entry_size = dev->attr.wqe_size; in ocrdma_set_create_qp_sq_cmd()
2234 cmd->wqe_rqe_size |= (dev->attr.wqe_size << in ocrdma_set_create_qp_sq_cmd()
/Linux-v4.19/drivers/infiniband/sw/rxe/
Drxe_qp.c223 int wqe_size; in rxe_qp_init_req() local
234 wqe_size = max_t(int, sizeof(struct rxe_send_wqe) + in rxe_qp_init_req()
241 wqe_size); in rxe_qp_init_req()
282 int wqe_size; in rxe_qp_init_resp() local
288 wqe_size = rcv_wqe_size(qp->rq.max_sge); in rxe_qp_init_resp()
291 qp_num(qp), qp->rq.max_wr, qp->rq.max_sge, wqe_size); in rxe_qp_init_resp()
295 wqe_size); in rxe_qp_init_resp()
/Linux-v4.19/include/uapi/rdma/
Docrdma-abi.h55 __u32 wqe_size; member
Dib_user_verbs.h802 __u32 wqe_size; member
821 __u32 wqe_size; member
834 __u32 wqe_size; member
/Linux-v4.19/drivers/infiniband/hw/vmw_pvrdma/
Dpvrdma_qp.c144 qp->rq.wqe_size = roundup_pow_of_two(sizeof(struct pvrdma_rq_wqe_hdr) + in pvrdma_set_rq_size()
147 qp->npages_recv = (qp->rq.wqe_cnt * qp->rq.wqe_size + PAGE_SIZE - 1) / in pvrdma_set_rq_size()
169 qp->sq.wqe_size = roundup_pow_of_two(sizeof(struct pvrdma_sq_wqe_hdr) + in pvrdma_set_sq_size()
174 (qp->sq.wqe_cnt * qp->sq.wqe_size + PAGE_SIZE - 1) / in pvrdma_set_sq_size()
593 qp->sq.offset + n * qp->sq.wqe_size); in get_sq_wqe()
599 qp->rq.offset + n * qp->rq.wqe_size); in get_rq_wqe()
Dpvrdma.h155 int wqe_size; member
170 int wqe_size; member
/Linux-v4.19/drivers/infiniband/hw/mlx5/
Dqp.c247 int wqe_size; in set_rq_size() local
271 wqe_size = qp->wq_sig ? sizeof(struct mlx5_wqe_signature_seg) : 0; in set_rq_size()
272 wqe_size += cap->max_recv_sge * sizeof(struct mlx5_wqe_data_seg); in set_rq_size()
273 wqe_size = roundup_pow_of_two(wqe_size); in set_rq_size()
274 wq_size = roundup_pow_of_two(cap->max_recv_wr) * wqe_size; in set_rq_size()
276 qp->rq.wqe_cnt = wq_size / wqe_size; in set_rq_size()
277 if (wqe_size > MLX5_CAP_GEN(dev->mdev, max_wqe_sz_rq)) { in set_rq_size()
279 wqe_size, in set_rq_size()
284 qp->rq.wqe_shift = ilog2(wqe_size); in set_rq_size()
367 static int get_send_sge(struct ib_qp_init_attr *attr, int wqe_size) in get_send_sge() argument
[all …]
Dodp.c973 int wqe_size = 1 << wq->wqe_shift; in mlx5_ib_mr_responder_pfault_handler() local
985 if (wqe_size > wqe_length) { in mlx5_ib_mr_responder_pfault_handler()
1003 *wqe_end = *wqe + wqe_size; in mlx5_ib_mr_responder_pfault_handler()
/Linux-v4.19/drivers/infiniband/core/
Duverbs_cmd.c2218 if (in_len < sizeof cmd + cmd.wqe_size * cmd.wr_count + in ib_uverbs_post_send()
2222 if (cmd.wqe_size < sizeof (struct ib_uverbs_send_wr)) in ib_uverbs_post_send()
2225 user_wr = kmalloc(cmd.wqe_size, GFP_KERNEL); in ib_uverbs_post_send()
2238 buf + sizeof cmd + i * cmd.wqe_size, in ib_uverbs_post_send()
2239 cmd.wqe_size)) { in ib_uverbs_post_send()
2348 cmd.wr_count * cmd.wqe_size + in ib_uverbs_post_send()
2392 u32 wqe_size) in ib_uverbs_unmarshall_recv() argument
2400 if (in_len < wqe_size * wr_count + in ib_uverbs_unmarshall_recv()
2404 if (wqe_size < sizeof (struct ib_uverbs_recv_wr)) in ib_uverbs_unmarshall_recv()
2407 user_wr = kmalloc(wqe_size, GFP_KERNEL); in ib_uverbs_unmarshall_recv()
[all …]
/Linux-v4.19/drivers/net/ethernet/ibm/ehea/
Dehea_qmr.c378 int nr_pages, int wqe_size, int act_nr_sges, in ehea_qp_alloc_register() argument
385 ret = hw_queue_ctor(hw_queue, nr_pages, EHEA_PAGESIZE, wqe_size); in ehea_qp_alloc_register()
/Linux-v4.19/drivers/infiniband/hw/bnxt_re/
Droce_hsi.h167 u8 wqe_size; member
196 u8 wqe_size; member
253 u8 wqe_size; member
399 u8 wqe_size; member
Dqplib_fp.c688 srqe->wqe_size = wqe->num_sge + in bnxt_qplib_post_srq_recv()
1598 sqe->wqe_size = wqe_size16 + in bnxt_qplib_post_send()
1617 sqe->wqe_size = wqe_size16 + in bnxt_qplib_post_send()
1649 sqe->wqe_size = wqe_size16 + in bnxt_qplib_post_send()
1845 rqe->wqe_size = wqe->num_sge + in bnxt_qplib_post_recv()
1851 rqe->wqe_size++; in bnxt_qplib_post_recv()

12