| /Linux-v5.4/drivers/infiniband/hw/mlx5/ |
| D | cq.c | 637 int cqe_size) in alloc_cq_frag_buf() argument 640 u8 log_wq_stride = 6 + (cqe_size == 128 ? 1 : 0); in alloc_cq_frag_buf() 641 u8 log_wq_sz = ilog2(cqe_size); in alloc_cq_frag_buf() 645 nent * cqe_size, in alloc_cq_frag_buf() 653 buf->cqe_size = cqe_size; in alloc_cq_frag_buf() 683 int *cqe_size, int *index, int *inlen) in create_cq_user() argument 706 if (ucmd.cqe_size != 64 && ucmd.cqe_size != 128) in create_cq_user() 709 *cqe_size = ucmd.cqe_size; in create_cq_user() 712 ib_umem_get(udata, ucmd.buf_addr, entries * ucmd.cqe_size, in create_cq_user() 726 ucmd.buf_addr, entries * ucmd.cqe_size, npages, page_shift, ncont); in create_cq_user() [all …]
|
| D | mlx5_ib.h | 453 int cqe_size; member 517 int cqe_size; member
|
| /Linux-v5.4/drivers/net/ethernet/mellanox/mlx4/ |
| D | cq.c | 290 static int mlx4_init_user_cqes(void *buf, int entries, int cqe_size) in mlx4_init_user_cqes() argument 292 int entries_per_copy = PAGE_SIZE / cqe_size; in mlx4_init_user_cqes() 317 err = copy_to_user((void __user *)buf, init_ents, entries * cqe_size) ? in mlx4_init_user_cqes() 329 int cqe_size) in mlx4_init_kernel_cqes() argument 334 memset(buf->direct.buf, 0xcc, entries * cqe_size); in mlx4_init_kernel_cqes() 394 dev->caps.cqe_size); in mlx4_cq_alloc() 399 dev->caps.cqe_size); in mlx4_cq_alloc()
|
| D | fw.h | 202 u16 cqe_size; /* For use only when CQE stride feature enabled */ member
|
| D | en_cq.c | 62 cq->buf_size = cq->size * mdev->dev->caps.cqe_size; in mlx4_en_create_cq()
|
| D | en_tx.c | 413 cqe = mlx4_en_get_cqe(buf, index, priv->cqe_size) + factor; in mlx4_en_process_tx_cq() 467 cqe = mlx4_en_get_cqe(buf, index, priv->cqe_size) + factor; in mlx4_en_process_tx_cq()
|
| D | en_rx.c | 692 cqe = mlx4_en_get_cqe(cq->buf, index, priv->cqe_size) + factor; in mlx4_en_process_rx_cq() 899 cqe = mlx4_en_get_cqe(cq->buf, index, priv->cqe_size) + factor; in mlx4_en_process_rx_cq()
|
| D | mlx4_en.h | 576 int cqe_size; member
|
| D | fw.c | 1941 dev->caps.cqe_size = 64; in mlx4_INIT_HCA() 1944 dev->caps.cqe_size = 32; in mlx4_INIT_HCA() 1951 dev->caps.cqe_size = cache_line_size(); in mlx4_INIT_HCA() 2175 param->cqe_size = 1 << ((byte_field & in mlx4_QUERY_HCA()
|
| D | en_netdev.c | 1669 cqe = mlx4_en_get_cqe(cq->buf, j, priv->cqe_size) + in mlx4_en_start_port() 3312 priv->cqe_factor = (mdev->dev->caps.cqe_size == 64) ? 1 : 0; in mlx4_en_init_netdev() 3313 priv->cqe_size = mdev->dev->caps.cqe_size; in mlx4_en_init_netdev()
|
| D | main.c | 1049 dev->caps.cqe_size = 64; in mlx4_slave_cap() 1052 dev->caps.cqe_size = 32; in mlx4_slave_cap() 1061 dev->caps.cqe_size = hca_param->cqe_size; in mlx4_slave_cap()
|
| /Linux-v5.4/drivers/infiniband/hw/mlx4/ |
| D | cq.c | 105 err = mlx4_buf_alloc(dev->dev, nent * dev->dev->caps.cqe_size, in mlx4_ib_alloc_cq_buf() 111 buf->entry_size = dev->dev->caps.cqe_size; in mlx4_ib_alloc_cq_buf() 143 int cqe_size = dev->dev->caps.cqe_size; in mlx4_ib_get_cq_umem() local 147 *umem = ib_umem_get(udata, buf_addr, cqe * cqe_size, in mlx4_ib_get_cq_umem() 359 int cqe_size = cq->buf.entry_size; in mlx4_ib_cq_resize_copy_cqes() local 360 int cqe_inc = cqe_size == 64 ? 1 : 0; in mlx4_ib_cq_resize_copy_cqes() 369 memcpy(new_cqe, get_cqe(cq, i & cq->ibcq.cqe), cqe_size); in mlx4_ib_cq_resize_copy_cqes()
|
| D | main.c | 1103 resp.cqe_size = dev->dev->caps.cqe_size; in mlx4_ib_alloc_ucontext()
|
| /Linux-v5.4/include/uapi/rdma/ |
| D | mlx5-abi.h | 274 __u32 cqe_size; member 287 __u16 cqe_size; member
|
| D | mlx4-abi.h | 71 __u32 cqe_size; member
|
| /Linux-v5.4/drivers/net/ethernet/huawei/hinic/ |
| D | hinic_hw_qp.c | 315 size_t cqe_dma_size, cqe_size; in alloc_rq_cqe() local 319 cqe_size = wq->q_depth * sizeof(*rq->cqe); in alloc_rq_cqe() 320 rq->cqe = vzalloc(cqe_size); in alloc_rq_cqe()
|
| /Linux-v5.4/drivers/infiniband/hw/ocrdma/ |
| D | ocrdma_hw.c | 1791 u32 hw_pages, cqe_size, page_size, cqe_count; in ocrdma_mbx_create_cq() local 1804 cqe_size = OCRDMA_DPP_CQE_SIZE; in ocrdma_mbx_create_cq() 1809 cqe_size = sizeof(struct ocrdma_cqe); in ocrdma_mbx_create_cq() 1813 cq->len = roundup(max_hw_cqe * cqe_size, OCRDMA_MIN_Q_PAGE_SIZE); in ocrdma_mbx_create_cq() 1833 cqe_count = cq->len / cqe_size; in ocrdma_mbx_create_cq() 1862 cmd->cmd.pdid_cqecnt = (cq->len / cqe_size); in ocrdma_mbx_create_cq() 1864 cmd->cmd.pdid_cqecnt = (cq->len / cqe_size) - 1; in ocrdma_mbx_create_cq()
|
| /Linux-v5.4/drivers/scsi/bnx2i/ |
| D | bnx2i.h | 661 u32 cqe_size; member
|
| D | bnx2i_hwi.c | 171 if (cq_index > ep->qp.cqe_size * 2) in bnx2i_arm_cq_event_coalescing() 172 cq_index -= ep->qp.cqe_size * 2; in bnx2i_arm_cq_event_coalescing() 1122 ep->qp.cqe_size = hba->max_cqes; in bnx2i_alloc_qp_resc() 2060 if (qp->cqe_exp_seq_sn == (qp->cqe_size * 2 + 1)) in bnx2i_process_new_cqes()
|
| /Linux-v5.4/drivers/scsi/lpfc/ |
| D | lpfc_sli4.h | 495 uint32_t cqe_size; member
|
| D | lpfc_init.c | 11452 sli4_params->cqe_size = bf_get(cqe_size, &mqe->un.sli4_params); in lpfc_pc_sli4_params_get()
|
| /Linux-v5.4/include/linux/mlx4/ |
| D | device.h | 619 u32 cqe_size; member
|