/Linux-v5.4/drivers/net/ethernet/mellanox/mlx4/ |
D | pd.c | 145 int mlx4_uar_alloc(struct mlx4_dev *dev, struct mlx4_uar *uar) in mlx4_uar_alloc() argument 149 uar->index = mlx4_bitmap_alloc(&mlx4_priv(dev)->uar_table.bitmap); in mlx4_uar_alloc() 150 if (uar->index == -1) in mlx4_uar_alloc() 154 offset = uar->index % ((int)pci_resource_len(dev->persist->pdev, in mlx4_uar_alloc() 158 offset = uar->index; in mlx4_uar_alloc() 159 uar->pfn = (pci_resource_start(dev->persist->pdev, 2) >> PAGE_SHIFT) in mlx4_uar_alloc() 161 uar->map = NULL; in mlx4_uar_alloc() 166 void mlx4_uar_free(struct mlx4_dev *dev, struct mlx4_uar *uar) in mlx4_uar_free() argument 168 mlx4_bitmap_free(&mlx4_priv(dev)->uar_table.bitmap, uar->index, MLX4_USE_RR); in mlx4_uar_free() 175 struct mlx4_uar *uar; in mlx4_bf_alloc() local [all …]
|
D | cq.c | 342 struct mlx4_mtt *mtt, struct mlx4_uar *uar, u64 db_rec, in mlx4_cq_alloc() argument 382 mlx4_to_hw_uar_index(dev, uar->index)); in mlx4_cq_alloc() 411 cq->uar = uar; in mlx4_cq_alloc()
|
/Linux-v5.4/drivers/infiniband/hw/hns/ |
D | hns_roce_pd.c | 92 int hns_roce_uar_alloc(struct hns_roce_dev *hr_dev, struct hns_roce_uar *uar) in hns_roce_uar_alloc() argument 98 ret = hns_roce_bitmap_alloc(&hr_dev->uar_table.bitmap, &uar->logic_idx); in hns_roce_uar_alloc() 102 if (uar->logic_idx > 0 && hr_dev->caps.phy_num_uars > 1) in hns_roce_uar_alloc() 103 uar->index = (uar->logic_idx - 1) % in hns_roce_uar_alloc() 106 uar->index = 0; in hns_roce_uar_alloc() 114 uar->pfn = ((res->start) >> PAGE_SHIFT) + uar->index; in hns_roce_uar_alloc() 116 uar->pfn = ((pci_resource_start(hr_dev->pci_dev, 2)) in hns_roce_uar_alloc() 123 void hns_roce_uar_free(struct hns_roce_dev *hr_dev, struct hns_roce_uar *uar) in hns_roce_uar_free() argument 125 hns_roce_bitmap_free(&hr_dev->uar_table.bitmap, uar->logic_idx, in hns_roce_uar_free()
|
D | hns_roce_cq.c | 350 struct hns_roce_uar *uar; in create_kernel_cq() local 370 uar = &hr_dev->priv_uar; in create_kernel_cq() 372 DB_REG_OFFSET * uar->index; in create_kernel_cq()
|
D | hns_roce_main.c | 323 ret = hns_roce_uar_alloc(hr_dev, &context->uar); in hns_roce_alloc_ucontext() 339 hns_roce_uar_free(hr_dev, &context->uar); in hns_roce_alloc_ucontext() 349 hns_roce_uar_free(to_hr_dev(ibcontext->device), &context->uar); in hns_roce_dealloc_ucontext() 360 to_hr_ucontext(context)->uar.pfn, in hns_roce_mmap()
|
/Linux-v5.4/drivers/infiniband/hw/mthca/ |
D | mthca_uar.c | 38 int mthca_uar_alloc(struct mthca_dev *dev, struct mthca_uar *uar) in mthca_uar_alloc() argument 40 uar->index = mthca_alloc(&dev->uar_table.alloc); in mthca_uar_alloc() 41 if (uar->index == -1) in mthca_uar_alloc() 44 uar->pfn = (pci_resource_start(dev->pdev, 2) >> PAGE_SHIFT) + uar->index; in mthca_uar_alloc() 49 void mthca_uar_free(struct mthca_dev *dev, struct mthca_uar *uar) in mthca_uar_free() argument 51 mthca_free(&dev->uar_table.alloc, uar->index); in mthca_uar_free()
|
D | mthca_provider.c | 321 err = mthca_uar_alloc(to_mdev(ibdev), &context->uar); in mthca_alloc_ucontext() 328 mthca_uar_free(to_mdev(ibdev), &context->uar); in mthca_alloc_ucontext() 333 mthca_cleanup_user_db_tab(to_mdev(ibdev), &context->uar, context->db_tab); in mthca_alloc_ucontext() 334 mthca_uar_free(to_mdev(ibdev), &context->uar); in mthca_alloc_ucontext() 345 mthca_cleanup_user_db_tab(to_mdev(context->device), &to_mucontext(context)->uar, in mthca_dealloc_ucontext() 347 mthca_uar_free(to_mdev(context->device), &to_mucontext(context)->uar); in mthca_dealloc_ucontext() 359 to_mucontext(context)->uar.pfn, in mthca_mmap_uar() 423 err = mthca_map_user_db(to_mdev(ibsrq->device), &context->uar, in mthca_create_srq() 438 mthca_unmap_user_db(to_mdev(ibsrq->device), &context->uar, in mthca_create_srq() 461 mthca_unmap_user_db(to_mdev(srq->device), &context->uar, in mthca_destroy_srq() [all …]
|
D | mthca_memfree.h | 165 int mthca_map_user_db(struct mthca_dev *dev, struct mthca_uar *uar, 167 void mthca_unmap_user_db(struct mthca_dev *dev, struct mthca_uar *uar, 170 void mthca_cleanup_user_db_tab(struct mthca_dev *dev, struct mthca_uar *uar,
|
D | mthca_memfree.c | 439 static u64 mthca_uarc_virt(struct mthca_dev *dev, struct mthca_uar *uar, int page) in mthca_uarc_virt() argument 442 uar->index * dev->uar_table.uarc_size + in mthca_uarc_virt() 446 int mthca_map_user_db(struct mthca_dev *dev, struct mthca_uar *uar, in mthca_map_user_db() argument 490 mthca_uarc_virt(dev, uar, i)); in mthca_map_user_db() 505 void mthca_unmap_user_db(struct mthca_dev *dev, struct mthca_uar *uar, in mthca_unmap_user_db() argument 547 void mthca_cleanup_user_db_tab(struct mthca_dev *dev, struct mthca_uar *uar, in mthca_cleanup_user_db_tab() argument 557 mthca_UNMAP_ICM(dev, mthca_uarc_virt(dev, uar, i), 1); in mthca_cleanup_user_db_tab()
|
D | mthca_srq.c | 54 __be32 uar; member 113 context->uar = cpu_to_be32(ucontext->uar.index); in mthca_tavor_init_srq_context() 115 context->uar = cpu_to_be32(dev->driver_uar.index); in mthca_tavor_init_srq_context() 141 context->logstride_usrpage |= cpu_to_be32(ucontext->uar.index); in mthca_arbel_init_srq_context()
|
D | mthca_provider.h | 66 struct mthca_uar uar; member
|
/Linux-v5.4/drivers/infiniband/hw/vmw_pvrdma/ |
D | pvrdma_doorbell.c | 83 int pvrdma_uar_alloc(struct pvrdma_dev *dev, struct pvrdma_uar_map *uar) in pvrdma_uar_alloc() argument 108 uar->index = obj; in pvrdma_uar_alloc() 109 uar->pfn = (pci_resource_start(dev->pdev, PVRDMA_PCI_RESOURCE_UAR) >> in pvrdma_uar_alloc() 110 PAGE_SHIFT) + uar->index; in pvrdma_uar_alloc() 115 void pvrdma_uar_free(struct pvrdma_dev *dev, struct pvrdma_uar_map *uar) in pvrdma_uar_free() argument 121 obj = uar->index & (tbl->max - 1); in pvrdma_uar_free()
|
D | pvrdma_verbs.c | 329 ret = pvrdma_uar_alloc(vdev, &context->uar); in pvrdma_alloc_ucontext() 335 cmd->pfn = context->uar.pfn; in pvrdma_alloc_ucontext() 337 cmd->pfn64 = context->uar.pfn; in pvrdma_alloc_ucontext() 353 pvrdma_uar_free(vdev, &context->uar); in pvrdma_alloc_ucontext() 361 pvrdma_uar_free(vdev, &context->uar); in pvrdma_alloc_ucontext() 385 pvrdma_uar_free(to_vdev(ibcontext->device), &context->uar); in pvrdma_dealloc_ucontext() 413 if (io_remap_pfn_range(vma, start, context->uar.pfn, size, in pvrdma_mmap()
|
D | pvrdma.h | 90 struct pvrdma_uar_map *uar; member 123 struct pvrdma_uar_map uar; member 531 int pvrdma_uar_alloc(struct pvrdma_dev *dev, struct pvrdma_uar_map *uar); 532 void pvrdma_uar_free(struct pvrdma_dev *dev, struct pvrdma_uar_map *uar);
|
D | pvrdma_cq.c | 200 cq->uar = &context->uar; in pvrdma_create_cq()
|
/Linux-v5.4/drivers/infiniband/hw/efa/ |
D | efa_admin_cmds_defs.h | 150 u16 uar; member 460 u16 uar; member 729 u16 uar; member 739 u16 uar; member
|
D | efa_com_cmd.c | 39 create_qp_cmd.uar = params->uarn; in efa_com_create_qp() 168 create_cmd.uar = params->uarn; in efa_com_create_cq() 700 result->uarn = resp.uar; in efa_com_alloc_uar() 714 cmd.uar = params->uarn; in efa_com_dealloc_uar() 724 cmd.uar, err); in efa_com_dealloc_uar()
|
/Linux-v5.4/drivers/net/ethernet/mellanox/mlx5/core/steering/ |
D | dr_send.c | 46 struct mlx5_uars_page *uar; member 164 MLX5_SET(qpc, qpc, uar_page, attr->uar->index); in dr_create_rc_qp() 190 dr_qp->uar = attr->uar; in dr_create_rc_qp() 220 mlx5_write64(ctrl, dr_qp->uar->map + MLX5_BF_OFFSET); in dr_cmd_notify_hw() 691 struct mlx5_uars_page *uar, in dr_create_cq() argument 742 MLX5_SET(cqc, cqc, uar_page, uar->index); in dr_create_cq() 765 cq->mcq.uar = uar; in dr_create_cq() 861 dmn->send_ring->cq = dr_create_cq(dmn->mdev, dmn->uar, cq_size); in mlx5dr_send_ring_alloc() 869 init_attr.uar = dmn->uar; in mlx5dr_send_ring_alloc()
|
D | dr_domain.c | 66 dmn->uar = mlx5_get_uars_page(dmn->mdev); in dr_domain_init_resources() 67 if (!dmn->uar) { in dr_domain_init_resources() 100 mlx5_put_uars_page(dmn->mdev, dmn->uar); in dr_domain_init_resources() 112 mlx5_put_uars_page(dmn->mdev, dmn->uar); in dr_domain_uninit_resources()
|
/Linux-v5.4/drivers/net/ethernet/mellanox/mlx5/core/fpga/ |
D | conn.c | 138 mlx5_write64(wqe, conn->fdev->conn_res.uar->map + MLX5_BF_OFFSET); in mlx5_fpga_conn_notify_hw() 362 conn->fdev->conn_res.uar->map, conn->cq.wq.cc); in mlx5_fpga_conn_arm_cq() 475 MLX5_SET(cqc, cqc, uar_page, fdev->conn_res.uar->index); in mlx5_fpga_conn_create_cq() 498 conn->cq.mcq.uar = fdev->conn_res.uar; in mlx5_fpga_conn_create_cq() 582 MLX5_SET(qpc, qpc, uar_page, fdev->conn_res.uar->index); in mlx5_fpga_conn_create_qp() 1007 fdev->conn_res.uar = mlx5_get_uars_page(fdev->mdev); in mlx5_fpga_conn_device_init() 1008 if (IS_ERR(fdev->conn_res.uar)) { in mlx5_fpga_conn_device_init() 1009 err = PTR_ERR(fdev->conn_res.uar); in mlx5_fpga_conn_device_init() 1014 fdev->conn_res.uar->index); in mlx5_fpga_conn_device_init() 1036 mlx5_put_uars_page(fdev->mdev, fdev->conn_res.uar); in mlx5_fpga_conn_device_init() [all …]
|
D | core.h | 58 struct mlx5_uars_page *uar; member
|
/Linux-v5.4/drivers/net/ethernet/mellanox/mlx5/core/ |
D | cq.c | 138 cq->uar = dev->priv.uar; in mlx5_core_create_cq()
|
D | uar.c | 49 *uarn = MLX5_GET(alloc_uar_out, out, uar); in mlx5_cmd_alloc_uar() 60 MLX5_SET(dealloc_uar_in, in, uar, uarn); in mlx5_cmd_free_uar()
|
/Linux-v5.4/drivers/infiniband/hw/mlx4/ |
D | cq.c | 183 struct mlx4_uar *uar; in mlx4_ib_create_cq() local 223 uar = &context->uar; in mlx4_ib_create_cq() 241 uar = &dev->priv_uar; in mlx4_ib_create_cq() 248 err = mlx4_cq_alloc(dev->dev, entries, &cq->buf.mtt, uar, cq->db.dma, in mlx4_ib_create_cq()
|
/Linux-v5.4/include/linux/mlx5/ |
D | cq.h | 45 struct mlx5_uars_page *uar; member
|