| /Linux-v5.4/net/rds/ |
| D | ib_frmr.c | 36 rds_transition_frwr_state(struct rds_ib_mr *ibmr, in rds_transition_frwr_state() argument 40 if (cmpxchg(&ibmr->u.frmr.fr_state, in rds_transition_frwr_state() 47 atomic_dec(&ibmr->ic->i_fastreg_inuse_count); in rds_transition_frwr_state() 57 struct rds_ib_mr *ibmr = NULL; in rds_ib_alloc_frmr() local 66 ibmr = rds_ib_try_reuse_ibmr(pool); in rds_ib_alloc_frmr() 67 if (ibmr) in rds_ib_alloc_frmr() 68 return ibmr; in rds_ib_alloc_frmr() 70 ibmr = kzalloc_node(sizeof(*ibmr), GFP_KERNEL, in rds_ib_alloc_frmr() 72 if (!ibmr) { in rds_ib_alloc_frmr() 77 frmr = &ibmr->u.frmr; in rds_ib_alloc_frmr() [all …]
|
| D | ib_fmr.c | 38 struct rds_ib_mr *ibmr = NULL; in rds_ib_alloc_fmr() local 58 ibmr = rds_ib_try_reuse_ibmr(pool); in rds_ib_alloc_fmr() 59 if (ibmr) in rds_ib_alloc_fmr() 60 return ibmr; in rds_ib_alloc_fmr() 62 ibmr = kzalloc_node(sizeof(*ibmr), GFP_KERNEL, in rds_ib_alloc_fmr() 64 if (!ibmr) { in rds_ib_alloc_fmr() 69 fmr = &ibmr->u.fmr; in rds_ib_alloc_fmr() 83 ibmr->pool = pool; in rds_ib_alloc_fmr() 89 return ibmr; in rds_ib_alloc_fmr() 92 kfree(ibmr); in rds_ib_alloc_fmr() [all …]
|
| D | ib_rdma.c | 193 struct rds_ib_mr *ibmr = NULL; in rds_ib_reuse_mr() local 201 ibmr = llist_entry(ret, struct rds_ib_mr, llnode); in rds_ib_reuse_mr() 208 return ibmr; in rds_ib_reuse_mr() 213 struct rds_ib_mr *ibmr = trans_private; in rds_ib_sync_mr() local 214 struct rds_ib_device *rds_ibdev = ibmr->device; in rds_ib_sync_mr() 218 ib_dma_sync_sg_for_cpu(rds_ibdev->dev, ibmr->sg, in rds_ib_sync_mr() 219 ibmr->sg_dma_len, DMA_BIDIRECTIONAL); in rds_ib_sync_mr() 222 ib_dma_sync_sg_for_device(rds_ibdev->dev, ibmr->sg, in rds_ib_sync_mr() 223 ibmr->sg_dma_len, DMA_BIDIRECTIONAL); in rds_ib_sync_mr() 228 void __rds_ib_teardown_mr(struct rds_ib_mr *ibmr) in __rds_ib_teardown_mr() argument [all …]
|
| /Linux-v5.4/drivers/infiniband/hw/vmw_pvrdma/ |
| D | pvrdma_mr.c | 94 mr->ibmr.lkey = resp->lkey; in pvrdma_get_dma_mr() 95 mr->ibmr.rkey = resp->rkey; in pvrdma_get_dma_mr() 97 return &mr->ibmr; in pvrdma_get_dma_mr() 182 mr->ibmr.lkey = resp->lkey; in pvrdma_reg_user_mr() 183 mr->ibmr.rkey = resp->rkey; in pvrdma_reg_user_mr() 185 return &mr->ibmr; in pvrdma_reg_user_mr() 254 mr->ibmr.lkey = resp->lkey; in pvrdma_alloc_mr() 255 mr->ibmr.rkey = resp->rkey; in pvrdma_alloc_mr() 259 return &mr->ibmr; in pvrdma_alloc_mr() 276 int pvrdma_dereg_mr(struct ib_mr *ibmr, struct ib_udata *udata) in pvrdma_dereg_mr() argument [all …]
|
| D | pvrdma.h | 141 struct ib_mr ibmr; member 284 static inline struct pvrdma_user_mr *to_vmr(struct ib_mr *ibmr) in to_vmr() argument 286 return container_of(ibmr, struct pvrdma_user_mr, ibmr); in to_vmr()
|
| /Linux-v5.4/drivers/infiniband/sw/rdmavt/ |
| D | mr.h | 58 struct ib_mr ibmr; member 68 static inline struct rvt_mr *to_imr(struct ib_mr *ibmr) in to_imr() argument 70 return container_of(ibmr, struct rvt_mr, ibmr); in to_imr() 81 int rvt_dereg_mr(struct ib_mr *ibmr, struct ib_udata *udata); 84 int rvt_map_mr_sg(struct ib_mr *ibmr, struct scatterlist *sg,
|
| D | trace_mr.h | 186 TP_PROTO(struct ib_mr *ibmr, int sg_nents, unsigned int *sg_offset), 187 TP_ARGS(ibmr, sg_nents, sg_offset), 189 RDI_DEV_ENTRY(ib_to_rvt(to_imr(ibmr)->mr.pd->device)) 198 RDI_DEV_ASSIGN(ib_to_rvt(to_imr(ibmr)->mr.pd->device)) 199 __entry->ibmr_iova = ibmr->iova; 200 __entry->iova = to_imr(ibmr)->mr.iova; 201 __entry->user_base = to_imr(ibmr)->mr.user_base; 202 __entry->ibmr_length = to_imr(ibmr)->mr.length;
|
| D | mr.c | 302 mr->ibmr.lkey = mr->mr.lkey; in __rvt_alloc_mr() 303 mr->ibmr.rkey = mr->mr.lkey; in __rvt_alloc_mr() 359 ret = &mr->ibmr; in rvt_get_dma_mr() 431 return &mr->ibmr; in rvt_reg_user_mr() 553 int rvt_dereg_mr(struct ib_mr *ibmr, struct ib_udata *udata) in rvt_dereg_mr() argument 555 struct rvt_mr *mr = to_imr(ibmr); in rvt_dereg_mr() 591 return &mr->ibmr; in rvt_alloc_mr() 601 static int rvt_set_page(struct ib_mr *ibmr, u64 addr) in rvt_set_page() argument 603 struct rvt_mr *mr = to_imr(ibmr); in rvt_set_page() 632 int rvt_map_mr_sg(struct ib_mr *ibmr, struct scatterlist *sg, in rvt_map_mr_sg() argument [all …]
|
| /Linux-v5.4/drivers/infiniband/hw/mlx5/ |
| D | mr.c | 722 mr->ibmr.lkey = mr->mmkey.key; in mlx5_ib_get_dma_mr() 723 mr->ibmr.rkey = mr->mmkey.key; in mlx5_ib_get_dma_mr() 726 return &mr->ibmr; in mlx5_ib_get_dma_mr() 874 mr->ibmr.pd = pd; in alloc_mr_from_cache() 995 wr.pd = mr->ibmr.pd; in mlx5_ib_update_xlt() 1050 static struct mlx5_ib_mr *reg_create(struct ib_mr *ibmr, struct ib_pd *pd, in reg_create() argument 1065 mr = ibmr ? to_mmr(ibmr) : kzalloc(sizeof(*mr), GFP_KERNEL); in reg_create() 1069 mr->ibmr.pd = pd; in reg_create() 1130 if (!ibmr) in reg_create() 1141 mr->ibmr.lkey = mr->mmkey.key; in set_mr_fields() [all …]
|
| /Linux-v5.4/drivers/infiniband/hw/mlx4/ |
| D | mr.c | 76 mr->ibmr.rkey = mr->ibmr.lkey = mr->mmr.key; in mlx4_ib_get_dma_mr() 79 return &mr->ibmr; in mlx4_ib_get_dma_mr() 440 mr->ibmr.rkey = mr->ibmr.lkey = mr->mmr.key; in mlx4_ib_reg_user_mr() 441 mr->ibmr.length = length; in mlx4_ib_reg_user_mr() 442 mr->ibmr.iova = virt_addr; in mlx4_ib_reg_user_mr() 443 mr->ibmr.page_size = 1U << shift; in mlx4_ib_reg_user_mr() 445 return &mr->ibmr; in mlx4_ib_reg_user_mr() 588 struct ib_device *device = mr->ibmr.device; in mlx4_free_priv_pages() 597 int mlx4_ib_dereg_mr(struct ib_mr *ibmr, struct ib_udata *udata) in mlx4_ib_dereg_mr() argument 599 struct mlx4_ib_mr *mr = to_mmr(ibmr); in mlx4_ib_dereg_mr() [all …]
|
| /Linux-v5.4/drivers/infiniband/hw/mthca/ |
| D | mthca_provider.h | 74 struct ib_mr ibmr; member 80 struct ib_fmr ibmr; member 304 static inline struct mthca_fmr *to_mfmr(struct ib_fmr *ibmr) in to_mfmr() argument 306 return container_of(ibmr, struct mthca_fmr, ibmr); in to_mfmr() 309 static inline struct mthca_mr *to_mmr(struct ib_mr *ibmr) in to_mmr() argument 311 return container_of(ibmr, struct mthca_mr, ibmr); in to_mmr()
|
| D | mthca_mr.c | 444 mr->ibmr.rkey = mr->ibmr.lkey = hw_index_to_key(dev, key); in mthca_mr_alloc() 481 mthca_dbg(dev, "Dumping MPT entry %08x:\n", mr->ibmr.lkey); in mthca_mr_alloc() 558 key_to_hw_index(dev, mr->ibmr.lkey) & in mthca_free_mr() 563 mthca_free_region(dev, mr->ibmr.lkey); in mthca_free_mr() 594 mr->ibmr.rkey = mr->ibmr.lkey = hw_index_to_key(dev, key); in mthca_fmr_alloc() 644 mthca_dbg(dev, "Dumping MPT entry %08x:\n", mr->ibmr.lkey); in mthca_fmr_alloc() 683 mthca_free_region(dev, fmr->ibmr.lkey); in mthca_free_fmr() 732 key = tavor_key_to_hw_index(fmr->ibmr.lkey); in mthca_tavor_map_phys_fmr() 734 fmr->ibmr.lkey = fmr->ibmr.rkey = tavor_hw_index_to_key(key); in mthca_tavor_map_phys_fmr() 772 key = arbel_key_to_hw_index(fmr->ibmr.lkey); in mthca_arbel_map_phys_fmr() [all …]
|
| /Linux-v5.4/drivers/infiniband/hw/usnic/ |
| D | usnic_ib.h | 69 struct ib_mr ibmr; member 125 struct usnic_ib_mr *to_umr(struct ib_mr *ibmr) in to_umr() argument 127 return container_of(ibmr, struct usnic_ib_mr, ibmr); in to_umr()
|
| /Linux-v5.4/net/sunrpc/xprtrdma/ |
| D | frwr_ops.c | 329 struct ib_mr *ibmr; in frwr_map() local 360 ibmr = mr->frwr.fr_mr; in frwr_map() 361 n = ib_map_mr_sg(ibmr, mr->mr_sg, mr->mr_nents, NULL, PAGE_SIZE); in frwr_map() 365 ibmr->iova &= 0x00000000ffffffff; in frwr_map() 366 ibmr->iova |= ((u64)be32_to_cpu(xid)) << 32; in frwr_map() 367 key = (u8)(ibmr->rkey & 0x000000FF); in frwr_map() 368 ib_update_fast_reg_key(ibmr, ++key); in frwr_map() 371 reg_wr->mr = ibmr; in frwr_map() 372 reg_wr->key = ibmr->rkey; in frwr_map() 377 mr->mr_handle = ibmr->rkey; in frwr_map() [all …]
|
| /Linux-v5.4/drivers/infiniband/hw/hns/ |
| D | hns_roce_mr.c | 1006 mr->ibmr.rkey = mr->ibmr.lkey = mr->key; in hns_roce_get_dma_mr() 1009 return &mr->ibmr; in hns_roce_get_dma_mr() 1193 mr->ibmr.rkey = mr->ibmr.lkey = mr->key; in hns_roce_reg_user_mr() 1195 return &mr->ibmr; in hns_roce_reg_user_mr() 1208 static int rereg_mr_trans(struct ib_mr *ibmr, int flags, in rereg_mr_trans() argument 1214 struct hns_roce_dev *hr_dev = to_hr_dev(ibmr->device); in rereg_mr_trans() 1215 struct hns_roce_mr *mr = to_hr_mr(ibmr); in rereg_mr_trans() 1285 int hns_roce_rereg_user_mr(struct ib_mr *ibmr, int flags, u64 start, u64 length, in hns_roce_rereg_user_mr() argument 1289 struct hns_roce_dev *hr_dev = to_hr_dev(ibmr->device); in hns_roce_rereg_user_mr() 1290 struct hns_roce_mr *mr = to_hr_mr(ibmr); in hns_roce_rereg_user_mr() [all …]
|
| /Linux-v5.4/drivers/infiniband/hw/cxgb4/ |
| D | mem.c | 400 mhp->ibmr.rkey = mhp->ibmr.lkey = stag; in finish_mem_reg() 401 mhp->ibmr.length = mhp->attr.len; in finish_mem_reg() 402 mhp->ibmr.iova = mhp->attr.va_fbo; in finish_mem_reg() 403 mhp->ibmr.page_size = 1U << (mhp->attr.page_size + 12); in finish_mem_reg() 495 return &mhp->ibmr; in c4iw_get_dma_mr() 599 return &mhp->ibmr; in c4iw_reg_user_mr() 748 mhp->ibmr.rkey = mhp->ibmr.lkey = stag; in c4iw_alloc_mr() 755 return &(mhp->ibmr); in c4iw_alloc_mr() 773 static int c4iw_set_page(struct ib_mr *ibmr, u64 addr) in c4iw_set_page() argument 775 struct c4iw_mr *mhp = to_c4iw_mr(ibmr); in c4iw_set_page() [all …]
|
| /Linux-v5.4/drivers/infiniband/sw/rxe/ |
| D | rxe_verbs.c | 915 return &mr->ibmr; in rxe_get_dma_mr() 951 return &mr->ibmr; in rxe_reg_user_mr() 961 static int rxe_dereg_mr(struct ib_mr *ibmr, struct ib_udata *udata) in rxe_dereg_mr() argument 963 struct rxe_mem *mr = to_rmr(ibmr); in rxe_dereg_mr() 997 return &mr->ibmr; in rxe_alloc_mr() 1007 static int rxe_set_page(struct ib_mr *ibmr, u64 addr) in rxe_set_page() argument 1009 struct rxe_mem *mr = to_rmr(ibmr); in rxe_set_page() 1020 buf->size = ibmr->page_size; in rxe_set_page() 1026 static int rxe_map_mr_sg(struct ib_mr *ibmr, struct scatterlist *sg, in rxe_map_mr_sg() argument 1029 struct rxe_mem *mr = to_rmr(ibmr); in rxe_map_mr_sg() [all …]
|
| /Linux-v5.4/drivers/infiniband/hw/cxgb3/ |
| D | iwch_provider.h | 75 struct ib_mr ibmr; member 86 static inline struct iwch_mr *to_iwch_mr(struct ib_mr *ibmr) in to_iwch_mr() argument 88 return container_of(ibmr, struct iwch_mr, ibmr); in to_iwch_mr()
|
| D | iwch_mem.c | 50 mhp->ibmr.rkey = mhp->ibmr.lkey = stag; in iwch_finish_mem_reg()
|
| D | iwch_provider.c | 423 return &mhp->ibmr; in iwch_get_dma_mr() 514 iwch_dereg_mr(&mhp->ibmr, udata); in iwch_reg_user_mr() 520 return &mhp->ibmr; in iwch_reg_user_mr() 623 mhp->ibmr.rkey = mhp->ibmr.lkey = stag; in iwch_alloc_mr() 629 return &(mhp->ibmr); in iwch_alloc_mr() 643 static int iwch_set_page(struct ib_mr *ibmr, u64 addr) in iwch_set_page() argument 645 struct iwch_mr *mhp = to_iwch_mr(ibmr); in iwch_set_page() 655 static int iwch_map_mr_sg(struct ib_mr *ibmr, struct scatterlist *sg, in iwch_map_mr_sg() argument 658 struct iwch_mr *mhp = to_iwch_mr(ibmr); in iwch_map_mr_sg() 662 return ib_sg_to_pages(ibmr, sg, sg_nents, sg_offset, iwch_set_page); in iwch_map_mr_sg()
|
| /Linux-v5.4/drivers/infiniband/hw/i40iw/ |
| D | i40iw_verbs.c | 1512 struct i40iw_pd *iwpd = to_iwpd(iwmr->ibmr.pd); in i40iw_hw_alloc_stag() 1573 iwmr->ibmr.rkey = stag; in i40iw_alloc_mr() 1574 iwmr->ibmr.lkey = stag; in i40iw_alloc_mr() 1575 iwmr->ibmr.pd = pd; in i40iw_alloc_mr() 1576 iwmr->ibmr.device = pd->device; in i40iw_alloc_mr() 1595 return &iwmr->ibmr; in i40iw_alloc_mr() 1610 static int i40iw_set_page(struct ib_mr *ibmr, u64 addr) in i40iw_set_page() argument 1612 struct i40iw_mr *iwmr = to_iwmr(ibmr); in i40iw_set_page() 1631 static int i40iw_map_mr_sg(struct ib_mr *ibmr, struct scatterlist *sg, in i40iw_map_mr_sg() argument 1634 struct i40iw_mr *iwmr = to_iwmr(ibmr); in i40iw_map_mr_sg() [all …]
|
| /Linux-v5.4/drivers/infiniband/hw/ocrdma/ |
| D | ocrdma.h | 194 struct ib_mr ibmr; member 483 static inline struct ocrdma_mr *get_ocrdma_mr(struct ib_mr *ibmr) in get_ocrdma_mr() argument 485 return container_of(ibmr, struct ocrdma_mr, ibmr); in get_ocrdma_mr()
|
| /Linux-v5.4/drivers/infiniband/hw/qedr/ |
| D | qedr.h | 466 struct ib_mr ibmr; member 568 static inline struct qedr_mr *get_qedr_mr(struct ib_mr *ibmr) in get_qedr_mr() argument 570 return container_of(ibmr, struct qedr_mr, ibmr); in get_qedr_mr()
|
| /Linux-v5.4/drivers/infiniband/hw/efa/ |
| D | efa.h | 85 struct ib_mr ibmr; member 143 int efa_dereg_mr(struct ib_mr *ibmr, struct ib_udata *udata);
|
| D | efa_verbs.c | 130 static inline struct efa_mr *to_emr(struct ib_mr *ibmr) in to_emr() argument 132 return container_of(ibmr, struct efa_mr, ibmr); in to_emr() 1478 mr->ibmr.lkey = result.l_key; in efa_reg_mr() 1479 mr->ibmr.rkey = result.r_key; in efa_reg_mr() 1480 mr->ibmr.length = length; in efa_reg_mr() 1481 ibdev_dbg(&dev->ibdev, "Registered mr[%d]\n", mr->ibmr.lkey); in efa_reg_mr() 1483 return &mr->ibmr; in efa_reg_mr() 1494 int efa_dereg_mr(struct ib_mr *ibmr, struct ib_udata *udata) in efa_dereg_mr() argument 1496 struct efa_dev *dev = to_edev(ibmr->device); in efa_dereg_mr() 1498 struct efa_mr *mr = to_emr(ibmr); in efa_dereg_mr() [all …]
|