Searched refs:umem_dmabuf (Results 1 – 5 of 5) sorted by relevance
/Linux-v5.15/drivers/infiniband/core/ |
D | umem_dmabuf.c | 12 int ib_umem_dmabuf_map_pages(struct ib_umem_dmabuf *umem_dmabuf) in ib_umem_dmabuf_map_pages() argument 21 dma_resv_assert_held(umem_dmabuf->attach->dmabuf->resv); in ib_umem_dmabuf_map_pages() 23 if (umem_dmabuf->sgt) in ib_umem_dmabuf_map_pages() 26 sgt = dma_buf_map_attachment(umem_dmabuf->attach, DMA_BIDIRECTIONAL); in ib_umem_dmabuf_map_pages() 32 start = ALIGN_DOWN(umem_dmabuf->umem.address, PAGE_SIZE); in ib_umem_dmabuf_map_pages() 33 end = ALIGN(umem_dmabuf->umem.address + umem_dmabuf->umem.length, in ib_umem_dmabuf_map_pages() 41 umem_dmabuf->first_sg = sg; in ib_umem_dmabuf_map_pages() 42 umem_dmabuf->first_sg_offset = offset; in ib_umem_dmabuf_map_pages() 50 umem_dmabuf->last_sg = sg; in ib_umem_dmabuf_map_pages() 51 umem_dmabuf->last_sg_trim = trim; in ib_umem_dmabuf_map_pages() [all …]
|
D | Makefile | 43 ib_uverbs-$(CONFIG_INFINIBAND_USER_MEM) += umem.o umem_dmabuf.o
|
/Linux-v5.15/include/rdma/ |
D | ib_umem.h | 142 int ib_umem_dmabuf_map_pages(struct ib_umem_dmabuf *umem_dmabuf); 143 void ib_umem_dmabuf_unmap_pages(struct ib_umem_dmabuf *umem_dmabuf); 144 void ib_umem_dmabuf_release(struct ib_umem_dmabuf *umem_dmabuf); 182 static inline int ib_umem_dmabuf_map_pages(struct ib_umem_dmabuf *umem_dmabuf) in ib_umem_dmabuf_map_pages() argument 186 static inline void ib_umem_dmabuf_unmap_pages(struct ib_umem_dmabuf *umem_dmabuf) { } in ib_umem_dmabuf_unmap_pages() argument 187 static inline void ib_umem_dmabuf_release(struct ib_umem_dmabuf *umem_dmabuf) { } in ib_umem_dmabuf_release() argument
|
/Linux-v5.15/drivers/infiniband/hw/mlx5/ |
D | odp.c | 692 struct ib_umem_dmabuf *umem_dmabuf = to_ib_umem_dmabuf(mr->umem); in pagefault_dmabuf_mr() local 700 dma_resv_lock(umem_dmabuf->attach->dmabuf->resv, NULL); in pagefault_dmabuf_mr() 701 err = ib_umem_dmabuf_map_pages(umem_dmabuf); in pagefault_dmabuf_mr() 703 dma_resv_unlock(umem_dmabuf->attach->dmabuf->resv); in pagefault_dmabuf_mr() 707 page_size = mlx5_umem_find_best_pgsz(&umem_dmabuf->umem, mkc, in pagefault_dmabuf_mr() 709 umem_dmabuf->umem.iova); in pagefault_dmabuf_mr() 711 ib_umem_dmabuf_unmap_pages(umem_dmabuf); in pagefault_dmabuf_mr() 716 dma_resv_unlock(umem_dmabuf->attach->dmabuf->resv); in pagefault_dmabuf_mr()
|
D | mr.c | 1576 struct ib_umem_dmabuf *umem_dmabuf = attach->importer_priv; in mlx5_ib_dmabuf_invalidate_cb() local 1577 struct mlx5_ib_mr *mr = umem_dmabuf->private; in mlx5_ib_dmabuf_invalidate_cb() 1579 dma_resv_assert_held(umem_dmabuf->attach->dmabuf->resv); in mlx5_ib_dmabuf_invalidate_cb() 1581 if (!umem_dmabuf->sgt) in mlx5_ib_dmabuf_invalidate_cb() 1585 ib_umem_dmabuf_unmap_pages(umem_dmabuf); in mlx5_ib_dmabuf_invalidate_cb() 1600 struct ib_umem_dmabuf *umem_dmabuf; in mlx5_ib_reg_user_mr_dmabuf() local 1615 umem_dmabuf = ib_umem_dmabuf_get(&dev->ib_dev, offset, length, fd, in mlx5_ib_reg_user_mr_dmabuf() 1618 if (IS_ERR(umem_dmabuf)) { in mlx5_ib_reg_user_mr_dmabuf() 1620 PTR_ERR(umem_dmabuf)); in mlx5_ib_reg_user_mr_dmabuf() 1621 return ERR_CAST(umem_dmabuf); in mlx5_ib_reg_user_mr_dmabuf() [all …]
|