/Linux-v4.19/drivers/infiniband/hw/mlx4/ |
D | mr.c | 67 err = mlx4_mr_alloc(to_mdev(pd->device)->dev, to_mpd(pd)->pdn, 0, in mlx4_ib_get_dma_mr() 428 err = mlx4_mr_alloc(dev->dev, to_mpd(pd)->pdn, virt_addr, length, in mlx4_ib_reg_user_mr() 482 to_mpd(pd)->pdn); in mlx4_ib_rereg_user_mr() 627 err = mlx4_mw_alloc(dev->dev, to_mpd(pd)->pdn, in mlx4_ib_alloc_mw() 675 err = mlx4_mr_alloc(dev->dev, to_mpd(pd)->pdn, 0, 0, 0, in mlx4_ib_alloc_mr() 715 err = mlx4_fmr_alloc(dev->dev, to_mpd(pd)->pdn, convert_access(acc), in mlx4_ib_fmr_alloc()
|
D | ah.c | 49 ah->av.ib.port_pd = cpu_to_be32(to_mpd(pd)->pdn | in create_ib_ah() 120 ah->av.eth.port_pd = cpu_to_be32(to_mpd(pd)->pdn | in create_iboe_ah()
|
D | srq.c | 186 err = mlx4_srq_alloc(dev->dev, to_mpd(pd)->pdn, cqn, xrcdn, &srq->mtt, in mlx4_ib_create_srq()
|
D | mlx4_ib.h | 654 static inline struct mlx4_ib_pd *to_mpd(struct ib_pd *ibpd) in to_mpd() function
|
D | qp.c | 1278 return to_mpd(to_mxrcd(qp->ibqp.xrcd)->pd); in get_pd() 1280 return to_mpd(qp->ibqp.pd); in get_pd() 2073 pd = to_mpd(ibwq->pd); in __mlx4_ib_modify_qp()
|
D | main.c | 1311 mlx4_pd_free(to_mdev(pd->device)->dev, to_mpd(pd)->pdn); in mlx4_ib_dealloc_pd()
|
/Linux-v4.19/drivers/infiniband/hw/mthca/ |
D | mthca_provider.c | 407 mthca_pd_free(to_mdev(pd->device), to_mpd(pd)); in mthca_dealloc_pd() 425 err = mthca_create_ah(to_mdev(pd->device), to_mpd(pd), ah_attr, ah); in mthca_ah_create() 477 err = mthca_alloc_srq(to_mdev(pd->device), to_mpd(pd), in mthca_create_srq() 573 err = mthca_alloc_qp(to_mdev(pd->device), to_mpd(pd), in mthca_create_qp() 608 err = mthca_alloc_sqp(to_mdev(pd->device), to_mpd(pd), in mthca_create_qp() 892 to_mpd(pd)->pd_num, in mthca_get_dma_mr() 985 err = mthca_mr_alloc(dev, to_mpd(pd)->pd_num, shift, virt, length, in mthca_reg_user_mr() 1027 err = mthca_fmr_alloc(to_mdev(pd->device), to_mpd(pd)->pd_num, in mthca_alloc_fmr()
|
D | mthca_provider.h | 314 static inline struct mthca_pd *to_mpd(struct ib_pd *ibpd) in to_mpd() function
|
D | mthca_qp.c | 705 qp_context->pd = cpu_to_be32(to_mpd(ibqp->pd)->pd_num); in __mthca_modify_qp() 1480 atomic_dec(&(to_mpd(qp->ibqp.pd)->sqp_count)); in mthca_free_qp() 1550 data->lkey = cpu_to_be32(to_mpd(sqp->qp.ibqp.pd)->ntmr.ibmr.lkey); in build_mlx_header()
|
/Linux-v4.19/drivers/infiniband/hw/mlx5/ |
D | mr.c | 807 MLX5_SET(mkc, mkc, pd, to_mpd(pd)->pdn); in mlx5_ib_get_dma_mr() 958 mr->mmkey.pd = to_mpd(pd)->pdn; in alloc_mr_from_cache() 1179 MLX5_SET(mkc, mkc, pd, to_mpd(pd)->pdn); in reg_create() 1258 MLX5_SET(mkc, mkc, pd, to_mpd(pd)->pdn); in mlx5_ib_get_memic_mr() 1325 mr = mlx5_ib_alloc_implicit_mr(to_mpd(pd), access_flags); in mlx5_ib_reg_user_mr() 1521 mr->mmkey.pd = to_mpd(pd)->pdn; in mlx5_ib_rereg_user_mr() 1700 MLX5_SET(mkc, mkc, pd, to_mpd(pd)->pdn); in mlx5_ib_alloc_mr() 1733 err = mlx5_core_create_psv(dev->mdev, to_mpd(pd)->pdn, in mlx5_ib_alloc_mr() 1831 MLX5_SET(mkc, mkc, pd, to_mpd(pd)->pdn); in mlx5_ib_alloc_mw()
|
D | srq.c | 327 in.pd = to_mpd(pd)->pdn; in mlx5_ib_create_srq()
|
D | qp.c | 1795 MLX5_SET(qpc, qpc, pd, to_mpd(pd ? pd : devr->p0)->pdn); in create_qp_common() 2019 return to_mpd(qp->ibqp.pd); in get_pd() 2197 MLX5_SET(dctc, dctc, pd, to_mpd(pd)->pdn); in mlx5_ib_create_dct() 3069 context->flags_pd = cpu_to_be32(pd ? pd->pdn : to_mpd(dev->devr.p0)->pdn); in __mlx5_ib_modify_qp() 3796 seg->flags_pd = cpu_to_be32(to_mpd(umrwr->pd)->pdn); in set_reg_mkey_segment() 4230 struct mlx5_ib_pd *pd = to_mpd(qp->ibqp.pd); in set_reg_wr() 5301 MLX5_SET(wq, wq, pd, to_mpd(pd)->pdn); in create_rq()
|
D | mlx5_ib.h | 946 static inline struct mlx5_ib_pd *to_mpd(struct ib_pd *ibpd) in to_mpd() function
|
D | main.c | 2347 struct mlx5_ib_pd *mpd = to_mpd(pd); in mlx5_ib_dealloc_pd()
|