Searched refs:pages_per_mr (Results 1 – 5 of 5) sorted by relevance
89 u32 pages_per_mr = rdma_rw_fr_page_list_len(qp->pd->device, in rdma_rw_init_one_mr() local91 u32 nents = min(sg_cnt, pages_per_mr); in rdma_rw_init_one_mr()123 u32 pages_per_mr = rdma_rw_fr_page_list_len(qp->pd->device, in rdma_rw_init_mr_wrs() local127 ctx->nr_ops = (sg_cnt + pages_per_mr - 1) / pages_per_mr; in rdma_rw_init_mr_wrs()136 u32 nents = min(sg_cnt, pages_per_mr); in rdma_rw_init_mr_wrs()365 u32 pages_per_mr = rdma_rw_fr_page_list_len(qp->pd->device, in rdma_rw_ctx_signature_init() local370 if (sg_cnt > pages_per_mr || prot_sg_cnt > pages_per_mr) { in rdma_rw_ctx_signature_init()372 sg_cnt, prot_sg_cnt, pages_per_mr); in rdma_rw_ctx_signature_init()
492 unsigned short pages_per_mr; member
254 iser_conn->pages_per_mr)) in iser_alloc_rx_descriptors()
683 iser_conn->pages_per_mr = in iser_calc_scsi_params()
440 int ret, pages_per_mr; in nvme_rdma_create_queue_ib() local487 pages_per_mr = nvme_rdma_get_max_fr_pages(ibdev) + 1; in nvme_rdma_create_queue_ib()491 pages_per_mr, 0); in nvme_rdma_create_queue_ib()