Home
last modified time | relevance | path

Searched refs:hr_qp (Results 1 – 4 of 4) sorted by relevance

/Linux-v5.4/drivers/infiniband/hw/hns/
Dhns_roce_qp.c68 static void hns_roce_ib_qp_event(struct hns_roce_qp *hr_qp, in hns_roce_ib_qp_event() argument
72 struct ib_qp *ibqp = &hr_qp->ibqp; in hns_roce_ib_qp_event()
104 type, hr_qp->qpn); in hns_roce_ib_qp_event()
143 struct hns_roce_qp *hr_qp) in hns_roce_gsi_qp_alloc() argument
151 hr_qp->qpn = qpn; in hns_roce_gsi_qp_alloc()
152 atomic_set(&hr_qp->refcount, 1); in hns_roce_gsi_qp_alloc()
153 init_completion(&hr_qp->free); in hns_roce_gsi_qp_alloc()
155 ret = xa_err(xa_store_irq(xa, hr_qp->qpn & (hr_dev->caps.num_qps - 1), in hns_roce_gsi_qp_alloc()
156 hr_qp, GFP_KERNEL)); in hns_roce_gsi_qp_alloc()
164 struct hns_roce_qp *hr_qp) in hns_roce_qp_alloc() argument
[all …]
Dhns_roce_hw_v1.c359 struct hns_roce_qp *hr_qp = to_hr_qp(ibqp); in hns_roce_v1_post_recv() local
365 spin_lock_irqsave(&hr_qp->rq.lock, flags); in hns_roce_v1_post_recv()
366 ind = hr_qp->rq.head & (hr_qp->rq.wqe_cnt - 1); in hns_roce_v1_post_recv()
369 if (hns_roce_wq_overflow(&hr_qp->rq, nreq, in hns_roce_v1_post_recv()
370 hr_qp->ibqp.recv_cq)) { in hns_roce_v1_post_recv()
376 if (unlikely(wr->num_sge > hr_qp->rq.max_gs)) { in hns_roce_v1_post_recv()
378 wr->num_sge, hr_qp->rq.max_gs); in hns_roce_v1_post_recv()
384 ctrl = get_recv_wqe(hr_qp, ind); in hns_roce_v1_post_recv()
396 hr_qp->rq.wrid[ind] = wr->wr_id; in hns_roce_v1_post_recv()
398 ind = (ind + 1) & (hr_qp->rq.wqe_cnt - 1); in hns_roce_v1_post_recv()
[all …]
Dhns_roce_hw_v2.c618 struct hns_roce_qp *hr_qp = to_hr_qp(ibqp); in hns_roce_v2_post_recv() local
631 spin_lock_irqsave(&hr_qp->rq.lock, flags); in hns_roce_v2_post_recv()
632 ind = hr_qp->rq.head & (hr_qp->rq.wqe_cnt - 1); in hns_roce_v2_post_recv()
634 if (hr_qp->state == IB_QPS_RESET) { in hns_roce_v2_post_recv()
635 spin_unlock_irqrestore(&hr_qp->rq.lock, flags); in hns_roce_v2_post_recv()
641 if (hns_roce_wq_overflow(&hr_qp->rq, nreq, in hns_roce_v2_post_recv()
642 hr_qp->ibqp.recv_cq)) { in hns_roce_v2_post_recv()
648 if (unlikely(wr->num_sge > hr_qp->rq.max_gs)) { in hns_roce_v2_post_recv()
650 wr->num_sge, hr_qp->rq.max_gs); in hns_roce_v2_post_recv()
656 wqe = get_recv_wqe(hr_qp, ind); in hns_roce_v2_post_recv()
[all …]
Dhns_roce_device.h699 struct hns_roce_qp hr_qp; member
969 struct hns_roce_qp *hr_qp);
1095 static inline struct hns_roce_sqp *hr_to_hr_sqp(struct hns_roce_qp *hr_qp) in hr_to_hr_sqp() argument
1097 return container_of(hr_qp, struct hns_roce_sqp, hr_qp); in hr_to_hr_sqp()
1243 void *get_recv_wqe(struct hns_roce_qp *hr_qp, int n);
1244 void *get_send_wqe(struct hns_roce_qp *hr_qp, int n);
1245 void *get_send_extend_sge(struct hns_roce_qp *hr_qp, int n);
1253 void hns_roce_qp_remove(struct hns_roce_dev *hr_dev, struct hns_roce_qp *hr_qp);
1254 void hns_roce_qp_free(struct hns_roce_dev *hr_dev, struct hns_roce_qp *hr_qp);