Lines Matching refs:wqe_req

99 	tp->wqe_req.sgl[0].address = ash->dma_handle[0];  in mana_map_skb()
100 tp->wqe_req.sgl[0].mem_key = gd->gpa_mkey; in mana_map_skb()
101 tp->wqe_req.sgl[0].size = ash->size[0]; in mana_map_skb()
114 tp->wqe_req.sgl[i + 1].address = ash->dma_handle[i + 1]; in mana_map_skb()
115 tp->wqe_req.sgl[i + 1].mem_key = gd->gpa_mkey; in mana_map_skb()
116 tp->wqe_req.sgl[i + 1].size = ash->size[i + 1]; in mana_map_skb()
170 pkg.wqe_req.inline_oob_size = sizeof(struct mana_tx_short_oob); in mana_start_xmit()
172 pkg.wqe_req.inline_oob_size = sizeof(struct mana_tx_oob); in mana_start_xmit()
174 pkg.wqe_req.inline_oob_data = &pkg.tx_oob; in mana_start_xmit()
175 pkg.wqe_req.flags = 0; in mana_start_xmit()
176 pkg.wqe_req.client_data_unit = 0; in mana_start_xmit()
178 pkg.wqe_req.num_sge = 1 + skb_shinfo(skb)->nr_frags; in mana_start_xmit()
179 WARN_ON_ONCE(pkg.wqe_req.num_sge > 30); in mana_start_xmit()
181 if (pkg.wqe_req.num_sge <= ARRAY_SIZE(pkg.sgl_array)) { in mana_start_xmit()
182 pkg.wqe_req.sgl = pkg.sgl_array; in mana_start_xmit()
184 pkg.sgl_ptr = kmalloc_array(pkg.wqe_req.num_sge, in mana_start_xmit()
190 pkg.wqe_req.sgl = pkg.sgl_ptr; in mana_start_xmit()
206 pkg.wqe_req.client_data_unit = skb_shinfo(skb)->gso_size; in mana_start_xmit()
207 pkg.wqe_req.flags = GDMA_WR_OOB_IN_SGL | GDMA_WR_PAD_BY_SGE0; in mana_start_xmit()
252 err = mana_gd_post_work_request(gdma_sq, &pkg.wqe_req, in mana_start_xmit()
1075 err = mana_gd_post_and_ring(rxq->gdma_rq, &recv_buf_oob->wqe_req, in mana_post_pkt_rxq()
1608 rx_oob->wqe_req.sgl = rx_oob->sgl; in mana_alloc_rx_wqe()
1609 rx_oob->wqe_req.num_sge = rx_oob->num_sge; in mana_alloc_rx_wqe()
1610 rx_oob->wqe_req.inline_oob_size = 0; in mana_alloc_rx_wqe()
1611 rx_oob->wqe_req.inline_oob_data = NULL; in mana_alloc_rx_wqe()
1612 rx_oob->wqe_req.flags = 0; in mana_alloc_rx_wqe()
1613 rx_oob->wqe_req.client_data_unit = 0; in mana_alloc_rx_wqe()
1632 err = mana_gd_post_and_ring(rxq->gdma_rq, &rx_oob->wqe_req, in mana_push_wqe()