Lines Matching full:txp
236 struct ipoib_txparms *txp) in hfi1_ipoib_build_ulp_payload() argument
238 struct hfi1_devdata *dd = txp->dd; in hfi1_ipoib_build_ulp_payload()
266 struct ipoib_txparms *txp) in hfi1_ipoib_build_tx_desc() argument
268 struct hfi1_devdata *dd = txp->dd; in hfi1_ipoib_build_tx_desc()
272 sizeof(sdma_hdr->pbc) + (txp->hdr_dwords << 2) + tx->skb->len; in hfi1_ipoib_build_tx_desc()
283 sizeof(sdma_hdr->pbc) + (txp->hdr_dwords << 2)); in hfi1_ipoib_build_tx_desc()
288 return hfi1_ipoib_build_ulp_payload(tx, txp); in hfi1_ipoib_build_tx_desc()
292 struct ipoib_txparms *txp) in hfi1_ipoib_build_ib_tx_headers() argument
297 struct hfi1_pportdata *ppd = ppd_from_ibp(txp->ibp); in hfi1_ipoib_build_ib_tx_headers()
298 struct rdma_ah_attr *ah_attr = txp->ah_attr; in hfi1_ipoib_build_ib_tx_headers()
318 txp->hdr_dwords = 7; in hfi1_ipoib_build_ib_tx_headers()
322 txp->hdr_dwords += in hfi1_ipoib_build_ib_tx_headers()
323 hfi1_make_grh(txp->ibp, in hfi1_ipoib_build_ib_tx_headers()
326 txp->hdr_dwords - LRH_9B_DWORDS, in hfi1_ipoib_build_ib_tx_headers()
336 lrh0 |= (txp->flow.sc5 & 0xf) << 12; in hfi1_ipoib_build_ib_tx_headers()
354 dwords = txp->hdr_dwords + payload_dwords; in hfi1_ipoib_build_ib_tx_headers()
364 ohdr->bth[1] = cpu_to_be32(txp->dqpn); in hfi1_ipoib_build_ib_tx_headers()
365 ohdr->bth[2] = cpu_to_be32(mask_psn((u32)txp->txq->sent_txreqs)); in hfi1_ipoib_build_ib_tx_headers()
369 ohdr->u.ud.deth[1] = cpu_to_be32((txp->entropy << in hfi1_ipoib_build_ib_tx_headers()
375 ib_is_sc5(txp->flow.sc5) << in hfi1_ipoib_build_ib_tx_headers()
378 sc_to_vlt(priv->dd, txp->flow.sc5), in hfi1_ipoib_build_ib_tx_headers()
385 struct ipoib_txparms *txp) in hfi1_ipoib_send_dma_common() argument
400 tx->txq = txp->txq; in hfi1_ipoib_send_dma_common()
404 hfi1_ipoib_build_ib_tx_headers(tx, txp); in hfi1_ipoib_send_dma_common()
406 ret = hfi1_ipoib_build_tx_desc(tx, txp); in hfi1_ipoib_send_dma_common()
408 if (txp->txq->flow.as_int != txp->flow.as_int) { in hfi1_ipoib_send_dma_common()
409 txp->txq->flow.tx_queue = txp->flow.tx_queue; in hfi1_ipoib_send_dma_common()
410 txp->txq->flow.sc5 = txp->flow.sc5; in hfi1_ipoib_send_dma_common()
411 txp->txq->sde = in hfi1_ipoib_send_dma_common()
413 txp->flow.tx_queue, in hfi1_ipoib_send_dma_common()
414 txp->flow.sc5); in hfi1_ipoib_send_dma_common()
415 trace_hfi1_flow_switch(txp->txq); in hfi1_ipoib_send_dma_common()
481 struct ipoib_txparms *txp) in hfi1_ipoib_send_dma_single() argument
484 struct hfi1_ipoib_txq *txq = txp->txq; in hfi1_ipoib_send_dma_single()
488 tx = hfi1_ipoib_send_dma_common(dev, skb, txp); in hfi1_ipoib_send_dma_single()
507 ib_is_sc5(txp->flow.sc5)); in hfi1_ipoib_send_dma_single()
527 struct ipoib_txparms *txp) in hfi1_ipoib_send_dma_list() argument
529 struct hfi1_ipoib_txq *txq = txp->txq; in hfi1_ipoib_send_dma_list()
533 if (txq->flow.as_int != txp->flow.as_int) { in hfi1_ipoib_send_dma_list()
545 tx = hfi1_ipoib_send_dma_common(dev, skb, txp); in hfi1_ipoib_send_dma_list()
565 ib_is_sc5(txp->flow.sc5)); in hfi1_ipoib_send_dma_list()
590 struct ipoib_txparms txp; in hfi1_ipoib_send() local
603 txp.dd = priv->dd; in hfi1_ipoib_send()
604 txp.ah_attr = &ibah_to_rvtah(address)->attr; in hfi1_ipoib_send()
605 txp.ibp = to_iport(priv->device, priv->port_num); in hfi1_ipoib_send()
606 txp.txq = &priv->txqs[skb_get_queue_mapping(skb)]; in hfi1_ipoib_send()
607 txp.dqpn = dqpn; in hfi1_ipoib_send()
608 txp.flow.sc5 = txp.ibp->sl_to_sc[rdma_ah_get_sl(txp.ah_attr)]; in hfi1_ipoib_send()
609 txp.flow.tx_queue = (u8)skb_get_queue_mapping(skb); in hfi1_ipoib_send()
610 txp.entropy = hfi1_ipoib_calc_entropy(skb); in hfi1_ipoib_send()
612 if (netdev_xmit_more() || !list_empty(&txp.txq->tx_list)) in hfi1_ipoib_send()
613 return hfi1_ipoib_send_dma_list(dev, skb, &txp); in hfi1_ipoib_send()
615 return hfi1_ipoib_send_dma_single(dev, skb, &txp); in hfi1_ipoib_send()