Lines Matching refs:tqueue
638 struct sxgbe_tx_queue *tqueue = priv->txq[queue_num]; in dma_free_tx_skbufs() local
639 tx_free_ring_skbufs(tqueue); in dma_free_tx_skbufs()
733 static void sxgbe_tx_queue_clean(struct sxgbe_tx_queue *tqueue) in sxgbe_tx_queue_clean() argument
735 struct sxgbe_priv_data *priv = tqueue->priv_ptr; in sxgbe_tx_queue_clean()
738 u8 queue_no = tqueue->queue_no; in sxgbe_tx_queue_clean()
745 while (tqueue->dirty_tx != tqueue->cur_tx) { in sxgbe_tx_queue_clean()
746 unsigned int entry = tqueue->dirty_tx % tx_rsize; in sxgbe_tx_queue_clean()
747 struct sk_buff *skb = tqueue->tx_skbuff[entry]; in sxgbe_tx_queue_clean()
750 p = tqueue->dma_tx + entry; in sxgbe_tx_queue_clean()
758 __func__, tqueue->cur_tx, tqueue->dirty_tx); in sxgbe_tx_queue_clean()
760 if (likely(tqueue->tx_skbuff_dma[entry])) { in sxgbe_tx_queue_clean()
762 tqueue->tx_skbuff_dma[entry], in sxgbe_tx_queue_clean()
765 tqueue->tx_skbuff_dma[entry] = 0; in sxgbe_tx_queue_clean()
770 tqueue->tx_skbuff[entry] = NULL; in sxgbe_tx_queue_clean()
775 tqueue->dirty_tx++; in sxgbe_tx_queue_clean()
780 sxgbe_tx_avail(tqueue, tx_rsize) > SXGBE_TX_THRESH(priv))) { in sxgbe_tx_queue_clean()
799 struct sxgbe_tx_queue *tqueue = priv->txq[queue_num]; in sxgbe_tx_all_clean() local
801 sxgbe_tx_queue_clean(tqueue); in sxgbe_tx_all_clean()
1277 struct sxgbe_tx_queue *tqueue = priv->txq[txq_index]; in sxgbe_xmit() local
1289 if (unlikely(skb_is_gso(skb) && tqueue->prev_mss != cur_mss)) in sxgbe_xmit()
1294 tqueue->hwts_tx_en))) in sxgbe_xmit()
1300 if (unlikely(sxgbe_tx_avail(tqueue, tx_rsize) < nr_frags + 1)) { in sxgbe_xmit()
1309 entry = tqueue->cur_tx % tx_rsize; in sxgbe_xmit()
1310 tx_desc = tqueue->dma_tx + entry; in sxgbe_xmit()
1317 tqueue->tx_skbuff[entry] = skb; in sxgbe_xmit()
1322 if (unlikely(tqueue->prev_mss != cur_mss)) { in sxgbe_xmit()
1334 entry = (++tqueue->cur_tx) % tx_rsize; in sxgbe_xmit()
1335 first_desc = tqueue->dma_tx + entry; in sxgbe_xmit()
1337 tqueue->prev_mss = cur_mss; in sxgbe_xmit()
1356 entry = (++tqueue->cur_tx) % tx_rsize; in sxgbe_xmit()
1357 tx_desc = tqueue->dma_tx + entry; in sxgbe_xmit()
1361 tqueue->tx_skbuff_dma[entry] = tx_desc->tdes01; in sxgbe_xmit()
1362 tqueue->tx_skbuff[entry] = NULL; in sxgbe_xmit()
1380 tqueue->tx_count_frames += nr_frags + 1; in sxgbe_xmit()
1381 if (tqueue->tx_count_frames > tqueue->tx_coal_frames) { in sxgbe_xmit()
1384 mod_timer(&tqueue->txtimer, in sxgbe_xmit()
1385 SXGBE_COAL_TIMER(tqueue->tx_coal_timer)); in sxgbe_xmit()
1387 tqueue->tx_count_frames = 0; in sxgbe_xmit()
1396 tqueue->cur_tx++; in sxgbe_xmit()
1400 __func__, tqueue->cur_tx % tx_rsize, in sxgbe_xmit()
1401 tqueue->dirty_tx % tx_rsize, entry, in sxgbe_xmit()
1404 if (unlikely(sxgbe_tx_avail(tqueue, tx_rsize) <= (MAX_SKB_FRAGS + 1))) { in sxgbe_xmit()
1413 tqueue->hwts_tx_en)) { in sxgbe_xmit()