Lines Matching refs:tqueue
635 struct sxgbe_tx_queue *tqueue = priv->txq[queue_num]; in dma_free_tx_skbufs() local
636 tx_free_ring_skbufs(tqueue); in dma_free_tx_skbufs()
730 static void sxgbe_tx_queue_clean(struct sxgbe_tx_queue *tqueue) in sxgbe_tx_queue_clean() argument
732 struct sxgbe_priv_data *priv = tqueue->priv_ptr; in sxgbe_tx_queue_clean()
735 u8 queue_no = tqueue->queue_no; in sxgbe_tx_queue_clean()
742 while (tqueue->dirty_tx != tqueue->cur_tx) { in sxgbe_tx_queue_clean()
743 unsigned int entry = tqueue->dirty_tx % tx_rsize; in sxgbe_tx_queue_clean()
744 struct sk_buff *skb = tqueue->tx_skbuff[entry]; in sxgbe_tx_queue_clean()
747 p = tqueue->dma_tx + entry; in sxgbe_tx_queue_clean()
755 __func__, tqueue->cur_tx, tqueue->dirty_tx); in sxgbe_tx_queue_clean()
757 if (likely(tqueue->tx_skbuff_dma[entry])) { in sxgbe_tx_queue_clean()
759 tqueue->tx_skbuff_dma[entry], in sxgbe_tx_queue_clean()
762 tqueue->tx_skbuff_dma[entry] = 0; in sxgbe_tx_queue_clean()
767 tqueue->tx_skbuff[entry] = NULL; in sxgbe_tx_queue_clean()
772 tqueue->dirty_tx++; in sxgbe_tx_queue_clean()
777 sxgbe_tx_avail(tqueue, tx_rsize) > SXGBE_TX_THRESH(priv))) { in sxgbe_tx_queue_clean()
796 struct sxgbe_tx_queue *tqueue = priv->txq[queue_num]; in sxgbe_tx_all_clean() local
798 sxgbe_tx_queue_clean(tqueue); in sxgbe_tx_all_clean()
1274 struct sxgbe_tx_queue *tqueue = priv->txq[txq_index]; in sxgbe_xmit() local
1286 if (unlikely(skb_is_gso(skb) && tqueue->prev_mss != cur_mss)) in sxgbe_xmit()
1291 tqueue->hwts_tx_en))) in sxgbe_xmit()
1297 if (unlikely(sxgbe_tx_avail(tqueue, tx_rsize) < nr_frags + 1)) { in sxgbe_xmit()
1306 entry = tqueue->cur_tx % tx_rsize; in sxgbe_xmit()
1307 tx_desc = tqueue->dma_tx + entry; in sxgbe_xmit()
1314 tqueue->tx_skbuff[entry] = skb; in sxgbe_xmit()
1319 if (unlikely(tqueue->prev_mss != cur_mss)) { in sxgbe_xmit()
1331 entry = (++tqueue->cur_tx) % tx_rsize; in sxgbe_xmit()
1332 first_desc = tqueue->dma_tx + entry; in sxgbe_xmit()
1334 tqueue->prev_mss = cur_mss; in sxgbe_xmit()
1353 entry = (++tqueue->cur_tx) % tx_rsize; in sxgbe_xmit()
1354 tx_desc = tqueue->dma_tx + entry; in sxgbe_xmit()
1358 tqueue->tx_skbuff_dma[entry] = tx_desc->tdes01; in sxgbe_xmit()
1359 tqueue->tx_skbuff[entry] = NULL; in sxgbe_xmit()
1377 tqueue->tx_count_frames += nr_frags + 1; in sxgbe_xmit()
1378 if (tqueue->tx_count_frames > tqueue->tx_coal_frames) { in sxgbe_xmit()
1381 mod_timer(&tqueue->txtimer, in sxgbe_xmit()
1382 SXGBE_COAL_TIMER(tqueue->tx_coal_timer)); in sxgbe_xmit()
1384 tqueue->tx_count_frames = 0; in sxgbe_xmit()
1393 tqueue->cur_tx++; in sxgbe_xmit()
1397 __func__, tqueue->cur_tx % tx_rsize, in sxgbe_xmit()
1398 tqueue->dirty_tx % tx_rsize, entry, in sxgbe_xmit()
1401 if (unlikely(sxgbe_tx_avail(tqueue, tx_rsize) <= (MAX_SKB_FRAGS + 1))) { in sxgbe_xmit()
1410 tqueue->hwts_tx_en)) { in sxgbe_xmit()