Lines Matching refs:cur_tx

366 	if (tx_q->dirty_tx > tx_q->cur_tx)  in stmmac_tx_avail()
367 avail = tx_q->dirty_tx - tx_q->cur_tx - 1; in stmmac_tx_avail()
369 avail = priv->dma_conf.dma_tx_size - tx_q->cur_tx + tx_q->dirty_tx - 1; in stmmac_tx_avail()
417 if (tx_q->dirty_tx != tx_q->cur_tx) in stmmac_enable_eee_mode()
2404 unsigned int entry = tx_q->cur_tx; in stmmac_xdp_xmit_zc()
2477 tx_q->cur_tx = STMMAC_GET_ENTRY(tx_q->cur_tx, priv->dma_conf.dma_tx_size); in stmmac_xdp_xmit_zc()
2478 entry = tx_q->cur_tx; in stmmac_xdp_xmit_zc()
2531 while ((entry != tx_q->cur_tx) && count < priv->dma_conf.dma_tx_size) { in stmmac_tx_clean()
2678 if (tx_q->dirty_tx != tx_q->cur_tx) in stmmac_tx_clean()
3960 p = &tx_q->dma_entx[tx_q->cur_tx].basic; in stmmac_vlan_insert()
3962 p = &tx_q->dma_tx[tx_q->cur_tx]; in stmmac_vlan_insert()
3968 tx_q->cur_tx = STMMAC_GET_ENTRY(tx_q->cur_tx, priv->dma_conf.dma_tx_size); in stmmac_vlan_insert()
3996 tx_q->cur_tx = STMMAC_GET_ENTRY(tx_q->cur_tx, in stmmac_tso_allocator()
3998 WARN_ON(tx_q->tx_skbuff[tx_q->cur_tx]); in stmmac_tso_allocator()
4001 desc = &tx_q->dma_entx[tx_q->cur_tx].basic; in stmmac_tso_allocator()
4003 desc = &tx_q->dma_tx[tx_q->cur_tx]; in stmmac_tso_allocator()
4041 tx_q->tx_tail_addr = tx_q->dma_tx_phy + (tx_q->cur_tx * desc_size); in stmmac_flush_tx_descriptors()
4088 first_tx = tx_q->cur_tx; in stmmac_tso_xmit()
4120 mss_desc = &tx_q->dma_entx[tx_q->cur_tx].basic; in stmmac_tso_xmit()
4122 mss_desc = &tx_q->dma_tx[tx_q->cur_tx]; in stmmac_tso_xmit()
4126 tx_q->cur_tx = STMMAC_GET_ENTRY(tx_q->cur_tx, in stmmac_tso_xmit()
4128 WARN_ON(tx_q->tx_skbuff[tx_q->cur_tx]); in stmmac_tso_xmit()
4141 first_entry = tx_q->cur_tx; in stmmac_tso_xmit()
4195 tx_q->tx_skbuff_dma[tx_q->cur_tx].buf = des; in stmmac_tso_xmit()
4196 tx_q->tx_skbuff_dma[tx_q->cur_tx].len = skb_frag_size(frag); in stmmac_tso_xmit()
4197 tx_q->tx_skbuff_dma[tx_q->cur_tx].map_as_page = true; in stmmac_tso_xmit()
4198 tx_q->tx_skbuff_dma[tx_q->cur_tx].buf_type = STMMAC_TXBUF_T_SKB; in stmmac_tso_xmit()
4201 tx_q->tx_skbuff_dma[tx_q->cur_tx].last_segment = true; in stmmac_tso_xmit()
4204 tx_q->tx_skbuff[tx_q->cur_tx] = skb; in stmmac_tso_xmit()
4205 tx_q->tx_skbuff_dma[tx_q->cur_tx].buf_type = STMMAC_TXBUF_T_SKB; in stmmac_tso_xmit()
4208 tx_packets = (tx_q->cur_tx + 1) - first_tx; in stmmac_tso_xmit()
4225 desc = &tx_q->dma_entx[tx_q->cur_tx].basic; in stmmac_tso_xmit()
4227 desc = &tx_q->dma_tx[tx_q->cur_tx]; in stmmac_tso_xmit()
4239 tx_q->cur_tx = STMMAC_GET_ENTRY(tx_q->cur_tx, priv->dma_conf.dma_tx_size); in stmmac_tso_xmit()
4283 __func__, tx_q->cur_tx, tx_q->dirty_tx, first_entry, in stmmac_tso_xmit()
4284 tx_q->cur_tx, first, nfrags); in stmmac_tso_xmit()
4328 first_tx = tx_q->cur_tx; in stmmac_xmit()
4356 entry = tx_q->cur_tx; in stmmac_xmit()
4462 tx_q->cur_tx = entry; in stmmac_xmit()
4467 __func__, tx_q->cur_tx, tx_q->dirty_tx, first_entry, in stmmac_xmit()
4691 unsigned int entry = tx_q->cur_tx; in stmmac_xdp_xmit_xdpf()
4754 tx_q->cur_tx = entry; in stmmac_xdp_xmit_xdpf()
7465 tx_q->cur_tx = 0; in stmmac_reset_tx_queue()