Home
last modified time | relevance | path

Searched refs:netdev_txq (Results 1 – 5 of 5) sorted by relevance

/Linux-v5.10/drivers/net/ethernet/google/gve/
Dgve_tx.c155 netdev_tx_reset_queue(tx->netdev_txq); in gve_tx_free_ring()
225 tx->netdev_txq = netdev_get_tx_queue(priv->dev, idx); in gve_tx_alloc_ring()
330 netif_tx_stop_queue(tx->netdev_txq); in gve_maybe_stop_tx()
348 netif_tx_start_queue(tx->netdev_txq); in gve_maybe_stop_tx()
495 netdev_tx_sent_queue(tx->netdev_txq, skb->len); in gve_tx()
501 if (!netif_xmit_stopped(tx->netdev_txq) && netdev_xmit_more()) in gve_tx()
550 netdev_tx_completed_queue(tx->netdev_txq, pkts, bytes); in gve_clean_tx_done()
557 if (try_to_wake && netif_tx_queue_stopped(tx->netdev_txq) && in gve_clean_tx_done()
560 netif_tx_wake_queue(tx->netdev_txq); in gve_clean_tx_done()
Dgve.h142 struct netdev_queue *netdev_txq; member
/Linux-v5.10/drivers/net/ethernet/huawei/hinic/
Dhinic_tx.c495 struct netdev_queue *netdev_txq; in hinic_lb_xmit_frame() local
537 netdev_txq = netdev_get_tx_queue(netdev, q_id); in hinic_lb_xmit_frame()
538 if ((!netdev_xmit_more()) || (netif_xmit_stopped(netdev_txq))) in hinic_lb_xmit_frame()
556 struct netdev_queue *netdev_txq; in hinic_xmit_frame() local
626 netdev_txq = netdev_get_tx_queue(netdev, q_id); in hinic_xmit_frame()
627 if ((!netdev_xmit_more()) || (netif_xmit_stopped(netdev_txq))) in hinic_xmit_frame()
702 struct netdev_queue *netdev_txq; in free_tx_poll() local
746 netdev_txq = netdev_get_tx_queue(txq->netdev, qp->q_id); in free_tx_poll()
748 __netif_tx_lock(netdev_txq, smp_processor_id()); in free_tx_poll()
752 __netif_tx_unlock(netdev_txq); in free_tx_poll()
/Linux-v5.10/drivers/net/ethernet/qlogic/qede/
Dqede_fp.c441 struct netdev_queue *netdev_txq; in qede_tx_int() local
445 netdev_txq = netdev_get_tx_queue(edev->ndev, txq->ndev_txq_id); in qede_tx_int()
467 netdev_tx_completed_queue(netdev_txq, pkts_compl, bytes_compl); in qede_tx_int()
480 if (unlikely(netif_tx_queue_stopped(netdev_txq))) { in qede_tx_int()
491 __netif_tx_lock(netdev_txq, smp_processor_id()); in qede_tx_int()
493 if ((netif_tx_queue_stopped(netdev_txq)) && in qede_tx_int()
497 netif_tx_wake_queue(netdev_txq); in qede_tx_int()
502 __netif_tx_unlock(netdev_txq); in qede_tx_int()
1494 struct netdev_queue *netdev_txq; in qede_start_xmit() local
1513 netdev_txq = netdev_get_tx_queue(ndev, txq_index); in qede_start_xmit()
[all …]
Dqede_main.c1683 struct netdev_queue *netdev_txq; in qede_empty_tx_queue() local
1686 netdev_txq = netdev_get_tx_queue(edev->ndev, txq->ndev_txq_id); in qede_empty_tx_queue()
1710 netdev_tx_completed_queue(netdev_txq, pkts_compl, bytes_compl); in qede_empty_tx_queue()
2671 struct netdev_queue *netdev_txq; in qede_is_txq_full() local
2673 netdev_txq = netdev_get_tx_queue(edev->ndev, txq->ndev_txq_id); in qede_is_txq_full()
2674 if (netif_xmit_stopped(netdev_txq)) in qede_is_txq_full()