/Linux-v5.15/drivers/net/ethernet/intel/i40e/ |
D | i40e_xsk.c | 194 u16 ntu = rx_ring->next_to_use; in i40e_alloc_rx_buffers_zc() local 200 rx_desc = I40E_RX_DESC(rx_ring, ntu); in i40e_alloc_rx_buffers_zc() 201 bi = i40e_rx_bi(rx_ring, ntu); in i40e_alloc_rx_buffers_zc() 215 ntu++; in i40e_alloc_rx_buffers_zc() 217 if (unlikely(ntu == rx_ring->count)) { in i40e_alloc_rx_buffers_zc() 220 ntu = 0; in i40e_alloc_rx_buffers_zc() 225 if (rx_ring->next_to_use != ntu) { in i40e_alloc_rx_buffers_zc() 228 i40e_release_rx_desc(rx_ring, ntu); in i40e_alloc_rx_buffers_zc() 420 u16 ntu = xdp_ring->next_to_use; in i40e_xmit_pkt_batch() local 429 tx_desc = I40E_TX_DESC(xdp_ring, ntu++); in i40e_xmit_pkt_batch() [all …]
|
D | i40e_adminq.c | 1007 u16 ntu; in i40e_clean_arq_element() local 1023 ntu = rd32(hw, hw->aq.arq.head) & I40E_PF_ARQH_ARQH_MASK; in i40e_clean_arq_element() 1024 if (ntu == ntc) { in i40e_clean_arq_element() 1077 hw->aq.arq.next_to_use = ntu; in i40e_clean_arq_element() 1083 *pending = (ntc > ntu ? hw->aq.arq.count : 0) + (ntu - ntc); in i40e_clean_arq_element()
|
D | i40e_txrx.c | 1711 u16 ntu = rx_ring->next_to_use; in i40e_alloc_rx_buffers() local 1719 rx_desc = I40E_RX_DESC(rx_ring, ntu); in i40e_alloc_rx_buffers() 1720 bi = i40e_rx_bi(rx_ring, ntu); in i40e_alloc_rx_buffers() 1739 ntu++; in i40e_alloc_rx_buffers() 1740 if (unlikely(ntu == rx_ring->count)) { in i40e_alloc_rx_buffers() 1743 ntu = 0; in i40e_alloc_rx_buffers() 1752 if (rx_ring->next_to_use != ntu) in i40e_alloc_rx_buffers() 1753 i40e_release_rx_desc(rx_ring, ntu); in i40e_alloc_rx_buffers() 1758 if (rx_ring->next_to_use != ntu) in i40e_alloc_rx_buffers() 1759 i40e_release_rx_desc(rx_ring, ntu); in i40e_alloc_rx_buffers()
|
/Linux-v5.15/drivers/net/ethernet/hisilicon/hns3/ |
D | hns3_trace.h | 74 __field(int, ntu) 83 __entry->ntu = ring->next_to_use; 93 __get_str(devname), __entry->index, __entry->ntu, 105 __field(int, ntu) 115 __entry->ntu = ring->next_to_use; 126 __get_str(devname), __entry->index, __entry->ntu,
|
D | hns3_enet.c | 965 u32 ntc, ntu; in hns3_tx_spare_space() local 971 ntu = tx_spare->next_to_use; in hns3_tx_spare_space() 973 if (ntc > ntu) in hns3_tx_spare_space() 974 return ntc - ntu - 1; in hns3_tx_spare_space() 979 return max(ntc, tx_spare->len - ntu) - 1; in hns3_tx_spare_space() 1089 u32 ntu = tx_spare->next_to_use; in hns3_tx_spare_alloc() local 1097 if (ntu + size > tx_spare->len) { in hns3_tx_spare_alloc() 1098 *cb_len += (tx_spare->len - ntu); in hns3_tx_spare_alloc() 1099 ntu = 0; in hns3_tx_spare_alloc() 1102 tx_spare->next_to_use = ntu + size; in hns3_tx_spare_alloc() [all …]
|
/Linux-v5.15/drivers/net/ethernet/intel/ice/ |
D | ice_xsk.c | 366 u16 ntu = rx_ring->next_to_use; in ice_alloc_rx_bufs_zc() local 374 rx_desc = ICE_RX_DESC(rx_ring, ntu); in ice_alloc_rx_bufs_zc() 375 rx_buf = &rx_ring->rx_buf[ntu]; in ice_alloc_rx_bufs_zc() 390 ntu++; in ice_alloc_rx_bufs_zc() 392 if (unlikely(ntu == rx_ring->count)) { in ice_alloc_rx_bufs_zc() 395 ntu = 0; in ice_alloc_rx_bufs_zc() 399 if (rx_ring->next_to_use != ntu) { in ice_alloc_rx_bufs_zc() 402 ice_release_rx_desc(rx_ring, ntu); in ice_alloc_rx_bufs_zc() 831 u16 ntc = xdp_ring->next_to_clean, ntu = xdp_ring->next_to_use; in ice_xsk_clean_xdp_ring() local 834 while (ntc != ntu) { in ice_xsk_clean_xdp_ring()
|
D | ice_controlq.c | 1169 u16 ntu; in ice_clean_rq_elem() local 1184 ntu = (u16)(rd32(hw, cq->rq.head) & cq->rq.head_mask); in ice_clean_rq_elem() 1186 if (ntu == ntc) { in ice_clean_rq_elem() 1233 cq->rq.next_to_use = ntu; in ice_clean_rq_elem() 1239 ntu = (u16)(rd32(hw, cq->rq.head) & cq->rq.head_mask); in ice_clean_rq_elem() 1240 *pending = (u16)((ntc > ntu ? cq->rq.count : 0) + (ntu - ntc)); in ice_clean_rq_elem()
|
D | ice_txrx.c | 671 u16 ntu = rx_ring->next_to_use; in ice_alloc_rx_bufs() local 680 rx_desc = ICE_RX_DESC(rx_ring, ntu); in ice_alloc_rx_bufs() 681 bi = &rx_ring->rx_buf[ntu]; in ice_alloc_rx_bufs() 701 ntu++; in ice_alloc_rx_bufs() 702 if (unlikely(ntu == rx_ring->count)) { in ice_alloc_rx_bufs() 705 ntu = 0; in ice_alloc_rx_bufs() 714 if (rx_ring->next_to_use != ntu) in ice_alloc_rx_bufs() 715 ice_release_rx_desc(rx_ring, ntu); in ice_alloc_rx_bufs()
|
D | ice_main.c | 1373 u16 ntu; in ice_ctrlq_pending() local 1375 ntu = (u16)(rd32(hw, cq->rq.head) & cq->rq.head_mask); in ice_ctrlq_pending() 1376 return cq->rq.next_to_clean != ntu; in ice_ctrlq_pending()
|
/Linux-v5.15/drivers/net/ethernet/hisilicon/hns3/hns3vf/ |
D | hclgevf_cmd.c | 19 int ntu = ring->next_to_use; in hclgevf_ring_space() local 22 used = (ntu - ntc + ring->desc_num) % ring->desc_num; in hclgevf_ring_space() 30 int ntu = ring->next_to_use; in hclgevf_is_valid_csq_clean_head() local 33 if (ntu > ntc) in hclgevf_is_valid_csq_clean_head() 34 return head >= ntc && head <= ntu; in hclgevf_is_valid_csq_clean_head() 36 return head >= ntc || head <= ntu; in hclgevf_is_valid_csq_clean_head()
|
/Linux-v5.15/drivers/net/ethernet/hisilicon/hns3/hns3pf/ |
D | hclge_cmd.c | 18 int ntu = ring->next_to_use; in hclge_ring_space() local 20 int used = (ntu - ntc + ring->desc_num) % ring->desc_num; in hclge_ring_space() 27 int ntu = ring->next_to_use; in is_valid_csq_clean_head() local 30 if (ntu > ntc) in is_valid_csq_clean_head() 31 return head >= ntc && head <= ntu; in is_valid_csq_clean_head() 33 return head >= ntc || head <= ntu; in is_valid_csq_clean_head()
|
/Linux-v5.15/drivers/net/ethernet/intel/ixgbevf/ |
D | ixgbevf.h | 289 u16 ntu = ring->next_to_use; in ixgbevf_desc_unused() local 291 return ((ntc > ntu) ? 0 : ring->count) + ntc - ntu - 1; in ixgbevf_desc_unused()
|
/Linux-v5.15/drivers/net/ethernet/intel/iavf/ |
D | iavf_adminq.c | 856 u16 ntu; in iavf_clean_arq_element() local 872 ntu = rd32(hw, hw->aq.arq.head) & IAVF_VF_ARQH1_ARQH_MASK; in iavf_clean_arq_element() 873 if (ntu == ntc) { in iavf_clean_arq_element() 926 hw->aq.arq.next_to_use = ntu; in iavf_clean_arq_element() 931 *pending = (ntc > ntu ? hw->aq.arq.count : 0) + (ntu - ntc); in iavf_clean_arq_element()
|
D | iavf_txrx.c | 881 u16 ntu = rx_ring->next_to_use; in iavf_alloc_rx_buffers() local 889 rx_desc = IAVF_RX_DESC(rx_ring, ntu); in iavf_alloc_rx_buffers() 890 bi = &rx_ring->rx_bi[ntu]; in iavf_alloc_rx_buffers() 909 ntu++; in iavf_alloc_rx_buffers() 910 if (unlikely(ntu == rx_ring->count)) { in iavf_alloc_rx_buffers() 913 ntu = 0; in iavf_alloc_rx_buffers() 922 if (rx_ring->next_to_use != ntu) in iavf_alloc_rx_buffers() 923 iavf_release_rx_desc(rx_ring, ntu); in iavf_alloc_rx_buffers() 928 if (rx_ring->next_to_use != ntu) in iavf_alloc_rx_buffers() 929 iavf_release_rx_desc(rx_ring, ntu); in iavf_alloc_rx_buffers()
|
/Linux-v5.15/drivers/net/ethernet/intel/ixgbe/ |
D | ixgbe_xsk.c | 446 u16 ntc = tx_ring->next_to_clean, ntu = tx_ring->next_to_use; in ixgbe_clean_xdp_tx_irq() local 456 while (ntc != ntu) { in ixgbe_clean_xdp_tx_irq() 534 u16 ntc = tx_ring->next_to_clean, ntu = tx_ring->next_to_use; in ixgbe_xsk_clean_tx_ring() local 539 while (ntc != ntu) { in ixgbe_xsk_clean_tx_ring()
|
D | ixgbe.h | 510 u16 ntu = ring->next_to_use; in ixgbe_desc_unused() local 512 return ((ntc > ntu) ? 0 : ring->count) + ntc - ntu - 1; in ixgbe_desc_unused()
|
/Linux-v5.15/drivers/net/ethernet/intel/igc/ |
D | igc.h | 545 u16 ntu = ring->next_to_use; in igc_desc_unused() local 547 return ((ntc > ntu) ? 0 : ring->count) + ntc - ntu - 1; in igc_desc_unused()
|
D | igc_main.c | 2596 u16 ntu = ring->next_to_use; in igc_xdp_xmit_zc() local 2620 tx_desc = IGC_TX_DESC(ring, ntu); in igc_xdp_xmit_zc() 2625 bi = &ring->tx_buffer_info[ntu]; in igc_xdp_xmit_zc() 2635 ntu++; in igc_xdp_xmit_zc() 2636 if (ntu == ring->count) in igc_xdp_xmit_zc() 2637 ntu = 0; in igc_xdp_xmit_zc() 2640 ring->next_to_use = ntu; in igc_xdp_xmit_zc()
|
/Linux-v5.15/drivers/net/ethernet/hisilicon/hns/ |
D | hns_enet.c | 706 int ntu = ring->next_to_use; in hns_desc_unused() local 708 return ((ntc >= ntu) ? 0 : ring->desc_num) + ntc - ntu; in hns_desc_unused()
|