| /Linux-v5.15/drivers/net/ethernet/intel/i40e/ |
| D | i40e_xsk.c | 194 u16 ntu = rx_ring->next_to_use; in i40e_alloc_rx_buffers_zc() 225 if (rx_ring->next_to_use != ntu) { in i40e_alloc_rx_buffers_zc() 381 cleaned_count = (next_to_clean - rx_ring->next_to_use - 1) & count_mask; in i40e_clean_rx_irq_zc() 390 if (failure || next_to_clean == rx_ring->next_to_use) in i40e_clean_rx_irq_zc() 409 tx_desc = I40E_TX_DESC(xdp_ring, xdp_ring->next_to_use++); in i40e_xmit_pkt() 420 u16 ntu = xdp_ring->next_to_use; in i40e_xmit_pkt_batch() 438 xdp_ring->next_to_use = ntu; in i40e_xmit_pkt_batch() 456 u16 ntu = xdp_ring->next_to_use ? xdp_ring->next_to_use - 1 : xdp_ring->count - 1; in i40e_set_rs_bit() 480 if (xdp_ring->next_to_use + nb_pkts >= xdp_ring->count) { in i40e_xmit_zc() 481 nb_processed = xdp_ring->count - xdp_ring->next_to_use; in i40e_xmit_zc() [all …]
|
| D | i40e_adminq.c | 354 hw->aq.asq.next_to_use = 0; in i40e_init_asq() 413 hw->aq.arq.next_to_use = 0; in i40e_init_arq() 767 return rd32(hw, hw->aq.asq.head) == hw->aq.asq.next_to_use; in i40e_asq_done() 815 details = I40E_ADMINQ_DETAILS(hw->aq.asq, hw->aq.asq.next_to_use); in i40e_asq_send_command() 870 desc_on_ring = I40E_ADMINQ_DESC(hw->aq.asq, hw->aq.asq.next_to_use); in i40e_asq_send_command() 877 dma_buff = &(hw->aq.asq.r.asq_bi[hw->aq.asq.next_to_use]); in i40e_asq_send_command() 895 (hw->aq.asq.next_to_use)++; in i40e_asq_send_command() 896 if (hw->aq.asq.next_to_use == hw->aq.asq.count) in i40e_asq_send_command() 897 hw->aq.asq.next_to_use = 0; in i40e_asq_send_command() 899 wr32(hw, hw->aq.asq.tail, hw->aq.asq.next_to_use); in i40e_asq_send_command() [all …]
|
| D | i40e_txrx.c | 30 i = tx_ring->next_to_use; in i40e_fdir() 34 tx_ring->next_to_use = (i < tx_ring->count) ? i : 0; in i40e_fdir() 119 i = tx_ring->next_to_use; in i40e_program_fdir_filter() 124 i = tx_ring->next_to_use; in i40e_program_fdir_filter() 128 tx_ring->next_to_use = ((i + 1) < tx_ring->count) ? i + 1 : 0; in i40e_program_fdir_filter() 153 writel(tx_ring->next_to_use, tx_ring->tail); in i40e_program_fdir_filter() 812 tx_ring->next_to_use = 0; in i40e_clean_tx_ring() 860 tail = ring->next_to_use; in i40e_get_tx_pending() 1460 tx_ring->next_to_use = 0; in i40e_setup_tx_descriptors() 1547 rx_ring->next_to_use = 0; in i40e_clean_rx_ring() [all …]
|
| /Linux-v5.15/drivers/net/ethernet/hisilicon/hns3/hns3vf/ |
| D | hclgevf_cmd.c | 19 int ntu = ring->next_to_use; in hclgevf_ring_space() 30 int ntu = ring->next_to_use; in hclgevf_is_valid_csq_clean_head() 51 csq->next_to_use, csq->next_to_clean); in hclgevf_cmd_csq_clean() 69 return head == hw->cmq.csq.next_to_use; in hclgevf_cmd_csq_done() 191 desc_to_use = &hw->cmq.csq.desc[hw->cmq.csq.next_to_use]; in hclgevf_cmd_copy_desc() 193 (hw->cmq.csq.next_to_use)++; in hclgevf_cmd_copy_desc() 194 if (hw->cmq.csq.next_to_use == hw->cmq.csq.desc_num) in hclgevf_cmd_copy_desc() 195 hw->cmq.csq.next_to_use = 0; in hclgevf_cmd_copy_desc() 321 ntc = hw->cmq.csq.next_to_use; in hclgevf_cmd_send() 327 hw->cmq.csq.next_to_use); in hclgevf_cmd_send() [all …]
|
| D | hclgevf_mbx.c | 155 return tail == hw->cmq.crq.next_to_use; in hclgevf_cmd_crq_empty() 226 desc = &crq->desc[crq->next_to_use]; in hclgevf_mbx_handler() 229 flag = le16_to_cpu(crq->desc[crq->next_to_use].flag); in hclgevf_mbx_handler() 236 crq->desc[crq->next_to_use].flag = 0; in hclgevf_mbx_handler() 266 crq->desc[crq->next_to_use].flag = 0; in hclgevf_mbx_handler() 272 crq->next_to_use); in hclgevf_mbx_handler()
|
| /Linux-v5.15/drivers/net/ethernet/intel/ixgbe/ |
| D | ixgbe_xsk.c | 145 u16 i = rx_ring->next_to_use; in ixgbe_alloc_rx_buffers_zc() 188 if (rx_ring->next_to_use != i) { in ixgbe_alloc_rx_buffers_zc() 189 rx_ring->next_to_use = i; in ixgbe_alloc_rx_buffers_zc() 343 writel(ring->next_to_use, ring->tail); in ixgbe_clean_rx_irq_zc() 354 if (failure || rx_ring->next_to_clean == rx_ring->next_to_use) in ixgbe_clean_rx_irq_zc() 403 tx_bi = &xdp_ring->tx_buffer_info[xdp_ring->next_to_use]; in ixgbe_xmit_zc() 408 tx_desc = IXGBE_TX_DESC(xdp_ring, xdp_ring->next_to_use); in ixgbe_xmit_zc() 420 xdp_ring->next_to_use++; in ixgbe_xmit_zc() 421 if (xdp_ring->next_to_use == xdp_ring->count) in ixgbe_xmit_zc() 422 xdp_ring->next_to_use = 0; in ixgbe_xmit_zc() [all …]
|
| /Linux-v5.15/drivers/net/ethernet/hisilicon/hns3/hns3pf/ |
| D | hclge_cmd.c | 18 int ntu = ring->next_to_use; in hclge_ring_space() 27 int ntu = ring->next_to_use; in is_valid_csq_clean_head() 147 csq->next_to_use, csq->next_to_clean); in hclge_cmd_csq_clean() 164 return head == hw->cmq.csq.next_to_use; in hclge_cmd_csq_done() 207 desc_to_use = &hw->cmq.csq.desc[hw->cmq.csq.next_to_use]; in hclge_cmd_copy_desc() 209 (hw->cmq.csq.next_to_use)++; in hclge_cmd_copy_desc() 210 if (hw->cmq.csq.next_to_use >= hw->cmq.csq.desc_num) in hclge_cmd_copy_desc() 211 hw->cmq.csq.next_to_use = 0; in hclge_cmd_copy_desc() 339 ntc = hw->cmq.csq.next_to_use; in hclge_cmd_send() 344 hclge_write_dev(hw, HCLGE_NIC_CSQ_TAIL_REG, hw->cmq.csq.next_to_use); in hclge_cmd_send() [all …]
|
| /Linux-v5.15/drivers/net/ethernet/intel/ice/ |
| D | ice_controlq.h | 18 ((u16)((((R)->next_to_clean > (R)->next_to_use) ? 0 : (R)->count) + \ 19 (R)->next_to_clean - (R)->next_to_use - 1)) 55 u16 next_to_use; member
|
| D | ice_txrx_lib.c | 13 u16 prev_ntu = rx_ring->next_to_use & ~0x7; in ice_release_rx_desc() 15 rx_ring->next_to_use = val; in ice_release_rx_desc() 222 u16 i = xdp_ring->next_to_use; in ice_xmit_xdp_ring() 260 xdp_ring->next_to_use = i; in ice_xmit_xdp_ring()
|
| D | ice_xsk.c | 366 u16 ntu = rx_ring->next_to_use; in ice_alloc_rx_bufs_zc() 399 if (rx_ring->next_to_use != ntu) { in ice_alloc_rx_bufs_zc() 603 if (failure || rx_ring->next_to_clean == rx_ring->next_to_use) in ice_clean_rx_irq_zc() 637 tx_buf = &xdp_ring->tx_buf[xdp_ring->next_to_use]; in ice_xmit_zc() 648 tx_desc = ICE_TX_DESC(xdp_ring, xdp_ring->next_to_use); in ice_xmit_zc() 653 xdp_ring->next_to_use++; in ice_xmit_zc() 654 if (xdp_ring->next_to_use == xdp_ring->count) in ice_xmit_zc() 655 xdp_ring->next_to_use = 0; in ice_xmit_zc() 831 u16 ntc = xdp_ring->next_to_clean, ntu = xdp_ring->next_to_use; in ice_xsk_clean_xdp_ring()
|
| D | ice_controlq.c | 380 cq->sq.next_to_use = 0; in ice_init_sq() 440 cq->rq.next_to_use = 0; in ice_init_rq() 950 return rd32(hw, cq->sq.head) == cq->sq.next_to_use; in ice_sq_done() 1018 details = ICE_CTL_Q_DETAILS(cq->sq, cq->sq.next_to_use); in ice_sq_send_cmd() 1036 desc_on_ring = ICE_CTL_Q_DESC(cq->sq, cq->sq.next_to_use); in ice_sq_send_cmd() 1043 dma_buf = &cq->sq.r.sq_bi[cq->sq.next_to_use]; in ice_sq_send_cmd() 1062 (cq->sq.next_to_use)++; in ice_sq_send_cmd() 1063 if (cq->sq.next_to_use == cq->sq.count) in ice_sq_send_cmd() 1064 cq->sq.next_to_use = 0; in ice_sq_send_cmd() 1065 wr32(hw, cq->sq.tail, cq->sq.next_to_use); in ice_sq_send_cmd() [all …]
|
| D | ice_txrx.c | 63 i = tx_ring->next_to_use; in ice_prgm_fdir_fltr() 74 tx_ring->next_to_use = (i < tx_ring->count) ? i : 0; in ice_prgm_fdir_fltr() 98 writel(tx_ring->next_to_use, tx_ring->tail); in ice_prgm_fdir_fltr() 168 tx_ring->next_to_use = 0; in ice_clean_tx_ring() 358 tx_ring->next_to_use = 0; in ice_setup_tx_ring() 424 rx_ring->next_to_use = 0; in ice_clean_rx_ring() 482 rx_ring->next_to_use = 0; in ice_setup_rx_ring() 671 u16 ntu = rx_ring->next_to_use; in ice_alloc_rx_bufs() 714 if (rx_ring->next_to_use != ntu) in ice_alloc_rx_bufs() 1511 u16 i = tx_ring->next_to_use; in ice_tx_map() [all …]
|
| D | ice_txrx.h | 111 (u16)((((R)->next_to_clean > (R)->next_to_use) ? 0 : (R)->count) + \ 112 (R)->next_to_clean - (R)->next_to_use - 1) 284 u16 next_to_use; member
|
| /Linux-v5.15/drivers/net/ethernet/intel/ixgb/ |
| D | ixgb.h | 93 unsigned int next_to_use; member 101 ((((R)->next_to_clean > (R)->next_to_use) ? 0 : (R)->count) + \ 102 (R)->next_to_clean - (R)->next_to_use - 1)
|
| /Linux-v5.15/drivers/net/ethernet/intel/igc/ |
| D | igc_dump.c | 142 n, tx_ring->next_to_use, tx_ring->next_to_clean, in igc_rings_dump() 181 if (i == tx_ring->next_to_use && in igc_rings_dump() 184 else if (i == tx_ring->next_to_use) in igc_rings_dump() 215 netdev_info(netdev, "%5d %5X %5X\n", n, rx_ring->next_to_use, in igc_rings_dump() 264 if (i == rx_ring->next_to_use) in igc_rings_dump()
|
| /Linux-v5.15/drivers/net/ethernet/atheros/atlx/ |
| D | atl1.c | 1112 atomic_set(&tpd_ring->next_to_use, 0); in atl1_init_ring_ptrs() 1116 atomic_set(&rfd_ring->next_to_use, 0); in atl1_init_ring_ptrs() 1118 rrd_ring->next_to_use = 0; in atl1_init_ring_ptrs() 1156 atomic_set(&rfd_ring->next_to_use, 0); in atl1_clean_rx_ring() 1158 rrd_ring->next_to_use = 0; in atl1_clean_rx_ring() 1198 atomic_set(&tpd_ring->next_to_use, 0); in atl1_clean_tx_ring() 1497 value = ((atomic_read(&adapter->tpd_ring.next_to_use) in atl1_configure() 1501 ((atomic_read(&adapter->rfd_ring.next_to_use) in atl1_configure() 1743 tpd_next_to_use = atomic_read(&adapter->tpd_ring.next_to_use); in atl1_update_mailbox() 1744 rfd_next_to_use = atomic_read(&adapter->rfd_ring.next_to_use); in atl1_update_mailbox() [all …]
|
| /Linux-v5.15/drivers/net/ethernet/freescale/enetc/ |
| D | enetc_cbdr.c | 24 cbdr->next_to_use = 0; in enetc_setup_cbdr() 41 enetc_wr_reg(cbdr->cir, cbdr->next_to_use); in enetc_setup_cbdr() 85 return (r->next_to_clean - r->next_to_use - 1 + r->bd_count) % in enetc_cbd_unused() 102 i = ring->next_to_use; in enetc_send_cmd() 109 ring->next_to_use = i; in enetc_send_cmd()
|
| D | enetc.h | 95 int next_to_use; member 125 if (bdr->next_to_clean > bdr->next_to_use) in enetc_bd_unused() 126 return bdr->next_to_clean - bdr->next_to_use - 1; in enetc_bd_unused() 128 return bdr->bd_count + bdr->next_to_clean - bdr->next_to_use - 1; in enetc_bd_unused() 148 int next_to_use; member
|
| /Linux-v5.15/drivers/net/ethernet/intel/iavf/ |
| D | iavf_adminq.c | 341 hw->aq.asq.next_to_use = 0; in iavf_init_asq() 400 hw->aq.arq.next_to_use = 0; in iavf_init_arq() 618 return rd32(hw, hw->aq.asq.head) == hw->aq.asq.next_to_use; in iavf_asq_done() 665 details = IAVF_ADMINQ_DETAILS(hw->aq.asq, hw->aq.asq.next_to_use); in iavf_asq_send_command() 720 desc_on_ring = IAVF_ADMINQ_DESC(hw->aq.asq, hw->aq.asq.next_to_use); in iavf_asq_send_command() 727 dma_buff = &hw->aq.asq.r.asq_bi[hw->aq.asq.next_to_use]; in iavf_asq_send_command() 745 (hw->aq.asq.next_to_use)++; in iavf_asq_send_command() 746 if (hw->aq.asq.next_to_use == hw->aq.asq.count) in iavf_asq_send_command() 747 hw->aq.asq.next_to_use = 0; in iavf_asq_send_command() 749 wr32(hw, hw->aq.asq.tail, hw->aq.asq.next_to_use); in iavf_asq_send_command() [all …]
|
| D | iavf_txrx.c | 76 tx_ring->next_to_use = 0; in iavf_clean_tx_ring() 640 tx_ring->next_to_use = 0; in iavf_setup_tx_descriptors() 705 rx_ring->next_to_use = 0; in iavf_clean_rx_ring() 761 rx_ring->next_to_use = 0; in iavf_setup_rx_descriptors() 777 rx_ring->next_to_use = val; in iavf_release_rx_desc() 881 u16 ntu = rx_ring->next_to_use; in iavf_alloc_rx_buffers() 922 if (rx_ring->next_to_use != ntu) in iavf_alloc_rx_buffers() 928 if (rx_ring->next_to_use != ntu) in iavf_alloc_rx_buffers() 2119 int i = tx_ring->next_to_use; in iavf_create_tx_ctx() 2129 tx_ring->next_to_use = (i < tx_ring->count) ? i : 0; in iavf_create_tx_ctx() [all …]
|
| /Linux-v5.15/drivers/net/ethernet/hisilicon/hns/ |
| D | hns_enet.c | 41 struct hnae_desc *desc = &ring->desc[ring->next_to_use]; in fill_v2_desc_hw() 42 struct hnae_desc_cb *desc_cb = &ring->desc_cb[ring->next_to_use]; in fill_v2_desc_hw() 127 ring_ptr_move_fw(ring, next_to_use); in fill_v2_desc_hw() 149 struct hnae_desc *desc = &ring->desc[ring->next_to_use]; in fill_desc() 150 struct hnae_desc_cb *desc_cb = &ring->desc_cb[ring->next_to_use]; in fill_desc() 203 ring_ptr_move_fw(ring, next_to_use); in fill_desc() 208 ring_ptr_move_bw(ring, next_to_use); in unfill_desc() 317 int size, next_to_use; in hns_nic_net_xmit_hw() local 334 next_to_use = ring->next_to_use; in hns_nic_net_xmit_hw() 378 while (ring->next_to_use != next_to_use) { in hns_nic_net_xmit_hw() [all …]
|
| /Linux-v5.15/drivers/net/ethernet/atheros/atl1e/ |
| D | atl1e_main.c | 751 tx_ring->next_to_use = 0; in atl1e_init_ring_ptrs() 1559 u16 next_to_use = 0; in atl1e_tpd_avail() local 1563 next_to_use = tx_ring->next_to_use; in atl1e_tpd_avail() 1565 return (u16)(next_to_clean > next_to_use) ? in atl1e_tpd_avail() 1566 (next_to_clean - next_to_use - 1) : in atl1e_tpd_avail() 1567 (tx_ring->count + next_to_clean - next_to_use - 1); in atl1e_tpd_avail() 1578 u16 next_to_use = 0; in atl1e_get_tpd() local 1580 next_to_use = tx_ring->next_to_use; in atl1e_get_tpd() 1581 if (++tx_ring->next_to_use == tx_ring->count) in atl1e_get_tpd() 1582 tx_ring->next_to_use = 0; in atl1e_get_tpd() [all …]
|
| /Linux-v5.15/drivers/net/ethernet/hisilicon/hns3/ |
| D | hns3_enet.c | 971 ntu = tx_spare->next_to_use; in hns3_tx_spare_space() 1089 u32 ntu = tx_spare->next_to_use; in hns3_tx_spare_alloc() 1102 tx_spare->next_to_use = ntu + size; in hns3_tx_spare_alloc() 1103 if (tx_spare->next_to_use == tx_spare->len) in hns3_tx_spare_alloc() 1104 tx_spare->next_to_use = 0; in hns3_tx_spare_alloc() 1115 if (len > tx_spare->next_to_use) { in hns3_tx_spare_rollback() 1116 len -= tx_spare->next_to_use; in hns3_tx_spare_rollback() 1117 tx_spare->next_to_use = tx_spare->len - len; in hns3_tx_spare_rollback() 1119 tx_spare->next_to_use -= len; in hns3_tx_spare_rollback() 1638 struct hns3_desc *desc = &ring->desc[ring->next_to_use]; in hns3_fill_desc() [all …]
|
| D | hclge_mbx.h | 181 (crq->next_to_use = (crq->next_to_use + 1) % crq->desc_num)
|
| /Linux-v5.15/drivers/net/ethernet/atheros/atl1c/ |
| D | atl1c_main.c | 909 tpd_ring->next_to_use = 0; in atl1c_clean_tx_ring() 932 rfd_ring->next_to_use = 0; in atl1c_clean_rx_ring() 933 rrd_ring->next_to_use = 0; in atl1c_clean_rx_ring() 949 tpd_ring[i].next_to_use = 0; in atl1c_init_ring_ptrs() 957 rfd_ring[i].next_to_use = 0; in atl1c_init_ring_ptrs() 959 rrd_ring[i].next_to_use = 0; in atl1c_init_ring_ptrs() 1818 next_next = rfd_next_to_use = rfd_ring->next_to_use; in atl1c_alloc_rx_buffer() 1868 rfd_ring->next_to_use = rfd_next_to_use; in atl1c_alloc_rx_buffer() 1870 rfd_ring->next_to_use & MB_RFDX_PROD_IDX_MASK); in atl1c_alloc_rx_buffer() 2026 u16 next_to_use = 0; in atl1c_tpd_avail() local [all …]
|