Lines Matching refs:reg_idx

24 	u16 reg_idx, pool;  in ixgbe_cache_ring_dcb_sriov()  local
36 reg_idx = vmdq->offset * __ALIGN_MASK(1, ~vmdq->mask); in ixgbe_cache_ring_dcb_sriov()
37 for (i = 0, pool = 0; i < adapter->num_rx_queues; i++, reg_idx++) { in ixgbe_cache_ring_dcb_sriov()
39 if ((reg_idx & ~vmdq->mask) >= tcs) { in ixgbe_cache_ring_dcb_sriov()
41 reg_idx = __ALIGN_MASK(reg_idx, ~vmdq->mask); in ixgbe_cache_ring_dcb_sriov()
43 adapter->rx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_dcb_sriov()
47 reg_idx = vmdq->offset * __ALIGN_MASK(1, ~vmdq->mask); in ixgbe_cache_ring_dcb_sriov()
48 for (i = 0; i < adapter->num_tx_queues; i++, reg_idx++) { in ixgbe_cache_ring_dcb_sriov()
50 if ((reg_idx & ~vmdq->mask) >= tcs) in ixgbe_cache_ring_dcb_sriov()
51 reg_idx = __ALIGN_MASK(reg_idx, ~vmdq->mask); in ixgbe_cache_ring_dcb_sriov()
52 adapter->tx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_dcb_sriov()
69 reg_idx = (vmdq->offset + vmdq->indices) * queues_per_pool; in ixgbe_cache_ring_dcb_sriov()
71 reg_idx = __ALIGN_MASK(reg_idx, ~vmdq->mask) + fcoe_tc; in ixgbe_cache_ring_dcb_sriov()
72 adapter->rx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_dcb_sriov()
74 reg_idx++; in ixgbe_cache_ring_dcb_sriov()
77 reg_idx = (vmdq->offset + vmdq->indices) * queues_per_pool; in ixgbe_cache_ring_dcb_sriov()
79 reg_idx = __ALIGN_MASK(reg_idx, ~vmdq->mask) + fcoe_tc; in ixgbe_cache_ring_dcb_sriov()
80 adapter->tx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_dcb_sriov()
81 reg_idx++; in ixgbe_cache_ring_dcb_sriov()
162 adapter->tx_ring[offset + i]->reg_idx = tx_idx; in ixgbe_cache_ring_dcb()
163 adapter->rx_ring[offset + i]->reg_idx = rx_idx; in ixgbe_cache_ring_dcb()
189 u16 reg_idx, pool; in ixgbe_cache_ring_sriov() local
198 reg_idx = vmdq->offset * __ALIGN_MASK(1, ~vmdq->mask); in ixgbe_cache_ring_sriov()
199 for (i = 0; i < adapter->num_rx_queues; i++, reg_idx++) { in ixgbe_cache_ring_sriov()
206 if ((reg_idx & ~vmdq->mask) >= rss->indices) { in ixgbe_cache_ring_sriov()
208 reg_idx = __ALIGN_MASK(reg_idx, ~vmdq->mask); in ixgbe_cache_ring_sriov()
210 adapter->rx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_sriov()
216 for (; i < adapter->num_rx_queues; i++, reg_idx++) { in ixgbe_cache_ring_sriov()
217 adapter->rx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_sriov()
222 reg_idx = vmdq->offset * __ALIGN_MASK(1, ~vmdq->mask); in ixgbe_cache_ring_sriov()
223 for (i = 0; i < adapter->num_tx_queues; i++, reg_idx++) { in ixgbe_cache_ring_sriov()
230 if ((reg_idx & rss->mask) >= rss->indices) in ixgbe_cache_ring_sriov()
231 reg_idx = __ALIGN_MASK(reg_idx, ~vmdq->mask); in ixgbe_cache_ring_sriov()
232 adapter->tx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_sriov()
237 for (; i < adapter->num_tx_queues; i++, reg_idx++) in ixgbe_cache_ring_sriov()
238 adapter->tx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_sriov()
254 int i, reg_idx; in ixgbe_cache_ring_rss() local
257 adapter->rx_ring[i]->reg_idx = i; in ixgbe_cache_ring_rss()
260 for (i = 0, reg_idx = 0; i < adapter->num_tx_queues; i++, reg_idx++) in ixgbe_cache_ring_rss()
261 adapter->tx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_rss()
262 for (i = 0; i < adapter->num_xdp_queues; i++, reg_idx++) in ixgbe_cache_ring_rss()
263 adapter->xdp_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_rss()
282 adapter->rx_ring[0]->reg_idx = 0; in ixgbe_cache_ring_register()
283 adapter->tx_ring[0]->reg_idx = 0; in ixgbe_cache_ring_register()