Lines Matching refs:rx_ring
98 struct ixgbe_ring *rx_ring, in ixgbe_run_xdp_zc() argument
106 xdp_prog = READ_ONCE(rx_ring->xdp_prog); in ixgbe_run_xdp_zc()
110 err = xdp_do_redirect(rx_ring->netdev, xdp, xdp_prog); in ixgbe_run_xdp_zc()
132 trace_xdp_exception(rx_ring->netdev, xdp_prog, act); in ixgbe_run_xdp_zc()
141 bool ixgbe_alloc_rx_buffers_zc(struct ixgbe_ring *rx_ring, u16 count) in ixgbe_alloc_rx_buffers_zc() argument
145 u16 i = rx_ring->next_to_use; in ixgbe_alloc_rx_buffers_zc()
153 rx_desc = IXGBE_RX_DESC(rx_ring, i); in ixgbe_alloc_rx_buffers_zc()
154 bi = &rx_ring->rx_buffer_info[i]; in ixgbe_alloc_rx_buffers_zc()
155 i -= rx_ring->count; in ixgbe_alloc_rx_buffers_zc()
158 bi->xdp = xsk_buff_alloc(rx_ring->xsk_pool); in ixgbe_alloc_rx_buffers_zc()
175 rx_desc = IXGBE_RX_DESC(rx_ring, 0); in ixgbe_alloc_rx_buffers_zc()
176 bi = rx_ring->rx_buffer_info; in ixgbe_alloc_rx_buffers_zc()
177 i -= rx_ring->count; in ixgbe_alloc_rx_buffers_zc()
186 i += rx_ring->count; in ixgbe_alloc_rx_buffers_zc()
188 if (rx_ring->next_to_use != i) { in ixgbe_alloc_rx_buffers_zc()
189 rx_ring->next_to_use = i; in ixgbe_alloc_rx_buffers_zc()
197 writel(i, rx_ring->tail); in ixgbe_alloc_rx_buffers_zc()
203 static struct sk_buff *ixgbe_construct_skb_zc(struct ixgbe_ring *rx_ring, in ixgbe_construct_skb_zc() argument
211 skb = __napi_alloc_skb(&rx_ring->q_vector->napi, in ixgbe_construct_skb_zc()
227 static void ixgbe_inc_ntc(struct ixgbe_ring *rx_ring) in ixgbe_inc_ntc() argument
229 u32 ntc = rx_ring->next_to_clean + 1; in ixgbe_inc_ntc()
231 ntc = (ntc < rx_ring->count) ? ntc : 0; in ixgbe_inc_ntc()
232 rx_ring->next_to_clean = ntc; in ixgbe_inc_ntc()
233 prefetch(IXGBE_RX_DESC(rx_ring, ntc)); in ixgbe_inc_ntc()
237 struct ixgbe_ring *rx_ring, in ixgbe_clean_rx_irq_zc() argument
242 u16 cleaned_count = ixgbe_desc_unused(rx_ring); in ixgbe_clean_rx_irq_zc()
255 !ixgbe_alloc_rx_buffers_zc(rx_ring, in ixgbe_clean_rx_irq_zc()
260 rx_desc = IXGBE_RX_DESC(rx_ring, rx_ring->next_to_clean); in ixgbe_clean_rx_irq_zc()
271 bi = &rx_ring->rx_buffer_info[rx_ring->next_to_clean]; in ixgbe_clean_rx_irq_zc()
279 ixgbe_inc_ntc(rx_ring); in ixgbe_clean_rx_irq_zc()
281 &rx_ring->rx_buffer_info[rx_ring->next_to_clean]; in ixgbe_clean_rx_irq_zc()
290 ixgbe_inc_ntc(rx_ring); in ixgbe_clean_rx_irq_zc()
295 xsk_buff_dma_sync_for_cpu(bi->xdp, rx_ring->xsk_pool); in ixgbe_clean_rx_irq_zc()
296 xdp_res = ixgbe_run_xdp_zc(adapter, rx_ring, bi->xdp); in ixgbe_clean_rx_irq_zc()
309 ixgbe_inc_ntc(rx_ring); in ixgbe_clean_rx_irq_zc()
314 skb = ixgbe_construct_skb_zc(rx_ring, bi); in ixgbe_clean_rx_irq_zc()
316 rx_ring->rx_stats.alloc_rx_buff_failed++; in ixgbe_clean_rx_irq_zc()
321 ixgbe_inc_ntc(rx_ring); in ixgbe_clean_rx_irq_zc()
329 ixgbe_process_skb_fields(rx_ring, rx_desc, skb); in ixgbe_clean_rx_irq_zc()
346 u64_stats_update_begin(&rx_ring->syncp); in ixgbe_clean_rx_irq_zc()
347 rx_ring->stats.packets += total_rx_packets; in ixgbe_clean_rx_irq_zc()
348 rx_ring->stats.bytes += total_rx_bytes; in ixgbe_clean_rx_irq_zc()
349 u64_stats_update_end(&rx_ring->syncp); in ixgbe_clean_rx_irq_zc()
353 if (xsk_uses_need_wakeup(rx_ring->xsk_pool)) { in ixgbe_clean_rx_irq_zc()
354 if (failure || rx_ring->next_to_clean == rx_ring->next_to_use) in ixgbe_clean_rx_irq_zc()
355 xsk_set_rx_need_wakeup(rx_ring->xsk_pool); in ixgbe_clean_rx_irq_zc()
357 xsk_clear_rx_need_wakeup(rx_ring->xsk_pool); in ixgbe_clean_rx_irq_zc()
364 void ixgbe_xsk_clean_rx_ring(struct ixgbe_ring *rx_ring) in ixgbe_xsk_clean_rx_ring() argument
369 for (i = 0; i < rx_ring->count; i++) { in ixgbe_xsk_clean_rx_ring()
370 bi = &rx_ring->rx_buffer_info[i]; in ixgbe_xsk_clean_rx_ring()