Lines Matching refs:rx_ring
70 ice_qvec_dis_irq(struct ice_vsi *vsi, struct ice_ring *rx_ring, in ice_qvec_dis_irq() argument
82 reg = rx_ring->reg_idx; in ice_qvec_dis_irq()
150 struct ice_ring *tx_ring, *rx_ring; in ice_qp_dis() local
159 rx_ring = vsi->rx_rings[q_idx]; in ice_qp_dis()
160 q_vector = rx_ring->q_vector; in ice_qp_dis()
170 ice_qvec_dis_irq(vsi, rx_ring, q_vector); in ice_qp_dis()
207 struct ice_ring *tx_ring, *rx_ring; in ice_qp_ena() local
223 rx_ring = vsi->rx_rings[q_idx]; in ice_qp_ena()
224 q_vector = rx_ring->q_vector; in ice_qp_ena()
242 err = ice_setup_rx_ctx(rx_ring); in ice_qp_ena()
413 bool ice_alloc_rx_bufs_zc(struct ice_ring *rx_ring, u16 count) in ice_alloc_rx_bufs_zc() argument
416 u16 ntu = rx_ring->next_to_use; in ice_alloc_rx_bufs_zc()
424 rx_desc = ICE_RX_DESC(rx_ring, ntu); in ice_alloc_rx_bufs_zc()
425 rx_buf = &rx_ring->rx_buf[ntu]; in ice_alloc_rx_bufs_zc()
428 rx_buf->xdp = xsk_buff_alloc(rx_ring->xsk_pool); in ice_alloc_rx_bufs_zc()
442 if (unlikely(ntu == rx_ring->count)) { in ice_alloc_rx_bufs_zc()
443 rx_desc = ICE_RX_DESC(rx_ring, 0); in ice_alloc_rx_bufs_zc()
444 rx_buf = rx_ring->rx_buf; in ice_alloc_rx_bufs_zc()
449 if (rx_ring->next_to_use != ntu) in ice_alloc_rx_bufs_zc()
450 ice_release_rx_desc(rx_ring, ntu); in ice_alloc_rx_bufs_zc()
459 static void ice_bump_ntc(struct ice_ring *rx_ring) in ice_bump_ntc() argument
461 int ntc = rx_ring->next_to_clean + 1; in ice_bump_ntc()
463 ntc = (ntc < rx_ring->count) ? ntc : 0; in ice_bump_ntc()
464 rx_ring->next_to_clean = ntc; in ice_bump_ntc()
465 prefetch(ICE_RX_DESC(rx_ring, ntc)); in ice_bump_ntc()
478 ice_construct_skb_zc(struct ice_ring *rx_ring, struct ice_rx_buf *rx_buf) in ice_construct_skb_zc() argument
486 skb = __napi_alloc_skb(&rx_ring->q_vector->napi, datasize_hard, in ice_construct_skb_zc()
509 ice_run_xdp_zc(struct ice_ring *rx_ring, struct xdp_buff *xdp) in ice_run_xdp_zc() argument
517 xdp_prog = READ_ONCE(rx_ring->xdp_prog); in ice_run_xdp_zc()
528 xdp_ring = rx_ring->vsi->xdp_rings[rx_ring->q_index]; in ice_run_xdp_zc()
532 err = xdp_do_redirect(rx_ring->netdev, xdp, xdp_prog); in ice_run_xdp_zc()
539 trace_xdp_exception(rx_ring->netdev, xdp_prog, act); in ice_run_xdp_zc()
557 int ice_clean_rx_irq_zc(struct ice_ring *rx_ring, int budget) in ice_clean_rx_irq_zc() argument
560 u16 cleaned_count = ICE_DESC_UNUSED(rx_ring); in ice_clean_rx_irq_zc()
574 failure |= ice_alloc_rx_bufs_zc(rx_ring, in ice_clean_rx_irq_zc()
579 rx_desc = ICE_RX_DESC(rx_ring, rx_ring->next_to_clean); in ice_clean_rx_irq_zc()
596 rx_buf = &rx_ring->rx_buf[rx_ring->next_to_clean]; in ice_clean_rx_irq_zc()
598 xsk_buff_dma_sync_for_cpu(rx_buf->xdp, rx_ring->xsk_pool); in ice_clean_rx_irq_zc()
600 xdp_res = ice_run_xdp_zc(rx_ring, rx_buf->xdp); in ice_clean_rx_irq_zc()
612 ice_bump_ntc(rx_ring); in ice_clean_rx_irq_zc()
617 skb = ice_construct_skb_zc(rx_ring, rx_buf); in ice_clean_rx_irq_zc()
619 rx_ring->rx_stats.alloc_buf_failed++; in ice_clean_rx_irq_zc()
624 ice_bump_ntc(rx_ring); in ice_clean_rx_irq_zc()
641 ice_process_skb_fields(rx_ring, rx_desc, skb, rx_ptype); in ice_clean_rx_irq_zc()
642 ice_receive_skb(rx_ring, skb, vlan_tag); in ice_clean_rx_irq_zc()
645 ice_finalize_xdp_rx(rx_ring, xdp_xmit); in ice_clean_rx_irq_zc()
646 ice_update_rx_ring_stats(rx_ring, total_rx_packets, total_rx_bytes); in ice_clean_rx_irq_zc()
648 if (xsk_uses_need_wakeup(rx_ring->xsk_pool)) { in ice_clean_rx_irq_zc()
649 if (failure || rx_ring->next_to_clean == rx_ring->next_to_use) in ice_clean_rx_irq_zc()
650 xsk_set_rx_need_wakeup(rx_ring->xsk_pool); in ice_clean_rx_irq_zc()
652 xsk_clear_rx_need_wakeup(rx_ring->xsk_pool); in ice_clean_rx_irq_zc()
860 void ice_xsk_clean_rx_ring(struct ice_ring *rx_ring) in ice_xsk_clean_rx_ring() argument
864 for (i = 0; i < rx_ring->count; i++) { in ice_xsk_clean_rx_ring()
865 struct ice_rx_buf *rx_buf = &rx_ring->rx_buf[i]; in ice_xsk_clean_rx_ring()