Lines Matching refs:rx_ring
70 ice_qvec_dis_irq(struct ice_vsi *vsi, struct ice_ring *rx_ring, in ice_qvec_dis_irq() argument
82 reg = rx_ring->reg_idx; in ice_qvec_dis_irq()
147 struct ice_ring *tx_ring, *rx_ring; in ice_qp_dis() local
156 rx_ring = vsi->rx_rings[q_idx]; in ice_qp_dis()
157 q_vector = rx_ring->q_vector; in ice_qp_dis()
167 ice_qvec_dis_irq(vsi, rx_ring, q_vector); in ice_qp_dis()
204 struct ice_ring *tx_ring, *rx_ring; in ice_qp_ena() local
220 rx_ring = vsi->rx_rings[q_idx]; in ice_qp_ena()
221 q_vector = rx_ring->q_vector; in ice_qp_ena()
239 err = ice_vsi_cfg_rxq(rx_ring); in ice_qp_ena()
363 bool ice_alloc_rx_bufs_zc(struct ice_ring *rx_ring, u16 count) in ice_alloc_rx_bufs_zc() argument
366 u16 ntu = rx_ring->next_to_use; in ice_alloc_rx_bufs_zc()
374 rx_desc = ICE_RX_DESC(rx_ring, ntu); in ice_alloc_rx_bufs_zc()
375 rx_buf = &rx_ring->rx_buf[ntu]; in ice_alloc_rx_bufs_zc()
378 rx_buf->xdp = xsk_buff_alloc(rx_ring->xsk_pool); in ice_alloc_rx_bufs_zc()
392 if (unlikely(ntu == rx_ring->count)) { in ice_alloc_rx_bufs_zc()
393 rx_desc = ICE_RX_DESC(rx_ring, 0); in ice_alloc_rx_bufs_zc()
394 rx_buf = rx_ring->rx_buf; in ice_alloc_rx_bufs_zc()
399 if (rx_ring->next_to_use != ntu) { in ice_alloc_rx_bufs_zc()
402 ice_release_rx_desc(rx_ring, ntu); in ice_alloc_rx_bufs_zc()
412 static void ice_bump_ntc(struct ice_ring *rx_ring) in ice_bump_ntc() argument
414 int ntc = rx_ring->next_to_clean + 1; in ice_bump_ntc()
416 ntc = (ntc < rx_ring->count) ? ntc : 0; in ice_bump_ntc()
417 rx_ring->next_to_clean = ntc; in ice_bump_ntc()
418 prefetch(ICE_RX_DESC(rx_ring, ntc)); in ice_bump_ntc()
431 ice_construct_skb_zc(struct ice_ring *rx_ring, struct ice_rx_buf *rx_buf) in ice_construct_skb_zc() argument
439 skb = __napi_alloc_skb(&rx_ring->q_vector->napi, datasize_hard, in ice_construct_skb_zc()
462 ice_run_xdp_zc(struct ice_ring *rx_ring, struct xdp_buff *xdp) in ice_run_xdp_zc() argument
472 xdp_prog = READ_ONCE(rx_ring->xdp_prog); in ice_run_xdp_zc()
477 err = xdp_do_redirect(rx_ring->netdev, xdp, xdp_prog); in ice_run_xdp_zc()
487 xdp_ring = rx_ring->vsi->xdp_rings[rx_ring->q_index]; in ice_run_xdp_zc()
497 trace_xdp_exception(rx_ring->netdev, xdp_prog, act); in ice_run_xdp_zc()
514 int ice_clean_rx_irq_zc(struct ice_ring *rx_ring, int budget) in ice_clean_rx_irq_zc() argument
517 u16 cleaned_count = ICE_DESC_UNUSED(rx_ring); in ice_clean_rx_irq_zc()
530 rx_desc = ICE_RX_DESC(rx_ring, rx_ring->next_to_clean); in ice_clean_rx_irq_zc()
547 rx_buf = &rx_ring->rx_buf[rx_ring->next_to_clean]; in ice_clean_rx_irq_zc()
549 xsk_buff_dma_sync_for_cpu(rx_buf->xdp, rx_ring->xsk_pool); in ice_clean_rx_irq_zc()
551 xdp_res = ice_run_xdp_zc(rx_ring, rx_buf->xdp); in ice_clean_rx_irq_zc()
563 ice_bump_ntc(rx_ring); in ice_clean_rx_irq_zc()
568 skb = ice_construct_skb_zc(rx_ring, rx_buf); in ice_clean_rx_irq_zc()
570 rx_ring->rx_stats.alloc_buf_failed++; in ice_clean_rx_irq_zc()
575 ice_bump_ntc(rx_ring); in ice_clean_rx_irq_zc()
592 ice_process_skb_fields(rx_ring, rx_desc, skb, rx_ptype); in ice_clean_rx_irq_zc()
593 ice_receive_skb(rx_ring, skb, vlan_tag); in ice_clean_rx_irq_zc()
597 failure = !ice_alloc_rx_bufs_zc(rx_ring, cleaned_count); in ice_clean_rx_irq_zc()
599 ice_finalize_xdp_rx(rx_ring, xdp_xmit); in ice_clean_rx_irq_zc()
600 ice_update_rx_ring_stats(rx_ring, total_rx_packets, total_rx_bytes); in ice_clean_rx_irq_zc()
602 if (xsk_uses_need_wakeup(rx_ring->xsk_pool)) { in ice_clean_rx_irq_zc()
603 if (failure || rx_ring->next_to_clean == rx_ring->next_to_use) in ice_clean_rx_irq_zc()
604 xsk_set_rx_need_wakeup(rx_ring->xsk_pool); in ice_clean_rx_irq_zc()
606 xsk_clear_rx_need_wakeup(rx_ring->xsk_pool); in ice_clean_rx_irq_zc()
811 void ice_xsk_clean_rx_ring(struct ice_ring *rx_ring) in ice_xsk_clean_rx_ring() argument
815 for (i = 0; i < rx_ring->count; i++) { in ice_xsk_clean_rx_ring()
816 struct ice_rx_buf *rx_buf = &rx_ring->rx_buf[i]; in ice_xsk_clean_rx_ring()