Home
last modified time | relevance | path

Searched refs:xsk_pool (Results 1 – 21 of 21) sorted by relevance

/Linux-v5.10/drivers/net/ethernet/mellanox/mlx5/core/en/xsk/
Drx.h25 dma_info->xsk = xsk_buff_alloc(rq->xsk_pool); in mlx5e_xsk_page_alloc_pool()
41 if (!xsk_uses_need_wakeup(rq->xsk_pool)) in mlx5e_xsk_update_rx_wakeup()
45 xsk_set_rx_need_wakeup(rq->xsk_pool); in mlx5e_xsk_update_rx_wakeup()
47 xsk_clear_rx_need_wakeup(rq->xsk_pool); in mlx5e_xsk_update_rx_wakeup()
Dtx.h18 if (!xsk_uses_need_wakeup(sq->xsk_pool)) in mlx5e_xsk_update_tx_wakeup()
22 xsk_clear_tx_need_wakeup(sq->xsk_pool); in mlx5e_xsk_update_tx_wakeup()
24 xsk_set_tx_need_wakeup(sq->xsk_pool); in mlx5e_xsk_update_tx_wakeup()
Drx.c50 xsk_buff_dma_sync_for_cpu(xdp, rq->xsk_pool); in mlx5e_xsk_skb_from_cqe_mpwrq_linear()
96 xsk_buff_dma_sync_for_cpu(xdp, rq->xsk_pool); in mlx5e_xsk_skb_from_cqe_linear()
Dtx.c69 struct xsk_buff_pool *pool = sq->xsk_pool; in mlx5e_xsk_tx()
/Linux-v5.10/drivers/net/ethernet/intel/i40e/
Di40e_xsk.c198 xdp = xsk_buff_alloc(rx_ring->xsk_pool); in i40e_alloc_rx_buffers_zc()
322 xsk_buff_dma_sync_for_cpu(*bi, rx_ring->xsk_pool); in i40e_clean_rx_irq_zc()
373 if (xsk_uses_need_wakeup(rx_ring->xsk_pool)) { in i40e_clean_rx_irq_zc()
375 xsk_set_rx_need_wakeup(rx_ring->xsk_pool); in i40e_clean_rx_irq_zc()
377 xsk_clear_rx_need_wakeup(rx_ring->xsk_pool); in i40e_clean_rx_irq_zc()
400 if (!xsk_tx_peek_desc(xdp_ring->xsk_pool, &desc)) in i40e_xmit_zc()
403 dma = xsk_buff_raw_get_dma(xdp_ring->xsk_pool, desc.addr); in i40e_xmit_zc()
404 xsk_buff_raw_dma_sync_for_device(xdp_ring->xsk_pool, dma, in i40e_xmit_zc()
431 xsk_tx_release(xdp_ring->xsk_pool); in i40e_xmit_zc()
463 struct xsk_buff_pool *bp = tx_ring->xsk_pool; in i40e_clean_xdp_tx_irq()
[all …]
Di40e_txrx.h391 struct xsk_buff_pool *xsk_pool; member
Di40e_txrx.c639 if (ring_is_xdp(tx_ring) && tx_ring->xsk_pool) { in i40e_clean_tx_ring()
1338 if (rx_ring->xsk_pool) { in i40e_clean_rx_ring()
1372 if (rx_ring->xsk_pool) in i40e_clean_rx_ring()
2603 bool wd = ring->xsk_pool ? in i40e_napi_poll()
2631 int cleaned = ring->xsk_pool ? in i40e_napi_poll()
Di40e_main.c3163 ring->xsk_pool = i40e_xsk_pool(ring); in i40e_configure_tx_ring()
3286 ring->xsk_pool = i40e_xsk_pool(ring); in i40e_configure_rx_ring()
3287 if (ring->xsk_pool) { in i40e_configure_rx_ring()
3292 xsk_pool_get_rx_frame_size(ring->xsk_pool); in i40e_configure_rx_ring()
3375 if (ring->xsk_pool) { in i40e_configure_rx_ring()
3376 xsk_pool_set_rxq_info(ring->xsk_pool, &ring->xdp_rxq); in i40e_configure_rx_ring()
3387 ring->xsk_pool ? "AF_XDP ZC enabled " : "", in i40e_configure_rx_ring()
12484 if (vsi->xdp_rings[i]->xsk_pool) in i40e_xdp_setup()
/Linux-v5.10/drivers/net/ethernet/intel/ice/
Dice_xsk.c239 xdp_ring->xsk_pool = ice_xsk_pool(xdp_ring); in ice_qp_ena()
428 rx_buf->xdp = xsk_buff_alloc(rx_ring->xsk_pool); in ice_alloc_rx_bufs_zc()
598 xsk_buff_dma_sync_for_cpu(rx_buf->xdp, rx_ring->xsk_pool); in ice_clean_rx_irq_zc()
648 if (xsk_uses_need_wakeup(rx_ring->xsk_pool)) { in ice_clean_rx_irq_zc()
650 xsk_set_rx_need_wakeup(rx_ring->xsk_pool); in ice_clean_rx_irq_zc()
652 xsk_clear_rx_need_wakeup(rx_ring->xsk_pool); in ice_clean_rx_irq_zc()
685 if (!xsk_tx_peek_desc(xdp_ring->xsk_pool, &desc)) in ice_xmit_zc()
688 dma = xsk_buff_raw_get_dma(xdp_ring->xsk_pool, desc.addr); in ice_xmit_zc()
689 xsk_buff_raw_dma_sync_for_device(xdp_ring->xsk_pool, dma, in ice_xmit_zc()
706 xsk_tx_release(xdp_ring->xsk_pool); in ice_xmit_zc()
[all …]
Dice_base.c311 ring->xsk_pool = ice_xsk_pool(ring); in ice_setup_rx_ctx()
312 if (ring->xsk_pool) { in ice_setup_rx_ctx()
316 xsk_pool_get_rx_frame_size(ring->xsk_pool); in ice_setup_rx_ctx()
327 xsk_pool_set_rxq_info(ring->xsk_pool, &ring->xdp_rxq); in ice_setup_rx_ctx()
420 if (ring->xsk_pool) { in ice_setup_rx_ctx()
421 if (!xsk_buff_can_alloc(ring->xsk_pool, num_bufs)) { in ice_setup_rx_ctx()
Dice_txrx.h298 struct xsk_buff_pool *xsk_pool; member
Dice_txrx.c148 if (ice_ring_is_xdp(tx_ring) && tx_ring->xsk_pool) { in ice_clean_tx_ring()
378 if (rx_ring->xsk_pool) { in ice_clean_rx_ring()
1626 bool wd = ring->xsk_pool ? in ice_napi_poll()
1656 cleaned = ring->xsk_pool ? in ice_napi_poll()
Dice_lib.c1759 vsi->xdp_rings[i]->xsk_pool = ice_xsk_pool(vsi->xdp_rings[i]); in ice_vsi_cfg_xdp_txqs()
Dice_main.c2284 xdp_ring->xsk_pool = ice_xsk_pool(xdp_ring); in ice_xdp_alloc_setup_rings()
2534 if (rx_ring->xsk_pool) in ice_xdp_setup_prog()
/Linux-v5.10/drivers/net/ethernet/intel/ixgbe/
Dixgbe_xsk.c153 bi->xdp = xsk_buff_alloc(rx_ring->xsk_pool); in ixgbe_alloc_rx_buffers_zc()
290 xsk_buff_dma_sync_for_cpu(bi->xdp, rx_ring->xsk_pool); in ixgbe_clean_rx_irq_zc()
348 if (xsk_uses_need_wakeup(rx_ring->xsk_pool)) { in ixgbe_clean_rx_irq_zc()
350 xsk_set_rx_need_wakeup(rx_ring->xsk_pool); in ixgbe_clean_rx_irq_zc()
352 xsk_clear_rx_need_wakeup(rx_ring->xsk_pool); in ixgbe_clean_rx_irq_zc()
377 struct xsk_buff_pool *pool = xdp_ring->xsk_pool; in ixgbe_xmit_zc()
443 struct xsk_buff_pool *pool = tx_ring->xsk_pool; in ixgbe_clean_xdp_tx_irq()
515 if (!ring->xsk_pool) in ixgbe_xsk_wakeup()
530 struct xsk_buff_pool *pool = tx_ring->xsk_pool; in ixgbe_xsk_clean_tx_ring()
Dixgbe_main.c3164 bool wd = ring->xsk_pool ? in ixgbe_poll()
3184 int cleaned = ring->xsk_pool ? in ixgbe_poll()
3479 ring->xsk_pool = NULL; in ixgbe_configure_tx_ring()
3481 ring->xsk_pool = ixgbe_xsk_pool(adapter, ring); in ixgbe_configure_tx_ring()
3721 if (rx_ring->xsk_pool) { in ixgbe_configure_srrctl()
3722 u32 xsk_buf_len = xsk_pool_get_rx_frame_size(rx_ring->xsk_pool); in ixgbe_configure_srrctl()
4067 ring->xsk_pool = ixgbe_xsk_pool(adapter, ring); in ixgbe_configure_rx_ring()
4068 if (ring->xsk_pool) { in ixgbe_configure_rx_ring()
4072 xsk_pool_set_rxq_info(ring->xsk_pool, &ring->xdp_rxq); in ixgbe_configure_rx_ring()
4127 if (ring->xsk_pool && hw->mac.type != ixgbe_mac_82599EB) { in ixgbe_configure_rx_ring()
[all …]
Dixgbe.h353 struct xsk_buff_pool *xsk_pool; member
/Linux-v5.10/drivers/net/ethernet/mellanox/mlx5/core/
Den.h459 struct xsk_buff_pool *xsk_pool; member
623 struct xsk_buff_pool *xsk_pool; member
912 struct xsk_buff_pool *xsk_pool, struct mlx5e_rq *rq);
922 struct mlx5e_sq_param *param, struct xsk_buff_pool *xsk_pool,
Den_main.c396 struct xsk_buff_pool *xsk_pool, in mlx5e_alloc_rq() argument
422 rq->xsk_pool = xsk_pool; in mlx5e_alloc_rq()
424 if (rq->xsk_pool) in mlx5e_alloc_rq()
514 xsk_pool_set_rxq_info(rq->xsk_pool, &rq->xdp_rxq); in mlx5e_alloc_rq()
864 struct xsk_buff_pool *xsk_pool, struct mlx5e_rq *rq) in mlx5e_open_rq() argument
868 err = mlx5e_alloc_rq(c, params, xsk, xsk_pool, param, rq); in mlx5e_open_rq()
980 struct xsk_buff_pool *xsk_pool, in mlx5e_alloc_xdpsq() argument
996 sq->xsk_pool = xsk_pool; in mlx5e_alloc_xdpsq()
998 sq->stats = sq->xsk_pool ? in mlx5e_alloc_xdpsq()
1472 struct mlx5e_sq_param *param, struct xsk_buff_pool *xsk_pool, in mlx5e_open_xdpsq() argument
[all …]
Den_rx.c293 if (rq->xsk_pool) in mlx5e_page_alloc()
325 if (rq->xsk_pool) in mlx5e_page_release()
412 if (rq->xsk_pool) { in mlx5e_alloc_rx_wqes()
419 if (unlikely(!xsk_buff_can_alloc(rq->xsk_pool, pages_desired))) in mlx5e_alloc_rx_wqes()
517 if (rq->xsk_pool && in mlx5e_alloc_rx_mpwqe()
518 unlikely(!xsk_buff_can_alloc(rq->xsk_pool, MLX5_MPWRQ_PAGES_PER_WQE))) { in mlx5e_alloc_rx_mpwqe()
766 if (unlikely(alloc_err == -ENOMEM && rq->xsk_pool)) in mlx5e_post_rx_mpwqes()
/Linux-v5.10/drivers/net/ethernet/mellanox/mlx5/core/en/
Dxdp.c451 xsk_tx_completed(sq->xsk_pool, xsk_frames); in mlx5e_poll_xdpsq_cq()
481 xsk_tx_completed(sq->xsk_pool, xsk_frames); in mlx5e_free_xdpsq_descs()