Home
last modified time | relevance | path

Searched refs:xdpf (Results 1 – 24 of 24) sorted by relevance

/Linux-v5.4/kernel/bpf/
Dcpumap.c164 struct xdp_frame *xdpf, in cpu_map_build_skb() argument
172 hard_start_headroom = sizeof(struct xdp_frame) + xdpf->headroom; in cpu_map_build_skb()
191 frame_size = SKB_DATA_ALIGN(xdpf->len + hard_start_headroom) + in cpu_map_build_skb()
194 pkt_data_start = xdpf->data - hard_start_headroom; in cpu_map_build_skb()
200 __skb_put(skb, xdpf->len); in cpu_map_build_skb()
201 if (xdpf->metasize) in cpu_map_build_skb()
202 skb_metadata_set(skb, xdpf->metasize); in cpu_map_build_skb()
205 skb->protocol = eth_type_trans(skb, xdpf->dev_rx); in cpu_map_build_skb()
214 xdp_release_frame(xdpf); in cpu_map_build_skb()
217 xdp_scrub_frame(xdpf); in cpu_map_build_skb()
[all …]
Ddevmap.c342 struct xdp_frame *xdpf = bq->q[i]; in bq_xmit_all() local
344 prefetch(xdpf); in bq_xmit_all()
367 struct xdp_frame *xdpf = bq->q[i]; in bq_xmit_all() local
371 xdp_return_frame_rx_napi(xdpf); in bq_xmit_all()
373 xdp_return_frame(xdpf); in bq_xmit_all()
417 static int bq_enqueue(struct bpf_dtab_netdev *obj, struct xdp_frame *xdpf, in bq_enqueue() argument
434 bq->q[bq->count++] = xdpf; in bq_enqueue()
446 struct xdp_frame *xdpf; in dev_map_enqueue() local
456 xdpf = convert_to_xdp_frame(xdp); in dev_map_enqueue()
457 if (unlikely(!xdpf)) in dev_map_enqueue()
[all …]
/Linux-v5.4/net/core/
Dxdp.c436 void xdp_return_frame(struct xdp_frame *xdpf) in xdp_return_frame() argument
438 __xdp_return(xdpf->data, &xdpf->mem, false, 0); in xdp_return_frame()
442 void xdp_return_frame_rx_napi(struct xdp_frame *xdpf) in xdp_return_frame_rx_napi() argument
444 __xdp_return(xdpf->data, &xdpf->mem, true, 0); in xdp_return_frame_rx_napi()
504 struct xdp_frame *xdpf; in xdp_convert_zc_to_xdp_frame() local
512 if (sizeof(*xdpf) + totsize > PAGE_SIZE) in xdp_convert_zc_to_xdp_frame()
520 xdpf = addr; in xdp_convert_zc_to_xdp_frame()
521 memset(xdpf, 0, sizeof(*xdpf)); in xdp_convert_zc_to_xdp_frame()
523 addr += sizeof(*xdpf); in xdp_convert_zc_to_xdp_frame()
527 xdpf->data = addr + metasize; in xdp_convert_zc_to_xdp_frame()
[all …]
Dfilter.c3465 struct xdp_frame *xdpf; in __bpf_tx_xdp() local
3476 xdpf = convert_to_xdp_frame(xdp); in __bpf_tx_xdp()
3477 if (unlikely(!xdpf)) in __bpf_tx_xdp()
3480 sent = dev->netdev_ops->ndo_xdp_xmit(dev, 1, &xdpf, XDP_XMIT_FLUSH); in __bpf_tx_xdp()
/Linux-v5.4/drivers/net/ethernet/mellanox/mlx5/core/en/
Dxdp.c64 struct xdp_frame *xdpf; in mlx5e_xmit_xdp_buff() local
67 xdpf = convert_to_xdp_frame(xdp); in mlx5e_xmit_xdp_buff()
68 if (unlikely(!xdpf)) in mlx5e_xmit_xdp_buff()
71 xdptxd.data = xdpf->data; in mlx5e_xmit_xdp_buff()
72 xdptxd.len = xdpf->len; in mlx5e_xmit_xdp_buff()
92 xdp_return_frame(xdpf); in mlx5e_xmit_xdp_buff()
97 xdpi.frame.xdpf = xdpf; in mlx5e_xmit_xdp_buff()
108 dma_addr = di->addr + (xdpf->data - (void *)xdpf); in mlx5e_xmit_xdp_buff()
370 xdpi.frame.xdpf->len, DMA_TO_DEVICE); in mlx5e_free_xdpsq_desc()
371 xdp_return_frame(xdpi.frame.xdpf); in mlx5e_free_xdpsq_desc()
[all …]
/Linux-v5.4/include/net/
Dxdp.h128 void xdp_return_frame(struct xdp_frame *xdpf);
129 void xdp_return_frame_rx_napi(struct xdp_frame *xdpf);
138 static inline void xdp_release_frame(struct xdp_frame *xdpf) in xdp_release_frame() argument
140 struct xdp_mem_info *mem = &xdpf->mem; in xdp_release_frame()
144 __xdp_release_frame(xdpf->data, mem); in xdp_release_frame()
/Linux-v5.4/drivers/net/ethernet/intel/ixgbe/
Dixgbe_xsk.c149 struct xdp_frame *xdpf; in ixgbe_run_xdp_zc() local
164 xdpf = convert_to_xdp_frame(xdp); in ixgbe_run_xdp_zc()
165 if (unlikely(!xdpf)) { in ixgbe_run_xdp_zc()
169 result = ixgbe_xmit_xdp_ring(adapter, xdpf); in ixgbe_run_xdp_zc()
602 tx_bi->xdpf = NULL; in ixgbe_xmit_zc()
633 xdp_return_frame(tx_bi->xdpf); in ixgbe_clean_xdp_tx_buffer()
660 if (tx_bi->xdpf) in ixgbe_clean_xdp_tx_irq()
665 tx_bi->xdpf = NULL; in ixgbe_clean_xdp_tx_irq()
735 if (tx_bi->xdpf) in ixgbe_xsk_clean_tx_ring()
740 tx_bi->xdpf = NULL; in ixgbe_xsk_clean_tx_ring()
Dixgbe_txrx_common.h16 struct xdp_frame *xdpf);
Dixgbe_main.c1154 xdp_return_frame(tx_buffer->xdpf); in ixgbe_clean_tx_irq()
2202 struct xdp_frame *xdpf; in ixgbe_run_xdp() local
2218 xdpf = convert_to_xdp_frame(xdp); in ixgbe_run_xdp()
2219 if (unlikely(!xdpf)) { in ixgbe_run_xdp()
2223 result = ixgbe_xmit_xdp_ring(adapter, xdpf); in ixgbe_run_xdp()
5974 xdp_return_frame(tx_buffer->xdpf); in ixgbe_clean_tx_ring()
8532 struct xdp_frame *xdpf) in ixgbe_xmit_xdp_ring() argument
8541 len = xdpf->len; in ixgbe_xmit_xdp_ring()
8546 dma = dma_map_single(ring->dev, xdpf->data, len, DMA_TO_DEVICE); in ixgbe_xmit_xdp_ring()
8561 tx_buffer->xdpf = xdpf; in ixgbe_xmit_xdp_ring()
[all …]
Dixgbe.h216 struct xdp_frame *xdpf; member
/Linux-v5.4/drivers/net/ethernet/broadcom/bnxt/
Dbnxt_xdp.c60 struct xdp_frame *xdpf) in __bnxt_xmit_xdp_redirect() argument
66 tx_buf->xdpf = xdpf; in __bnxt_xmit_xdp_redirect()
91 xdp_return_frame(tx_buf->xdpf); in bnxt_tx_int_xdp()
93 tx_buf->xdpf = NULL; in bnxt_tx_int_xdp()
Dbnxt.h685 struct xdp_frame *xdpf; member
Dbnxt.c2489 xdp_return_frame(tx_buf->xdpf); in bnxt_free_tx_skbs()
2491 tx_buf->xdpf = NULL; in bnxt_free_tx_skbs()
/Linux-v5.4/drivers/net/ethernet/ti/
Dcpsw.c358 static void *cpsw_xdpf_to_handle(struct xdp_frame *xdpf) in cpsw_xdpf_to_handle() argument
360 return (void *)((unsigned long)xdpf | BIT(0)); in cpsw_xdpf_to_handle()
376 struct xdp_frame *xdpf; in cpsw_tx_handler() local
383 xdpf = cpsw_handle_to_xdpf(token); in cpsw_tx_handler()
384 xmeta = (void *)xdpf + CPSW_XMETA_OFFSET; in cpsw_tx_handler()
387 xdp_return_frame(xdpf); in cpsw_tx_handler()
450 static int cpsw_xdp_tx_frame(struct cpsw_priv *priv, struct xdp_frame *xdpf, in cpsw_xdp_tx_frame() argument
459 xmeta = (void *)xdpf + CPSW_XMETA_OFFSET; in cpsw_xdp_tx_frame()
467 dma += xdpf->headroom + sizeof(struct xdp_frame); in cpsw_xdp_tx_frame()
468 ret = cpdma_chan_submit_mapped(txch, cpsw_xdpf_to_handle(xdpf), in cpsw_xdp_tx_frame()
[all …]
/Linux-v5.4/drivers/net/ethernet/socionext/
Dnetsec.c271 struct xdp_frame *xdpf; member
664 xdp_return_frame(desc->xdpf); in netsec_clean_tx_dring()
806 dring->desc[idx].xdpf = buf; in netsec_set_tx_de()
814 struct xdp_frame *xdpf, bool is_ndo) in netsec_xdp_queue_one() argument
818 struct page *page = virt_to_page(xdpf->data); in netsec_xdp_queue_one()
836 dma_handle = dma_map_single(priv->dev, xdpf->data, xdpf->len, in netsec_xdp_queue_one()
852 dma_sync_single_for_device(priv->dev, dma_handle, xdpf->len, in netsec_xdp_queue_one()
858 tx_desc.addr = xdpf->data; in netsec_xdp_queue_one()
859 tx_desc.len = xdpf->len; in netsec_xdp_queue_one()
861 netsec_set_tx_de(priv, tx_ring, &tx_ctrl, &tx_desc, xdpf); in netsec_xdp_queue_one()
[all …]
/Linux-v5.4/drivers/net/ethernet/intel/i40e/
Di40e_xsk.c705 xdp_return_frame(tx_bi->xdpf); in i40e_clean_xdp_tx_buffer()
747 if (tx_bi->xdpf) in i40e_clean_xdp_tx_irq()
752 tx_bi->xdpf = NULL; in i40e_clean_xdp_tx_irq()
847 if (tx_bi->xdpf) in i40e_xsk_clean_tx_ring()
852 tx_bi->xdpf = NULL; in i40e_xsk_clean_tx_ring()
Di40e_txrx.c608 xdp_return_frame(tx_buffer->xdpf); in i40e_unmap_and_free_tx_resource()
814 xdp_return_frame(tx_buf->xdpf); in i40e_clean_tx_irq()
2178 static int i40e_xmit_xdp_ring(struct xdp_frame *xdpf,
2183 struct xdp_frame *xdpf = convert_to_xdp_frame(xdp); in i40e_xmit_xdp_tx_ring() local
2185 if (unlikely(!xdpf)) in i40e_xmit_xdp_tx_ring()
2188 return i40e_xmit_xdp_ring(xdpf, xdp_ring); in i40e_xmit_xdp_tx_ring()
3503 static int i40e_xmit_xdp_ring(struct xdp_frame *xdpf, in i40e_xmit_xdp_ring() argument
3509 void *data = xdpf->data; in i40e_xmit_xdp_ring()
3510 u32 size = xdpf->len; in i40e_xmit_xdp_ring()
3524 tx_bi->xdpf = xdpf; in i40e_xmit_xdp_ring()
[all …]
Di40e_txrx.h285 struct xdp_frame *xdpf; member
/Linux-v5.4/drivers/net/
Dvirtio_net.c448 struct xdp_frame *xdpf) in __virtnet_xdp_xmit_one() argument
454 if (unlikely(xdpf->metasize > 0)) in __virtnet_xdp_xmit_one()
457 if (unlikely(xdpf->headroom < vi->hdr_len)) in __virtnet_xdp_xmit_one()
461 xdpf->data -= vi->hdr_len; in __virtnet_xdp_xmit_one()
463 hdr = xdpf->data; in __virtnet_xdp_xmit_one()
465 xdpf->len += vi->hdr_len; in __virtnet_xdp_xmit_one()
467 sg_init_one(sq->sg, xdpf->data, xdpf->len); in __virtnet_xdp_xmit_one()
469 err = virtqueue_add_outbuf(sq->vq, sq->sg, 1, xdp_to_ptr(xdpf), in __virtnet_xdp_xmit_one()
533 struct xdp_frame *xdpf = frames[i]; in virtnet_xdp_xmit() local
535 err = __virtnet_xdp_xmit_one(vi, sq, xdpf); in virtnet_xdp_xmit()
[all …]
Dtun.c657 struct xdp_frame *xdpf = tun_ptr_to_xdp(ptr); in tun_ptr_free() local
659 xdp_return_frame(xdpf); in tun_ptr_free()
2221 struct xdp_frame *xdpf = tun_ptr_to_xdp(ptr); in tun_do_read() local
2223 ret = tun_put_user_xdp(tun, tfile, xdpf, to); in tun_do_read()
2224 xdp_return_frame(xdpf); in tun_do_read()
2607 struct xdp_frame *xdpf = tun_ptr_to_xdp(ptr); in tun_ptr_peek_len() local
2609 return xdpf->len; in tun_ptr_peek_len()
/Linux-v5.4/drivers/net/ethernet/freescale/dpaa2/
Ddpaa2-eth.c724 xdp_return_frame(swa->xdp.xdpf); in free_tx_fd()
1847 struct xdp_frame *xdpf) in dpaa2_eth_xdp_xmit_frame() argument
1865 if (xdpf->headroom < needed_headroom) in dpaa2_eth_xdp_xmit_frame()
1875 buffer_start = xdpf->data - needed_headroom; in dpaa2_eth_xdp_xmit_frame()
1878 if (aligned_start >= xdpf->data - xdpf->headroom) in dpaa2_eth_xdp_xmit_frame()
1884 swa->xdp.dma_size = xdpf->data + xdpf->len - buffer_start; in dpaa2_eth_xdp_xmit_frame()
1885 swa->xdp.xdpf = xdpf; in dpaa2_eth_xdp_xmit_frame()
1896 dpaa2_fd_set_offset(&fd, xdpf->data - buffer_start); in dpaa2_eth_xdp_xmit_frame()
1897 dpaa2_fd_set_len(&fd, xdpf->len); in dpaa2_eth_xdp_xmit_frame()
1933 struct xdp_frame *xdpf = frames[i]; in dpaa2_eth_xdp_xmit() local
[all …]
Ddpaa2-eth.h116 struct xdp_frame *xdpf; member
/Linux-v5.4/drivers/vhost/
Dnet.c201 struct xdp_frame *xdpf = tun_ptr_to_xdp(ptr); in vhost_net_buf_peek_len() local
203 return xdpf->len; in vhost_net_buf_peek_len()
/Linux-v5.4/drivers/net/ethernet/mellanox/mlx5/core/
Den.h462 struct xdp_frame *xdpf; member