Home
last modified time | relevance | path

Searched refs:skb_frag_size (Results 1 – 25 of 152) sorted by relevance

1234567

/Linux-v5.4/net/core/
Dtso.c58 tso->size = skb_frag_size(frag); in tso_build_data()
82 tso->size = skb_frag_size(frag); in tso_start()
Dskbuff.c789 skb_frag_size(frag), p, p_off, p_len, in skb_dump()
1378 skb_frag_foreach_page(f, skb_frag_off(f), skb_frag_size(f), in skb_copy_ubufs()
1943 int end = offset + skb_frag_size(&skb_shinfo(skb)->frags[i]); in ___pskb_trim()
2076 int size = skb_frag_size(&skb_shinfo(skb)->frags[i]); in __pskb_pull_tail()
2141 int size = skb_frag_size(&skb_shinfo(skb)->frags[i]); in __pskb_pull_tail()
2214 end = start + skb_frag_size(f); in skb_copy_bits()
2400 skb_frag_off(f), skb_frag_size(f), in __skb_splice_bits()
2490 if (offset < skb_frag_size(frag)) in skb_send_sock_locked()
2493 offset -= skb_frag_size(frag); in skb_send_sock_locked()
2499 slen = min_t(size_t, len, skb_frag_size(frag) - offset); in skb_send_sock_locked()
[all …]
Ddatagram.c438 end = start + skb_frag_size(frag); in __skb_datagram_iter()
569 end = start + skb_frag_size(frag); in skb_copy_datagram_from_iter()
/Linux-v5.4/drivers/net/ethernet/sfc/
Dtx_tso.c212 skb_frag_size(frag), DMA_TO_DEVICE); in tso_get_fragment()
214 st->unmap_len = skb_frag_size(frag); in tso_get_fragment()
215 st->in_len = skb_frag_size(frag); in tso_get_fragment()
Dtx.c278 skb_frag_size(f), copy_buf); in efx_skb_copy_bits_to_pio()
423 len = skb_frag_size(fragment); in efx_tx_map_data()
/Linux-v5.4/drivers/net/ethernet/intel/ice/
Dice_txrx.c1672 size = skb_frag_size(frag); in ice_tx_map()
2007 size = skb_frag_size(frag++); in ice_xmit_desc_count()
2051 sum += skb_frag_size(frag++); in __ice_chk_linearize()
2052 sum += skb_frag_size(frag++); in __ice_chk_linearize()
2053 sum += skb_frag_size(frag++); in __ice_chk_linearize()
2054 sum += skb_frag_size(frag++); in __ice_chk_linearize()
2055 sum += skb_frag_size(frag++); in __ice_chk_linearize()
2062 sum += skb_frag_size(frag++); in __ice_chk_linearize()
2071 sum -= skb_frag_size(stale++); in __ice_chk_linearize()
/Linux-v5.4/drivers/net/ethernet/mellanox/mlx5/core/en_accel/
Dktls_tx.c224 remaining -= skb_frag_size(frag); in tx_sync_info_get()
280 fsz = skb_frag_size(frag); in tx_post_resync_dump()
387 orig_fsz = skb_frag_size(f); in mlx5e_ktls_tx_handle_ooo()
Dtls_rxtx.c128 remaining -= skb_frag_size(&info->frags[i]); in mlx5e_tls_get_sync_data()
/Linux-v5.4/drivers/net/ethernet/intel/iavf/
Diavf_txrx.c2187 sum += skb_frag_size(frag++); in __iavf_chk_linearize()
2188 sum += skb_frag_size(frag++); in __iavf_chk_linearize()
2189 sum += skb_frag_size(frag++); in __iavf_chk_linearize()
2190 sum += skb_frag_size(frag++); in __iavf_chk_linearize()
2191 sum += skb_frag_size(frag++); in __iavf_chk_linearize()
2197 int stale_size = skb_frag_size(stale); in __iavf_chk_linearize()
2199 sum += skb_frag_size(frag++); in __iavf_chk_linearize()
2340 size = skb_frag_size(frag); in iavf_tx_map()
Diavf_txrx.h475 size = skb_frag_size(frag++); in iavf_xmit_descriptor_count()
/Linux-v5.4/net/tls/
Dtls_device_fallback.c276 skb_frag_size(frag), skb_frag_off(frag)); in fill_sg_in()
278 remaining -= skb_frag_size(frag); in fill_sg_in()
Dtls_device.c240 skb_frag_off(frag) + skb_frag_size(frag) == pfrag->offset) { in tls_append_frag()
279 skb_frag_size(frag), skb_frag_off(frag)); in tls_push_record()
280 sk_mem_charge(sk, skb_frag_size(frag)); in tls_push_record()
/Linux-v5.4/drivers/net/ethernet/aeroflex/
Dgreth.c113 skb_frag_size(&skb_shinfo(skb)->frags[i]), true); in greth_print_tx_packet()
203 skb_frag_size(frag), in greth_clean_rings()
525 status |= skb_frag_size(frag) & GRETH_BD_LEN; in greth_start_xmit_gbit()
539 dma_addr = skb_frag_dma_map(greth->dev, frag, 0, skb_frag_size(frag), in greth_start_xmit_gbit()
719 skb_frag_size(frag), in greth_clean_tx_gbit()
/Linux-v5.4/drivers/net/wireless/intel/iwlwifi/pcie/
Dtx-gen2.c434 if (!skb_frag_size(frag)) in iwl_pcie_gen2_tx_add_frags()
438 skb_frag_size(frag), DMA_TO_DEVICE); in iwl_pcie_gen2_tx_add_frags()
443 skb_frag_size(frag)); in iwl_pcie_gen2_tx_add_frags()
446 skb_frag_size(frag)); in iwl_pcie_gen2_tx_add_frags()
/Linux-v5.4/drivers/net/ethernet/chelsio/cxgb4vf/
Dsge.c290 *++addr = skb_frag_dma_map(dev, fp, 0, skb_frag_size(fp), in map_skb()
299 dma_unmap_page(dev, *--addr, skb_frag_size(fp), DMA_TO_DEVICE); in map_skb()
917 sgl->len0 = htonl(skb_frag_size(&si->frags[0])); in write_sgl()
933 to->len[0] = cpu_to_be32(skb_frag_size(&si->frags[i])); in write_sgl()
934 to->len[1] = cpu_to_be32(skb_frag_size(&si->frags[++i])); in write_sgl()
939 to->len[0] = cpu_to_be32(skb_frag_size(&si->frags[i])); in write_sgl()
/Linux-v5.4/drivers/net/ethernet/intel/i40e/
Di40e_txrx.c3288 sum += skb_frag_size(frag++); in __i40e_chk_linearize()
3289 sum += skb_frag_size(frag++); in __i40e_chk_linearize()
3290 sum += skb_frag_size(frag++); in __i40e_chk_linearize()
3291 sum += skb_frag_size(frag++); in __i40e_chk_linearize()
3292 sum += skb_frag_size(frag++); in __i40e_chk_linearize()
3298 int stale_size = skb_frag_size(stale); in __i40e_chk_linearize()
3300 sum += skb_frag_size(frag++); in __i40e_chk_linearize()
3423 size = skb_frag_size(frag); in i40e_tx_map()
Di40e_txrx.h534 size = skb_frag_size(frag++); in i40e_xmit_descriptor_count()
/Linux-v5.4/drivers/infiniband/hw/hfi1/
Dvnic_sdma.c112 skb_frag_size(frag)); in build_vnic_ulp_payload()
/Linux-v5.4/drivers/infiniband/ulp/ipoib/
Dipoib_ib.c297 skb_frag_size(frag), in ipoib_dma_map_tx()
308 ib_dma_unmap_page(ca, mapping[i - !off], skb_frag_size(frag), DMA_TO_DEVICE); in ipoib_dma_map_tx()
336 skb_frag_size(frag), DMA_TO_DEVICE); in ipoib_dma_unmap_tx()
/Linux-v5.4/drivers/net/ethernet/mellanox/mlx4/
Den_tx.c658 skb_frag_size(&shinfo->frags[0])); in build_inline_wqe()
679 skb_frag_size(&shinfo->frags[0])); in build_inline_wqe()
776 byte_count = skb_frag_size(frag); in mlx4_en_build_dma_wqe()
/Linux-v5.4/drivers/target/iscsi/cxgbit/
Dcxgbit_target.c903 skb_frag_size(dfrag), skb_frag_off(dfrag)); in cxgbit_handle_immediate_data()
1407 skb_frag_size(&ssi->frags[i])); in cxgbit_lro_skb_dump()
1451 len = skb_frag_size(&hssi->frags[hfrag_idx]); in cxgbit_lro_skb_merge()
1471 len += skb_frag_size(&hssi->frags[dfrag_idx]); in cxgbit_lro_skb_merge()
/Linux-v5.4/drivers/net/ethernet/cavium/liquidio/
Dlio_vf_main.c844 skb_frag_size(frag), DMA_TO_DEVICE); in free_netsgbuf()
888 skb_frag_size(frag), DMA_TO_DEVICE); in free_netsgbuf_with_resp()
1539 frag, 0, skb_frag_size(frag), in liquidio_xmit()
1551 skb_frag_size(frag), in liquidio_xmit()
1559 add_sg_size(&g->sg[(i >> 2)], skb_frag_size(frag), in liquidio_xmit()
/Linux-v5.4/drivers/net/ethernet/huawei/hinic/
Dhinic_tx.c156 skb_frag_size(frag), in tx_map_skb()
163 hinic_set_sge(&sges[i + 1], dma_addr, skb_frag_size(frag)); in tx_map_skb()
/Linux-v5.4/net/ipv4/
Dtcp.c1795 if (skb_frag_size(frags) > offset) in tcp_zerocopy_receive()
1797 offset -= skb_frag_size(frags); in tcp_zerocopy_receive()
1801 if (skb_frag_size(frags) != PAGE_SIZE || skb_frag_off(frags)) { in tcp_zerocopy_receive()
1804 while (remaining && (skb_frag_size(frags) != PAGE_SIZE || in tcp_zerocopy_receive()
1806 remaining -= skb_frag_size(frags); in tcp_zerocopy_receive()
3814 sg_set_page(&sg, page, skb_frag_size(f), in tcp_md5_hash_skb_data()
3816 ahash_request_set_crypt(req, &sg, NULL, skb_frag_size(f)); in tcp_md5_hash_skb_data()
/Linux-v5.4/drivers/net/ethernet/ni/
Dnixge.c546 skb_frag_size(frag), in nixge_start_xmit()
552 cur_p->cntrl = skb_frag_size(frag); in nixge_start_xmit()
556 tx_skb->size = skb_frag_size(frag); in nixge_start_xmit()

1234567