Home
last modified time | relevance | path

Searched refs:skb_frag_size (Results 1 – 25 of 156) sorted by relevance

1234567

/linux/net/tls/
H A Dtls_strp.c58 skb_frag_size(frag))); in tls_strp_skb_copy()
59 offset += skb_frag_size(frag); in tls_strp_skb_copy()
224 chunk = min_t(size_t, len, PAGE_SIZE - skb_frag_size(frag)); in tls_strp_copyin_frag()
227 skb_frag_size(frag), in tls_strp_copyin_frag()
262 chunk = min_t(size_t, chunk, PAGE_SIZE - skb_frag_size(frag)); in tls_strp_copyin_frag()
265 skb_frag_size(frag), in tls_strp_copyin_frag()
H A Dtls_device.c263 skb_frag_off(frag) + skb_frag_size(frag) == pfrag->offset) { in tls_append_frag()
301 skb_frag_size(frag), skb_frag_off(frag)); in tls_push_record()
302 sk_mem_charge(sk, skb_frag_size(frag)); in tls_push_record()
/linux/net/core/
H A Dtso.c57 tso->size = skb_frag_size(frag); in tso_build_data()
83 tso->size = skb_frag_size(frag); in tso_start()
H A Ddatagram.c421 end = start + skb_frag_size(frag); in __skb_datagram_iter()
575 end = start + skb_frag_size(frag); in skb_copy_datagram_from_iter()
668 start == skb_frag_off(last) + skb_frag_size(last)) { in zerocopy_fill_skb_from_iter()
H A Dgro.c407 skb_frag_size(frag0), in skb_gro_reset_offset()
426 if (unlikely(!skb_frag_size(&pinfo->frags[0]))) { in gro_pull_from_frag0()
/linux/drivers/net/ethernet/sfc/
H A Dtx_tso.c212 skb_frag_size(frag), DMA_TO_DEVICE); in tso_get_fragment()
214 st->unmap_len = skb_frag_size(frag); in tso_get_fragment()
215 st->in_len = skb_frag_size(frag); in tso_get_fragment()
/linux/drivers/net/ethernet/netronome/nfp/nfdk/
H A Drings.c44 size = skb_frag_size(frag); in nfp_nfdk_tx_ring_reset()
46 skb_frag_size(frag), DMA_TO_DEVICE); in nfp_nfdk_tx_ring_reset()
/linux/drivers/net/ethernet/intel/iavf/
H A Diavf_txrx.c2071 sum += skb_frag_size(frag++); in __iavf_chk_linearize()
2072 sum += skb_frag_size(frag++); in __iavf_chk_linearize()
2073 sum += skb_frag_size(frag++); in __iavf_chk_linearize()
2074 sum += skb_frag_size(frag++); in __iavf_chk_linearize()
2075 sum += skb_frag_size(frag++); in __iavf_chk_linearize()
2081 int stale_size = skb_frag_size(stale); in __iavf_chk_linearize()
2083 sum += skb_frag_size(frag++); in __iavf_chk_linearize()
2223 size = skb_frag_size(frag); in iavf_tx_map()
H A Diavf_txrx.h343 size = skb_frag_size(frag++); in iavf_xmit_descriptor_count()
/linux/drivers/net/ethernet/intel/ice/
H A Dice_txrx.c1798 size = skb_frag_size(frag); in ice_tx_map()
2278 size = skb_frag_size(frag++); in ice_xmit_desc_count()
2322 sum += skb_frag_size(frag++); in __ice_chk_linearize()
2323 sum += skb_frag_size(frag++); in __ice_chk_linearize()
2324 sum += skb_frag_size(frag++); in __ice_chk_linearize()
2325 sum += skb_frag_size(frag++); in __ice_chk_linearize()
2326 sum += skb_frag_size(frag++); in __ice_chk_linearize()
2332 int stale_size = skb_frag_size(stale); in __ice_chk_linearize()
2334 sum += skb_frag_size(frag++); in __ice_chk_linearize()
H A Dice_xsk.c592 memcpy(addr, skb_frag_page(frag), skb_frag_size(frag)); in ice_construct_skb_zc()
595 addr, 0, skb_frag_size(frag)); in ice_construct_skb_zc()
733 size = skb_frag_size(&sinfo->frags[frag]); in ice_xmit_xdp_tx_zc()
/linux/drivers/net/ethernet/chelsio/cxgb4vf/
H A Dsge.c290 *++addr = skb_frag_dma_map(dev, fp, 0, skb_frag_size(fp), in map_skb()
299 dma_unmap_page(dev, *--addr, skb_frag_size(fp), DMA_TO_DEVICE); in map_skb()
917 sgl->len0 = htonl(skb_frag_size(&si->frags[0])); in write_sgl()
933 to->len[0] = cpu_to_be32(skb_frag_size(&si->frags[i])); in write_sgl()
934 to->len[1] = cpu_to_be32(skb_frag_size(&si->frags[++i])); in write_sgl()
939 to->len[0] = cpu_to_be32(skb_frag_size(&si->frags[i])); in write_sgl()
/linux/drivers/net/ethernet/intel/i40e/
H A Di40e_txrx.c3515 sum += skb_frag_size(frag++); in __i40e_chk_linearize()
3516 sum += skb_frag_size(frag++); in __i40e_chk_linearize()
3517 sum += skb_frag_size(frag++); in __i40e_chk_linearize()
3518 sum += skb_frag_size(frag++); in __i40e_chk_linearize()
3519 sum += skb_frag_size(frag++); in __i40e_chk_linearize()
3525 int stale_size = skb_frag_size(stale); in __i40e_chk_linearize()
3527 sum += skb_frag_size(frag++); in __i40e_chk_linearize()
3649 size = skb_frag_size(frag); in i40e_tx_map()
3825 size = skb_frag_size(&sinfo->frags[i]); in i40e_xmit_xdp_ring()
H A Di40e_xsk.c333 memcpy(addr, skb_frag_page(frag), skb_frag_size(frag)); in i40e_construct_skb_zc()
336 addr, 0, skb_frag_size(frag)); in i40e_construct_skb_zc()
H A Di40e_txrx.h516 size = skb_frag_size(frag++); in i40e_xmit_descriptor_count()
/linux/drivers/net/ethernet/mellanox/mlx5/core/en_accel/
H A Dktls_tx.c664 remaining -= skb_frag_size(frag); in tx_sync_info_get()
738 fsz = skb_frag_size(frag); in tx_post_resync_dump()
795 orig_fsz = skb_frag_size(f); in mlx5e_ktls_tx_handle_ooo()
/linux/drivers/net/ethernet/netronome/nfp/nfd3/
H A Drings.c66 skb_frag_size(frag), DMA_TO_DEVICE); in nfp_nfd3_tx_ring_reset()
/linux/drivers/infiniband/hw/hfi1/
H A Dvnic_sdma.c70 skb_frag_size(frag), in build_vnic_ulp_payload()
/linux/drivers/net/ethernet/huawei/hinic3/
H A Dhinic3_tx.c96 skb_frag_size(frag), in hinic3_tx_map_skb()
102 dma_info[idx].len = skb_frag_size(frag); in hinic3_tx_map_skb()
/linux/drivers/net/ethernet/intel/idpf/
H A Didpf_txrx.c2274 size = skb_frag_size(&shinfo->frags[i]); in idpf_tx_desc_count_required()
2527 size = skb_frag_size(frag); in idpf_tx_splitq_map()
2667 sum += skb_frag_size(frag++); in __idpf_chk_linearize()
2668 sum += skb_frag_size(frag++); in __idpf_chk_linearize()
2669 sum += skb_frag_size(frag++); in __idpf_chk_linearize()
2670 sum += skb_frag_size(frag++); in __idpf_chk_linearize()
2671 sum += skb_frag_size(frag++); in __idpf_chk_linearize()
2677 int stale_size = skb_frag_size(stale); in __idpf_chk_linearize()
2679 sum += skb_frag_size(frag++); in __idpf_chk_linearize()
/linux/drivers/infiniband/ulp/ipoib/
H A Dipoib_ib.c298 skb_frag_size(frag), in ipoib_dma_map_tx()
309 ib_dma_unmap_page(ca, mapping[i - !off], skb_frag_size(frag), DMA_TO_DEVICE); in ipoib_dma_map_tx()
337 skb_frag_size(frag), DMA_TO_DEVICE); in ipoib_dma_unmap_tx()
/linux/include/net/libeth/
H A Dxdp.h736 .len = skb_frag_size(frag) & LIBETH_XDP_TX_LEN, in libeth_xdp_tx_fill_buf()
737 .flags = skb_frag_size(frag) & LIBETH_XDP_TX_FLAGS, in libeth_xdp_tx_fill_buf()
919 __libeth_xdp_tx_len(skb_frag_size(frag)), in libeth_xdp_xmit_queue_frag()
/linux/drivers/target/iscsi/cxgbit/
H A Dcxgbit_target.c886 skb_frag_size(dfrag), skb_frag_off(dfrag)); in cxgbit_handle_immediate_data()
1405 skb_frag_size(&ssi->frags[i])); in cxgbit_lro_skb_dump()
1449 len = skb_frag_size(&hssi->frags[hfrag_idx]); in cxgbit_lro_skb_merge()
1469 len += skb_frag_size(&hssi->frags[dfrag_idx]); in cxgbit_lro_skb_merge()
/linux/drivers/net/ethernet/chelsio/cxgb4/
H A Dsge.c257 *++addr = skb_frag_dma_map(dev, fp, 0, skb_frag_size(fp), in cxgb4_map_skb()
266 dma_unmap_page(dev, *--addr, skb_frag_size(fp), DMA_TO_DEVICE); in cxgb4_map_skb()
285 dma_unmap_page(dev, *addr++, skb_frag_size(fp), DMA_TO_DEVICE); in unmap_skb()
839 sgl->len0 = htonl(skb_frag_size(&si->frags[0])); in cxgb4_write_sgl()
855 to->len[0] = cpu_to_be32(skb_frag_size(&si->frags[i])); in cxgb4_write_sgl()
856 to->len[1] = cpu_to_be32(skb_frag_size(&si->frags[++i])); in cxgb4_write_sgl()
861 to->len[0] = cpu_to_be32(skb_frag_size(&si->frags[i])); in cxgb4_write_sgl()
914 frag_size = skb_frag_size(frag); in cxgb4_write_partial_sgl()
920 frag_size = skb_frag_size(frag); in cxgb4_write_partial_sgl()
923 frag_size = min(len, skb_frag_size(fra in cxgb4_write_partial_sgl()
[all...]
/linux/drivers/net/ethernet/cavium/liquidio/
H A Dlio_vf_main.c844 skb_frag_size(frag), DMA_TO_DEVICE); in free_netsgbuf()
888 skb_frag_size(frag), DMA_TO_DEVICE); in free_netsgbuf_with_resp()
1541 frag, 0, skb_frag_size(frag), in liquidio_xmit()
1553 skb_frag_size(frag), in liquidio_xmit()
1561 add_sg_size(&g->sg[(i >> 2)], skb_frag_size(frag), in liquidio_xmit()

1234567