Home
last modified time | relevance | path

Searched refs:skb_frag_size (Results 1 – 25 of 143) sorted by relevance

123456

/linux/net/core/
H A Dtso.c57 tso->size = skb_frag_size(frag); in tso_build_data()
83 tso->size = skb_frag_size(frag); in tso_start()
H A Dskbuff.c1369 len -= skb_frag_size(frag); in skb_dump()
1376 skb_frag_size(frag), p, p_off, p_len, in skb_dump()
2016 skb_frag_foreach_page(f, skb_frag_off(f), skb_frag_size(f), in skb_copy_ubufs()
2724 int end = offset + skb_frag_size(&skb_shinfo(skb)->frags[i]); in ___pskb_trim()
2866 int size = skb_frag_size(&skb_shinfo(skb)->frags[i]); in __pskb_pull_tail()
2934 int size = skb_frag_size(&skb_shinfo(skb)->frags[i]); in __pskb_pull_tail()
3010 end = start + skb_frag_size(f); in skb_copy_bits()
3200 skb_frag_off(f), skb_frag_size(f), in __skb_splice_bits()
3318 if (offset < skb_frag_size(frag)) in __skb_send_sock()
3321 offset -= skb_frag_size(frag); in __skb_send_sock()
[all …]
H A Ddatagram.c421 end = start + skb_frag_size(frag); in __skb_datagram_iter()
575 end = start + skb_frag_size(frag); in skb_copy_datagram_from_iter()
682 start == skb_frag_off(last) + skb_frag_size(last)) { in zerocopy_fill_skb_from_iter()
/linux/net/tls/
H A Dtls_strp.c58 skb_frag_size(frag))); in tls_strp_skb_copy()
59 offset += skb_frag_size(frag); in tls_strp_skb_copy()
230 chunk = min_t(size_t, len, PAGE_SIZE - skb_frag_size(frag)); in tls_strp_copyin_frag()
233 skb_frag_size(frag), in tls_strp_copyin_frag()
268 chunk = min_t(size_t, chunk, PAGE_SIZE - skb_frag_size(frag)); in tls_strp_copyin_frag()
271 skb_frag_size(frag), in tls_strp_copyin_frag()
H A Dtls_device_fallback.c264 skb_frag_size(frag), skb_frag_off(frag)); in fill_sg_in()
266 remaining -= skb_frag_size(frag); in fill_sg_in()
H A Dtls_device.c265 skb_frag_off(frag) + skb_frag_size(frag) == pfrag->offset) { in tls_append_frag()
303 skb_frag_size(frag), skb_frag_off(frag)); in tls_push_record()
304 sk_mem_charge(sk, skb_frag_size(frag)); in tls_push_record()
/linux/drivers/net/ethernet/sfc/
H A Dtx_tso.c212 skb_frag_size(frag), DMA_TO_DEVICE); in tso_get_fragment()
214 st->unmap_len = skb_frag_size(frag); in tso_get_fragment()
215 st->in_len = skb_frag_size(frag); in tso_get_fragment()
/linux/drivers/net/ethernet/netronome/nfp/nfdk/
H A Drings.c44 size = skb_frag_size(frag); in nfp_nfdk_tx_ring_reset()
46 skb_frag_size(frag), DMA_TO_DEVICE); in nfp_nfdk_tx_ring_reset()
/linux/net/ipv4/
H A Dtcp_sigpool.c351 sg_set_page(&sg, page, skb_frag_size(f), offset_in_page(offset)); in tcp_sigpool_hash_skb_data()
352 ahash_request_set_crypt(req, &sg, NULL, skb_frag_size(f)); in tcp_sigpool_hash_skb_data()
/linux/drivers/net/ethernet/intel/iavf/
H A Diavf_txrx.c2071 sum += skb_frag_size(frag++); in __iavf_chk_linearize()
2072 sum += skb_frag_size(frag++); in __iavf_chk_linearize()
2073 sum += skb_frag_size(frag++); in __iavf_chk_linearize()
2074 sum += skb_frag_size(frag++); in __iavf_chk_linearize()
2075 sum += skb_frag_size(frag++); in __iavf_chk_linearize()
2081 int stale_size = skb_frag_size(stale); in __iavf_chk_linearize()
2083 sum += skb_frag_size(frag++); in __iavf_chk_linearize()
2223 size = skb_frag_size(frag); in iavf_tx_map()
H A Diavf_txrx.h343 size = skb_frag_size(frag++); in iavf_xmit_descriptor_count()
/linux/drivers/net/ethernet/intel/idpf/
H A Didpf_lib.c2381 sum += skb_frag_size(frag++); in idpf_chk_tso_segment()
2382 sum += skb_frag_size(frag++); in idpf_chk_tso_segment()
2383 sum += skb_frag_size(frag++); in idpf_chk_tso_segment()
2384 sum += skb_frag_size(frag++); in idpf_chk_tso_segment()
2385 sum += skb_frag_size(frag++); in idpf_chk_tso_segment()
2391 int stale_size = skb_frag_size(stale); in idpf_chk_tso_segment()
2393 sum += skb_frag_size(frag++); in idpf_chk_tso_segment()
/linux/drivers/net/ethernet/pensando/ionic/
H A Dionic_txrx.c359 skb_frag_size(frag), in ionic_xdp_post_frame()
363 skb_frag_size(frag)); in ionic_xdp_post_frame()
370 bi->len = skb_frag_size(frag); in ionic_xdp_post_frame()
1132 dma_addr = ionic_tx_map_frag(q, frag, 0, skb_frag_size(frag)); in ionic_tx_map_skb()
1136 buf_info->len = skb_frag_size(frag); in ionic_tx_map_skb()
1743 frag_rem = skb_frag_size(frag); in ionic_tx_descs_needed()
/linux/drivers/net/ethernet/aeroflex/
H A Dgreth.c113 skb_frag_size(&skb_shinfo(skb)->frags[i]), true); in greth_print_tx_packet()
203 skb_frag_size(frag), in greth_clean_rings()
526 status |= skb_frag_size(frag) & GRETH_BD_LEN; in greth_start_xmit_gbit()
540 dma_addr = skb_frag_dma_map(greth->dev, frag, 0, skb_frag_size(frag), in greth_start_xmit_gbit()
721 skb_frag_size(frag), in greth_clean_tx_gbit()
/linux/drivers/net/ethernet/chelsio/cxgb4vf/
H A Dsge.c290 *++addr = skb_frag_dma_map(dev, fp, 0, skb_frag_size(fp), in map_skb()
299 dma_unmap_page(dev, *--addr, skb_frag_size(fp), DMA_TO_DEVICE); in map_skb()
917 sgl->len0 = htonl(skb_frag_size(&si->frags[0])); in write_sgl()
933 to->len[0] = cpu_to_be32(skb_frag_size(&si->frags[i])); in write_sgl()
934 to->len[1] = cpu_to_be32(skb_frag_size(&si->frags[++i])); in write_sgl()
939 to->len[0] = cpu_to_be32(skb_frag_size(&si->frags[i])); in write_sgl()
/linux/drivers/net/ethernet/intel/i40e/
H A Di40e_txrx.c3511 sum += skb_frag_size(frag++); in __i40e_chk_linearize()
3512 sum += skb_frag_size(frag++); in __i40e_chk_linearize()
3513 sum += skb_frag_size(frag++); in __i40e_chk_linearize()
3514 sum += skb_frag_size(frag++); in __i40e_chk_linearize()
3515 sum += skb_frag_size(frag++); in __i40e_chk_linearize()
3521 int stale_size = skb_frag_size(stale); in __i40e_chk_linearize()
3523 sum += skb_frag_size(frag++); in __i40e_chk_linearize()
3645 size = skb_frag_size(frag); in i40e_tx_map()
3821 size = skb_frag_size(&sinfo->frags[i]); in i40e_xmit_xdp_ring()
H A Di40e_xsk.c333 memcpy(addr, skb_frag_page(frag), skb_frag_size(frag)); in i40e_construct_skb_zc()
336 addr, 0, skb_frag_size(frag)); in i40e_construct_skb_zc()
H A Di40e_txrx.h516 size = skb_frag_size(frag++); in i40e_xmit_descriptor_count()
/linux/drivers/net/ethernet/mellanox/mlx5/core/en_accel/
H A Dktls_tx.c664 remaining -= skb_frag_size(frag); in tx_sync_info_get()
738 fsz = skb_frag_size(frag); in tx_post_resync_dump()
795 orig_fsz = skb_frag_size(f); in mlx5e_ktls_tx_handle_ooo()
/linux/drivers/net/ethernet/netronome/nfp/nfd3/
H A Drings.c66 skb_frag_size(frag), DMA_TO_DEVICE); in nfp_nfd3_tx_ring_reset()
/linux/drivers/infiniband/hw/hfi1/
H A Dvnic_sdma.c70 skb_frag_size(frag), in build_vnic_ulp_payload()
/linux/drivers/net/ethernet/mellanox/mlx4/
H A Den_tx.c707 skb_frag_size(&shinfo->frags[0])); in build_inline_wqe()
728 skb_frag_size(&shinfo->frags[0])); in build_inline_wqe()
824 byte_count = skb_frag_size(frag); in mlx4_en_build_dma_wqe()
/linux/include/net/libeth/
H A Dxdp.h736 .len = skb_frag_size(frag) & LIBETH_XDP_TX_LEN, in libeth_xdp_tx_fill_buf()
737 .flags = skb_frag_size(frag) & LIBETH_XDP_TX_FLAGS, in libeth_xdp_tx_fill_buf()
919 __libeth_xdp_tx_len(skb_frag_size(frag)), in libeth_xdp_xmit_queue_frag()
/linux/drivers/target/iscsi/cxgbit/
H A Dcxgbit_target.c886 skb_frag_size(dfrag), skb_frag_off(dfrag)); in cxgbit_handle_immediate_data()
1405 skb_frag_size(&ssi->frags[i])); in cxgbit_lro_skb_dump()
1449 len = skb_frag_size(&hssi->frags[hfrag_idx]); in cxgbit_lro_skb_merge()
1469 len += skb_frag_size(&hssi->frags[dfrag_idx]); in cxgbit_lro_skb_merge()
/linux/drivers/net/ethernet/chelsio/cxgb4/
H A Dsge.c257 *++addr = skb_frag_dma_map(dev, fp, 0, skb_frag_size(fp), in cxgb4_map_skb()
266 dma_unmap_page(dev, *--addr, skb_frag_size(fp), DMA_TO_DEVICE); in cxgb4_map_skb()
285 dma_unmap_page(dev, *addr++, skb_frag_size(fp), DMA_TO_DEVICE); in unmap_skb()
839 sgl->len0 = htonl(skb_frag_size(&si->frags[0])); in cxgb4_write_sgl()
855 to->len[0] = cpu_to_be32(skb_frag_size(&si->frags[i])); in cxgb4_write_sgl()
856 to->len[1] = cpu_to_be32(skb_frag_size(&si->frags[++i])); in cxgb4_write_sgl()
861 to->len[0] = cpu_to_be32(skb_frag_size(&si->frags[i])); in cxgb4_write_sgl()
914 frag_size = skb_frag_size(frag); in cxgb4_write_partial_sgl()
920 frag_size = skb_frag_size(frag); in cxgb4_write_partial_sgl()
923 frag_size = min(len, skb_frag_size(frag) - start); in cxgb4_write_partial_sgl()
[all …]

123456