Home
last modified time | relevance | path

Searched refs:rx_buf_len (Results 1 – 25 of 38) sorted by relevance

12

/linux/drivers/net/ethernet/intel/i40e/
H A Di40e_txrx.h135 static inline int i40e_compute_pad(int rx_buf_len) in i40e_compute_pad() argument
139 page_size = ALIGN(rx_buf_len, PAGE_SIZE / 2); in i40e_compute_pad()
140 pad_size = SKB_WITH_OVERHEAD(page_size) - rx_buf_len; in i40e_compute_pad()
147 int rx_buf_len; in i40e_skb_pad() local
157 rx_buf_len = I40E_RXBUFFER_3072 + SKB_DATA_ALIGN(NET_IP_ALIGN); in i40e_skb_pad()
159 rx_buf_len = I40E_RXBUFFER_1536; in i40e_skb_pad()
162 rx_buf_len -= NET_IP_ALIGN; in i40e_skb_pad()
164 return i40e_compute_pad(rx_buf_len); in i40e_skb_pad()
363 u16 rx_buf_len; member
453 if (ring->rx_buf_len > (PAGE_SIZE / 2)) in i40e_rx_pg_order()
H A Di40e_adminq.h27 u16 rx_buf_len; /* Admin Receive Queue buffer length */ member
/linux/drivers/net/ethernet/intel/ixgbe/
H A Dixgbe.h95 static inline int ixgbe_compute_pad(int rx_buf_len) in ixgbe_compute_pad() argument
99 page_size = ALIGN(rx_buf_len, PAGE_SIZE / 2); in ixgbe_compute_pad()
100 pad_size = SKB_WITH_OVERHEAD(page_size) - rx_buf_len; in ixgbe_compute_pad()
107 int rx_buf_len; in ixgbe_skb_pad() local
117 rx_buf_len = IXGBE_RXBUFFER_3K + SKB_DATA_ALIGN(NET_IP_ALIGN); in ixgbe_skb_pad()
119 rx_buf_len = IXGBE_RXBUFFER_1536; in ixgbe_skb_pad()
122 rx_buf_len -= NET_IP_ALIGN; in ixgbe_skb_pad()
124 return ixgbe_compute_pad(rx_buf_len); in ixgbe_skb_pad()
406 u16 rx_buf_len; member
/linux/drivers/net/ethernet/hisilicon/hns3/
H A Dhns3_ethtool.c686 kernel_param->rx_buf_len = priv->ring[rx_queue_index].buf_size; in hns3_get_ringparam()
1134 if (kernel_param->rx_buf_len != RX_BUF_LEN_2K && in hns3_check_ringparam()
1135 kernel_param->rx_buf_len != RX_BUF_LEN_4K) { in hns3_check_ringparam()
1169 old_ringparam->rx_buf_len = priv->ring[queue_num].buf_size; in hns3_is_ringparam_changed()
1170 new_ringparam->rx_buf_len = kernel_param->rx_buf_len; in hns3_is_ringparam_changed()
1174 old_ringparam->rx_buf_len == new_ringparam->rx_buf_len) { in hns3_is_ringparam_changed()
1182 static int hns3_change_rx_buf_len(struct net_device *ndev, u32 rx_buf_len) in hns3_change_rx_buf_len() argument
1188 h->kinfo.rx_buf_len = rx_buf_len; in hns3_change_rx_buf_len()
1191 h->kinfo.tqp[i]->buf_size = rx_buf_len; in hns3_change_rx_buf_len()
1192 priv->ring[i + h->kinfo.num_tqps].buf_size = rx_buf_len; in hns3_change_rx_buf_len()
[all …]
H A Dhns3_ethtool.h34 u32 rx_buf_len; member
H A Dhclge_mbx.h205 __le16 rx_buf_len; member
/linux/drivers/net/ethernet/hisilicon/hns3/hns3vf/
H A Dhclgevf_main.h163 u16 rx_buf_len; member
240 u16 rx_buf_len; member
/linux/drivers/net/ethernet/marvell/octeontx2/nic/
H A Dotx2_ethtool.c381 kernel_ring->rx_buf_len = pfvf->hw.rbuf_len; in otx2_get_ringparam()
391 u32 rx_buf_len = kernel_ring->rx_buf_len; in otx2_set_ringparam() local
404 if (rx_buf_len && (rx_buf_len < 1536 || rx_buf_len > 32768)) { in otx2_set_ringparam()
442 rx_buf_len == old_rx_buf_len && xqe_size == pfvf->hw.xqe_size) in otx2_set_ringparam()
452 pfvf->hw.rbuf_len = rx_buf_len; in otx2_set_ringparam()
/linux/drivers/net/ethernet/intel/iavf/
H A Diavf_adminq.h27 u16 rx_buf_len; /* Admin Receive Queue buffer length */ member
H A Diavf_txrx.h286 u32 rx_buf_len; member
/linux/io_uring/
H A Dzcrx.c449 if (reg->rx_buf_len) { in io_zcrx_create_area()
450 if (!is_power_of_2(reg->rx_buf_len) || in io_zcrx_create_area()
451 reg->rx_buf_len < PAGE_SIZE) in io_zcrx_create_area()
453 buf_size_shift = ilog2(reg->rx_buf_len); in io_zcrx_create_area()
813 if (reg->rx_buf_len) in zcrx_register_netdev()
920 reg.rx_buf_len = 1U << ifq->niov_shift; in io_register_zcrx()
/linux/drivers/net/ethernet/sfc/siena/
H A Defx_common.c369 size_t rx_buf_len; in efx_start_datapath() local
378 rx_buf_len = (sizeof(struct efx_rx_page_state) + EFX_XDP_HEADROOM + in efx_start_datapath()
381 if (rx_buf_len <= PAGE_SIZE) { in efx_start_datapath()
395 efx->rx_buffer_order = get_order(rx_buf_len); in efx_start_datapath()
/linux/drivers/net/ethernet/sfc/
H A Defx_common.c365 size_t rx_buf_len; in efx_start_datapath() local
374 rx_buf_len = (sizeof(struct efx_rx_page_state) + EFX_XDP_HEADROOM + in efx_start_datapath()
377 if (rx_buf_len <= PAGE_SIZE) { in efx_start_datapath()
391 efx->rx_buffer_order = get_order(rx_buf_len); in efx_start_datapath()
/linux/drivers/net/can/usb/etas_es58x/
H A Des58x_core.c1710 u16 rx_buf_len = usb_maxpacket(es58x_dev->udev, es58x_dev->rx_pipe); in es58x_alloc_rx_urbs() local
1717 ret = es58x_alloc_urb(es58x_dev, &urb, &buf, rx_buf_len, in es58x_alloc_rx_urbs()
1723 buf, rx_buf_len, es58x_read_bulk_callback, in es58x_alloc_rx_urbs()
1730 usb_free_coherent(es58x_dev->udev, rx_buf_len, in es58x_alloc_rx_urbs()
1743 __func__, i, rx_buf_len); in es58x_alloc_rx_urbs()
/linux/drivers/net/ethernet/huawei/hinic3/
H A Dhinic3_nic_io.c233 nic_io->rx_buf_len = nic_dev->rx_buf_len; in hinic3_init_nic_io()
856 nic_io->rx_buf_len); in hinic3_init_qp_ctxts()
/linux/drivers/net/wireless/ath/wil6210/
H A Dtxrx_edma.c155 unsigned int sz = wil->rx_buf_len; in wil_ring_alloc_skb_edma()
283 dma_unmap_single(dev, pa, wil->rx_buf_len, in wil_move_all_rx_buff_to_free_list()
595 wil->rx_buf_len = rx_large_buf ? in wil_rx_buf_len_init_edma()
640 rc = wil_wmi_cfg_def_rx_offload(wil, wil->rx_buf_len); in wil_rx_init_edma()
858 unsigned int sz = wil->rx_buf_len; in wil_sring_reap_rx_edma()
H A Dtxrx.c260 unsigned int sz = wil->rx_buf_len + ETH_HLEN + wil_rx_snaplen(); in wil_vring_alloc_skb()
450 unsigned int sz = wil->rx_buf_len + ETH_HLEN + snaplen; in wil_vring_reap_rx()
1037 wil->rx_buf_len = rx_large_buf ? in wil_rx_buf_len_init()
1039 if (mtu_max > wil->rx_buf_len) { in wil_rx_buf_len_init()
1045 wil->rx_buf_len = mtu_max; in wil_rx_buf_len_init()
/linux/drivers/i2c/busses/
H A Di2c-designware-core.h289 u32 rx_buf_len; member
H A Di2c-designware-master.c534 len = dev->rx_buf_len; in i2c_dw_read()
565 dev->rx_buf_len = len; in i2c_dw_read()
/linux/drivers/net/ethernet/intel/ice/
H A Dice_base.c501 rlan_ctx.dbuf = DIV_ROUND_UP(ring->rx_buf_len, in ice_setup_rx_ctx()
564 ICE_MAX_CHAINED_RX_BUFS * ring->rx_buf_len); in ice_setup_rx_ctx()
625 rq->rx_buf_len = fq.buf_len; in ice_rxq_pp_create()
/linux/drivers/net/ethernet/hisilicon/hns3/hns3pf/
H A Dhclge_main.h370 u16 rx_buf_len; member
900 u16 rx_buf_len; member
H A Dhclge_mbx.c523 queue_info->rx_buf_len = cpu_to_le16(hdev->rx_buf_len); in hclge_get_vf_queue_info()
/linux/drivers/net/ethernet/google/gve/
H A Dgve_ethtool.c555 kernel_cmd->rx_buf_len = priv->rx_cfg.packet_buffer_size; in gve_get_ringparam()
618 err = gve_set_rx_buf_len_config(priv, kernel_cmd->rx_buf_len, extack, in gve_set_ringparam()
/linux/drivers/net/ethernet/broadcom/genet/
H A Dbcmgenet.h612 unsigned int rx_buf_len; member
/linux/drivers/net/ethernet/sfc/falcon/
H A Defx.c594 size_t rx_buf_len; in ef4_start_datapath() local
603 rx_buf_len = (sizeof(struct ef4_rx_page_state) + in ef4_start_datapath()
605 if (rx_buf_len <= PAGE_SIZE) { in ef4_start_datapath()
619 efx->rx_buffer_order = get_order(rx_buf_len); in ef4_start_datapath()

12