Home
last modified time | relevance | path

Searched refs:rx_cfg (Results 1 – 25 of 31) sorted by relevance

12

/linux/drivers/hsi/
H A Dhsi_core.c63 kfree(cl->rx_cfg.channels); in hsi_client_release()
86 cl->rx_cfg = info->rx_cfg; in hsi_new_client()
87 if (cl->rx_cfg.channels) { in hsi_new_client()
88 size = cl->rx_cfg.num_channels * sizeof(*cl->rx_cfg.channels); in hsi_new_client()
89 cl->rx_cfg.channels = kmemdup(info->rx_cfg.channels, size, in hsi_new_client()
91 if (!cl->rx_cfg.channels) in hsi_new_client()
217 &cl->rx_cfg.mode); in hsi_add_client_from_dt()
226 cl->rx_cfg.mode = mode; in hsi_add_client_from_dt()
234 cl->rx_cfg.speed = cl->tx_cfg.speed; in hsi_add_client_from_dt()
237 &cl->rx_cfg.flow); in hsi_add_client_from_dt()
[all …]
/linux/drivers/staging/pi433/
H A Dpi433_if.c
/linux/drivers/net/ethernet/apple/
H A Dbmac.c896 unsigned short rx_cfg; in bmac_rx_off() local
898 rx_cfg = bmread(dev, RXCFG); in bmac_rx_off()
899 rx_cfg &= ~RxMACEnable; in bmac_rx_off()
900 bmwrite(dev, RXCFG, rx_cfg); in bmac_rx_off()
902 rx_cfg = bmread(dev, RXCFG); in bmac_rx_off()
903 } while (rx_cfg & RxMACEnable); in bmac_rx_off()
909 unsigned short rx_cfg; in bmac_rx_on() local
911 rx_cfg = bmread(dev, RXCFG); in bmac_rx_on()
912 rx_cfg |= RxMACEnable; in bmac_rx_on()
913 if (hash_enable) rx_cfg |= RxHashFilterEnable; in bmac_rx_on()
[all …]
/linux/drivers/net/ethernet/google/gve/
H A Dgve_main.c117 for (ring = 0; ring < priv->rx_cfg.num_queues; ring++) { in gve_get_stats()
249 priv->rx_cfg.num_queues; in gve_alloc_stats_report()
445 priv->rx_cfg.max_queues = min_t(int, priv->rx_cfg.max_queues, in gve_alloc_notify_blocks()
450 priv->rx_cfg.max_queues); in gve_alloc_notify_blocks()
453 if (priv->rx_cfg.num_queues > priv->rx_cfg.max_queues) in gve_alloc_notify_blocks()
454 priv->rx_cfg.num_queues = priv->rx_cfg.max_queues; in gve_alloc_notify_blocks()
765 num_rx_qpls = gve_num_rx_qpls(&priv->rx_cfg, gve_is_qpl(priv)); in gve_register_qpls()
806 num_rx_qpls = gve_num_rx_qpls(&priv->rx_cfg, gve_is_qpl(priv)); in gve_unregister_qpls()
863 err = gve_adminq_create_rx_queues(priv, priv->rx_cfg.num_queues); in gve_create_rings()
866 priv->rx_cfg.num_queues); in gve_create_rings()
[all …]
H A Dgve_ethtool.c105 for (i = 0; i < priv->rx_cfg.num_queues; i++) in gve_get_strings()
139 (priv->rx_cfg.num_queues * NUM_GVE_RX_CNTS) + in gve_get_sset_count()
177 rx_qid_to_stats_idx = kmalloc_array(priv->rx_cfg.num_queues, in gve_get_ethtool_stats()
181 for (ring = 0; ring < priv->rx_cfg.num_queues; ring++) { in gve_get_ethtool_stats()
202 ring < priv->rx_cfg.num_queues; ring++) { in gve_get_ethtool_stats()
270 GVE_RX_STATS_REPORT_NUM * priv->rx_cfg.num_queues; in gve_get_ethtool_stats()
277 (priv->rx_cfg.num_queues - num_stopped_rxqs) + in gve_get_ethtool_stats()
291 if (queue_id < 0 || queue_id >= priv->rx_cfg.num_queues) { in gve_get_ethtool_stats()
299 for (ring = 0; ring < priv->rx_cfg.num_queues; ring++) { in gve_get_ethtool_stats()
354 i += priv->rx_cfg.num_queues * NUM_GVE_RX_CNTS; in gve_get_ethtool_stats()
[all …]
H A Dgve.h750 struct gve_queue_config rx_cfg; member
1041 static inline u32 gve_num_rx_qpls(const struct gve_queue_config *rx_cfg, in gve_num_rx_qpls() argument
1046 return rx_cfg->num_queues; in gve_num_rx_qpls()
H A Dgve_flow_rule.c126 if (fsp->ring_cookie >= priv->rx_cfg.num_queues) in gve_generate_flow_rule()
/linux/drivers/net/wireless/intel/iwlwifi/pcie/
H A Dctxt-info.c168 struct iwl_context_info_rbd_cfg *rx_cfg; in iwl_pcie_ctxt_info_init() local
214 rx_cfg = &ctxt_info->rbd_cfg; in iwl_pcie_ctxt_info_init()
215 rx_cfg->free_rbd_addr = cpu_to_le64(trans_pcie->rxq->bd_dma); in iwl_pcie_ctxt_info_init()
216 rx_cfg->used_rbd_addr = cpu_to_le64(trans_pcie->rxq->used_bd_dma); in iwl_pcie_ctxt_info_init()
217 rx_cfg->status_wr_ptr = cpu_to_le64(trans_pcie->rxq->rb_stts_dma); in iwl_pcie_ctxt_info_init()
/linux/drivers/hsi/clients/
H A Dhsi_char.c355 tmp = cl->rx_cfg; in hsc_rx_set()
356 cl->rx_cfg.mode = rxc->mode; in hsc_rx_set()
357 cl->rx_cfg.num_hw_channels = rxc->channels; in hsc_rx_set()
358 cl->rx_cfg.flow = rxc->flow; in hsc_rx_set()
361 cl->rx_cfg = tmp; in hsc_rx_set()
372 rxc->mode = cl->rx_cfg.mode; in hsc_rx_get()
373 rxc->channels = cl->rx_cfg.num_hw_channels; in hsc_rx_get()
374 rxc->flow = cl->rx_cfg.flow; in hsc_rx_get()
425 if (channel->ch >= channel->cl->rx_cfg.num_hw_channels) in hsc_read()
H A Dnokia-modem.c177 ssip.rx_cfg = cl->rx_cfg; in nokia_modem_probe()
200 cmtspeech.rx_cfg = cl->rx_cfg; in nokia_modem_probe()
/linux/drivers/net/ethernet/brocade/bna/
H A Dbna_tx_rx.c1625 cfg_req->rx_cfg.frame_size = bna_enet_mtu_get(&rx->bna->enet); in bna_bfi_rx_enet_start()
1651 cfg_req->rx_cfg.multi_buffer = in bna_bfi_rx_enet_start()
1690 cfg_req->rx_cfg.rxq_type = BFI_ENET_RXQ_LARGE_SMALL; in bna_bfi_rx_enet_start()
1694 cfg_req->rx_cfg.rxq_type = BFI_ENET_RXQ_HDS; in bna_bfi_rx_enet_start()
1695 cfg_req->rx_cfg.hds.type = rx->hds_cfg.hdr_type; in bna_bfi_rx_enet_start()
1696 cfg_req->rx_cfg.hds.force_offset = rx->hds_cfg.forced_offset; in bna_bfi_rx_enet_start()
1697 cfg_req->rx_cfg.hds.max_header_size = rx->hds_cfg.forced_offset; in bna_bfi_rx_enet_start()
1701 cfg_req->rx_cfg.rxq_type = BFI_ENET_RXQ_SINGLE; in bna_bfi_rx_enet_start()
1707 cfg_req->rx_cfg.strip_vlan = rx->rxf.vlan_strip_status; in bna_bfi_rx_enet_start()
1741 bna_rx_res_check(struct bna_rx_mod *rx_mod, struct bna_rx_config *rx_cfg) in bna_rx_res_check() argument
[all …]
H A Dbna.h346 struct bna_rx_config *rx_cfg,
/linux/include/linux/hsi/
H A Dhsi.h104 struct hsi_config rx_cfg; member
129 struct hsi_config rx_cfg; member
224 struct hsi_config rx_cfg; member
/linux/drivers/net/ethernet/ti/icssg/
H A Dicssg_common.c359 struct k3_udma_glue_rx_channel_cfg rx_cfg; in prueth_init_rx_chns() local
375 memset(&rx_cfg, 0, sizeof(rx_cfg)); in prueth_init_rx_chns()
376 rx_cfg.swdata_size = PRUETH_NAV_SW_DATA_SIZE; in prueth_init_rx_chns()
377 rx_cfg.flow_id_num = max_rflows; in prueth_init_rx_chns()
378 rx_cfg.flow_id_base = -1; /* udmax will auto select flow id base */ in prueth_init_rx_chns()
385 &rx_cfg); in prueth_init_rx_chns()
415 for (i = 0; i < rx_cfg.flow_id_num; i++) { in prueth_init_rx_chns()
426 .rx_cfg = rxring_cfg, in prueth_init_rx_chns()
434 rx_flow_cfg.rx_cfg.size = max_desc_num; in prueth_init_rx_chns()
/linux/drivers/net/ethernet/micrel/
H A Dksz884x.c1207 u32 rx_cfg; member
2837 u32 rx_cfg; in set_flow_ctrl() local
2840 rx_cfg = hw->rx_cfg; in set_flow_ctrl()
2843 hw->rx_cfg |= DMA_RX_FLOW_ENABLE; in set_flow_ctrl()
2845 hw->rx_cfg &= ~DMA_RX_FLOW_ENABLE; in set_flow_ctrl()
2851 if (rx_cfg != hw->rx_cfg) in set_flow_ctrl()
2852 writel(hw->rx_cfg, hw->io + KS_DMA_RX_CTRL); in set_flow_ctrl()
3405 hw->rx_cfg = (DMA_RX_BROADCAST | DMA_RX_UNICAST | in hw_setup()
3407 hw->rx_cfg |= KS884X_DMA_RX_MULTICAST; in hw_setup()
3410 hw->rx_cfg |= (DMA_RX_CSUM_TCP | DMA_RX_CSUM_IP); in hw_setup()
[all …]
/linux/drivers/net/ethernet/neterion/
H A Ds2io.c699 struct rx_ring_config *rx_cfg = &config->rx_cfg[i]; in init_shared_mem() local
702 if (rx_cfg->num_rxd % (rxd_count[nic->rxd_mode] + 1)) { in init_shared_mem()
708 size += rx_cfg->num_rxd; in init_shared_mem()
709 ring->block_count = rx_cfg->num_rxd / in init_shared_mem()
711 ring->pkt_cnt = rx_cfg->num_rxd - ring->block_count; in init_shared_mem()
719 struct rx_ring_config *rx_cfg = &config->rx_cfg[i]; in init_shared_mem() local
724 ring->rx_curr_get_info.ring_len = rx_cfg->num_rxd - 1; in init_shared_mem()
727 ring->rx_curr_put_info.ring_len = rx_cfg->num_rxd - 1; in init_shared_mem()
731 blk_cnt = rx_cfg->num_rxd / (rxd_count[nic->rxd_mode] + 1); in init_shared_mem()
791 struct rx_ring_config *rx_cfg = &config->rx_cfg[i]; in init_shared_mem() local
[all …]
/linux/include/linux/dma/
H A Dk3-udma-glue.h74 struct k3_ring_cfg rx_cfg; member
/linux/drivers/hsi/controllers/
H A Domap_ssi_port.c484 writel_relaxed(cl->rx_cfg.num_hw_channels, ssr + SSI_SSR_CHANNELS_REG); in ssi_setup()
488 (cl->rx_cfg.mode != SSI_MODE_FRAME)) in ssi_setup()
490 writel_relaxed(cl->rx_cfg.mode, ssr + SSI_SSR_MODE_REG); in ssi_setup()
491 omap_port->channels = max(cl->rx_cfg.num_hw_channels, in ssi_setup()
503 omap_port->ssr.channels = cl->rx_cfg.num_hw_channels; in ssi_setup()
504 omap_port->ssr.mode = cl->rx_cfg.mode; in ssi_setup()
/linux/drivers/net/ethernet/chelsio/cxgb3/
H A Dt3_hw.c1133 static void t3_gate_rx_traffic(struct cmac *mac, u32 *rx_cfg, in t3_gate_rx_traffic() argument
1140 *rx_cfg = t3_read_reg(mac->adapter, A_XGM_RX_CFG); in t3_gate_rx_traffic()
1155 static void t3_open_rx_traffic(struct cmac *mac, u32 rx_cfg, in t3_open_rx_traffic() argument
1161 rx_cfg); in t3_open_rx_traffic()
1186 u32 rx_cfg, rx_hash_high, rx_hash_low; in t3_link_changed() local
1190 t3_gate_rx_traffic(mac, &rx_cfg, &rx_hash_high, &rx_hash_low); in t3_link_changed()
1199 t3_open_rx_traffic(mac, rx_cfg, rx_hash_high, rx_hash_low); in t3_link_changed()
1239 u32 rx_cfg, rx_hash_high, rx_hash_low; in t3_link_fault() local
1241 t3_gate_rx_traffic(mac, &rx_cfg, &rx_hash_high, &rx_hash_low); in t3_link_fault()
1249 t3_open_rx_traffic(mac, rx_cfg, rx_hash_high, rx_hash_low); in t3_link_fault()
/linux/drivers/net/ethernet/hisilicon/hns3/hns3pf/
H A Dhclge_debugfs.c2724 u8 rx_cfg; in hclge_get_vlan_rx_offload_cfg() local
2742 rx_cfg = req->vport_vlan_cfg; in hclge_get_vlan_rx_offload_cfg()
2743 vlan_cfg->strip_tag1 = hnae3_get_bit(rx_cfg, HCLGE_REM_TAG1_EN_B); in hclge_get_vlan_rx_offload_cfg()
2744 vlan_cfg->strip_tag2 = hnae3_get_bit(rx_cfg, HCLGE_REM_TAG2_EN_B); in hclge_get_vlan_rx_offload_cfg()
2745 vlan_cfg->drop_tag1 = hnae3_get_bit(rx_cfg, HCLGE_DISCARD_TAG1_EN_B); in hclge_get_vlan_rx_offload_cfg()
2746 vlan_cfg->drop_tag2 = hnae3_get_bit(rx_cfg, HCLGE_DISCARD_TAG2_EN_B); in hclge_get_vlan_rx_offload_cfg()
2747 vlan_cfg->pri_only1 = hnae3_get_bit(rx_cfg, HCLGE_SHOW_TAG1_EN_B); in hclge_get_vlan_rx_offload_cfg()
2748 vlan_cfg->pri_only2 = hnae3_get_bit(rx_cfg, HCLGE_SHOW_TAG2_EN_B); in hclge_get_vlan_rx_offload_cfg()
/linux/drivers/usb/gadget/udc/
H A Dbcm63xx_udc.c467 const struct iudma_ch_cfg *rx_cfg = &iudma_defaults[i]; in bcm63xx_fifo_setup() local
473 ((rx_fifo_slot + rx_cfg->n_fifo_slots - 1) << in bcm63xx_fifo_setup()
475 rx_fifo_slot += rx_cfg->n_fifo_slots; in bcm63xx_fifo_setup()
478 is_hs ? rx_cfg->max_pkt_hs : rx_cfg->max_pkt_fs, in bcm63xx_fifo_setup()
/linux/drivers/net/ethernet/ti/
H A Dam65-cpsw-nuss.c2383 struct k3_udma_glue_rx_channel_cfg rx_cfg = { 0 }; in am65_cpsw_nuss_init_rx_chns() local
2394 rx_cfg.swdata_size = AM65_CPSW_NAV_SW_DATA_SIZE; in am65_cpsw_nuss_init_rx_chns()
2395 rx_cfg.flow_id_num = common->rx_ch_num_flows; in am65_cpsw_nuss_init_rx_chns()
2396 rx_cfg.flow_id_base = common->rx_flow_id_base; in am65_cpsw_nuss_init_rx_chns()
2400 rx_chn->descs_num = max_desc_num * rx_cfg.flow_id_num; in am65_cpsw_nuss_init_rx_chns()
2411 rx_chn->rx_chn = k3_udma_glue_request_rx_chn(dev, "rx", &rx_cfg); in am65_cpsw_nuss_init_rx_chns()
2437 for (i = 0; i < rx_cfg.flow_id_num; i++) { in am65_cpsw_nuss_init_rx_chns()
2448 .rx_cfg = rxring_cfg, in am65_cpsw_nuss_init_rx_chns()
2460 rx_flow_cfg.rx_cfg.size = max_desc_num; in am65_cpsw_nuss_init_rx_chns()
/linux/drivers/dma/ti/
H A Dk3-udma-glue.c776 flow_cfg->rx_cfg.dma_dev = k3_udma_glue_rx_get_dma_device(rx_chn); in k3_udma_glue_cfg_rx_flow()
777 flow_cfg->rxfdq_cfg.dma_dev = flow_cfg->rx_cfg.dma_dev; in k3_udma_glue_cfg_rx_flow()
781 flow_cfg->rx_cfg.asel = rx_chn->common.atype_asel; in k3_udma_glue_cfg_rx_flow()
785 ret = k3_ringacc_ring_cfg(flow->ringrx, &flow_cfg->rx_cfg); in k3_udma_glue_cfg_rx_flow()
/linux/drivers/soc/qcom/
H A Dqcom-geni-se.c431 bool msb_to_lsb, bool tx_cfg, bool rx_cfg) in geni_se_config_packing() argument
468 if (rx_cfg) { in geni_se_config_packing()
/linux/include/linux/soc/qcom/
H A Dgeni-se.h491 bool msb_to_lsb, bool tx_cfg, bool rx_cfg);

12