/linux/drivers/tty/serial/ |
H A D | samsung_tty.c | 109 struct dma_chan *tx_chan; member 305 if (dma && dma->tx_chan && ourport->tx_in_progress == S3C24XX_TX_DMA) { in s3c24xx_serial_stop_tx() 306 dmaengine_pause(dma->tx_chan); in s3c24xx_serial_stop_tx() 307 dmaengine_tx_status(dma->tx_chan, dma->tx_cookie, &state); in s3c24xx_serial_stop_tx() 308 dmaengine_terminate_all(dma->tx_chan); in s3c24xx_serial_stop_tx() 309 dma_sync_single_for_cpu(dma->tx_chan->device->dev, in s3c24xx_serial_stop_tx() 338 dmaengine_tx_status(dma->tx_chan, dma->tx_cookie, &state); in s3c24xx_serial_tx_dma_complete() 342 dma_sync_single_for_cpu(dma->tx_chan->device->dev, in s3c24xx_serial_tx_dma_complete() 444 dma_sync_single_for_device(dma->tx_chan->device->dev, in s3c24xx_serial_start_tx_dma() 448 dma->tx_desc = dmaengine_prep_slave_single(dma->tx_chan, in s3c24xx_serial_start_tx_dma() [all …]
|
/linux/drivers/net/ethernet/broadcom/ |
H A D | bcm63xx_enet.c | 499 ENETDMAC_IR, priv->tx_chan); in bcm_enet_poll() 521 ENETDMAC_IRMASK, priv->tx_chan); in bcm_enet_poll() 565 enet_dmac_writel(priv, 0, ENETDMAC_IRMASK, priv->tx_chan); in bcm_enet_isr_dma() 648 ENETDMAC_CHANCFG, priv->tx_chan); in bcm_enet_start_xmit() 936 enet_dmac_writel(priv, 0, ENETDMAC_IRMASK, priv->tx_chan); in bcm_enet_open() 1027 ENETDMAS_RSTART_REG, priv->tx_chan); in bcm_enet_open() 1032 ENETDMAC_RSTART, priv->tx_chan); in bcm_enet_open() 1038 enet_dmas_writel(priv, 0, ENETDMAS_SRAM2_REG, priv->tx_chan); in bcm_enet_open() 1040 enet_dmas_writel(priv, 0, ENETDMAS_SRAM3_REG, priv->tx_chan); in bcm_enet_open() 1042 enet_dmas_writel(priv, 0, ENETDMAS_SRAM4_REG, priv->tx_chan); in bcm_enet_open() [all …]
|
H A D | bcm63xx_enet.h | 254 int tx_chan; member
|
/linux/drivers/remoteproc/ |
H A D | xlnx_r5_remoteproc.c | 78 * @tx_chan: mailbox tx channel 87 struct mbox_chan *tx_chan; member 280 ipi->tx_chan = mbox_request_channel_byname(mbox_cl, "tx"); in zynqmp_r5_setup_mbox() 281 if (IS_ERR(ipi->tx_chan)) { in zynqmp_r5_setup_mbox() 282 ipi->tx_chan = NULL; in zynqmp_r5_setup_mbox() 290 mbox_free_channel(ipi->tx_chan); in zynqmp_r5_setup_mbox() 292 ipi->tx_chan = NULL; in zynqmp_r5_setup_mbox() 308 if (ipi->tx_chan) { in zynqmp_r5_free_mbox() 309 mbox_free_channel(ipi->tx_chan); in zynqmp_r5_free_mbox() 310 ipi->tx_chan = NULL; in zynqmp_r5_free_mbox() [all …]
|
/linux/drivers/usb/renesas_usbhs/ |
H A D | fifo.c | 771 return fifo->tx_chan; in usbhsf_dma_chan_get() 1250 if (fifo->tx_chan) in usbhsf_dma_quit() 1251 dma_release_channel(fifo->tx_chan); in usbhsf_dma_quit() 1255 fifo->tx_chan = NULL; in usbhsf_dma_quit() 1265 fifo->tx_chan = dma_request_channel(mask, usbhsf_dma_filter, in usbhsf_dma_init_pdev() 1286 fifo->tx_chan = dma_request_chan(dev, name); in usbhsf_dma_init_dt() 1287 if (IS_ERR(fifo->tx_chan)) in usbhsf_dma_init_dt() 1288 fifo->tx_chan = NULL; in usbhsf_dma_init_dt() 1306 if (fifo->tx_chan || fifo->rx_chan) in usbhsf_dma_init() 1309 fifo->tx_chan ? "[TX]" : " ", in usbhsf_dma_init()
|
H A D | fifo.h | 25 struct dma_chan *tx_chan; member
|
/linux/drivers/net/ethernet/chelsio/inline_crypto/ch_ktls/ |
H A D | chcr_ktls.c | 207 SMAC_SEL_V(tx_info->smt_idx) | TX_CHAN_V(tx_info->tx_chan); in chcr_ktls_act_open_req() 212 TX_QUEUE_V(tx_info->adap->params.tp.tx_modq[tx_info->tx_chan]); in chcr_ktls_act_open_req() 259 SMAC_SEL_V(tx_info->smt_idx) | TX_CHAN_V(tx_info->tx_chan); in chcr_ktls_act_open_req6() 263 TX_QUEUE_V(tx_info->adap->params.tp.tx_modq[tx_info->tx_chan]); in chcr_ktls_act_open_req6() 389 cxgb4_remove_tid(&tx_info->adap->tids, tx_info->tx_chan, in chcr_ktls_dev_del() 458 tx_info->tx_chan = pi->tx_chan; in chcr_ktls_dev_add() 577 cxgb4_remove_tid(&tx_info->adap->tids, tx_info->tx_chan, in chcr_ktls_dev_add() 673 cxgb4_remove_tid(&tx_info->adap->tids, tx_info->tx_chan, in chcr_ktls_cpl_act_open_rpl() 984 * @tx_chan - channel number. 989 struct sge_eth_txq *q, uint32_t tx_chan) in chcr_ktls_write_tcp_options() argument [all …]
|
H A D | chcr_ktls.h | 61 u8 tx_chan; member
|
/linux/drivers/mmc/host/ |
H A D | au1xmmc.c | 112 u32 tx_chan; member 152 (((h)->flags & HOST_F_XMIT) ? (h)->tx_chan : (h)->rx_chan) 879 host->tx_chan = au1xxx_dbdma_chan_alloc(memid, txid, in au1xmmc_dbdma_init() 881 if (!host->tx_chan) { in au1xmmc_dbdma_init() 890 au1xxx_dbdma_chan_free(host->tx_chan); in au1xmmc_dbdma_init() 894 au1xxx_dbdma_set_devwidth(host->tx_chan, 8); in au1xmmc_dbdma_init() 897 au1xxx_dbdma_ring_alloc(host->tx_chan, AU1XMMC_DESCRIPTOR_COUNT); in au1xmmc_dbdma_init() 910 au1xxx_dbdma_chan_free(host->tx_chan); in au1xmmc_dbdma_shutdown()
|
H A D | omap_hsmmc.c | 195 struct dma_chan *tx_chan; member 813 return data->flags & MMC_DATA_WRITE ? host->tx_chan : host->rx_chan; in omap_hsmmc_get_dma_chan() 1901 host->tx_chan = dma_request_chan(&pdev->dev, "tx"); in omap_hsmmc_probe() 1902 if (IS_ERR(host->tx_chan)) { in omap_hsmmc_probe() 1904 ret = PTR_ERR(host->tx_chan); in omap_hsmmc_probe() 1917 dma_get_max_seg_size(host->tx_chan->device->dev)); in omap_hsmmc_probe() 1968 if (!IS_ERR_OR_NULL(host->tx_chan)) in omap_hsmmc_probe() 1969 dma_release_channel(host->tx_chan); in omap_hsmmc_probe() 1989 dma_release_channel(host->tx_chan); in omap_hsmmc_remove()
|
H A D | sdhci.c | 1215 host->tx_chan = dma_request_chan(mmc_dev(mmc), "tx"); in sdhci_external_dma_init() 1216 if (IS_ERR(host->tx_chan)) { in sdhci_external_dma_init() 1217 ret = PTR_ERR(host->tx_chan); in sdhci_external_dma_init() 1220 host->tx_chan = NULL; in sdhci_external_dma_init() 1226 if (host->tx_chan) { in sdhci_external_dma_init() 1227 dma_release_channel(host->tx_chan); in sdhci_external_dma_init() 1228 host->tx_chan = NULL; in sdhci_external_dma_init() 1243 return data->flags & MMC_DATA_WRITE ? host->tx_chan : host->rx_chan; in sdhci_external_dma_channel() 1303 if (host->tx_chan) { in sdhci_external_dma_release() 1304 dma_release_channel(host->tx_chan); in sdhci_external_dma_release() [all …]
|
/linux/drivers/net/ethernet/xilinx/ |
H A D | xilinx_axienet_main.c | 878 dma_dev = lp->tx_chan->device; in axienet_start_xmit_dmaengine() 917 dma_tx_desc = dma_dev->device_prep_slave_sg(lp->tx_chan, skbuf_dma->sgl, in axienet_start_xmit_dmaengine() 928 dma_async_issue_pending(lp->tx_chan); in axienet_start_xmit_dmaengine() 1447 lp->tx_chan = dma_request_chan(lp->dev, "tx_chan0"); in axienet_init_dmaengine() 1448 if (IS_ERR(lp->tx_chan)) { in axienet_init_dmaengine() 1450 return PTR_ERR(lp->tx_chan); in axienet_init_dmaengine() 1511 dma_release_channel(lp->tx_chan); in axienet_init_dmaengine() 1675 dmaengine_terminate_sync(lp->tx_chan); in axienet_stop() 1676 dmaengine_synchronize(lp->tx_chan); in axienet_stop() 1688 dma_release_channel(lp->tx_chan); in axienet_stop() [all …]
|
H A D | xilinx_axienet.h | 537 * @tx_chan: TX DMA channel. 615 struct dma_chan *tx_chan; member
|
/linux/drivers/net/ethernet/chelsio/cxgb4/ |
H A D | cxgb4_ethtool.c | 361 t4_get_port_stats_offset(adapter, pi->tx_chan, in get_stats() 399 t4_restart_aneg(p->adapter, p->adapter->pf, p->tx_chan); in restart_autoneg() 755 ret = t4_link_l1cfg(pi->adapter, pi->adapter->mbox, pi->tx_chan, lc); in set_link_ksettings() 854 pi->tx_chan, lc); in set_fecparam() 888 return t4_link_l1cfg(p->adapter, p->adapter->mbox, p->tx_chan, in set_pauseparam() 2017 ret = t4_i2c_rd(adapter, adapter->mbox, pi->tx_chan, in cxgb4_get_module_info() 2022 ret = t4_i2c_rd(adapter, adapter->mbox, pi->tx_chan, in cxgb4_get_module_info() 2045 ret = t4_i2c_rd(adapter, adapter->mbox, pi->tx_chan, in cxgb4_get_module_info() 2078 return t4_i2c_rd(adapter, adapter->mbox, pi->tx_chan, in cxgb4_get_module_eeprom() 2085 ret = t4_i2c_rd(adapter, adapter->mbox, pi->tx_chan, in cxgb4_get_module_eeprom() [all …]
|
/linux/arch/mips/bcm63xx/ |
H A D | dev-enet.c | 278 dpd->tx_chan = 1; in bcm63xx_enet_register() 281 dpd->tx_chan = 3; in bcm63xx_enet_register()
|
/linux/arch/mips/include/asm/mach-bcm63xx/ |
H A D | bcm63xx_dev_enet.h | 61 int tx_chan; member
|
/linux/drivers/target/iscsi/cxgbit/ |
H A D | cxgbit_cm.c | 938 csk->tx_chan = cxgb4_port_chan(ndev); in cxgbit_offload_init() 974 csk->tx_chan = cxgb4_port_chan(ndev); in cxgbit_offload_init() 1171 TX_CHAN_V(csk->tx_chan) | in cxgbit_pass_accept_rpl() 1441 flowc->mnemval[1].val = cpu_to_be32(csk->tx_chan); in cxgbit_send_tx_flowc_wr() 1443 flowc->mnemval[2].val = cpu_to_be32(csk->tx_chan); in cxgbit_send_tx_flowc_wr() 1479 pr_debug("%s: csk %p; tx_chan = %u; rss_qid = %u; snd_seq = %u;" in cxgbit_send_tx_flowc_wr() 1481 __func__, csk, csk->tx_chan, csk->rss_qid, csk->snd_nxt, in cxgbit_send_tx_flowc_wr()
|
H A D | cxgbit.h | 221 u32 tx_chan; member
|
/linux/drivers/mtd/nand/raw/ |
H A D | qcom_nandc.c | 411 struct dma_chan *tx_chan; member 882 } else if (chan == nandc->tx_chan) { in prepare_bam_async_desc() 1015 ret = prepare_bam_async_desc(nandc, nandc->tx_chan, in prep_bam_dma_desc_data() 1322 ret = prepare_bam_async_desc(nandc, nandc->tx_chan, in submit_descs() 1348 dma_async_issue_pending(nandc->tx_chan); in submit_descs() 3027 if (nandc->tx_chan) in qcom_nandc_unalloc() 3028 dma_release_channel(nandc->tx_chan); in qcom_nandc_unalloc() 3084 nandc->tx_chan = dma_request_chan(nandc->dev, "tx"); in qcom_nandc_alloc() 3085 if (IS_ERR(nandc->tx_chan)) { in qcom_nandc_alloc() 3086 ret = PTR_ERR(nandc->tx_chan); in qcom_nandc_alloc() [all …]
|
/linux/drivers/net/ethernet/chelsio/inline_crypto/chtls/ |
H A D | chtls.h | 282 struct net_device *egress_dev; /* TX_CHAN for act open retry */ 303 u32 tx_chan; member
|
/linux/drivers/infiniband/hw/cxgb4/ |
H A D | cm.c | 620 flowc->mnemval[1].val = cpu_to_be32(ep->tx_chan); in send_flowc() 622 flowc->mnemval[2].val = cpu_to_be32(ep->tx_chan); in send_flowc() 794 TX_CHAN_V(ep->tx_chan) | in send_connect() 2012 TX_CHAN_V(ep->tx_chan) | in send_fw_act_open_req() 2018 TX_QUEUE_V(ep->com.dev->rdev.lldi.tx_modq[ep->tx_chan]) | in send_fw_act_open_req() 2111 ep->tx_chan = cxgb4_port_chan(pdev); in import_ep() 2129 ep->tx_chan = cxgb4_port_chan(pdev); in import_ep() 2235 pr_debug("txq_idx %u tx_chan %u smac_idx %u rss_qid %u l2t_idx %u\n", in c4iw_reconnect() 2236 ep->txq_idx, ep->tx_chan, ep->smac_idx, ep->rss_qid, in c4iw_reconnect() 2450 TX_CHAN_V(ep->tx_chan) | in accept_cr() [all …]
|
/linux/Documentation/devicetree/bindings/dma/ti/ |
H A D | k3-udma.yaml | 187 <0x2>; /* TX_CHAN */
|
/linux/drivers/scsi/cxgbi/cxgb4i/ |
H A D | cxgb4i.c | 222 TX_CHAN_V(csk->tx_chan) | in send_act_open_req() 341 TX_CHAN_V(csk->tx_chan) | in send_act_open_req6() 633 flowc->mnemval[1].val = htonl(csk->tx_chan); in send_tx_flowc_wr() 635 flowc->mnemval[2].val = htonl(csk->tx_chan); in send_tx_flowc_wr() 669 csk, csk->tid, 0, csk->tx_chan, csk->rss_qid, in send_tx_flowc_wr() 1861 csk->tx_chan = cxgb4_port_chan(ndev); in init_act_open() 1893 csk->flags, csk->tx_chan, csk->txq_idx, csk->rss_qid, in init_act_open()
|
/linux/drivers/dma/qcom/ |
H A D | gpi.c | 2063 struct gchan *tx_chan, *rx_chan; in gpi_find_avail_gpii() local 2071 tx_chan = &gpi_dev->gpiis[gpii].gchan[GPI_TX_CHAN]; in gpi_find_avail_gpii() 2076 if (tx_chan->vc.chan.client_count && tx_chan->seid == seid) in gpi_find_avail_gpii() 2085 tx_chan = &gpi_dev->gpiis[gpii].gchan[GPI_TX_CHAN]; in gpi_find_avail_gpii() 2089 if (tx_chan->vc.chan.client_count || in gpi_find_avail_gpii()
|
/linux/drivers/ntb/ |
H A D | ntb_transport.c | 2423 struct dma_chan *rx_chan, *tx_chan; in ntb_transport_max_size() local 2429 tx_chan = qp->tx_dma_chan; in ntb_transport_max_size() 2432 tx_chan ? tx_chan->device->copy_align : 0); in ntb_transport_max_size()
|