Home
last modified time | relevance | path

Searched full:dma_tx (Results 1 – 25 of 40) sorted by relevance

12

/linux/drivers/spi/
H A Dspi-pxa2xx-dma.c98 chan = drv_data->controller->dma_tx; in pxa2xx_spi_dma_prepare_one()
127 dmaengine_terminate_async(drv_data->controller->dma_tx); in pxa2xx_spi_dma_transfer()
165 dmaengine_terminate_async(drv_data->controller->dma_tx); in pxa2xx_spi_dma_prepare()
173 dma_async_issue_pending(drv_data->controller->dma_tx); in pxa2xx_spi_dma_start()
182 dmaengine_terminate_sync(drv_data->controller->dma_tx); in pxa2xx_spi_dma_stop()
195 controller->dma_tx = dma_request_slave_channel_compat(mask, in pxa2xx_spi_dma_setup()
197 if (!controller->dma_tx) in pxa2xx_spi_dma_setup()
203 dma_release_channel(controller->dma_tx); in pxa2xx_spi_dma_setup()
204 controller->dma_tx = NULL; in pxa2xx_spi_dma_setup()
220 if (controller->dma_tx) { in pxa2xx_spi_dma_release()
[all …]
H A Dspi-fsl-lpspi.c398 ret = dmaengine_slave_config(controller->dma_tx, &tx); in fsl_lpspi_dma_configure()
612 desc_tx = dmaengine_prep_slave_sg(controller->dma_tx, in fsl_lpspi_dma_transfer()
616 dmaengine_terminate_all(controller->dma_tx); in fsl_lpspi_dma_transfer()
624 dma_async_issue_pending(controller->dma_tx); in fsl_lpspi_dma_transfer()
637 dmaengine_terminate_all(controller->dma_tx); in fsl_lpspi_dma_transfer()
647 dmaengine_terminate_all(controller->dma_tx); in fsl_lpspi_dma_transfer()
657 dmaengine_terminate_all(controller->dma_tx); in fsl_lpspi_dma_transfer()
667 dmaengine_terminate_all(controller->dma_tx); in fsl_lpspi_dma_transfer()
686 if (controller->dma_tx) { in fsl_lpspi_dma_exit()
687 dma_release_channel(controller->dma_tx); in fsl_lpspi_dma_exit()
[all …]
H A Dspi-rockchip.c295 dmaengine_terminate_async(ctlr->dma_tx); in rockchip_spi_handle_err()
493 dmaengine_slave_config(ctlr->dma_tx, &txconf); in rockchip_spi_prepare_dma()
496 ctlr->dma_tx, in rockchip_spi_prepare_dma()
524 dma_async_issue_pending(ctlr->dma_tx); in rockchip_spi_prepare_dma()
664 dmaengine_terminate_sync(ctlr->dma_tx); in rockchip_spi_target_abort()
872 ctlr->dma_tx = dma_request_chan(rs->dev, "tx"); in rockchip_spi_probe()
873 if (IS_ERR(ctlr->dma_tx)) { in rockchip_spi_probe()
875 ret = dev_warn_probe(rs->dev, PTR_ERR(ctlr->dma_tx), in rockchip_spi_probe()
879 ctlr->dma_tx = NULL; in rockchip_spi_probe()
892 if (ctlr->dma_tx && ctlr->dma_rx) { in rockchip_spi_probe()
[all …]
H A Dspi-ep93xx.c78 * @dma_tx: TX DMA channel
92 struct dma_chan *dma_tx; member
299 chan = espi->dma_tx; in ep93xx_spi_dma_prepare()
383 chan = espi->dma_tx; in ep93xx_spi_dma_finish()
427 dma_async_issue_pending(espi->dma_tx); in ep93xx_spi_dma_transfer()
588 espi->dma_tx = dma_request_chan(dev, "tx"); in ep93xx_spi_setup_dma()
589 if (IS_ERR(espi->dma_tx)) { in ep93xx_spi_setup_dma()
590 ret = dev_err_probe(dev, PTR_ERR(espi->dma_tx), "tx DMA setup failed"); in ep93xx_spi_setup_dma()
611 if (espi->dma_tx) { in ep93xx_spi_release_dma()
612 dma_release_channel(espi->dma_tx); in ep93xx_spi_release_dma()
H A Dspi-omap2-mcspi.c94 struct dma_chan *dma_tx; member
428 dmaengine_slave_config(mcspi_dma->dma_tx, &cfg); in omap2_mcspi_tx_dma()
430 tx = dmaengine_prep_slave_sg(mcspi_dma->dma_tx, xfer->tx_sg.sgl, in omap2_mcspi_tx_dma()
441 dma_async_issue_pending(mcspi_dma->dma_tx); in omap2_mcspi_tx_dma()
656 dmaengine_terminate_sync(mcspi_dma->dma_tx); in omap2_mcspi_txrx_dma()
1021 mcspi_dma->dma_tx = dma_request_chan(mcspi->dev, in omap2_mcspi_request_dma()
1023 if (IS_ERR(mcspi_dma->dma_tx)) { in omap2_mcspi_request_dma()
1024 ret = PTR_ERR(mcspi_dma->dma_tx); in omap2_mcspi_request_dma()
1025 mcspi_dma->dma_tx = NULL; in omap2_mcspi_request_dma()
1050 if (mcspi_dma->dma_tx) { in omap2_mcspi_release_dma()
[all …]
H A Dspi-davinci.c120 struct dma_chan *dma_tx; member
395 if (dspi->dma_rx && dspi->dma_tx) in davinci_spi_of_setup()
622 dmaengine_slave_config(dspi->dma_tx, &dma_tx_conf); in davinci_spi_bufs()
640 txdesc = dmaengine_prep_slave_sg(dspi->dma_tx, in davinci_spi_bufs()
658 dma_async_issue_pending(dspi->dma_tx); in davinci_spi_bufs()
757 dspi->dma_tx = dma_request_chan(sdev, "tx"); in davinci_spi_request_dma()
758 if (IS_ERR(dspi->dma_tx)) { in davinci_spi_request_dma()
760 return PTR_ERR(dspi->dma_tx); in davinci_spi_request_dma()
957 dspi->dma_tx = NULL; in davinci_spi_probe()
998 dma_release_channel(dspi->dma_tx); in davinci_spi_probe()
[all …]
H A Dspi-ingenic.c185 tx_desc = spi_ingenic_prepare_dma(ctlr, ctlr->dma_tx, in spi_ingenic_dma_tx()
194 dma_async_issue_pending(ctlr->dma_tx); in spi_ingenic_dma_tx()
336 ret = dma_get_slave_caps(ctlr->dma_tx, &caps); in spi_ingenic_can_dma()
354 ctlr->dma_tx = chan; in spi_ingenic_request_dma()
370 if (ctlr->dma_tx) in spi_ingenic_release_dma()
371 dma_release_channel(ctlr->dma_tx); in spi_ingenic_release_dma()
H A Dspi-sh-msiof.c674 dma_sync_single_for_device(p->ctlr->dma_tx->device->dev, in sh_msiof_dma_once()
676 desc_tx = dmaengine_prep_slave_single(p->ctlr->dma_tx, in sh_msiof_dma_once()
714 dma_async_issue_pending(p->ctlr->dma_tx); in sh_msiof_dma_once()
764 dmaengine_terminate_sync(p->ctlr->dma_tx); in sh_msiof_dma_once()
848 while (ctlr->dma_tx && len > 15) { in sh_msiof_transfer_one()
1121 ctlr->dma_tx = sh_msiof_request_dma_chan(dev, DMA_MEM_TO_DEV, in sh_msiof_request_dma()
1123 if (!ctlr->dma_tx) in sh_msiof_request_dma()
1139 tx_dev = ctlr->dma_tx->device->dev; in sh_msiof_request_dma()
1163 dma_release_channel(ctlr->dma_tx); in sh_msiof_request_dma()
1164 ctlr->dma_tx = NULL; in sh_msiof_request_dma()
[all …]
H A Dspi-stm32.c329 * @dma_tx: dma channel for TX transfer
367 struct dma_chan *dma_tx; member
856 if (spi->cur_usedma && spi->dma_tx) in stm32fx_spi_disable()
857 dmaengine_terminate_async(spi->dma_tx); in stm32fx_spi_disable()
899 if (spi->cur_usedma && spi->dma_tx) in stm32h7_spi_disable()
900 dmaengine_terminate_async(spi->dma_tx); in stm32h7_spi_disable()
1617 if (spi->tx_buf && spi->dma_tx) { in stm32_spi_transfer_one_dma()
1618 stm32_spi_dma_config(spi, spi->dma_tx, &tx_dma_conf, DMA_MEM_TO_DEV); in stm32_spi_transfer_one_dma()
1619 dmaengine_slave_config(spi->dma_tx, &tx_dma_conf); in stm32_spi_transfer_one_dma()
1620 tx_dma_desc = dmaengine_prep_slave_sg(spi->dma_tx, xfer->tx_sg.sgl, in stm32_spi_transfer_one_dma()
[all …]
H A Dspi-rspi.c569 desc_tx = dmaengine_prep_slave_sg(rspi->ctlr->dma_tx, tx->sgl, in rspi_dma_transfer()
609 dma_async_issue_pending(rspi->ctlr->dma_tx); in rspi_dma_transfer()
616 dmaengine_synchronize(rspi->ctlr->dma_tx); in rspi_dma_transfer()
625 dmaengine_terminate_sync(rspi->ctlr->dma_tx); in rspi_dma_transfer()
1144 ctlr->dma_tx = rspi_request_dma_chan(dev, DMA_MEM_TO_DEV, dma_tx_id, in rspi_request_dma()
1146 if (!ctlr->dma_tx) in rspi_request_dma()
1152 dma_release_channel(ctlr->dma_tx); in rspi_request_dma()
1153 ctlr->dma_tx = NULL; in rspi_request_dma()
1164 if (ctlr->dma_tx) in rspi_release_dma()
1165 dma_release_channel(ctlr->dma_tx); in rspi_release_dma()
H A Dspi-imx.c1284 ret = dmaengine_slave_config(controller->dma_tx, &tx); in spi_imx_dma_configure()
1383 if (controller->dma_tx) { in spi_imx_sdma_exit()
1384 dma_release_channel(controller->dma_tx); in spi_imx_sdma_exit()
1385 controller->dma_tx = NULL; in spi_imx_sdma_exit()
1397 controller->dma_tx = dma_request_chan(dev, "tx"); in spi_imx_sdma_init()
1398 if (IS_ERR(controller->dma_tx)) { in spi_imx_sdma_init()
1399 ret = PTR_ERR(controller->dma_tx); in spi_imx_sdma_init()
1401 controller->dma_tx = NULL; in spi_imx_sdma_init()
1508 desc_tx = dmaengine_prep_slave_sg(controller->dma_tx, in spi_imx_dma_transfer()
1512 dmaengine_terminate_all(controller->dma_tx); in spi_imx_dma_transfer()
[all …]
H A Dspi-dw-dma.c113 struct dw_dma_slave dma_tx = { .dst_id = 1 }, *tx = &dma_tx; in dw_spi_dma_init_mfld() local
143 dws->host->dma_tx = dws->txchan; in dw_spi_dma_init_mfld()
187 dws->host->dma_tx = dws->txchan; in dw_spi_dma_init_generic()
/linux/drivers/mmc/host/
H A Djz4740_mmc.c179 struct dma_chan *dma_tx; member
223 dma_release_channel(host->dma_tx); in jz4740_mmc_release_dma_channels()
232 host->dma_tx = dma_request_chan(dev, "tx-rx"); in jz4740_mmc_acquire_dma_channels()
233 if (!IS_ERR(host->dma_tx)) in jz4740_mmc_acquire_dma_channels()
236 if (PTR_ERR(host->dma_tx) != -ENODEV) { in jz4740_mmc_acquire_dma_channels()
238 return PTR_ERR(host->dma_tx); in jz4740_mmc_acquire_dma_channels()
241 host->dma_tx = dma_request_chan(mmc_dev(host->mmc), "tx"); in jz4740_mmc_acquire_dma_channels()
242 if (IS_ERR(host->dma_tx)) { in jz4740_mmc_acquire_dma_channels()
243 dev_err(mmc_dev(host->mmc), "Failed to get dma_tx channel\n"); in jz4740_mmc_acquire_dma_channels()
244 return PTR_ERR(host->dma_tx); in jz4740_mmc_acquire_dma_channels()
[all …]
H A Domap.c134 struct dma_chan *dma_tx; member
423 c = host->dma_tx; in mmc_omap_release_dma()
1007 c = host->dma_tx; in mmc_omap_prepare_data()
1087 host->dma_tx : host->dma_rx; in mmc_omap_start_request()
1444 host->dma_tx = dma_request_chan(&pdev->dev, "tx"); in mmc_omap_probe()
1445 if (IS_ERR(host->dma_tx)) { in mmc_omap_probe()
1446 ret = PTR_ERR(host->dma_tx); in mmc_omap_probe()
1450 host->dma_tx = NULL; in mmc_omap_probe()
1458 if (host->dma_tx) in mmc_omap_probe()
1459 dma_release_channel(host->dma_tx); in mmc_omap_probe()
[all …]
/linux/arch/powerpc/platforms/512x/
H A Dmpc512x_lpbfifo.c169 struct dma_async_tx_descriptor *dma_tx = NULL; in mpc512x_lpbfifo_kick() local
257 dma_tx = dmaengine_prep_slave_sg(lpbfifo.chan, &sg, in mpc512x_lpbfifo_kick()
259 if (!dma_tx) { in mpc512x_lpbfifo_kick()
263 dma_tx->callback = mpc512x_lpbfifo_callback; in mpc512x_lpbfifo_kick()
264 dma_tx->callback_param = NULL; in mpc512x_lpbfifo_kick()
313 cookie = dma_tx->tx_submit(dma_tx); in mpc512x_lpbfifo_kick()
/linux/drivers/mtd/nand/raw/
H A Dintel-nand-controller.c118 struct dma_chan *dma_tx; member
282 dmaengine_terminate_async(ebu_host->dma_tx); in ebu_dma_tx_callback()
305 chan = ebu_host->dma_tx; in ebu_dma_start()
572 if (ebu_host->dma_tx) in ebu_dma_cleanup()
573 dma_release_channel(ebu_host->dma_tx); in ebu_dma_cleanup()
641 ebu_host->dma_tx = dma_request_chan(dev, "tx"); in ebu_nand_probe()
642 if (IS_ERR(ebu_host->dma_tx)) { in ebu_nand_probe()
643 ret = dev_err_probe(dev, PTR_ERR(ebu_host->dma_tx), in ebu_nand_probe()
/linux/Documentation/devicetree/bindings/net/
H A Didt,3243x-emac.yaml28 - const: dma_tx
65 reg-names = "emac", "dma_rx", "dma_tx";
/linux/Documentation/devicetree/bindings/serial/
H A Dbrcm,bcm7271-uart.yaml40 - const: dma_tx
90 reg-names = "uart", "dma_arb", "dma_rx", "dma_tx", "dma_intr2";
/linux/drivers/net/ethernet/samsung/sxgbe/
H A Dsxgbe_dma.c44 int fix_burst, int pbl, dma_addr_t dma_tx, in sxgbe_dma_channel_init() argument
66 writel(upper_32_bits(dma_tx), in sxgbe_dma_channel_init()
68 writel(lower_32_bits(dma_tx), in sxgbe_dma_channel_init()
80 dma_addr = dma_tx + ((t_rsize - 1) * SXGBE_DESC_SIZE_BYTES); in sxgbe_dma_channel_init()
H A Dsxgbe_dma.h24 int pbl, dma_addr_t dma_tx, dma_addr_t dma_rx,
/linux/drivers/net/ethernet/micrel/
H A Dks8842.c147 #define KS8842_USE_DMA(adapter) (((adapter)->dma_tx.channel != -1) && \
159 struct ks8842_tx_dma_ctl dma_tx; member
424 struct ks8842_tx_dma_ctl *ctl = &adapter->dma_tx; in ks8842_tx_frame_dma()
847 struct ks8842_tx_dma_ctl *ctl = &adapter->dma_tx; in ks8842_dma_tx_cb()
863 struct ks8842_tx_dma_ctl *tx_ctl = &adapter->dma_tx; in ks8842_stop_dma()
885 struct ks8842_tx_dma_ctl *tx_ctl = &adapter->dma_tx; in ks8842_dealloc_dma_bufs()
917 struct ks8842_tx_dma_ctl *tx_ctl = &adapter->dma_tx; in ks8842_alloc_dma_bufs()
990 adapter->dma_tx.channel = -1; in ks8842_open()
1044 if (adapter->dma_tx.adesc) in ks8842_xmit_frame()
1175 adapter->dma_tx.channel = pdata->tx_dma_channel; in ks8842_probe()
[all …]
/linux/drivers/i2c/busses/
H A Di2c-sh_mobile.c136 struct dma_chan *dma_tx; member
449 ? pd->dma_rx : pd->dma_tx; in sh_mobile_i2c_cleanup_dma()
512 struct dma_chan *chan = read ? pd->dma_rx : pd->dma_tx; in sh_mobile_i2c_xfer_dma()
522 chan = pd->dma_tx = sh_mobile_i2c_request_dma_chan(pd->dev, DMA_MEM_TO_DEV, in sh_mobile_i2c_xfer_dma()
815 if (!IS_ERR(pd->dma_tx)) { in sh_mobile_i2c_release_dma()
816 dma_release_channel(pd->dma_tx); in sh_mobile_i2c_release_dma()
817 pd->dma_tx = ERR_PTR(-EPROBE_DEFER); in sh_mobile_i2c_release_dma()
920 pd->dma_rx = pd->dma_tx = ERR_PTR(-EPROBE_DEFER); in sh_mobile_i2c_probe()
H A Di2c-rcar.c162 struct dma_chan *dma_tx; member
450 ? priv->dma_rx : priv->dma_tx; in rcar_i2c_cleanup_dma()
485 struct dma_chan *chan = read ? priv->dma_rx : priv->dma_tx; in rcar_i2c_dma()
864 chan = read ? priv->dma_rx : priv->dma_tx; in rcar_i2c_request_dma()
874 priv->dma_tx = chan; in rcar_i2c_request_dma()
879 if (!IS_ERR(priv->dma_tx)) { in rcar_i2c_release_dma()
880 dma_release_channel(priv->dma_tx); in rcar_i2c_release_dma()
881 priv->dma_tx = ERR_PTR(-EPROBE_DEFER); in rcar_i2c_release_dma()
1162 priv->dma_rx = priv->dma_tx = ERR_PTR(-EPROBE_DEFER); in rcar_i2c_probe()
/linux/drivers/net/ethernet/stmicro/stmmac/
H A Dring_mode.c29 desc = tx_q->dma_tx + entry; in jumbo_frm()
59 desc = tx_q->dma_tx + entry; in jumbo_frm()
H A Dchain_mode.c27 desc = tx_q->dma_tx + entry; in jumbo_frm()
50 desc = tx_q->dma_tx + entry; in jumbo_frm()

12