Home
last modified time | relevance | path

Searched refs:tx_sgl (Results 1 – 5 of 5) sorted by relevance

/linux/drivers/spi/
H A Dspi-pci1xxxx.c145 struct scatterlist *tx_sgl, *rx_sgl; member
483 p->tx_sgl = xfer->tx_sg.sgl; in pci1xxxx_spi_transfer_with_dma()
489 if (!xfer->tx_buf || !p->tx_sgl) { in pci1xxxx_spi_transfer_with_dma()
501 tx_dma_addr = sg_dma_address(p->tx_sgl); in pci1xxxx_spi_transfer_with_dma()
503 p->tx_sgl_len = sg_dma_len(p->tx_sgl); in pci1xxxx_spi_transfer_with_dma()
604 p->tx_sgl = sg_next(p->tx_sgl); in pci1xxxx_spi_setup_next_dma_transfer()
607 if (!p->tx_sgl) { in pci1xxxx_spi_setup_next_dma_transfer()
611 tx_dma_addr = sg_dma_address(p->tx_sgl); in pci1xxxx_spi_setup_next_dma_transfer()
613 p->tx_sgl_len = sg_dma_len(p->tx_sgl); in pci1xxxx_spi_setup_next_dma_transfer()
/linux/drivers/tty/serial/
H A Dimx.c221 struct scatterlist rx_sgl, tx_sgl[2]; member
587 struct scatterlist *sgl = &sport->tx_sgl[0]; in imx_uart_dma_tx_callback()
624 struct scatterlist *sgl = sport->tx_sgl; in imx_uart_dma_tx()
638 sg_init_table(sgl, ARRAY_SIZE(sport->tx_sgl)); in imx_uart_dma_tx()
641 ARRAY_SIZE(sport->tx_sgl), sport->tx_bytes); in imx_uart_dma_tx()
1592 dma_unmap_sg(sport->port.dev, &sport->tx_sgl[0], in imx_uart_shutdown()
1710 struct scatterlist *sgl = &sport->tx_sgl[0]; in imx_uart_flush_buffer()
H A Dmxs-auart.c431 struct scatterlist tx_sgl; member
522 dma_unmap_sg(s->dev, &s->tx_sgl, 1, DMA_TO_DEVICE); in dma_tx_callback()
538 struct scatterlist *sgl = &s->tx_sgl; in mxs_auart_dma_tx()
/linux/drivers/net/ethernet/hisilicon/hns3/
H A Dhns3_enet.h434 u64 tx_sgl; member
H A Dhns3_enet.c58 static unsigned int tx_sgl = 1; variable
59 module_param(tx_sgl, uint, 0600);
60 MODULE_PARM_DESC(tx_sgl, "Minimum number of frags when using dma_map_sg() to optimize the IOMMU map…
1019 if (skb->len <= ring->tx_copybreak || !tx_sgl || in hns3_can_use_tx_sgl()
1021 skb_shinfo(skb)->nr_frags < tx_sgl)) in hns3_can_use_tx_sgl()
2244 hns3_ring_stats_update(ring, tx_sgl); in hns3_handle_tx_sgl()