Lines Matching +full:use +full:- +full:dma +full:- +full:rx

1 // SPDX-License-Identifier: GPL-2.0+
10 #include <linux/dma-mapping.h>
17 #include <linux/platform_data/spi-s3c64xx.h>
27 /* Registers and bit-fields */
112 #define FIFO_LVL_MASK(i) ((i)->port_conf->fifo_lvl_mask[i->port_id])
114 (1 << (i)->port_conf->tx_st_done)) ? 1 : 0)
115 #define TX_FIFO_LVL(v, sdd) (((v) & (sdd)->tx_fifomask) >> \
116 __ffs((sdd)->tx_fifomask))
117 #define RX_FIFO_LVL(v, sdd) (((v) & (sdd)->rx_fifomask) >> \
118 __ffs((sdd)->rx_fifomask))
127 #define is_polling(x) (x->cntrlr_info->polling)
139 * struct s3c64xx_spi_port_config - SPI Controller hardware info
140 * @fifo_lvl_mask: [DEPRECATED] use @{rx, tx}_fifomask instead.
141 * @rx_lvl_offset: [DEPRECATED] use @{rx,tx}_fifomask instead.
157 * @use_32bit_io: True if the SoC allows only 32-bit register accesses.
181 * struct s3c64xx_spi_driver_data - Runtime info holder for SPI driver.
196 * @rx_dma: Local receive DMA data (e.g. chan and direction)
197 * @tx_dma: Local transmit DMA data (e.g. chan and direction)
199 * @port_id: [DEPRECATED] use @{rx,tx}_fifomask instead.
231 void __iomem *regs = sdd->regs; in s3c64xx_flush_fifo()
250 } while (TX_FIFO_LVL(val, sdd) && --loops); in s3c64xx_flush_fifo()
253 dev_warn(&sdd->pdev->dev, "Timed out flushing TX FIFO\n"); in s3c64xx_flush_fifo()
263 } while (--loops); in s3c64xx_flush_fifo()
266 dev_warn(&sdd->pdev->dev, "Timed out flushing RX FIFO\n"); in s3c64xx_flush_fifo()
280 struct s3c64xx_spi_dma_data *dma = data; in s3c64xx_spi_dmacb() local
283 if (dma->direction == DMA_DEV_TO_MEM) in s3c64xx_spi_dmacb()
290 spin_lock_irqsave(&sdd->lock, flags); in s3c64xx_spi_dmacb()
292 if (dma->direction == DMA_DEV_TO_MEM) { in s3c64xx_spi_dmacb()
293 sdd->state &= ~RXBUSY; in s3c64xx_spi_dmacb()
294 if (!(sdd->state & TXBUSY)) in s3c64xx_spi_dmacb()
295 complete(&sdd->xfer_completion); in s3c64xx_spi_dmacb()
297 sdd->state &= ~TXBUSY; in s3c64xx_spi_dmacb()
298 if (!(sdd->state & RXBUSY)) in s3c64xx_spi_dmacb()
299 complete(&sdd->xfer_completion); in s3c64xx_spi_dmacb()
302 spin_unlock_irqrestore(&sdd->lock, flags); in s3c64xx_spi_dmacb()
305 static int s3c64xx_prepare_dma(struct s3c64xx_spi_dma_data *dma, in s3c64xx_prepare_dma() argument
315 if (dma->direction == DMA_DEV_TO_MEM) { in s3c64xx_prepare_dma()
316 sdd = container_of((void *)dma, in s3c64xx_prepare_dma()
318 config.src_addr = sdd->sfr_start + S3C64XX_SPI_RX_DATA; in s3c64xx_prepare_dma()
319 config.src_addr_width = sdd->cur_bpw / 8; in s3c64xx_prepare_dma()
322 sdd = container_of((void *)dma, in s3c64xx_prepare_dma()
324 config.dst_addr = sdd->sfr_start + S3C64XX_SPI_TX_DATA; in s3c64xx_prepare_dma()
325 config.dst_addr_width = sdd->cur_bpw / 8; in s3c64xx_prepare_dma()
328 config.direction = dma->direction; in s3c64xx_prepare_dma()
329 ret = dmaengine_slave_config(dma->ch, &config); in s3c64xx_prepare_dma()
333 desc = dmaengine_prep_slave_sg(dma->ch, sgt->sgl, sgt->nents, in s3c64xx_prepare_dma()
334 dma->direction, DMA_PREP_INTERRUPT); in s3c64xx_prepare_dma()
336 dev_err(&sdd->pdev->dev, "unable to prepare %s scatterlist", in s3c64xx_prepare_dma()
337 dma->direction == DMA_DEV_TO_MEM ? "rx" : "tx"); in s3c64xx_prepare_dma()
338 return -ENOMEM; in s3c64xx_prepare_dma()
341 desc->callback = s3c64xx_spi_dmacb; in s3c64xx_prepare_dma()
342 desc->callback_param = dma; in s3c64xx_prepare_dma()
344 dma->cookie = dmaengine_submit(desc); in s3c64xx_prepare_dma()
345 ret = dma_submit_error(dma->cookie); in s3c64xx_prepare_dma()
347 dev_err(&sdd->pdev->dev, "DMA submission failed"); in s3c64xx_prepare_dma()
351 dma_async_issue_pending(dma->ch); in s3c64xx_prepare_dma()
358 spi_controller_get_devdata(spi->controller); in s3c64xx_spi_set_cs()
360 if (sdd->cntrlr_info->no_cs) in s3c64xx_spi_set_cs()
364 if (!(sdd->port_conf->quirks & S3C64XX_SPI_QUIRK_CS_AUTO)) { in s3c64xx_spi_set_cs()
365 writel(0, sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_set_cs()
367 u32 ssel = readl(sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_set_cs()
371 writel(ssel, sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_set_cs()
374 if (!(sdd->port_conf->quirks & S3C64XX_SPI_QUIRK_CS_AUTO)) in s3c64xx_spi_set_cs()
376 sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_set_cs()
387 /* Requests DMA channels */ in s3c64xx_spi_prepare_transfer()
388 sdd->rx_dma.ch = dma_request_chan(&sdd->pdev->dev, "rx"); in s3c64xx_spi_prepare_transfer()
389 if (IS_ERR(sdd->rx_dma.ch)) { in s3c64xx_spi_prepare_transfer()
390 dev_err(&sdd->pdev->dev, "Failed to get RX DMA channel\n"); in s3c64xx_spi_prepare_transfer()
391 sdd->rx_dma.ch = NULL; in s3c64xx_spi_prepare_transfer()
395 sdd->tx_dma.ch = dma_request_chan(&sdd->pdev->dev, "tx"); in s3c64xx_spi_prepare_transfer()
396 if (IS_ERR(sdd->tx_dma.ch)) { in s3c64xx_spi_prepare_transfer()
397 dev_err(&sdd->pdev->dev, "Failed to get TX DMA channel\n"); in s3c64xx_spi_prepare_transfer()
398 dma_release_channel(sdd->rx_dma.ch); in s3c64xx_spi_prepare_transfer()
399 sdd->tx_dma.ch = NULL; in s3c64xx_spi_prepare_transfer()
400 sdd->rx_dma.ch = NULL; in s3c64xx_spi_prepare_transfer()
404 spi->dma_rx = sdd->rx_dma.ch; in s3c64xx_spi_prepare_transfer()
405 spi->dma_tx = sdd->tx_dma.ch; in s3c64xx_spi_prepare_transfer()
417 /* Releases DMA channels if they are allocated */ in s3c64xx_spi_unprepare_transfer()
418 if (sdd->rx_dma.ch && sdd->tx_dma.ch) { in s3c64xx_spi_unprepare_transfer()
419 dma_release_channel(sdd->rx_dma.ch); in s3c64xx_spi_unprepare_transfer()
420 dma_release_channel(sdd->tx_dma.ch); in s3c64xx_spi_unprepare_transfer()
421 sdd->rx_dma.ch = NULL; in s3c64xx_spi_unprepare_transfer()
422 sdd->tx_dma.ch = NULL; in s3c64xx_spi_unprepare_transfer()
434 if (sdd->rx_dma.ch && sdd->tx_dma.ch) in s3c64xx_spi_can_dma()
435 return xfer->len >= sdd->fifo_depth; in s3c64xx_spi_can_dma()
448 } while (--count); in s3c64xx_iowrite8_32_rep()
460 } while (--count); in s3c64xx_iowrite16_32_rep()
467 void __iomem *addr = sdd->regs + S3C64XX_SPI_TX_DATA; in s3c64xx_iowrite_rep()
468 const void *buf = xfer->tx_buf; in s3c64xx_iowrite_rep()
469 unsigned int len = xfer->len; in s3c64xx_iowrite_rep()
471 switch (sdd->cur_bpw) { in s3c64xx_iowrite_rep()
476 if (sdd->port_conf->use_32bit_io) in s3c64xx_iowrite_rep()
482 if (sdd->port_conf->use_32bit_io) in s3c64xx_iowrite_rep()
493 void __iomem *regs = sdd->regs; in s3c64xx_enable_datapath()
511 writel(((xfer->len * 8 / sdd->cur_bpw) & 0xffff) in s3c64xx_enable_datapath()
516 if (xfer->tx_buf != NULL) { in s3c64xx_enable_datapath()
517 sdd->state |= TXBUSY; in s3c64xx_enable_datapath()
521 ret = s3c64xx_prepare_dma(&sdd->tx_dma, &xfer->tx_sg); in s3c64xx_enable_datapath()
527 if (xfer->rx_buf != NULL) { in s3c64xx_enable_datapath()
528 sdd->state |= RXBUSY; in s3c64xx_enable_datapath()
530 if (sdd->port_conf->high_speed && sdd->cur_speed >= 30000000UL in s3c64xx_enable_datapath()
531 && !(sdd->cur_mode & SPI_CPHA)) in s3c64xx_enable_datapath()
537 writel(((xfer->len * 8 / sdd->cur_bpw) & 0xffff) in s3c64xx_enable_datapath()
540 ret = s3c64xx_prepare_dma(&sdd->rx_dma, &xfer->rx_sg); in s3c64xx_enable_datapath()
556 void __iomem *regs = sdd->regs; in s3c64xx_spi_wait_for_timeout()
559 u32 max_fifo = sdd->fifo_depth; in s3c64xx_spi_wait_for_timeout()
566 } while (RX_FIFO_LVL(status, sdd) < max_fifo && --val); in s3c64xx_spi_wait_for_timeout()
575 void __iomem *regs = sdd->regs; in s3c64xx_wait_for_dma()
581 ms = xfer->len * 8 * 1000 / sdd->cur_speed; in s3c64xx_wait_for_dma()
586 val = wait_for_completion_timeout(&sdd->xfer_completion, val); in s3c64xx_wait_for_dma()
590 * proceed further else return -ETIMEDOUT. in s3c64xx_wait_for_dma()
593 * DmaRx returns only after Dma read data from FIFO which in s3c64xx_wait_for_dma()
595 * Xfer involved Rx(with or without Tx). in s3c64xx_wait_for_dma()
597 if (val && !xfer->rx_buf) { in s3c64xx_wait_for_dma()
602 && --val) { in s3c64xx_wait_for_dma()
609 /* If timed out while checking rx/tx status return error */ in s3c64xx_wait_for_dma()
611 return -ETIMEDOUT; in s3c64xx_wait_for_dma()
619 void __iomem *regs = sdd->regs; in s3c64xx_wait_for_pio()
629 time_us = (xfer->len * 8 * 1000 * 1000) / sdd->cur_speed; in s3c64xx_wait_for_pio()
635 if (RX_FIFO_LVL(status, sdd) < xfer->len) in s3c64xx_wait_for_pio()
640 if (!wait_for_completion_timeout(&sdd->xfer_completion, val)) in s3c64xx_wait_for_pio()
641 return -ETIMEDOUT; in s3c64xx_wait_for_pio()
647 } while (RX_FIFO_LVL(status, sdd) < xfer->len && --val); in s3c64xx_wait_for_pio()
650 return -EIO; in s3c64xx_wait_for_pio()
653 if (!xfer->rx_buf) { in s3c64xx_wait_for_pio()
654 sdd->state &= ~TXBUSY; in s3c64xx_wait_for_pio()
666 loops = xfer->len / sdd->fifo_depth; in s3c64xx_wait_for_pio()
667 buf = xfer->rx_buf; in s3c64xx_wait_for_pio()
673 switch (sdd->cur_bpw) { in s3c64xx_wait_for_pio()
689 } while (loops--); in s3c64xx_wait_for_pio()
690 sdd->state &= ~RXBUSY; in s3c64xx_wait_for_pio()
697 void __iomem *regs = sdd->regs; in s3c64xx_spi_config()
700 int div = sdd->port_conf->clk_div; in s3c64xx_spi_config()
703 if (!sdd->port_conf->clk_from_cmu) { in s3c64xx_spi_config()
715 if (sdd->cur_mode & SPI_CPOL) in s3c64xx_spi_config()
718 if (sdd->cur_mode & SPI_CPHA) in s3c64xx_spi_config()
723 /* Set Channel & DMA Mode */ in s3c64xx_spi_config()
728 switch (sdd->cur_bpw) { in s3c64xx_spi_config()
743 if ((sdd->cur_mode & SPI_LOOP) && sdd->port_conf->has_loopback) in s3c64xx_spi_config()
750 if (sdd->port_conf->clk_from_cmu) { in s3c64xx_spi_config()
751 ret = clk_set_rate(sdd->src_clk, sdd->cur_speed * div); in s3c64xx_spi_config()
754 sdd->cur_speed = clk_get_rate(sdd->src_clk) / div; in s3c64xx_spi_config()
759 val |= ((clk_get_rate(sdd->src_clk) / sdd->cur_speed / div - 1) in s3c64xx_spi_config()
778 struct spi_device *spi = msg->spi; in s3c64xx_spi_prepare_message()
779 struct s3c64xx_spi_csinfo *cs = spi->controller_data; in s3c64xx_spi_prepare_message()
784 writel(0, sdd->regs + S3C64XX_SPI_FB_CLK); in s3c64xx_spi_prepare_message()
786 writel(cs->fb_delay & 0x3, sdd->regs + S3C64XX_SPI_FB_CLK); in s3c64xx_spi_prepare_message()
793 struct spi_controller *ctlr = spi->controller; in s3c64xx_spi_max_transfer_size()
795 return ctlr->can_dma ? S3C64XX_SPI_PACKET_CNT_MASK : SIZE_MAX; in s3c64xx_spi_max_transfer_size()
803 const unsigned int fifo_len = sdd->fifo_depth; in s3c64xx_spi_transfer_one()
816 reinit_completion(&sdd->xfer_completion); in s3c64xx_spi_transfer_one()
819 bpw = xfer->bits_per_word; in s3c64xx_spi_transfer_one()
820 speed = xfer->speed_hz; in s3c64xx_spi_transfer_one()
822 if (bpw != sdd->cur_bpw || speed != sdd->cur_speed) { in s3c64xx_spi_transfer_one()
823 sdd->cur_bpw = bpw; in s3c64xx_spi_transfer_one()
824 sdd->cur_speed = speed; in s3c64xx_spi_transfer_one()
825 sdd->cur_mode = spi->mode; in s3c64xx_spi_transfer_one()
831 if (!is_polling(sdd) && xfer->len >= fifo_len && in s3c64xx_spi_transfer_one()
832 sdd->rx_dma.ch && sdd->tx_dma.ch) { in s3c64xx_spi_transfer_one()
834 } else if (xfer->len >= fifo_len) { in s3c64xx_spi_transfer_one()
835 tx_buf = xfer->tx_buf; in s3c64xx_spi_transfer_one()
836 rx_buf = xfer->rx_buf; in s3c64xx_spi_transfer_one()
837 origin_len = xfer->len; in s3c64xx_spi_transfer_one()
838 target_len = xfer->len; in s3c64xx_spi_transfer_one()
839 xfer->len = fifo_len - 1; in s3c64xx_spi_transfer_one()
844 if (!use_dma && xfer->len > S3C64XX_SPI_POLLING_SIZE) in s3c64xx_spi_transfer_one()
848 reinit_completion(&sdd->xfer_completion); in s3c64xx_spi_transfer_one()
850 rdy_lv = xfer->len; in s3c64xx_spi_transfer_one()
853 * fifo_lvl up to 64 byte -> N bytes in s3c64xx_spi_transfer_one()
854 * 128 byte -> RDY_LVL * 2 bytes in s3c64xx_spi_transfer_one()
855 * 256 byte -> RDY_LVL * 4 bytes in s3c64xx_spi_transfer_one()
862 val = readl(sdd->regs + S3C64XX_SPI_MODE_CFG); in s3c64xx_spi_transfer_one()
865 writel(val, sdd->regs + S3C64XX_SPI_MODE_CFG); in s3c64xx_spi_transfer_one()
868 val = readl(sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_transfer_one()
870 sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_transfer_one()
874 spin_lock_irqsave(&sdd->lock, flags); in s3c64xx_spi_transfer_one()
877 sdd->state &= ~RXBUSY; in s3c64xx_spi_transfer_one()
878 sdd->state &= ~TXBUSY; in s3c64xx_spi_transfer_one()
885 spin_unlock_irqrestore(&sdd->lock, flags); in s3c64xx_spi_transfer_one()
888 dev_err(&spi->dev, "failed to enable data path for transfer: %d\n", status); in s3c64xx_spi_transfer_one()
898 dev_err(&spi->dev, in s3c64xx_spi_transfer_one()
899 "I/O Error: rx-%d tx-%d rx-%c tx-%c len-%d dma-%d res-(%d)\n", in s3c64xx_spi_transfer_one()
900 xfer->rx_buf ? 1 : 0, xfer->tx_buf ? 1 : 0, in s3c64xx_spi_transfer_one()
901 (sdd->state & RXBUSY) ? 'f' : 'p', in s3c64xx_spi_transfer_one()
902 (sdd->state & TXBUSY) ? 'f' : 'p', in s3c64xx_spi_transfer_one()
903 xfer->len, use_dma ? 1 : 0, status); in s3c64xx_spi_transfer_one()
908 if (xfer->tx_buf && (sdd->state & TXBUSY)) { in s3c64xx_spi_transfer_one()
909 dmaengine_pause(sdd->tx_dma.ch); in s3c64xx_spi_transfer_one()
910 dmaengine_tx_status(sdd->tx_dma.ch, sdd->tx_dma.cookie, &s); in s3c64xx_spi_transfer_one()
911 dmaengine_terminate_all(sdd->tx_dma.ch); in s3c64xx_spi_transfer_one()
912 dev_err(&spi->dev, "TX residue: %d\n", s.residue); in s3c64xx_spi_transfer_one()
915 if (xfer->rx_buf && (sdd->state & RXBUSY)) { in s3c64xx_spi_transfer_one()
916 dmaengine_pause(sdd->rx_dma.ch); in s3c64xx_spi_transfer_one()
917 dmaengine_tx_status(sdd->rx_dma.ch, sdd->rx_dma.cookie, &s); in s3c64xx_spi_transfer_one()
918 dmaengine_terminate_all(sdd->rx_dma.ch); in s3c64xx_spi_transfer_one()
919 dev_err(&spi->dev, "RX residue: %d\n", s.residue); in s3c64xx_spi_transfer_one()
926 target_len -= xfer->len; in s3c64xx_spi_transfer_one()
928 if (xfer->tx_buf) in s3c64xx_spi_transfer_one()
929 xfer->tx_buf += xfer->len; in s3c64xx_spi_transfer_one()
931 if (xfer->rx_buf) in s3c64xx_spi_transfer_one()
932 xfer->rx_buf += xfer->len; in s3c64xx_spi_transfer_one()
935 xfer->len = fifo_len - 1; in s3c64xx_spi_transfer_one()
937 xfer->len = target_len; in s3c64xx_spi_transfer_one()
943 xfer->tx_buf = tx_buf; in s3c64xx_spi_transfer_one()
944 xfer->rx_buf = rx_buf; in s3c64xx_spi_transfer_one()
945 xfer->len = origin_len; in s3c64xx_spi_transfer_one()
958 target_np = spi->dev.of_node; in s3c64xx_get_target_ctrldata()
960 dev_err(&spi->dev, "device node not found\n"); in s3c64xx_get_target_ctrldata()
961 return ERR_PTR(-EINVAL); in s3c64xx_get_target_ctrldata()
966 return ERR_PTR(-ENOMEM); in s3c64xx_get_target_ctrldata()
969 of_get_child_by_name(target_np, "controller-data"); in s3c64xx_get_target_ctrldata()
971 dev_info(&spi->dev, "feedback delay set to default (0)\n"); in s3c64xx_get_target_ctrldata()
975 of_property_read_u32(data_np, "samsung,spi-feedback-delay", &fb_delay); in s3c64xx_get_target_ctrldata()
976 cs->fb_delay = fb_delay; in s3c64xx_get_target_ctrldata()
982 * and save the configuration in a local data-structure.
988 struct s3c64xx_spi_csinfo *cs = spi->controller_data; in s3c64xx_spi_setup()
993 sdd = spi_controller_get_devdata(spi->controller); in s3c64xx_spi_setup()
994 if (spi->dev.of_node) { in s3c64xx_spi_setup()
996 spi->controller_data = cs; in s3c64xx_spi_setup()
1001 dev_err(&spi->dev, "No CS for SPI(%d)\n", spi_get_chipselect(spi, 0)); in s3c64xx_spi_setup()
1002 return -ENODEV; in s3c64xx_spi_setup()
1008 pm_runtime_get_sync(&sdd->pdev->dev); in s3c64xx_spi_setup()
1010 div = sdd->port_conf->clk_div; in s3c64xx_spi_setup()
1013 if (!sdd->port_conf->clk_from_cmu) { in s3c64xx_spi_setup()
1017 speed = clk_get_rate(sdd->src_clk) / div / (0 + 1); in s3c64xx_spi_setup()
1019 if (spi->max_speed_hz > speed) in s3c64xx_spi_setup()
1020 spi->max_speed_hz = speed; in s3c64xx_spi_setup()
1022 psr = clk_get_rate(sdd->src_clk) / div / spi->max_speed_hz - 1; in s3c64xx_spi_setup()
1025 psr--; in s3c64xx_spi_setup()
1027 speed = clk_get_rate(sdd->src_clk) / div / (psr + 1); in s3c64xx_spi_setup()
1028 if (spi->max_speed_hz < speed) { in s3c64xx_spi_setup()
1032 err = -EINVAL; in s3c64xx_spi_setup()
1037 speed = clk_get_rate(sdd->src_clk) / div / (psr + 1); in s3c64xx_spi_setup()
1038 if (spi->max_speed_hz >= speed) { in s3c64xx_spi_setup()
1039 spi->max_speed_hz = speed; in s3c64xx_spi_setup()
1041 dev_err(&spi->dev, "Can't set %dHz transfer speed\n", in s3c64xx_spi_setup()
1042 spi->max_speed_hz); in s3c64xx_spi_setup()
1043 err = -EINVAL; in s3c64xx_spi_setup()
1048 pm_runtime_put_autosuspend(&sdd->pdev->dev); in s3c64xx_spi_setup()
1054 pm_runtime_put_autosuspend(&sdd->pdev->dev); in s3c64xx_spi_setup()
1055 /* setup() returns with device de-selected */ in s3c64xx_spi_setup()
1061 if (spi->dev.of_node) in s3c64xx_spi_setup()
1072 if (spi->dev.of_node) in s3c64xx_spi_cleanup()
1081 struct spi_controller *spi = sdd->host; in s3c64xx_spi_irq()
1084 val = readl(sdd->regs + S3C64XX_SPI_STATUS); in s3c64xx_spi_irq()
1088 dev_err(&spi->dev, "RX overrun\n"); in s3c64xx_spi_irq()
1092 dev_err(&spi->dev, "RX underrun\n"); in s3c64xx_spi_irq()
1096 dev_err(&spi->dev, "TX overrun\n"); in s3c64xx_spi_irq()
1100 dev_err(&spi->dev, "TX underrun\n"); in s3c64xx_spi_irq()
1104 complete(&sdd->xfer_completion); in s3c64xx_spi_irq()
1105 /* No pending clear irq, turn-off INT_EN_RX_FIFO_RDY */ in s3c64xx_spi_irq()
1106 val = readl(sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_irq()
1108 sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_irq()
1112 writel(clr, sdd->regs + S3C64XX_SPI_PENDING_CLR); in s3c64xx_spi_irq()
1113 writel(0, sdd->regs + S3C64XX_SPI_PENDING_CLR); in s3c64xx_spi_irq()
1120 struct s3c64xx_spi_info *sci = sdd->cntrlr_info; in s3c64xx_spi_hwinit()
1121 void __iomem *regs = sdd->regs; in s3c64xx_spi_hwinit()
1124 sdd->cur_speed = 0; in s3c64xx_spi_hwinit()
1126 if (sci->no_cs) in s3c64xx_spi_hwinit()
1127 writel(0, sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_hwinit()
1128 else if (!(sdd->port_conf->quirks & S3C64XX_SPI_QUIRK_CS_AUTO)) in s3c64xx_spi_hwinit()
1129 writel(S3C64XX_SPI_CS_SIG_INACT, sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_hwinit()
1131 /* Disable Interrupts - we use Polling if not DMA mode */ in s3c64xx_spi_hwinit()
1134 if (!sdd->port_conf->clk_from_cmu) in s3c64xx_spi_hwinit()
1135 writel(sci->src_clk_nr << S3C64XX_SPI_CLKSEL_SRCSHFT, in s3c64xx_spi_hwinit()
1166 return ERR_PTR(-ENOMEM); in s3c64xx_spi_parse_dt()
1168 if (of_property_read_u32(dev->of_node, "samsung,spi-src-clk", &temp)) { in s3c64xx_spi_parse_dt()
1170 sci->src_clk_nr = 0; in s3c64xx_spi_parse_dt()
1172 sci->src_clk_nr = temp; in s3c64xx_spi_parse_dt()
1175 if (of_property_read_u32(dev->of_node, "num-cs", &temp)) { in s3c64xx_spi_parse_dt()
1177 sci->num_cs = 1; in s3c64xx_spi_parse_dt()
1179 sci->num_cs = temp; in s3c64xx_spi_parse_dt()
1182 sci->no_cs = of_property_read_bool(dev->of_node, "no-cs-readback"); in s3c64xx_spi_parse_dt()
1183 sci->polling = !of_property_present(dev->of_node, "dmas"); in s3c64xx_spi_parse_dt()
1198 if (pdev->dev.of_node) in s3c64xx_spi_get_port_config()
1199 return of_device_get_match_data(&pdev->dev); in s3c64xx_spi_get_port_config()
1201 return (const struct s3c64xx_spi_port_config *)platform_get_device_id(pdev)->driver_data; in s3c64xx_spi_get_port_config()
1207 const struct s3c64xx_spi_port_config *port_conf = sdd->port_conf; in s3c64xx_spi_set_port_id()
1210 if (port_conf->rx_fifomask && port_conf->tx_fifomask) in s3c64xx_spi_set_port_id()
1213 if (pdev->dev.of_node) { in s3c64xx_spi_set_port_id()
1214 ret = of_alias_get_id(pdev->dev.of_node, "spi"); in s3c64xx_spi_set_port_id()
1216 return dev_err_probe(&pdev->dev, ret, in s3c64xx_spi_set_port_id()
1218 sdd->port_id = ret; in s3c64xx_spi_set_port_id()
1220 if (pdev->id < 0) in s3c64xx_spi_set_port_id()
1221 return dev_err_probe(&pdev->dev, -EINVAL, in s3c64xx_spi_set_port_id()
1223 sdd->port_id = pdev->id; in s3c64xx_spi_set_port_id()
1231 const struct s3c64xx_spi_port_config *port_conf = sdd->port_conf; in s3c64xx_spi_set_fifomask()
1233 if (port_conf->rx_fifomask) in s3c64xx_spi_set_fifomask()
1234 sdd->rx_fifomask = port_conf->rx_fifomask; in s3c64xx_spi_set_fifomask()
1236 sdd->rx_fifomask = FIFO_LVL_MASK(sdd) << in s3c64xx_spi_set_fifomask()
1237 port_conf->rx_lvl_offset; in s3c64xx_spi_set_fifomask()
1239 if (port_conf->tx_fifomask) in s3c64xx_spi_set_fifomask()
1240 sdd->tx_fifomask = port_conf->tx_fifomask; in s3c64xx_spi_set_fifomask()
1242 sdd->tx_fifomask = FIFO_LVL_MASK(sdd) << in s3c64xx_spi_set_fifomask()
1250 struct s3c64xx_spi_info *sci = dev_get_platdata(&pdev->dev); in s3c64xx_spi_probe()
1255 if (!sci && pdev->dev.of_node) { in s3c64xx_spi_probe()
1256 sci = s3c64xx_spi_parse_dt(&pdev->dev); in s3c64xx_spi_probe()
1262 return dev_err_probe(&pdev->dev, -ENODEV, in s3c64xx_spi_probe()
1269 host = devm_spi_alloc_host(&pdev->dev, sizeof(*sdd)); in s3c64xx_spi_probe()
1271 return dev_err_probe(&pdev->dev, -ENOMEM, in s3c64xx_spi_probe()
1277 sdd->port_conf = s3c64xx_spi_get_port_config(pdev); in s3c64xx_spi_probe()
1278 sdd->host = host; in s3c64xx_spi_probe()
1279 sdd->cntrlr_info = sci; in s3c64xx_spi_probe()
1280 sdd->pdev = pdev; in s3c64xx_spi_probe()
1286 if (sdd->port_conf->fifo_depth) in s3c64xx_spi_probe()
1287 sdd->fifo_depth = sdd->port_conf->fifo_depth; in s3c64xx_spi_probe()
1288 else if (of_property_read_u32(pdev->dev.of_node, "fifo-depth", in s3c64xx_spi_probe()
1289 &sdd->fifo_depth)) in s3c64xx_spi_probe()
1290 sdd->fifo_depth = FIFO_DEPTH(sdd); in s3c64xx_spi_probe()
1294 sdd->cur_bpw = 8; in s3c64xx_spi_probe()
1296 sdd->tx_dma.direction = DMA_MEM_TO_DEV; in s3c64xx_spi_probe()
1297 sdd->rx_dma.direction = DMA_DEV_TO_MEM; in s3c64xx_spi_probe()
1299 host->dev.of_node = pdev->dev.of_node; in s3c64xx_spi_probe()
1300 host->bus_num = -1; in s3c64xx_spi_probe()
1301 host->setup = s3c64xx_spi_setup; in s3c64xx_spi_probe()
1302 host->cleanup = s3c64xx_spi_cleanup; in s3c64xx_spi_probe()
1303 host->prepare_transfer_hardware = s3c64xx_spi_prepare_transfer; in s3c64xx_spi_probe()
1304 host->unprepare_transfer_hardware = s3c64xx_spi_unprepare_transfer; in s3c64xx_spi_probe()
1305 host->prepare_message = s3c64xx_spi_prepare_message; in s3c64xx_spi_probe()
1306 host->transfer_one = s3c64xx_spi_transfer_one; in s3c64xx_spi_probe()
1307 host->max_transfer_size = s3c64xx_spi_max_transfer_size; in s3c64xx_spi_probe()
1308 host->num_chipselect = sci->num_cs; in s3c64xx_spi_probe()
1309 host->use_gpio_descriptors = true; in s3c64xx_spi_probe()
1310 host->dma_alignment = 8; in s3c64xx_spi_probe()
1311 host->bits_per_word_mask = SPI_BPW_MASK(32) | SPI_BPW_MASK(16) | in s3c64xx_spi_probe()
1313 /* the spi->mode bits understood by this driver: */ in s3c64xx_spi_probe()
1314 host->mode_bits = SPI_CPOL | SPI_CPHA | SPI_CS_HIGH; in s3c64xx_spi_probe()
1315 if (sdd->port_conf->has_loopback) in s3c64xx_spi_probe()
1316 host->mode_bits |= SPI_LOOP; in s3c64xx_spi_probe()
1317 host->auto_runtime_pm = true; in s3c64xx_spi_probe()
1319 host->can_dma = s3c64xx_spi_can_dma; in s3c64xx_spi_probe()
1321 sdd->regs = devm_platform_get_and_ioremap_resource(pdev, 0, &mem_res); in s3c64xx_spi_probe()
1322 if (IS_ERR(sdd->regs)) in s3c64xx_spi_probe()
1323 return PTR_ERR(sdd->regs); in s3c64xx_spi_probe()
1324 sdd->sfr_start = mem_res->start; in s3c64xx_spi_probe()
1326 if (sci->cfg_gpio && sci->cfg_gpio()) in s3c64xx_spi_probe()
1327 return dev_err_probe(&pdev->dev, -EBUSY, in s3c64xx_spi_probe()
1331 sdd->clk = devm_clk_get_enabled(&pdev->dev, "spi"); in s3c64xx_spi_probe()
1332 if (IS_ERR(sdd->clk)) in s3c64xx_spi_probe()
1333 return dev_err_probe(&pdev->dev, PTR_ERR(sdd->clk), in s3c64xx_spi_probe()
1336 sprintf(clk_name, "spi_busclk%d", sci->src_clk_nr); in s3c64xx_spi_probe()
1337 sdd->src_clk = devm_clk_get_enabled(&pdev->dev, clk_name); in s3c64xx_spi_probe()
1338 if (IS_ERR(sdd->src_clk)) in s3c64xx_spi_probe()
1339 return dev_err_probe(&pdev->dev, PTR_ERR(sdd->src_clk), in s3c64xx_spi_probe()
1343 if (sdd->port_conf->clk_ioclk) { in s3c64xx_spi_probe()
1344 sdd->ioclk = devm_clk_get_enabled(&pdev->dev, "spi_ioclk"); in s3c64xx_spi_probe()
1345 if (IS_ERR(sdd->ioclk)) in s3c64xx_spi_probe()
1346 return dev_err_probe(&pdev->dev, PTR_ERR(sdd->ioclk), in s3c64xx_spi_probe()
1350 pm_runtime_set_autosuspend_delay(&pdev->dev, AUTOSUSPEND_TIMEOUT); in s3c64xx_spi_probe()
1351 pm_runtime_use_autosuspend(&pdev->dev); in s3c64xx_spi_probe()
1352 pm_runtime_set_active(&pdev->dev); in s3c64xx_spi_probe()
1353 pm_runtime_enable(&pdev->dev); in s3c64xx_spi_probe()
1354 pm_runtime_get_sync(&pdev->dev); in s3c64xx_spi_probe()
1359 spin_lock_init(&sdd->lock); in s3c64xx_spi_probe()
1360 init_completion(&sdd->xfer_completion); in s3c64xx_spi_probe()
1362 ret = devm_request_irq(&pdev->dev, irq, s3c64xx_spi_irq, 0, in s3c64xx_spi_probe()
1363 "spi-s3c64xx", sdd); in s3c64xx_spi_probe()
1365 dev_err(&pdev->dev, "Failed to request IRQ %d: %d\n", in s3c64xx_spi_probe()
1372 sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_probe()
1374 ret = devm_spi_register_controller(&pdev->dev, host); in s3c64xx_spi_probe()
1376 dev_err(&pdev->dev, "cannot register SPI host: %d\n", ret); in s3c64xx_spi_probe()
1380 dev_dbg(&pdev->dev, "Samsung SoC SPI Driver loaded for Bus SPI-%d with %d Targets attached\n", in s3c64xx_spi_probe()
1381 host->bus_num, host->num_chipselect); in s3c64xx_spi_probe()
1382 dev_dbg(&pdev->dev, "\tIOmem=[%pR]\tFIFO %dbytes\n", in s3c64xx_spi_probe()
1383 mem_res, sdd->fifo_depth); in s3c64xx_spi_probe()
1385 pm_runtime_put_autosuspend(&pdev->dev); in s3c64xx_spi_probe()
1390 pm_runtime_put_noidle(&pdev->dev); in s3c64xx_spi_probe()
1391 pm_runtime_disable(&pdev->dev); in s3c64xx_spi_probe()
1392 pm_runtime_set_suspended(&pdev->dev); in s3c64xx_spi_probe()
1402 pm_runtime_get_sync(&pdev->dev); in s3c64xx_spi_remove()
1404 writel(0, sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_remove()
1407 dma_release_channel(sdd->rx_dma.ch); in s3c64xx_spi_remove()
1408 dma_release_channel(sdd->tx_dma.ch); in s3c64xx_spi_remove()
1411 pm_runtime_put_noidle(&pdev->dev); in s3c64xx_spi_remove()
1412 pm_runtime_disable(&pdev->dev); in s3c64xx_spi_remove()
1413 pm_runtime_set_suspended(&pdev->dev); in s3c64xx_spi_remove()
1431 sdd->cur_speed = 0; /* Output Clock is stopped */ in s3c64xx_spi_suspend()
1440 struct s3c64xx_spi_info *sci = sdd->cntrlr_info; in s3c64xx_spi_resume()
1443 if (sci->cfg_gpio) in s3c64xx_spi_resume()
1444 sci->cfg_gpio(); in s3c64xx_spi_resume()
1460 clk_disable_unprepare(sdd->clk); in s3c64xx_spi_runtime_suspend()
1461 clk_disable_unprepare(sdd->src_clk); in s3c64xx_spi_runtime_suspend()
1462 clk_disable_unprepare(sdd->ioclk); in s3c64xx_spi_runtime_suspend()
1473 if (sdd->port_conf->clk_ioclk) { in s3c64xx_spi_runtime_resume()
1474 ret = clk_prepare_enable(sdd->ioclk); in s3c64xx_spi_runtime_resume()
1479 ret = clk_prepare_enable(sdd->src_clk); in s3c64xx_spi_runtime_resume()
1483 ret = clk_prepare_enable(sdd->clk); in s3c64xx_spi_runtime_resume()
1491 sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_runtime_resume()
1496 clk_disable_unprepare(sdd->src_clk); in s3c64xx_spi_runtime_resume()
1498 clk_disable_unprepare(sdd->ioclk); in s3c64xx_spi_runtime_resume()
1511 /* fifo_lvl_mask is deprecated. Use {rx, tx}_fifomask instead. */
1513 /* rx_lvl_offset is deprecated. Use {rx, tx}_fifomask instead. */
1521 /* fifo_lvl_mask is deprecated. Use {rx, tx}_fifomask instead. */
1523 /* rx_lvl_offset is deprecated. Use {rx, tx}_fifomask instead. */
1530 /* fifo_lvl_mask is deprecated. Use {rx, tx}_fifomask instead. */
1532 /* rx_lvl_offset is deprecated. Use {rx, tx}_fifomask instead. */
1540 /* fifo_lvl_mask is deprecated. Use {rx, tx}_fifomask instead. */
1542 /* rx_lvl_offset is deprecated. Use {rx, tx}_fifomask instead. */
1552 /* fifo_lvl_mask is deprecated. Use {rx, tx}_fifomask instead. */
1554 /* rx_lvl_offset is deprecated. Use {rx, tx}_fifomask instead. */
1564 /* fifo_lvl_mask is deprecated. Use {rx, tx}_fifomask instead. */
1566 /* rx_lvl_offset is deprecated. Use {rx, tx}_fifomask instead. */
1589 /* fifo_lvl_mask is deprecated. Use {rx, tx}_fifomask instead. */
1592 /* rx_lvl_offset is deprecated. Use {rx, tx}_fifomask instead. */
1604 /* fifo_lvl_mask is deprecated. Use {rx, tx}_fifomask instead. */
1606 /* rx_lvl_offset is deprecated. Use {rx, tx}_fifomask instead. */
1631 .name = "s3c2443-spi",
1634 .name = "s3c6410-spi",
1642 { .compatible = "google,gs101-spi",
1645 { .compatible = "samsung,s3c2443-spi",
1648 { .compatible = "samsung,s3c6410-spi",
1651 { .compatible = "samsung,s5pv210-spi",
1654 { .compatible = "samsung,exynos4210-spi",
1657 { .compatible = "samsung,exynos7-spi",
1660 { .compatible = "samsung,exynos5433-spi",
1663 { .compatible = "samsung,exynos850-spi",
1666 { .compatible = "samsung,exynosautov9-spi",
1669 { .compatible = "tesla,fsd-spi",
1678 .name = "s3c64xx-spi",
1686 MODULE_ALIAS("platform:s3c64xx-spi");