Lines Matching +full:spi +full:- +full:rdy +full:- +full:drctl

1 // SPDX-License-Identifier: GPL-2.0+
2 // Copyright 2004-2007 Freescale Semiconductor, Inc. All Rights Reserved.
11 #include <linux/dma-mapping.h>
25 #include <linux/spi/spi.h>
30 #include <linux/dma/imx-dma.h>
78 int (*prepare_transfer)(struct spi_imx_data *spi_imx, struct spi_device *spi,
138 return d->devtype_data->devtype == IMX27_CSPI;
143 return d->devtype_data->devtype == IMX35_CSPI;
148 return d->devtype_data->devtype == IMX51_ECSPI;
153 return d->devtype_data->devtype == IMX53_ECSPI;
159 unsigned int val = readl(spi_imx->base + MXC_CSPIRXDATA); \
161 if (spi_imx->rx_buf) { \
162 *(type *)spi_imx->rx_buf = val; \
163 spi_imx->rx_buf += sizeof(type); \
166 spi_imx->remainder -= sizeof(type); \
174 if (spi_imx->tx_buf) { \
175 val = *(type *)spi_imx->tx_buf; \
176 spi_imx->tx_buf += sizeof(type); \
179 spi_imx->count -= sizeof(type); \
181 writel(val, spi_imx->base + MXC_CSPITXDATA); \
238 static bool spi_imx_can_dma(struct spi_controller *controller, struct spi_device *spi,
243 if (!use_dma || controller->fallback)
246 if (!controller->dma_rx)
249 if (spi_imx->target_mode)
252 if (transfer->len < spi_imx->devtype_data->fifo_size)
255 spi_imx->dynamic_burst = 0;
264 * outside the range 0 - 3. We therefore need to limit the cs value to avoid
277 #define MX51_ECSPI_CTRL_DRCTL(drctl) ((drctl) << 16)
312 * As measured on the i.MX6, the SPI host controller inserts a 4 SPI-Clock
326 unsigned int val = readl(spi_imx->base + MXC_CSPIRXDATA);
328 if (spi_imx->rx_buf) {
332 bytes_per_word = spi_imx_bytes_per_word(spi_imx->bits_per_word);
338 *(u32 *)spi_imx->rx_buf = val;
339 spi_imx->rx_buf += sizeof(u32);
342 spi_imx->remainder -= sizeof(u32);
350 unaligned = spi_imx->remainder % 4;
357 if (spi_imx_bytes_per_word(spi_imx->bits_per_word) == 2) {
362 val = readl(spi_imx->base + MXC_CSPIRXDATA);
364 while (unaligned--) {
365 if (spi_imx->rx_buf) {
366 *(u8 *)spi_imx->rx_buf = (val >> (8 * unaligned)) & 0xff;
367 spi_imx->rx_buf++;
369 spi_imx->remainder--;
380 if (spi_imx->tx_buf) {
381 val = *(u32 *)spi_imx->tx_buf;
382 spi_imx->tx_buf += sizeof(u32);
385 spi_imx->count -= sizeof(u32);
387 bytes_per_word = spi_imx_bytes_per_word(spi_imx->bits_per_word);
394 writel(val, spi_imx->base + MXC_CSPITXDATA);
402 unaligned = spi_imx->count % 4;
409 if (spi_imx_bytes_per_word(spi_imx->bits_per_word) == 2) {
414 while (unaligned--) {
415 if (spi_imx->tx_buf) {
416 val |= *(u8 *)spi_imx->tx_buf << (8 * unaligned);
417 spi_imx->tx_buf++;
419 spi_imx->count--;
422 writel(val, spi_imx->base + MXC_CSPITXDATA);
427 u32 val = ioread32be(spi_imx->base + MXC_CSPIRXDATA);
429 if (spi_imx->rx_buf) {
430 int n_bytes = spi_imx->target_burst % sizeof(val);
435 memcpy(spi_imx->rx_buf,
436 ((u8 *)&val) + sizeof(val) - n_bytes, n_bytes);
438 spi_imx->rx_buf += n_bytes;
439 spi_imx->target_burst -= n_bytes;
442 spi_imx->remainder -= sizeof(u32);
448 int n_bytes = spi_imx->count % sizeof(val);
453 if (spi_imx->tx_buf) {
454 memcpy(((u8 *)&val) + sizeof(val) - n_bytes,
455 spi_imx->tx_buf, n_bytes);
456 spi_imx->tx_buf += n_bytes;
459 spi_imx->count -= n_bytes;
461 iowrite32be(val, spi_imx->base + MXC_CSPITXDATA);
469 * there are two 4-bit dividers, the pre-divider divides by
470 * $pre, the post-divider by 2^$post
473 unsigned int fin = spi_imx->spi_clk;
477 post = fls(fin) - fls(fspi);
483 post = max(4U, post) - 4;
485 dev_err(spi_imx->dev, "cannot set clock freq: %u (base freq: %u)\n",
490 pre = DIV_ROUND_UP(fin, fspi << post) - 1;
492 dev_dbg(spi_imx->dev, "%s: fin: %u, fspi: %u, post: %u, pre: %u\n",
515 writel(val, spi_imx->base + MX51_ECSPI_INT);
522 reg = readl(spi_imx->base + MX51_ECSPI_CTRL);
524 writel(reg, spi_imx->base + MX51_ECSPI_CTRL);
531 ctrl = readl(spi_imx->base + MX51_ECSPI_CTRL);
533 writel(ctrl, spi_imx->base + MX51_ECSPI_CTRL);
536 static int mx51_ecspi_channel(const struct spi_device *spi)
538 if (!spi_get_csgpiod(spi, 0))
539 return spi_get_chipselect(spi, 0);
540 return spi->controller->unused_native_cs;
546 struct spi_device *spi = msg->spi;
551 u32 cfg = readl(spi_imx->base + MX51_ECSPI_CONFIG);
553 int channel = mx51_ecspi_channel(spi);
556 if (spi_imx->target_mode)
564 if (spi->mode & SPI_READY)
565 ctrl |= MX51_ECSPI_CTRL_DRCTL(spi_imx->spi_drctl);
574 writel(ctrl, spi_imx->base + MX51_ECSPI_CTRL);
576 testreg = readl(spi_imx->base + MX51_ECSPI_TESTREG);
577 if (spi->mode & SPI_LOOP)
581 writel(testreg, spi_imx->base + MX51_ECSPI_TESTREG);
585 * is not functional for imx53 Soc, config SPI burst completed when
588 if (spi_imx->target_mode && is_imx53_ecspi(spi_imx))
593 if (spi->mode & SPI_CPOL) {
601 if (spi->mode & SPI_MOSI_IDLE_LOW)
606 if (spi->mode & SPI_CS_HIGH)
614 writel(cfg, spi_imx->base + MX51_ECSPI_CONFIG);
624 * the SPI communication as the device on the other end would consider
627 * Because spi_imx->spi_bus_clk is only set in prepare_message
633 list_for_each_entry(xfer, &msg->transfers, transfer_list) {
634 if (!xfer->speed_hz)
636 min_speed_hz = min(xfer->speed_hz, min_speed_hz);
649 struct spi_device *spi)
651 bool cpha = (spi->mode & SPI_CPHA);
652 bool flip_cpha = (spi->mode & SPI_RX_CPHA_FLIP) && spi_imx->rx_only;
653 u32 cfg = readl(spi_imx->base + MX51_ECSPI_CONFIG);
654 int channel = mx51_ecspi_channel(spi);
664 writel(cfg, spi_imx->base + MX51_ECSPI_CONFIG);
668 struct spi_device *spi, struct spi_transfer *t)
670 u32 ctrl = readl(spi_imx->base + MX51_ECSPI_CTRL);
676 if (spi_imx->target_mode && is_imx53_ecspi(spi_imx))
677 ctrl |= (spi_imx->target_burst * 8 - 1)
680 ctrl |= (spi_imx->bits_per_word - 1)
687 ctrl |= mx51_ecspi_clkdiv(spi_imx, spi_imx->spi_bus_clk, &clk);
688 spi_imx->spi_bus_clk = clk;
690 mx51_configure_cpha(spi_imx, spi);
696 if (spi_imx->usedma && spi_imx->devtype_data->tx_glitch_fixed)
701 writel(ctrl, spi_imx->base + MX51_ECSPI_CTRL);
703 /* calculate word delay in SPI Clock (SCLK) cycles */
704 if (t->word_delay.value == 0) {
706 } else if (t->word_delay.unit == SPI_DELAY_UNIT_SCK) {
707 word_delay_sck = t->word_delay.value;
714 word_delay_sck -= MX51_ECSPI_PERIOD_MIN_DELAY_SCK + 1;
718 word_delay_ns = spi_delay_to_ns(&t->word_delay, t);
724 spi_imx->spi_bus_clk)) {
728 spi_imx->spi_bus_clk)) {
731 word_delay_ns -= mul_u64_u32_div(NSEC_PER_SEC,
733 spi_imx->spi_bus_clk);
735 word_delay_sck = DIV_U64_ROUND_UP((u64)word_delay_ns * spi_imx->spi_bus_clk,
741 return -EINVAL;
744 spi_imx->base + MX51_ECSPI_PERIOD);
753 if (spi_imx->devtype_data->tx_glitch_fixed)
754 tx_wml = spi_imx->wml;
759 writel(MX51_ECSPI_DMA_RX_WML(spi_imx->wml - 1) |
761 MX51_ECSPI_DMA_RXT_WML(spi_imx->wml) |
763 MX51_ECSPI_DMA_RXTDEN, spi_imx->base + MX51_ECSPI_DMA);
768 return readl(spi_imx->base + MX51_ECSPI_STAT) & MX51_ECSPI_STAT_RR;
775 readl(spi_imx->base + MXC_CSPIRXDATA);
818 writel(val, spi_imx->base + MXC_CSPIINT);
825 reg = readl(spi_imx->base + MXC_CSPICTRL);
827 writel(reg, spi_imx->base + MXC_CSPICTRL);
837 struct spi_device *spi, struct spi_transfer *t)
842 reg |= spi_imx_clkdiv_2(spi_imx->spi_clk, spi_imx->spi_bus_clk, &clk) <<
844 spi_imx->spi_bus_clk = clk;
847 reg |= (spi_imx->bits_per_word - 1) << MX35_CSPICTRL_BL_SHIFT;
850 reg |= (spi_imx->bits_per_word - 1) << MX31_CSPICTRL_BC_SHIFT;
853 if (spi->mode & SPI_CPHA)
855 if (spi->mode & SPI_CPOL)
857 if (spi->mode & SPI_CS_HIGH)
859 if (!spi_get_csgpiod(spi, 0))
860 reg |= (spi_get_chipselect(spi, 0)) <<
864 if (spi_imx->usedma)
867 writel(reg, spi_imx->base + MXC_CSPICTRL);
869 reg = readl(spi_imx->base + MX31_CSPI_TESTREG);
870 if (spi->mode & SPI_LOOP)
874 writel(reg, spi_imx->base + MX31_CSPI_TESTREG);
876 if (spi_imx->usedma) {
882 spi_imx->base + MX31_CSPI_DMAREG);
890 return readl(spi_imx->base + MX31_CSPISTATUS) & MX31_STATUS_RR;
896 while (readl(spi_imx->base + MX31_CSPISTATUS) & MX31_STATUS_RR)
897 readl(spi_imx->base + MXC_CSPIRXDATA);
922 writel(val, spi_imx->base + MXC_CSPIINT);
929 reg = readl(spi_imx->base + MXC_CSPICTRL);
931 writel(reg, spi_imx->base + MXC_CSPICTRL);
941 struct spi_device *spi, struct spi_transfer *t)
947 reg |= spi_imx_clkdiv_1(spi_imx->spi_clk, spi_imx->spi_bus_clk, max, &clk)
949 spi_imx->spi_bus_clk = clk;
951 reg |= spi_imx->bits_per_word - 1;
953 if (spi->mode & SPI_CPHA)
955 if (spi->mode & SPI_CPOL)
957 if (spi->mode & SPI_CS_HIGH)
959 if (!spi_get_csgpiod(spi, 0))
960 reg |= spi_get_chipselect(spi, 0) << MX21_CSPICTRL_CS_SHIFT;
962 writel(reg, spi_imx->base + MXC_CSPICTRL);
969 return readl(spi_imx->base + MXC_CSPIINT) & MX21_INTREG_RR;
974 writel(1, spi_imx->base + MXC_RESET);
997 writel(val, spi_imx->base + MXC_CSPIINT);
1004 reg = readl(spi_imx->base + MXC_CSPICTRL);
1006 writel(reg, spi_imx->base + MXC_CSPICTRL);
1016 struct spi_device *spi, struct spi_transfer *t)
1021 reg |= spi_imx_clkdiv_2(spi_imx->spi_clk, spi_imx->spi_bus_clk, &clk) <<
1023 spi_imx->spi_bus_clk = clk;
1025 reg |= spi_imx->bits_per_word - 1;
1027 if (spi->mode & SPI_CPHA)
1029 if (spi->mode & SPI_CPOL)
1032 writel(reg, spi_imx->base + MXC_CSPICTRL);
1039 return readl(spi_imx->base + MXC_CSPIINT) & MX1_INTREG_RR;
1044 writel(1, spi_imx->base + MXC_RESET);
1167 { .compatible = "fsl,imx1-cspi", .data = &imx1_cspi_devtype_data, },
1168 { .compatible = "fsl,imx21-cspi", .data = &imx21_cspi_devtype_data, },
1169 { .compatible = "fsl,imx27-cspi", .data = &imx27_cspi_devtype_data, },
1170 { .compatible = "fsl,imx31-cspi", .data = &imx31_cspi_devtype_data, },
1171 { .compatible = "fsl,imx35-cspi", .data = &imx35_cspi_devtype_data, },
1172 { .compatible = "fsl,imx51-ecspi", .data = &imx51_ecspi_devtype_data, },
1173 { .compatible = "fsl,imx53-ecspi", .data = &imx53_ecspi_devtype_data, },
1174 { .compatible = "fsl,imx6ul-ecspi", .data = &imx6ul_ecspi_devtype_data, },
1183 ctrl = readl(spi_imx->base + MX51_ECSPI_CTRL);
1185 ctrl |= ((n_bits - 1) << MX51_ECSPI_CTRL_BL_OFFSET);
1186 writel(ctrl, spi_imx->base + MX51_ECSPI_CTRL);
1198 if (!spi_imx->remainder) {
1199 if (spi_imx->dynamic_burst) {
1202 burst_len = spi_imx->count % MX51_ECSPI_CTRL_MAX_BURST;
1209 spi_imx->remainder = burst_len;
1211 spi_imx->remainder = spi_imx_bytes_per_word(spi_imx->bits_per_word);
1215 while (spi_imx->txfifo < spi_imx->devtype_data->fifo_size) {
1216 if (!spi_imx->count)
1218 if (spi_imx->dynamic_burst &&
1219 spi_imx->txfifo >= DIV_ROUND_UP(spi_imx->remainder, 4))
1221 spi_imx->tx(spi_imx);
1222 spi_imx->txfifo++;
1225 if (!spi_imx->target_mode)
1226 spi_imx->devtype_data->trigger(spi_imx);
1233 while (spi_imx->txfifo &&
1234 spi_imx->devtype_data->rx_available(spi_imx)) {
1235 spi_imx->rx(spi_imx);
1236 spi_imx->txfifo--;
1239 if (spi_imx->count) {
1244 if (spi_imx->txfifo) {
1248 spi_imx->devtype_data->intctrl(
1253 spi_imx->devtype_data->intctrl(spi_imx, 0);
1254 complete(&spi_imx->xfer_done);
1266 switch (spi_imx_bytes_per_word(spi_imx->bits_per_word)) {
1277 return -EINVAL;
1281 tx.dst_addr = spi_imx->base_phys + MXC_CSPITXDATA;
1283 tx.dst_maxburst = spi_imx->wml;
1284 ret = dmaengine_slave_config(controller->dma_tx, &tx);
1286 dev_err(spi_imx->dev, "TX dma configuration failed with %d\n", ret);
1291 rx.src_addr = spi_imx->base_phys + MXC_CSPIRXDATA;
1293 rx.src_maxburst = spi_imx->wml;
1294 ret = dmaengine_slave_config(controller->dma_rx, &rx);
1296 dev_err(spi_imx->dev, "RX dma configuration failed with %d\n", ret);
1303 static int spi_imx_setupxfer(struct spi_device *spi,
1306 struct spi_imx_data *spi_imx = spi_controller_get_devdata(spi->controller);
1311 if (!t->speed_hz) {
1312 if (!spi->max_speed_hz) {
1313 dev_err(&spi->dev, "no speed_hz provided!\n");
1314 return -EINVAL;
1316 dev_dbg(&spi->dev, "using spi->max_speed_hz!\n");
1317 spi_imx->spi_bus_clk = spi->max_speed_hz;
1319 spi_imx->spi_bus_clk = t->speed_hz;
1321 spi_imx->bits_per_word = t->bits_per_word;
1322 spi_imx->count = t->len;
1325 * Initialize the functions for transfer. To transfer non byte-aligned
1326 * words, we have to use multiple word-size bursts. To insert word
1330 if (spi_imx->devtype_data->dynamic_burst && !spi_imx->target_mode &&
1331 !(spi->mode & SPI_CS_WORD) &&
1332 !(t->word_delay.value) &&
1333 (spi_imx->bits_per_word == 8 ||
1334 spi_imx->bits_per_word == 16 ||
1335 spi_imx->bits_per_word == 32)) {
1337 spi_imx->rx = spi_imx_buf_rx_swap;
1338 spi_imx->tx = spi_imx_buf_tx_swap;
1339 spi_imx->dynamic_burst = 1;
1342 if (spi_imx->bits_per_word <= 8) {
1343 spi_imx->rx = spi_imx_buf_rx_u8;
1344 spi_imx->tx = spi_imx_buf_tx_u8;
1345 } else if (spi_imx->bits_per_word <= 16) {
1346 spi_imx->rx = spi_imx_buf_rx_u16;
1347 spi_imx->tx = spi_imx_buf_tx_u16;
1349 spi_imx->rx = spi_imx_buf_rx_u32;
1350 spi_imx->tx = spi_imx_buf_tx_u32;
1352 spi_imx->dynamic_burst = 0;
1355 if (spi_imx_can_dma(spi_imx->controller, spi, t))
1356 spi_imx->usedma = true;
1358 spi_imx->usedma = false;
1360 spi_imx->rx_only = ((t->tx_buf == NULL)
1361 || (t->tx_buf == spi->controller->dummy_tx));
1363 if (is_imx53_ecspi(spi_imx) && spi_imx->target_mode) {
1364 spi_imx->rx = mx53_ecspi_rx_target;
1365 spi_imx->tx = mx53_ecspi_tx_target;
1366 spi_imx->target_burst = t->len;
1369 spi_imx->devtype_data->prepare_transfer(spi_imx, spi, t);
1376 struct spi_controller *controller = spi_imx->controller;
1378 if (controller->dma_rx) {
1379 dma_release_channel(controller->dma_rx);
1380 controller->dma_rx = NULL;
1383 if (controller->dma_tx) {
1384 dma_release_channel(controller->dma_tx);
1385 controller->dma_tx = NULL;
1394 spi_imx->wml = spi_imx->devtype_data->fifo_size / 2;
1397 controller->dma_tx = dma_request_chan(dev, "tx");
1398 if (IS_ERR(controller->dma_tx)) {
1399 ret = PTR_ERR(controller->dma_tx);
1401 controller->dma_tx = NULL;
1406 controller->dma_rx = dma_request_chan(dev, "rx");
1407 if (IS_ERR(controller->dma_rx)) {
1408 ret = PTR_ERR(controller->dma_rx);
1410 controller->dma_rx = NULL;
1414 init_completion(&spi_imx->dma_rx_completion);
1415 init_completion(&spi_imx->dma_tx_completion);
1416 controller->can_dma = spi_imx_can_dma;
1417 controller->max_dma_len = MAX_SDMA_BD_BYTES;
1418 spi_imx->controller->flags = SPI_CONTROLLER_MUST_RX |
1431 complete(&spi_imx->dma_rx_completion);
1438 complete(&spi_imx->dma_tx_completion);
1446 timeout = (8 + 4) * size / spi_imx->spi_bus_clk;
1461 struct spi_controller *controller = spi_imx->controller;
1462 struct sg_table *tx = &transfer->tx_sg, *rx = &transfer->rx_sg;
1463 struct scatterlist *last_sg = sg_last(rx->sgl, rx->nents);
1468 bytes_per_word = spi_imx_bytes_per_word(transfer->bits_per_word);
1469 for (i = spi_imx->devtype_data->fifo_size / 2; i > 0; i--) {
1477 spi_imx->wml = i;
1483 if (!spi_imx->devtype_data->setup_wml) {
1484 dev_err(spi_imx->dev, "No setup_wml()?\n");
1485 ret = -EINVAL;
1488 spi_imx->devtype_data->setup_wml(spi_imx);
1494 desc_rx = dmaengine_prep_slave_sg(controller->dma_rx,
1495 rx->sgl, rx->nents, DMA_DEV_TO_MEM,
1498 ret = -EINVAL;
1502 desc_rx->callback = spi_imx_dma_rx_callback;
1503 desc_rx->callback_param = (void *)spi_imx;
1505 reinit_completion(&spi_imx->dma_rx_completion);
1506 dma_async_issue_pending(controller->dma_rx);
1508 desc_tx = dmaengine_prep_slave_sg(controller->dma_tx,
1509 tx->sgl, tx->nents, DMA_MEM_TO_DEV,
1512 dmaengine_terminate_all(controller->dma_tx);
1513 dmaengine_terminate_all(controller->dma_rx);
1514 return -EINVAL;
1517 desc_tx->callback = spi_imx_dma_tx_callback;
1518 desc_tx->callback_param = (void *)spi_imx;
1520 reinit_completion(&spi_imx->dma_tx_completion);
1521 dma_async_issue_pending(controller->dma_tx);
1523 transfer_timeout = spi_imx_calculate_timeout(spi_imx, transfer->len);
1526 time_left = wait_for_completion_timeout(&spi_imx->dma_tx_completion,
1529 dev_err(spi_imx->dev, "I/O Error in DMA TX\n");
1530 dmaengine_terminate_all(controller->dma_tx);
1531 dmaengine_terminate_all(controller->dma_rx);
1532 return -ETIMEDOUT;
1535 time_left = wait_for_completion_timeout(&spi_imx->dma_rx_completion,
1538 dev_err(&controller->dev, "I/O Error in DMA RX\n");
1539 spi_imx->devtype_data->reset(spi_imx);
1540 dmaengine_terminate_all(controller->dma_rx);
1541 return -ETIMEDOUT;
1547 transfer->error |= SPI_TRANS_FAIL_NO_START;
1551 static int spi_imx_pio_transfer(struct spi_device *spi,
1554 struct spi_imx_data *spi_imx = spi_controller_get_devdata(spi->controller);
1558 spi_imx->tx_buf = transfer->tx_buf;
1559 spi_imx->rx_buf = transfer->rx_buf;
1560 spi_imx->count = transfer->len;
1561 spi_imx->txfifo = 0;
1562 spi_imx->remainder = 0;
1564 reinit_completion(&spi_imx->xfer_done);
1568 spi_imx->devtype_data->intctrl(spi_imx, MXC_INT_TE);
1570 transfer_timeout = spi_imx_calculate_timeout(spi_imx, transfer->len);
1572 time_left = wait_for_completion_timeout(&spi_imx->xfer_done,
1575 dev_err(&spi->dev, "I/O Error in PIO\n");
1576 spi_imx->devtype_data->reset(spi_imx);
1577 return -ETIMEDOUT;
1583 static int spi_imx_poll_transfer(struct spi_device *spi,
1586 struct spi_imx_data *spi_imx = spi_controller_get_devdata(spi->controller);
1589 spi_imx->tx_buf = transfer->tx_buf;
1590 spi_imx->rx_buf = transfer->rx_buf;
1591 spi_imx->count = transfer->len;
1592 spi_imx->txfifo = 0;
1593 spi_imx->remainder = 0;
1601 timeout = spi_imx_calculate_timeout(spi_imx, transfer->len) + jiffies;
1602 while (spi_imx->txfifo) {
1604 while (spi_imx->txfifo &&
1605 spi_imx->devtype_data->rx_available(spi_imx)) {
1606 spi_imx->rx(spi_imx);
1607 spi_imx->txfifo--;
1611 if (spi_imx->count) {
1616 if (spi_imx->txfifo &&
1619 dev_err_ratelimited(&spi->dev,
1620 "timeout period reached: jiffies: %lu- falling back to interrupt mode\n",
1621 jiffies - timeout);
1624 return spi_imx_pio_transfer(spi, transfer);
1631 static int spi_imx_pio_transfer_target(struct spi_device *spi,
1634 struct spi_imx_data *spi_imx = spi_controller_get_devdata(spi->controller);
1638 transfer->len > MX53_MAX_TRANSFER_BYTES) {
1639 dev_err(&spi->dev, "Transaction too big, max size is %d bytes\n",
1641 return -EMSGSIZE;
1644 spi_imx->tx_buf = transfer->tx_buf;
1645 spi_imx->rx_buf = transfer->rx_buf;
1646 spi_imx->count = transfer->len;
1647 spi_imx->txfifo = 0;
1648 spi_imx->remainder = 0;
1650 reinit_completion(&spi_imx->xfer_done);
1651 spi_imx->target_aborted = false;
1655 spi_imx->devtype_data->intctrl(spi_imx, MXC_INT_TE | MXC_INT_RDR);
1657 if (wait_for_completion_interruptible(&spi_imx->xfer_done) ||
1658 spi_imx->target_aborted) {
1659 dev_dbg(&spi->dev, "interrupted\n");
1660 ret = -EINTR;
1669 if (spi_imx->devtype_data->disable)
1670 spi_imx->devtype_data->disable(spi_imx);
1679 result = DIV_U64_ROUND_CLOSEST((u64)USEC_PER_SEC * transfer->len * BITS_PER_BYTE,
1680 transfer->effective_speed_hz);
1681 if (transfer->word_delay.value) {
1685 words = DIV_ROUND_UP(transfer->len * BITS_PER_BYTE, transfer->bits_per_word);
1686 word_delay_us = DIV_ROUND_CLOSEST(spi_delay_to_ns(&transfer->word_delay, transfer),
1695 struct spi_device *spi,
1698 struct spi_imx_data *spi_imx = spi_controller_get_devdata(spi->controller);
1700 spi_imx_setupxfer(spi, transfer);
1701 transfer->effective_speed_hz = spi_imx->spi_bus_clk;
1704 while (spi_imx->devtype_data->rx_available(spi_imx))
1705 readl(spi_imx->base + MXC_CSPIRXDATA);
1707 if (spi_imx->target_mode)
1708 return spi_imx_pio_transfer_target(spi, transfer);
1712 * transfer, the SPI transfer has already been mapped, so we
1715 if (spi_imx->usedma)
1719 if (transfer->len == 1 || (polling_limit_us &&
1721 return spi_imx_poll_transfer(spi, transfer);
1723 return spi_imx_pio_transfer(spi, transfer);
1726 static int spi_imx_setup(struct spi_device *spi)
1728 dev_dbg(&spi->dev, "%s: mode %d, %u bpw, %d hz\n", __func__,
1729 spi->mode, spi->bits_per_word, spi->max_speed_hz);
1740 ret = pm_runtime_resume_and_get(spi_imx->dev);
1742 dev_err(spi_imx->dev, "failed to enable clock\n");
1746 ret = spi_imx->devtype_data->prepare_message(spi_imx, msg);
1748 pm_runtime_mark_last_busy(spi_imx->dev);
1749 pm_runtime_put_autosuspend(spi_imx->dev);
1760 pm_runtime_mark_last_busy(spi_imx->dev);
1761 pm_runtime_put_autosuspend(spi_imx->dev);
1769 spi_imx->target_aborted = true;
1770 complete(&spi_imx->xfer_done);
1777 struct device_node *np = pdev->dev.of_node;
1783 of_device_get_match_data(&pdev->dev);
1787 target_mode = devtype_data->has_targetmode &&
1788 of_property_read_bool(np, "spi-slave");
1790 controller = spi_alloc_target(&pdev->dev,
1793 controller = spi_alloc_host(&pdev->dev,
1796 return -ENOMEM;
1798 ret = of_property_read_u32(np, "fsl,spi-rdy-drctl", &spi_drctl);
1806 controller->bits_per_word_mask = SPI_BPW_RANGE_MASK(1, 32);
1807 controller->bus_num = np ? -1 : pdev->id;
1808 controller->use_gpio_descriptors = true;
1811 spi_imx->controller = controller;
1812 spi_imx->dev = &pdev->dev;
1813 spi_imx->target_mode = target_mode;
1815 spi_imx->devtype_data = devtype_data;
1823 if (!device_property_read_u32(&pdev->dev, "num-cs", &val))
1824 controller->num_chipselect = val;
1826 controller->num_chipselect = 3;
1828 controller->transfer_one = spi_imx_transfer_one;
1829 controller->setup = spi_imx_setup;
1830 controller->prepare_message = spi_imx_prepare_message;
1831 controller->unprepare_message = spi_imx_unprepare_message;
1832 controller->target_abort = spi_imx_target_abort;
1833 controller->mode_bits = SPI_CPOL | SPI_CPHA | SPI_CS_HIGH | SPI_NO_CS |
1838 controller->mode_bits |= SPI_LOOP | SPI_READY;
1841 controller->mode_bits |= SPI_RX_CPHA_FLIP;
1844 device_property_read_u32(&pdev->dev, "cs-gpios", NULL))
1846 * When using HW-CS implementing SPI_CS_WORD can be done by just
1850 controller->mode_bits |= SPI_CS_WORD;
1853 controller->max_native_cs = 4;
1854 controller->flags |= SPI_CONTROLLER_GPIO_SS;
1857 spi_imx->spi_drctl = spi_drctl;
1859 init_completion(&spi_imx->xfer_done);
1861 spi_imx->base = devm_platform_get_and_ioremap_resource(pdev, 0, &res);
1862 if (IS_ERR(spi_imx->base)) {
1863 ret = PTR_ERR(spi_imx->base);
1866 spi_imx->base_phys = res->start;
1874 ret = devm_request_irq(&pdev->dev, irq, spi_imx_isr, 0,
1875 dev_name(&pdev->dev), spi_imx);
1877 dev_err(&pdev->dev, "can't get irq%d: %d\n", irq, ret);
1881 spi_imx->clk_ipg = devm_clk_get(&pdev->dev, "ipg");
1882 if (IS_ERR(spi_imx->clk_ipg)) {
1883 ret = PTR_ERR(spi_imx->clk_ipg);
1887 spi_imx->clk_per = devm_clk_get(&pdev->dev, "per");
1888 if (IS_ERR(spi_imx->clk_per)) {
1889 ret = PTR_ERR(spi_imx->clk_per);
1893 ret = clk_prepare_enable(spi_imx->clk_per);
1897 ret = clk_prepare_enable(spi_imx->clk_ipg);
1901 pm_runtime_set_autosuspend_delay(spi_imx->dev, MXC_RPM_TIMEOUT);
1902 pm_runtime_use_autosuspend(spi_imx->dev);
1903 pm_runtime_get_noresume(spi_imx->dev);
1904 pm_runtime_set_active(spi_imx->dev);
1905 pm_runtime_enable(spi_imx->dev);
1907 spi_imx->spi_clk = clk_get_rate(spi_imx->clk_per);
1912 if (spi_imx->devtype_data->has_dmamode) {
1913 ret = spi_imx_sdma_init(&pdev->dev, spi_imx, controller);
1914 if (ret == -EPROBE_DEFER)
1918 dev_dbg(&pdev->dev, "dma setup error %d, use pio\n",
1922 spi_imx->devtype_data->reset(spi_imx);
1924 spi_imx->devtype_data->intctrl(spi_imx, 0);
1926 controller->dev.of_node = pdev->dev.of_node;
1929 dev_err_probe(&pdev->dev, ret, "register controller failed\n");
1933 pm_runtime_mark_last_busy(spi_imx->dev);
1934 pm_runtime_put_autosuspend(spi_imx->dev);
1939 if (spi_imx->devtype_data->has_dmamode)
1942 pm_runtime_dont_use_autosuspend(spi_imx->dev);
1943 pm_runtime_disable(spi_imx->dev);
1944 pm_runtime_set_suspended(&pdev->dev);
1946 clk_disable_unprepare(spi_imx->clk_ipg);
1948 clk_disable_unprepare(spi_imx->clk_per);
1963 ret = pm_runtime_get_sync(spi_imx->dev);
1965 writel(0, spi_imx->base + MXC_CSPICTRL);
1967 dev_warn(spi_imx->dev, "failed to enable clock, skip hw disable\n");
1969 pm_runtime_dont_use_autosuspend(spi_imx->dev);
1970 pm_runtime_put_sync(spi_imx->dev);
1971 pm_runtime_disable(spi_imx->dev);
1984 ret = clk_prepare_enable(spi_imx->clk_per);
1988 ret = clk_prepare_enable(spi_imx->clk_ipg);
1990 clk_disable_unprepare(spi_imx->clk_per);
2004 clk_disable_unprepare(spi_imx->clk_per);
2005 clk_disable_unprepare(spi_imx->clk_ipg);
2038 MODULE_DESCRIPTION("i.MX SPI Controller driver");