Lines Matching +full:tx +full:- +full:clk +full:- +full:tap +full:- +full:delay

1 // SPDX-License-Identifier: GPL-2.0-only
5 #include <linux/clk.h>
7 #include <linux/delay.h>
9 #include <linux/dma-mapping.h>
190 struct clk *clk; member
243 return readl(tqspi->base + offset); in tegra_qspi_readl()
248 writel(value, tqspi->base + offset); in tegra_qspi_writel()
252 readl(tqspi->base + QSPI_COMMAND1); in tegra_qspi_writel()
279 unsigned int remain_len = t->len - tqspi->cur_pos; in tegra_qspi_calculate_curr_xfer_param()
280 unsigned int bits_per_word = t->bits_per_word; in tegra_qspi_calculate_curr_xfer_param()
282 tqspi->bytes_per_word = DIV_ROUND_UP(bits_per_word, 8); in tegra_qspi_calculate_curr_xfer_param()
292 bits_per_word == 32) && t->len > 3) { in tegra_qspi_calculate_curr_xfer_param()
293 tqspi->is_packed = true; in tegra_qspi_calculate_curr_xfer_param()
294 tqspi->words_per_32bit = 32 / bits_per_word; in tegra_qspi_calculate_curr_xfer_param()
296 tqspi->is_packed = false; in tegra_qspi_calculate_curr_xfer_param()
297 tqspi->words_per_32bit = 1; in tegra_qspi_calculate_curr_xfer_param()
300 if (tqspi->is_packed) { in tegra_qspi_calculate_curr_xfer_param()
301 max_len = min(remain_len, tqspi->max_buf_size); in tegra_qspi_calculate_curr_xfer_param()
302 tqspi->curr_dma_words = max_len / tqspi->bytes_per_word; in tegra_qspi_calculate_curr_xfer_param()
305 max_word = (remain_len - 1) / tqspi->bytes_per_word + 1; in tegra_qspi_calculate_curr_xfer_param()
306 max_word = min(max_word, tqspi->max_buf_size / 4); in tegra_qspi_calculate_curr_xfer_param()
307 tqspi->curr_dma_words = max_word; in tegra_qspi_calculate_curr_xfer_param()
319 u8 *tx_buf = (u8 *)t->tx_buf + tqspi->cur_tx_pos; in tegra_qspi_fill_tx_fifo_from_client_txbuf()
325 if (tqspi->is_packed) { in tegra_qspi_fill_tx_fifo_from_client_txbuf()
326 fifo_words_left = tx_empty_count * tqspi->words_per_32bit; in tegra_qspi_fill_tx_fifo_from_client_txbuf()
327 written_words = min(fifo_words_left, tqspi->curr_dma_words); in tegra_qspi_fill_tx_fifo_from_client_txbuf()
328 len = written_words * tqspi->bytes_per_word; in tegra_qspi_fill_tx_fifo_from_client_txbuf()
333 for (i = 0; (i < 4) && len; i++, len--) in tegra_qspi_fill_tx_fifo_from_client_txbuf()
338 tqspi->cur_tx_pos += written_words * tqspi->bytes_per_word; in tegra_qspi_fill_tx_fifo_from_client_txbuf()
341 u8 bytes_per_word = tqspi->bytes_per_word; in tegra_qspi_fill_tx_fifo_from_client_txbuf()
343 max_n_32bit = min(tqspi->curr_dma_words, tx_empty_count); in tegra_qspi_fill_tx_fifo_from_client_txbuf()
345 len = written_words * tqspi->bytes_per_word; in tegra_qspi_fill_tx_fifo_from_client_txbuf()
346 if (len > t->len - tqspi->cur_pos) in tegra_qspi_fill_tx_fifo_from_client_txbuf()
347 len = t->len - tqspi->cur_pos; in tegra_qspi_fill_tx_fifo_from_client_txbuf()
352 for (i = 0; len && (i < min(4, bytes_per_word)); i++, len--) in tegra_qspi_fill_tx_fifo_from_client_txbuf()
357 tqspi->cur_tx_pos += write_bytes; in tegra_qspi_fill_tx_fifo_from_client_txbuf()
366 u8 *rx_buf = (u8 *)t->rx_buf + tqspi->cur_rx_pos; in tegra_qspi_read_rx_fifo_to_client_rxbuf()
373 if (tqspi->is_packed) { in tegra_qspi_read_rx_fifo_to_client_rxbuf()
374 len = tqspi->curr_dma_words * tqspi->bytes_per_word; in tegra_qspi_read_rx_fifo_to_client_rxbuf()
378 for (i = 0; len && (i < 4); i++, len--) in tegra_qspi_read_rx_fifo_to_client_rxbuf()
382 read_words += tqspi->curr_dma_words; in tegra_qspi_read_rx_fifo_to_client_rxbuf()
383 tqspi->cur_rx_pos += tqspi->curr_dma_words * tqspi->bytes_per_word; in tegra_qspi_read_rx_fifo_to_client_rxbuf()
385 u32 rx_mask = ((u32)1 << t->bits_per_word) - 1; in tegra_qspi_read_rx_fifo_to_client_rxbuf()
386 u8 bytes_per_word = tqspi->bytes_per_word; in tegra_qspi_read_rx_fifo_to_client_rxbuf()
390 if (len > t->len - tqspi->cur_pos) in tegra_qspi_read_rx_fifo_to_client_rxbuf()
391 len = t->len - tqspi->cur_pos; in tegra_qspi_read_rx_fifo_to_client_rxbuf()
396 for (i = 0; len && (i < bytes_per_word); i++, len--) in tegra_qspi_read_rx_fifo_to_client_rxbuf()
401 tqspi->cur_rx_pos += read_bytes; in tegra_qspi_read_rx_fifo_to_client_rxbuf()
418 if (tqspi->is_packed) { in tegra_qspi_copy_client_txbuf_to_qspi_txbuf()
419 tqspi->cur_tx_pos += tqspi->curr_dma_words * tqspi->bytes_per_word; in tegra_qspi_copy_client_txbuf_to_qspi_txbuf()
421 u8 *tx_buf = (u8 *)t->tx_buf + tqspi->cur_tx_pos; in tegra_qspi_copy_client_txbuf_to_qspi_txbuf()
428 consume = tqspi->curr_dma_words * tqspi->bytes_per_word; in tegra_qspi_copy_client_txbuf_to_qspi_txbuf()
429 if (consume > t->len - tqspi->cur_pos) in tegra_qspi_copy_client_txbuf_to_qspi_txbuf()
430 consume = t->len - tqspi->cur_pos; in tegra_qspi_copy_client_txbuf_to_qspi_txbuf()
432 for (count = 0; count < tqspi->curr_dma_words; count++) { in tegra_qspi_copy_client_txbuf_to_qspi_txbuf()
435 for (i = 0; consume && (i < tqspi->bytes_per_word); i++, consume--) in tegra_qspi_copy_client_txbuf_to_qspi_txbuf()
437 tqspi->tx_dma_buf[count] = x; in tegra_qspi_copy_client_txbuf_to_qspi_txbuf()
440 tqspi->cur_tx_pos += write_bytes; in tegra_qspi_copy_client_txbuf_to_qspi_txbuf()
447 if (tqspi->is_packed) { in tegra_qspi_copy_qspi_rxbuf_to_client_rxbuf()
448 tqspi->cur_rx_pos += tqspi->curr_dma_words * tqspi->bytes_per_word; in tegra_qspi_copy_qspi_rxbuf_to_client_rxbuf()
450 unsigned char *rx_buf = t->rx_buf + tqspi->cur_rx_pos; in tegra_qspi_copy_qspi_rxbuf_to_client_rxbuf()
451 u32 rx_mask = ((u32)1 << t->bits_per_word) - 1; in tegra_qspi_copy_qspi_rxbuf_to_client_rxbuf()
459 consume = tqspi->curr_dma_words * tqspi->bytes_per_word; in tegra_qspi_copy_qspi_rxbuf_to_client_rxbuf()
460 if (consume > t->len - tqspi->cur_pos) in tegra_qspi_copy_qspi_rxbuf_to_client_rxbuf()
461 consume = t->len - tqspi->cur_pos; in tegra_qspi_copy_qspi_rxbuf_to_client_rxbuf()
463 for (count = 0; count < tqspi->curr_dma_words; count++) { in tegra_qspi_copy_qspi_rxbuf_to_client_rxbuf()
464 u32 x = tqspi->rx_dma_buf[count] & rx_mask; in tegra_qspi_copy_qspi_rxbuf_to_client_rxbuf()
466 for (i = 0; consume && (i < tqspi->bytes_per_word); i++, consume--) in tegra_qspi_copy_qspi_rxbuf_to_client_rxbuf()
470 tqspi->cur_rx_pos += read_bytes; in tegra_qspi_copy_qspi_rxbuf_to_client_rxbuf()
485 reinit_completion(&tqspi->tx_dma_complete); in tegra_qspi_start_tx_dma()
487 if (tqspi->is_packed) in tegra_qspi_start_tx_dma()
488 tx_dma_phys = t->tx_dma; in tegra_qspi_start_tx_dma()
490 tx_dma_phys = tqspi->tx_dma_phys; in tegra_qspi_start_tx_dma()
492 tqspi->tx_dma_desc = dmaengine_prep_slave_single(tqspi->tx_dma_chan, tx_dma_phys, in tegra_qspi_start_tx_dma()
496 if (!tqspi->tx_dma_desc) { in tegra_qspi_start_tx_dma()
497 dev_err(tqspi->dev, "Unable to get TX descriptor\n"); in tegra_qspi_start_tx_dma()
498 return -EIO; in tegra_qspi_start_tx_dma()
501 tqspi->tx_dma_desc->callback = tegra_qspi_dma_complete; in tegra_qspi_start_tx_dma()
502 tqspi->tx_dma_desc->callback_param = &tqspi->tx_dma_complete; in tegra_qspi_start_tx_dma()
503 dmaengine_submit(tqspi->tx_dma_desc); in tegra_qspi_start_tx_dma()
504 dma_async_issue_pending(tqspi->tx_dma_chan); in tegra_qspi_start_tx_dma()
513 reinit_completion(&tqspi->rx_dma_complete); in tegra_qspi_start_rx_dma()
515 if (tqspi->is_packed) in tegra_qspi_start_rx_dma()
516 rx_dma_phys = t->rx_dma; in tegra_qspi_start_rx_dma()
518 rx_dma_phys = tqspi->rx_dma_phys; in tegra_qspi_start_rx_dma()
520 tqspi->rx_dma_desc = dmaengine_prep_slave_single(tqspi->rx_dma_chan, rx_dma_phys, in tegra_qspi_start_rx_dma()
524 if (!tqspi->rx_dma_desc) { in tegra_qspi_start_rx_dma()
525 dev_err(tqspi->dev, "Unable to get RX descriptor\n"); in tegra_qspi_start_rx_dma()
526 return -EIO; in tegra_qspi_start_rx_dma()
529 tqspi->rx_dma_desc->callback = tegra_qspi_dma_complete; in tegra_qspi_start_rx_dma()
530 tqspi->rx_dma_desc->callback_param = &tqspi->rx_dma_complete; in tegra_qspi_start_rx_dma()
531 dmaengine_submit(tqspi->rx_dma_desc); in tegra_qspi_start_rx_dma()
532 dma_async_issue_pending(tqspi->rx_dma_chan); in tegra_qspi_start_rx_dma()
539 void __iomem *addr = tqspi->base + QSPI_FIFO_STATUS; in tegra_qspi_flush_fifos()
570 u8 *tx_buf = (u8 *)t->tx_buf + tqspi->cur_tx_pos; in tegra_qspi_dma_map_xfer()
571 u8 *rx_buf = (u8 *)t->rx_buf + tqspi->cur_rx_pos; in tegra_qspi_dma_map_xfer()
574 len = DIV_ROUND_UP(tqspi->curr_dma_words * tqspi->bytes_per_word, 4) * 4; in tegra_qspi_dma_map_xfer()
576 if (t->tx_buf) { in tegra_qspi_dma_map_xfer()
577 t->tx_dma = dma_map_single(tqspi->dev, (void *)tx_buf, len, DMA_TO_DEVICE); in tegra_qspi_dma_map_xfer()
578 if (dma_mapping_error(tqspi->dev, t->tx_dma)) in tegra_qspi_dma_map_xfer()
579 return -ENOMEM; in tegra_qspi_dma_map_xfer()
582 if (t->rx_buf) { in tegra_qspi_dma_map_xfer()
583 t->rx_dma = dma_map_single(tqspi->dev, (void *)rx_buf, len, DMA_FROM_DEVICE); in tegra_qspi_dma_map_xfer()
584 if (dma_mapping_error(tqspi->dev, t->rx_dma)) { in tegra_qspi_dma_map_xfer()
585 dma_unmap_single(tqspi->dev, t->tx_dma, len, DMA_TO_DEVICE); in tegra_qspi_dma_map_xfer()
586 return -ENOMEM; in tegra_qspi_dma_map_xfer()
597 len = DIV_ROUND_UP(tqspi->curr_dma_words * tqspi->bytes_per_word, 4) * 4; in tegra_qspi_dma_unmap_xfer()
599 if (t->tx_buf) in tegra_qspi_dma_unmap_xfer()
600 dma_unmap_single(tqspi->dev, t->tx_dma, len, DMA_TO_DEVICE); in tegra_qspi_dma_unmap_xfer()
601 if (t->rx_buf) in tegra_qspi_dma_unmap_xfer()
602 dma_unmap_single(tqspi->dev, t->rx_dma, len, DMA_FROM_DEVICE); in tegra_qspi_dma_unmap_xfer()
614 if (tqspi->is_packed) { in tegra_qspi_start_dma_based_transfer()
620 val = QSPI_DMA_BLK_SET(tqspi->curr_dma_words - 1); in tegra_qspi_start_dma_based_transfer()
625 if (tqspi->is_packed) in tegra_qspi_start_dma_based_transfer()
626 len = DIV_ROUND_UP(tqspi->curr_dma_words * tqspi->bytes_per_word, 4) * 4; in tegra_qspi_start_dma_based_transfer()
628 len = tqspi->curr_dma_words * 4; in tegra_qspi_start_dma_based_transfer()
631 if (tqspi->soc_data->has_ext_dma) { in tegra_qspi_start_dma_based_transfer()
647 tqspi->dma_control_reg = val; in tegra_qspi_start_dma_based_transfer()
651 if (tqspi->cur_direction & DATA_DIR_TX) { in tegra_qspi_start_dma_based_transfer()
652 if (tqspi->tx_dma_chan) { in tegra_qspi_start_dma_based_transfer()
653 dma_sconfig.dst_addr = tqspi->phys + QSPI_TX_FIFO; in tegra_qspi_start_dma_based_transfer()
656 ret = dmaengine_slave_config(tqspi->tx_dma_chan, &dma_sconfig); in tegra_qspi_start_dma_based_transfer()
658 dev_err(tqspi->dev, "failed DMA slave config: %d\n", ret); in tegra_qspi_start_dma_based_transfer()
665 dev_err(tqspi->dev, "failed to starting TX DMA: %d\n", ret); in tegra_qspi_start_dma_based_transfer()
669 if (tqspi->is_packed) in tegra_qspi_start_dma_based_transfer()
670 tx_dma_phys = t->tx_dma; in tegra_qspi_start_dma_based_transfer()
672 tx_dma_phys = tqspi->tx_dma_phys; in tegra_qspi_start_dma_based_transfer()
681 if (tqspi->cur_direction & DATA_DIR_RX) { in tegra_qspi_start_dma_based_transfer()
682 if (tqspi->rx_dma_chan) { in tegra_qspi_start_dma_based_transfer()
683 dma_sconfig.src_addr = tqspi->phys + QSPI_RX_FIFO; in tegra_qspi_start_dma_based_transfer()
686 ret = dmaengine_slave_config(tqspi->rx_dma_chan, &dma_sconfig); in tegra_qspi_start_dma_based_transfer()
688 dev_err(tqspi->dev, "failed DMA slave config: %d\n", ret); in tegra_qspi_start_dma_based_transfer()
694 dev_err(tqspi->dev, "failed to start RX DMA: %d\n", ret); in tegra_qspi_start_dma_based_transfer()
695 if (tqspi->cur_direction & DATA_DIR_TX) in tegra_qspi_start_dma_based_transfer()
696 dmaengine_terminate_all(tqspi->tx_dma_chan); in tegra_qspi_start_dma_based_transfer()
700 if (tqspi->is_packed) in tegra_qspi_start_dma_based_transfer()
701 rx_dma_phys = t->rx_dma; in tegra_qspi_start_dma_based_transfer()
703 rx_dma_phys = tqspi->rx_dma_phys; in tegra_qspi_start_dma_based_transfer()
712 tegra_qspi_writel(tqspi, tqspi->command1_reg, QSPI_COMMAND1); in tegra_qspi_start_dma_based_transfer()
714 tqspi->is_curr_dma_xfer = true; in tegra_qspi_start_dma_based_transfer()
715 tqspi->dma_control_reg = val; in tegra_qspi_start_dma_based_transfer()
727 if (qspi->cur_direction & DATA_DIR_TX) in tegra_qspi_start_cpu_based_transfer()
730 cur_words = qspi->curr_dma_words; in tegra_qspi_start_cpu_based_transfer()
732 val = QSPI_DMA_BLK_SET(cur_words - 1); in tegra_qspi_start_cpu_based_transfer()
737 qspi->is_curr_dma_xfer = false; in tegra_qspi_start_cpu_based_transfer()
738 val = qspi->command1_reg; in tegra_qspi_start_cpu_based_transfer()
747 if (tqspi->tx_dma_buf) { in tegra_qspi_deinit_dma()
748 dma_free_coherent(tqspi->dev, tqspi->dma_buf_size, in tegra_qspi_deinit_dma()
749 tqspi->tx_dma_buf, tqspi->tx_dma_phys); in tegra_qspi_deinit_dma()
750 tqspi->tx_dma_buf = NULL; in tegra_qspi_deinit_dma()
753 if (tqspi->tx_dma_chan) { in tegra_qspi_deinit_dma()
754 dma_release_channel(tqspi->tx_dma_chan); in tegra_qspi_deinit_dma()
755 tqspi->tx_dma_chan = NULL; in tegra_qspi_deinit_dma()
758 if (tqspi->rx_dma_buf) { in tegra_qspi_deinit_dma()
759 dma_free_coherent(tqspi->dev, tqspi->dma_buf_size, in tegra_qspi_deinit_dma()
760 tqspi->rx_dma_buf, tqspi->rx_dma_phys); in tegra_qspi_deinit_dma()
761 tqspi->rx_dma_buf = NULL; in tegra_qspi_deinit_dma()
764 if (tqspi->rx_dma_chan) { in tegra_qspi_deinit_dma()
765 dma_release_channel(tqspi->rx_dma_chan); in tegra_qspi_deinit_dma()
766 tqspi->rx_dma_chan = NULL; in tegra_qspi_deinit_dma()
777 if (tqspi->soc_data->has_ext_dma) { in tegra_qspi_init_dma()
778 dma_chan = dma_request_chan(tqspi->dev, "rx"); in tegra_qspi_init_dma()
784 tqspi->rx_dma_chan = dma_chan; in tegra_qspi_init_dma()
786 dma_chan = dma_request_chan(tqspi->dev, "tx"); in tegra_qspi_init_dma()
792 tqspi->tx_dma_chan = dma_chan; in tegra_qspi_init_dma()
794 if (!device_iommu_mapped(tqspi->dev)) { in tegra_qspi_init_dma()
795 dev_warn(tqspi->dev, in tegra_qspi_init_dma()
796 "IOMMU not enabled in device-tree, falling back to PIO mode\n"); in tegra_qspi_init_dma()
801 dma_buf = dma_alloc_coherent(tqspi->dev, tqspi->dma_buf_size, &dma_phys, GFP_KERNEL); in tegra_qspi_init_dma()
803 err = -ENOMEM; in tegra_qspi_init_dma()
807 tqspi->rx_dma_buf = dma_buf; in tegra_qspi_init_dma()
808 tqspi->rx_dma_phys = dma_phys; in tegra_qspi_init_dma()
810 dma_buf = dma_alloc_coherent(tqspi->dev, tqspi->dma_buf_size, &dma_phys, GFP_KERNEL); in tegra_qspi_init_dma()
812 err = -ENOMEM; in tegra_qspi_init_dma()
816 tqspi->tx_dma_buf = dma_buf; in tegra_qspi_init_dma()
817 tqspi->tx_dma_phys = dma_phys; in tegra_qspi_init_dma()
818 tqspi->use_dma = true; in tegra_qspi_init_dma()
825 if (err != -EPROBE_DEFER) { in tegra_qspi_init_dma()
826 dev_err(tqspi->dev, "cannot use DMA: %d\n", err); in tegra_qspi_init_dma()
827 dev_err(tqspi->dev, "falling back to PIO\n"); in tegra_qspi_init_dma()
837 struct tegra_qspi *tqspi = spi_controller_get_devdata(spi->controller); in tegra_qspi_setup_transfer_one()
838 struct tegra_qspi_client_data *cdata = spi->controller_data; in tegra_qspi_setup_transfer_one()
839 u32 command1, command2, speed = t->speed_hz; in tegra_qspi_setup_transfer_one()
840 u8 bits_per_word = t->bits_per_word; in tegra_qspi_setup_transfer_one()
844 if (!has_acpi_companion(tqspi->dev) && speed != tqspi->cur_speed) { in tegra_qspi_setup_transfer_one()
845 clk_set_rate(tqspi->clk, speed); in tegra_qspi_setup_transfer_one()
846 tqspi->cur_speed = speed; in tegra_qspi_setup_transfer_one()
849 tqspi->cur_pos = 0; in tegra_qspi_setup_transfer_one()
850 tqspi->cur_rx_pos = 0; in tegra_qspi_setup_transfer_one()
851 tqspi->cur_tx_pos = 0; in tegra_qspi_setup_transfer_one()
852 tqspi->curr_xfer = t; in tegra_qspi_setup_transfer_one()
857 command1 = tqspi->def_command1_reg; in tegra_qspi_setup_transfer_one()
859 command1 |= QSPI_BIT_LENGTH(bits_per_word - 1); in tegra_qspi_setup_transfer_one()
862 req_mode = spi->mode & 0x3; in tegra_qspi_setup_transfer_one()
868 if (spi->mode & SPI_CS_HIGH) in tegra_qspi_setup_transfer_one()
874 if (cdata && cdata->tx_clk_tap_delay) in tegra_qspi_setup_transfer_one()
875 tx_tap = cdata->tx_clk_tap_delay; in tegra_qspi_setup_transfer_one()
877 if (cdata && cdata->rx_clk_tap_delay) in tegra_qspi_setup_transfer_one()
878 rx_tap = cdata->rx_clk_tap_delay; in tegra_qspi_setup_transfer_one()
881 if (command2 != tqspi->def_command2_reg) in tegra_qspi_setup_transfer_one()
885 command1 = tqspi->command1_reg; in tegra_qspi_setup_transfer_one()
887 command1 |= QSPI_BIT_LENGTH(bits_per_word - 1); in tegra_qspi_setup_transfer_one()
898 struct tegra_qspi *tqspi = spi_controller_get_devdata(spi->controller); in tegra_qspi_start_transfer_one()
906 if (tqspi->is_packed) in tegra_qspi_start_transfer_one()
910 tqspi->cur_direction = 0; in tegra_qspi_start_transfer_one()
913 if (t->rx_buf) { in tegra_qspi_start_transfer_one()
915 tqspi->cur_direction |= DATA_DIR_RX; in tegra_qspi_start_transfer_one()
916 bus_width = t->rx_nbits; in tegra_qspi_start_transfer_one()
919 if (t->tx_buf) { in tegra_qspi_start_transfer_one()
921 tqspi->cur_direction |= DATA_DIR_TX; in tegra_qspi_start_transfer_one()
922 bus_width = t->tx_nbits; in tegra_qspi_start_transfer_one()
934 tqspi->command1_reg = command1; in tegra_qspi_start_transfer_one()
936 tegra_qspi_writel(tqspi, QSPI_NUM_DUMMY_CYCLE(tqspi->dummy_cycles), QSPI_MISC_REG); in tegra_qspi_start_transfer_one()
942 if (tqspi->use_dma && total_fifo_words > QSPI_FIFO_DEPTH) in tegra_qspi_start_transfer_one()
953 struct tegra_qspi *tqspi = spi_controller_get_devdata(spi->controller); in tegra_qspi_parse_cdata_dt()
955 cdata = devm_kzalloc(tqspi->dev, sizeof(*cdata), GFP_KERNEL); in tegra_qspi_parse_cdata_dt()
959 device_property_read_u32(&spi->dev, "nvidia,tx-clk-tap-delay", in tegra_qspi_parse_cdata_dt()
960 &cdata->tx_clk_tap_delay); in tegra_qspi_parse_cdata_dt()
961 device_property_read_u32(&spi->dev, "nvidia,rx-clk-tap-delay", in tegra_qspi_parse_cdata_dt()
962 &cdata->rx_clk_tap_delay); in tegra_qspi_parse_cdata_dt()
969 struct tegra_qspi *tqspi = spi_controller_get_devdata(spi->controller); in tegra_qspi_setup()
970 struct tegra_qspi_client_data *cdata = spi->controller_data; in tegra_qspi_setup()
975 ret = pm_runtime_resume_and_get(tqspi->dev); in tegra_qspi_setup()
977 dev_err(tqspi->dev, "failed to get runtime PM: %d\n", ret); in tegra_qspi_setup()
983 spi->controller_data = cdata; in tegra_qspi_setup()
985 spin_lock_irqsave(&tqspi->lock, flags); in tegra_qspi_setup()
988 val = tqspi->def_command1_reg; in tegra_qspi_setup()
990 if (spi->mode & SPI_CS_HIGH) in tegra_qspi_setup()
995 tqspi->def_command1_reg = val; in tegra_qspi_setup()
996 tegra_qspi_writel(tqspi, tqspi->def_command1_reg, QSPI_COMMAND1); in tegra_qspi_setup()
998 spin_unlock_irqrestore(&tqspi->lock, flags); in tegra_qspi_setup()
1000 pm_runtime_put(tqspi->dev); in tegra_qspi_setup()
1007 dev_dbg(tqspi->dev, "============ QSPI REGISTER DUMP ============\n"); in tegra_qspi_dump_regs()
1008 dev_dbg(tqspi->dev, "Command1: 0x%08x | Command2: 0x%08x\n", in tegra_qspi_dump_regs()
1011 dev_dbg(tqspi->dev, "DMA_CTL: 0x%08x | DMA_BLK: 0x%08x\n", in tegra_qspi_dump_regs()
1014 dev_dbg(tqspi->dev, "INTR_MASK: 0x%08x | MISC: 0x%08x\n", in tegra_qspi_dump_regs()
1017 dev_dbg(tqspi->dev, "TRANS_STAT: 0x%08x | FIFO_STATUS: 0x%08x\n", in tegra_qspi_dump_regs()
1024 dev_err(tqspi->dev, "error in transfer, fifo status 0x%08x\n", tqspi->status_reg); in tegra_qspi_handle_error()
1027 if (device_reset(tqspi->dev) < 0) in tegra_qspi_handle_error()
1028 dev_warn_once(tqspi->dev, "device reset failed\n"); in tegra_qspi_handle_error()
1033 struct tegra_qspi *tqspi = spi_controller_get_devdata(spi->controller); in tegra_qspi_transfer_end()
1034 int cs_val = (spi->mode & SPI_CS_HIGH) ? 0 : 1; in tegra_qspi_transfer_end()
1037 tqspi->command1_reg |= QSPI_CS_SW_VAL; in tegra_qspi_transfer_end()
1039 tqspi->command1_reg &= ~QSPI_CS_SW_VAL; in tegra_qspi_transfer_end()
1040 tegra_qspi_writel(tqspi, tqspi->command1_reg, QSPI_COMMAND1); in tegra_qspi_transfer_end()
1041 tegra_qspi_writel(tqspi, tqspi->def_command1_reg, QSPI_COMMAND1); in tegra_qspi_transfer_end()
1055 cmd_config |= QSPI_COMMAND_SIZE_SET((len * 8) - 1); in tegra_qspi_cmd_config()
1070 addr_config |= QSPI_ADDRESS_SIZE_SET((len * 8) - 1); in tegra_qspi_addr_config()
1080 struct spi_device *spi = msg->spi; in tegra_qspi_combined_seq_xfer()
1090 if (spi->mode & SPI_TPM_HW_FLOW) { in tegra_qspi_combined_seq_xfer()
1091 if (tqspi->soc_data->supports_tpm) in tegra_qspi_combined_seq_xfer()
1094 return -EIO; in tegra_qspi_combined_seq_xfer()
1099 list_for_each_entry(xfer, &msg->transfers, transfer_list) { in tegra_qspi_combined_seq_xfer()
1103 cmd_config = tegra_qspi_cmd_config(false, xfer->tx_nbits, in tegra_qspi_combined_seq_xfer()
1104 xfer->len); in tegra_qspi_combined_seq_xfer()
1105 cmd_value = *((const u8 *)(xfer->tx_buf)); in tegra_qspi_combined_seq_xfer()
1109 addr_config = tegra_qspi_addr_config(false, xfer->tx_nbits, in tegra_qspi_combined_seq_xfer()
1110 xfer->len); in tegra_qspi_combined_seq_xfer()
1111 address_value = *((const u32 *)(xfer->tx_buf)); in tegra_qspi_combined_seq_xfer()
1114 if (xfer->dummy_data) { in tegra_qspi_combined_seq_xfer()
1115 tqspi->dummy_cycles = xfer->len * 8 / xfer->tx_nbits; in tegra_qspi_combined_seq_xfer()
1131 reinit_completion(&tqspi->xfer_completion); in tegra_qspi_combined_seq_xfer()
1138 dev_err(tqspi->dev, "Failed to start transfer-one: %d\n", in tegra_qspi_combined_seq_xfer()
1145 (&tqspi->xfer_completion, in tegra_qspi_combined_seq_xfer()
1149 dev_err_ratelimited(tqspi->dev, in tegra_qspi_combined_seq_xfer()
1151 if (tqspi->is_curr_dma_xfer) { in tegra_qspi_combined_seq_xfer()
1152 if ((tqspi->cur_direction & DATA_DIR_TX) && in tegra_qspi_combined_seq_xfer()
1153 tqspi->tx_dma_chan) in tegra_qspi_combined_seq_xfer()
1154 dmaengine_terminate_all(tqspi->tx_dma_chan); in tegra_qspi_combined_seq_xfer()
1155 if ((tqspi->cur_direction & DATA_DIR_RX) && in tegra_qspi_combined_seq_xfer()
1156 tqspi->rx_dma_chan) in tegra_qspi_combined_seq_xfer()
1157 dmaengine_terminate_all(tqspi->rx_dma_chan); in tegra_qspi_combined_seq_xfer()
1161 if (!tqspi->is_curr_dma_xfer) { in tegra_qspi_combined_seq_xfer()
1179 if (device_reset(tqspi->dev) < 0) in tegra_qspi_combined_seq_xfer()
1180 dev_warn_once(tqspi->dev, in tegra_qspi_combined_seq_xfer()
1182 ret = -EIO; in tegra_qspi_combined_seq_xfer()
1186 if (tqspi->tx_status || tqspi->rx_status) { in tegra_qspi_combined_seq_xfer()
1187 dev_err(tqspi->dev, "QSPI Transfer failed\n"); in tegra_qspi_combined_seq_xfer()
1188 tqspi->tx_status = 0; in tegra_qspi_combined_seq_xfer()
1189 tqspi->rx_status = 0; in tegra_qspi_combined_seq_xfer()
1190 ret = -EIO; in tegra_qspi_combined_seq_xfer()
1195 ret = -EINVAL; in tegra_qspi_combined_seq_xfer()
1198 msg->actual_length += xfer->len; in tegra_qspi_combined_seq_xfer()
1199 if (!xfer->cs_change && transfer_phase == DATA_TRANSFER) { in tegra_qspi_combined_seq_xfer()
1208 msg->status = ret; in tegra_qspi_combined_seq_xfer()
1216 struct spi_device *spi = msg->spi; in tegra_qspi_non_combined_seq_xfer()
1221 msg->status = 0; in tegra_qspi_non_combined_seq_xfer()
1222 msg->actual_length = 0; in tegra_qspi_non_combined_seq_xfer()
1223 tqspi->tx_status = 0; in tegra_qspi_non_combined_seq_xfer()
1224 tqspi->rx_status = 0; in tegra_qspi_non_combined_seq_xfer()
1229 if (tqspi->soc_data->supports_tpm) in tegra_qspi_non_combined_seq_xfer()
1232 list_for_each_entry(transfer, &msg->transfers, transfer_list) { in tegra_qspi_non_combined_seq_xfer()
1237 tqspi->dummy_cycles = 0; in tegra_qspi_non_combined_seq_xfer()
1244 if (!list_is_last(&xfer->transfer_list, &msg->transfers)) { in tegra_qspi_non_combined_seq_xfer()
1248 if (next_xfer->dummy_data) { in tegra_qspi_non_combined_seq_xfer()
1249 u32 dummy_cycles = next_xfer->len * 8 / next_xfer->tx_nbits; in tegra_qspi_non_combined_seq_xfer()
1252 tqspi->dummy_cycles = dummy_cycles; in tegra_qspi_non_combined_seq_xfer()
1253 dummy_bytes = next_xfer->len; in tegra_qspi_non_combined_seq_xfer()
1259 reinit_completion(&tqspi->xfer_completion); in tegra_qspi_non_combined_seq_xfer()
1265 dev_err(tqspi->dev, "failed to start transfer: %d\n", ret); in tegra_qspi_non_combined_seq_xfer()
1269 ret = wait_for_completion_timeout(&tqspi->xfer_completion, in tegra_qspi_non_combined_seq_xfer()
1272 dev_err(tqspi->dev, "transfer timeout\n"); in tegra_qspi_non_combined_seq_xfer()
1273 if (tqspi->is_curr_dma_xfer) { in tegra_qspi_non_combined_seq_xfer()
1274 if ((tqspi->cur_direction & DATA_DIR_TX) && tqspi->tx_dma_chan) in tegra_qspi_non_combined_seq_xfer()
1275 dmaengine_terminate_all(tqspi->tx_dma_chan); in tegra_qspi_non_combined_seq_xfer()
1276 if ((tqspi->cur_direction & DATA_DIR_RX) && tqspi->rx_dma_chan) in tegra_qspi_non_combined_seq_xfer()
1277 dmaengine_terminate_all(tqspi->rx_dma_chan); in tegra_qspi_non_combined_seq_xfer()
1280 ret = -EIO; in tegra_qspi_non_combined_seq_xfer()
1284 if (tqspi->tx_status || tqspi->rx_status) { in tegra_qspi_non_combined_seq_xfer()
1286 ret = -EIO; in tegra_qspi_non_combined_seq_xfer()
1290 msg->actual_length += xfer->len + dummy_bytes; in tegra_qspi_non_combined_seq_xfer()
1299 if (list_is_last(&xfer->transfer_list, &msg->transfers)) { in tegra_qspi_non_combined_seq_xfer()
1300 /* de-activate CS after last transfer only when cs_change is not set */ in tegra_qspi_non_combined_seq_xfer()
1301 if (!xfer->cs_change) { in tegra_qspi_non_combined_seq_xfer()
1305 } else if (xfer->cs_change) { in tegra_qspi_non_combined_seq_xfer()
1306 /* de-activated CS between the transfers only when cs_change is set */ in tegra_qspi_non_combined_seq_xfer()
1314 msg->status = ret; in tegra_qspi_non_combined_seq_xfer()
1325 list_for_each_entry(xfer, &msg->transfers, transfer_list) { in tegra_qspi_validate_cmb_seq()
1328 if (!tqspi->soc_data->cmb_xfer_capable) in tegra_qspi_validate_cmb_seq()
1332 xfer = list_first_entry(&msg->transfers, typeof(*xfer), in tegra_qspi_validate_cmb_seq()
1334 if (xfer->len > 2) in tegra_qspi_validate_cmb_seq()
1337 if (xfer->len > 4 || xfer->len < 3) in tegra_qspi_validate_cmb_seq()
1341 if (xfer->dummy_data != 1) in tegra_qspi_validate_cmb_seq()
1343 if ((xfer->len * 8 / xfer->tx_nbits) > QSPI_DUMMY_CYCLES_MAX) in tegra_qspi_validate_cmb_seq()
1347 if (!tqspi->soc_data->has_ext_dma && xfer->len > (QSPI_FIFO_DEPTH << 2)) in tegra_qspi_validate_cmb_seq()
1371 struct spi_transfer *t = tqspi->curr_xfer; in handle_cpu_based_xfer()
1374 spin_lock_irqsave(&tqspi->lock, flags); in handle_cpu_based_xfer()
1376 if (tqspi->tx_status || tqspi->rx_status) { in handle_cpu_based_xfer()
1378 complete(&tqspi->xfer_completion); in handle_cpu_based_xfer()
1382 if (tqspi->cur_direction & DATA_DIR_RX) in handle_cpu_based_xfer()
1385 if (tqspi->cur_direction & DATA_DIR_TX) in handle_cpu_based_xfer()
1386 tqspi->cur_pos = tqspi->cur_tx_pos; in handle_cpu_based_xfer()
1388 tqspi->cur_pos = tqspi->cur_rx_pos; in handle_cpu_based_xfer()
1390 if (tqspi->cur_pos == t->len) { in handle_cpu_based_xfer()
1391 complete(&tqspi->xfer_completion); in handle_cpu_based_xfer()
1398 spin_unlock_irqrestore(&tqspi->lock, flags); in handle_cpu_based_xfer()
1404 struct spi_transfer *t = tqspi->curr_xfer; in handle_dma_based_xfer()
1410 if (tqspi->cur_direction & DATA_DIR_TX) { in handle_dma_based_xfer()
1411 if (tqspi->tx_status) { in handle_dma_based_xfer()
1412 if (tqspi->tx_dma_chan) in handle_dma_based_xfer()
1413 dmaengine_terminate_all(tqspi->tx_dma_chan); in handle_dma_based_xfer()
1415 } else if (tqspi->tx_dma_chan) { in handle_dma_based_xfer()
1417 &tqspi->tx_dma_complete, QSPI_DMA_TIMEOUT); in handle_dma_based_xfer()
1419 dmaengine_terminate_all(tqspi->tx_dma_chan); in handle_dma_based_xfer()
1420 dev_err(tqspi->dev, "failed TX DMA transfer\n"); in handle_dma_based_xfer()
1426 if (tqspi->cur_direction & DATA_DIR_RX) { in handle_dma_based_xfer()
1427 if (tqspi->rx_status) { in handle_dma_based_xfer()
1428 if (tqspi->rx_dma_chan) in handle_dma_based_xfer()
1429 dmaengine_terminate_all(tqspi->rx_dma_chan); in handle_dma_based_xfer()
1431 } else if (tqspi->rx_dma_chan) { in handle_dma_based_xfer()
1433 &tqspi->rx_dma_complete, QSPI_DMA_TIMEOUT); in handle_dma_based_xfer()
1435 dmaengine_terminate_all(tqspi->rx_dma_chan); in handle_dma_based_xfer()
1436 dev_err(tqspi->dev, "failed RX DMA transfer\n"); in handle_dma_based_xfer()
1442 spin_lock_irqsave(&tqspi->lock, flags); in handle_dma_based_xfer()
1447 complete(&tqspi->xfer_completion); in handle_dma_based_xfer()
1451 if (tqspi->cur_direction & DATA_DIR_RX) in handle_dma_based_xfer()
1454 if (tqspi->cur_direction & DATA_DIR_TX) in handle_dma_based_xfer()
1455 tqspi->cur_pos = tqspi->cur_tx_pos; in handle_dma_based_xfer()
1457 tqspi->cur_pos = tqspi->cur_rx_pos; in handle_dma_based_xfer()
1459 if (tqspi->cur_pos == t->len) { in handle_dma_based_xfer()
1461 complete(&tqspi->xfer_completion); in handle_dma_based_xfer()
1475 spin_unlock_irqrestore(&tqspi->lock, flags); in handle_dma_based_xfer()
1483 tqspi->status_reg = tegra_qspi_readl(tqspi, QSPI_FIFO_STATUS); in tegra_qspi_isr_thread()
1485 if (tqspi->cur_direction & DATA_DIR_TX) in tegra_qspi_isr_thread()
1486 tqspi->tx_status = tqspi->status_reg & (QSPI_TX_FIFO_UNF | QSPI_TX_FIFO_OVF); in tegra_qspi_isr_thread()
1488 if (tqspi->cur_direction & DATA_DIR_RX) in tegra_qspi_isr_thread()
1489 tqspi->rx_status = tqspi->status_reg & (QSPI_RX_FIFO_OVF | QSPI_RX_FIFO_UNF); in tegra_qspi_isr_thread()
1493 if (!tqspi->is_curr_dma_xfer) in tegra_qspi_isr_thread()
1529 .compatible = "nvidia,tegra210-qspi",
1532 .compatible = "nvidia,tegra186-qspi",
1535 .compatible = "nvidia,tegra194-qspi",
1538 .compatible = "nvidia,tegra234-qspi",
1541 .compatible = "nvidia,tegra241-qspi",
1578 host = devm_spi_alloc_host(&pdev->dev, sizeof(*tqspi)); in tegra_qspi_probe()
1580 return -ENOMEM; in tegra_qspi_probe()
1585 host->mode_bits = SPI_MODE_0 | SPI_MODE_3 | SPI_CS_HIGH | in tegra_qspi_probe()
1587 host->bits_per_word_mask = SPI_BPW_MASK(32) | SPI_BPW_MASK(16) | SPI_BPW_MASK(8); in tegra_qspi_probe()
1588 host->flags = SPI_CONTROLLER_HALF_DUPLEX; in tegra_qspi_probe()
1589 host->setup = tegra_qspi_setup; in tegra_qspi_probe()
1590 host->transfer_one_message = tegra_qspi_transfer_one_message; in tegra_qspi_probe()
1591 host->num_chipselect = 1; in tegra_qspi_probe()
1592 host->auto_runtime_pm = true; in tegra_qspi_probe()
1594 bus_num = of_alias_get_id(pdev->dev.of_node, "spi"); in tegra_qspi_probe()
1596 host->bus_num = bus_num; in tegra_qspi_probe()
1598 tqspi->host = host; in tegra_qspi_probe()
1599 tqspi->dev = &pdev->dev; in tegra_qspi_probe()
1600 spin_lock_init(&tqspi->lock); in tegra_qspi_probe()
1602 tqspi->soc_data = device_get_match_data(&pdev->dev); in tegra_qspi_probe()
1603 host->num_chipselect = tqspi->soc_data->cs_count; in tegra_qspi_probe()
1604 tqspi->base = devm_platform_get_and_ioremap_resource(pdev, 0, &r); in tegra_qspi_probe()
1605 if (IS_ERR(tqspi->base)) in tegra_qspi_probe()
1606 return PTR_ERR(tqspi->base); in tegra_qspi_probe()
1608 tqspi->phys = r->start; in tegra_qspi_probe()
1612 tqspi->irq = qspi_irq; in tegra_qspi_probe()
1614 if (!has_acpi_companion(tqspi->dev)) { in tegra_qspi_probe()
1615 tqspi->clk = devm_clk_get(&pdev->dev, "qspi"); in tegra_qspi_probe()
1616 if (IS_ERR(tqspi->clk)) { in tegra_qspi_probe()
1617 ret = PTR_ERR(tqspi->clk); in tegra_qspi_probe()
1618 dev_err(&pdev->dev, "failed to get clock: %d\n", ret); in tegra_qspi_probe()
1624 tqspi->max_buf_size = QSPI_FIFO_DEPTH << 2; in tegra_qspi_probe()
1625 tqspi->dma_buf_size = DEFAULT_QSPI_DMA_BUF_LEN; in tegra_qspi_probe()
1631 if (tqspi->use_dma) in tegra_qspi_probe()
1632 tqspi->max_buf_size = tqspi->dma_buf_size; in tegra_qspi_probe()
1634 init_completion(&tqspi->tx_dma_complete); in tegra_qspi_probe()
1635 init_completion(&tqspi->rx_dma_complete); in tegra_qspi_probe()
1636 init_completion(&tqspi->xfer_completion); in tegra_qspi_probe()
1638 pm_runtime_enable(&pdev->dev); in tegra_qspi_probe()
1639 ret = pm_runtime_resume_and_get(&pdev->dev); in tegra_qspi_probe()
1641 dev_err(&pdev->dev, "failed to get runtime PM: %d\n", ret); in tegra_qspi_probe()
1645 if (device_reset(tqspi->dev) < 0) in tegra_qspi_probe()
1646 dev_warn_once(tqspi->dev, "device reset failed\n"); in tegra_qspi_probe()
1648 tqspi->def_command1_reg = QSPI_M_S | QSPI_CS_SW_HW | QSPI_CS_SW_VAL; in tegra_qspi_probe()
1649 tegra_qspi_writel(tqspi, tqspi->def_command1_reg, QSPI_COMMAND1); in tegra_qspi_probe()
1650 tqspi->spi_cs_timing1 = tegra_qspi_readl(tqspi, QSPI_CS_TIMING1); in tegra_qspi_probe()
1651 tqspi->spi_cs_timing2 = tegra_qspi_readl(tqspi, QSPI_CS_TIMING2); in tegra_qspi_probe()
1652 tqspi->def_command2_reg = tegra_qspi_readl(tqspi, QSPI_COMMAND2); in tegra_qspi_probe()
1654 pm_runtime_put(&pdev->dev); in tegra_qspi_probe()
1656 ret = request_threaded_irq(tqspi->irq, NULL, in tegra_qspi_probe()
1658 dev_name(&pdev->dev), tqspi); in tegra_qspi_probe()
1660 dev_err(&pdev->dev, "failed to request IRQ#%u: %d\n", tqspi->irq, ret); in tegra_qspi_probe()
1664 host->dev.of_node = pdev->dev.of_node; in tegra_qspi_probe()
1667 dev_err(&pdev->dev, "failed to register host: %d\n", ret); in tegra_qspi_probe()
1676 pm_runtime_force_suspend(&pdev->dev); in tegra_qspi_probe()
1687 free_irq(tqspi->irq, tqspi); in tegra_qspi_remove()
1688 pm_runtime_force_suspend(&pdev->dev); in tegra_qspi_remove()
1711 tegra_qspi_writel(tqspi, tqspi->command1_reg, QSPI_COMMAND1); in tegra_qspi_resume()
1712 tegra_qspi_writel(tqspi, tqspi->def_command2_reg, QSPI_COMMAND2); in tegra_qspi_resume()
1724 if (has_acpi_companion(tqspi->dev)) in tegra_qspi_runtime_suspend()
1729 clk_disable_unprepare(tqspi->clk); in tegra_qspi_runtime_suspend()
1741 if (has_acpi_companion(tqspi->dev)) in tegra_qspi_runtime_resume()
1743 ret = clk_prepare_enable(tqspi->clk); in tegra_qspi_runtime_resume()
1745 dev_err(tqspi->dev, "failed to enable clock: %d\n", ret); in tegra_qspi_runtime_resume()
1757 .name = "tegra-qspi",
1767 MODULE_ALIAS("platform:qspi-tegra");