Lines Matching refs:mdata
270 static void mtk_spi_reset(struct mtk_spi *mdata)
275 reg_val = readl(mdata->base + SPI_CMD_REG);
277 writel(reg_val, mdata->base + SPI_CMD_REG);
279 reg_val = readl(mdata->base + SPI_CMD_REG);
281 writel(reg_val, mdata->base + SPI_CMD_REG);
286 struct mtk_spi *mdata = spi_controller_get_devdata(spi->controller);
297 setup = (delay * DIV_ROUND_UP(mdata->spi_clk_hz, 1000000)) / 1000;
302 hold = (delay * DIV_ROUND_UP(mdata->spi_clk_hz, 1000000)) / 1000;
307 inactive = (delay * DIV_ROUND_UP(mdata->spi_clk_hz, 1000000)) / 1000;
310 reg_val = readl(mdata->base + SPI_CFG0_REG);
311 if (mdata->dev_comp->enhance_timing) {
337 writel(reg_val, mdata->base + SPI_CFG0_REG);
342 reg_val = readl(mdata->base + SPI_CFG1_REG);
345 writel(reg_val, mdata->base + SPI_CFG1_REG);
357 struct mtk_spi *mdata = spi_controller_get_devdata(host);
362 reg_val = readl(mdata->base + SPI_CMD_REG);
363 if (mdata->dev_comp->ipm_design) {
399 if (mdata->dev_comp->enhance_timing) {
421 writel(reg_val, mdata->base + SPI_CMD_REG);
424 if (mdata->dev_comp->need_pad_sel)
425 writel(mdata->pad_sel[spi_get_chipselect(spi, 0)],
426 mdata->base + SPI_PAD_SEL_REG);
429 if (mdata->dev_comp->enhance_timing) {
430 if (mdata->dev_comp->ipm_design) {
431 reg_val = readl(mdata->base + SPI_CMD_REG);
435 writel(reg_val, mdata->base + SPI_CMD_REG);
437 reg_val = readl(mdata->base + SPI_CFG1_REG);
441 writel(reg_val, mdata->base + SPI_CFG1_REG);
444 reg_val = readl(mdata->base + SPI_CFG1_REG);
448 writel(reg_val, mdata->base + SPI_CFG1_REG);
465 struct mtk_spi *mdata = spi_controller_get_devdata(spi->controller);
470 reg_val = readl(mdata->base + SPI_CMD_REG);
473 writel(reg_val, mdata->base + SPI_CMD_REG);
476 writel(reg_val, mdata->base + SPI_CMD_REG);
477 mdata->state = MTK_SPI_IDLE;
478 mtk_spi_reset(mdata);
486 struct mtk_spi *mdata = spi_controller_get_devdata(host);
488 if (speed_hz < mdata->spi_clk_hz / 2)
489 div = DIV_ROUND_UP(mdata->spi_clk_hz, speed_hz);
495 if (mdata->dev_comp->enhance_timing) {
496 reg_val = readl(mdata->base + SPI_CFG2_REG);
503 writel(reg_val, mdata->base + SPI_CFG2_REG);
505 reg_val = readl(mdata->base + SPI_CFG0_REG);
511 writel(reg_val, mdata->base + SPI_CFG0_REG);
518 struct mtk_spi *mdata = spi_controller_get_devdata(host);
520 if (mdata->dev_comp->ipm_design)
522 mdata->xfer_len,
526 mdata->xfer_len,
529 packet_loop = mdata->xfer_len / packet_size;
531 reg_val = readl(mdata->base + SPI_CFG1_REG);
532 if (mdata->dev_comp->ipm_design)
539 writel(reg_val, mdata->base + SPI_CFG1_REG);
545 struct mtk_spi *mdata = spi_controller_get_devdata(host);
547 cmd = readl(mdata->base + SPI_CMD_REG);
548 if (mdata->state == MTK_SPI_IDLE)
552 writel(cmd, mdata->base + SPI_CMD_REG);
555 static int mtk_spi_get_mult_delta(struct mtk_spi *mdata, u32 xfer_len)
559 if (mdata->dev_comp->ipm_design) {
573 struct mtk_spi *mdata = spi_controller_get_devdata(host);
575 if (mdata->tx_sgl_len && mdata->rx_sgl_len) {
576 if (mdata->tx_sgl_len > mdata->rx_sgl_len) {
577 mult_delta = mtk_spi_get_mult_delta(mdata, mdata->rx_sgl_len);
578 mdata->xfer_len = mdata->rx_sgl_len - mult_delta;
579 mdata->rx_sgl_len = mult_delta;
580 mdata->tx_sgl_len -= mdata->xfer_len;
582 mult_delta = mtk_spi_get_mult_delta(mdata, mdata->tx_sgl_len);
583 mdata->xfer_len = mdata->tx_sgl_len - mult_delta;
584 mdata->tx_sgl_len = mult_delta;
585 mdata->rx_sgl_len -= mdata->xfer_len;
587 } else if (mdata->tx_sgl_len) {
588 mult_delta = mtk_spi_get_mult_delta(mdata, mdata->tx_sgl_len);
589 mdata->xfer_len = mdata->tx_sgl_len - mult_delta;
590 mdata->tx_sgl_len = mult_delta;
591 } else if (mdata->rx_sgl_len) {
592 mult_delta = mtk_spi_get_mult_delta(mdata, mdata->rx_sgl_len);
593 mdata->xfer_len = mdata->rx_sgl_len - mult_delta;
594 mdata->rx_sgl_len = mult_delta;
601 struct mtk_spi *mdata = spi_controller_get_devdata(host);
603 if (mdata->tx_sgl) {
605 mdata->base + SPI_TX_SRC_REG);
607 if (mdata->dev_comp->dma_ext)
609 mdata->base + SPI_TX_SRC_REG_64);
613 if (mdata->rx_sgl) {
615 mdata->base + SPI_RX_DST_REG);
617 if (mdata->dev_comp->dma_ext)
619 mdata->base + SPI_RX_DST_REG_64);
630 struct mtk_spi *mdata = spi_controller_get_devdata(host);
632 mdata->cur_transfer = xfer;
633 mdata->xfer_len = min(MTK_SPI_MAX_FIFO_SIZE, xfer->len);
634 mdata->num_xfered = 0;
640 iowrite32_rep(mdata->base + SPI_TX_DATA_REG, xfer->tx_buf, cnt);
645 writel(reg_val, mdata->base + SPI_TX_DATA_REG);
659 struct mtk_spi *mdata = spi_controller_get_devdata(host);
661 mdata->tx_sgl = NULL;
662 mdata->rx_sgl = NULL;
663 mdata->tx_sgl_len = 0;
664 mdata->rx_sgl_len = 0;
665 mdata->cur_transfer = xfer;
666 mdata->num_xfered = 0;
670 cmd = readl(mdata->base + SPI_CMD_REG);
675 writel(cmd, mdata->base + SPI_CMD_REG);
678 mdata->tx_sgl = xfer->tx_sg.sgl;
680 mdata->rx_sgl = xfer->rx_sg.sgl;
682 if (mdata->tx_sgl) {
683 xfer->tx_dma = sg_dma_address(mdata->tx_sgl);
684 mdata->tx_sgl_len = sg_dma_len(mdata->tx_sgl);
686 if (mdata->rx_sgl) {
687 xfer->rx_dma = sg_dma_address(mdata->rx_sgl);
688 mdata->rx_sgl_len = sg_dma_len(mdata->rx_sgl);
703 struct mtk_spi *mdata = spi_controller_get_devdata(spi->controller);
707 if (mdata->dev_comp->ipm_design) {
713 writel(reg_val, mdata->base + SPI_CFG3_IPM_REG);
734 struct mtk_spi *mdata = spi_controller_get_devdata(spi->controller);
739 if (mdata->dev_comp->need_pad_sel && spi_get_csgpiod(spi, 0))
750 struct mtk_spi *mdata = spi_controller_get_devdata(host);
751 struct spi_transfer *xfer = mdata->cur_transfer;
755 cnt = mdata->xfer_len / 4;
756 ioread32_rep(mdata->base + SPI_RX_DATA_REG,
757 xfer->rx_buf + mdata->num_xfered, cnt);
758 remainder = mdata->xfer_len % 4;
760 reg_val = readl(mdata->base + SPI_RX_DATA_REG);
761 memcpy(xfer->rx_buf + (cnt * 4) + mdata->num_xfered,
767 mdata->num_xfered += mdata->xfer_len;
768 if (mdata->num_xfered == xfer->len) {
773 len = xfer->len - mdata->num_xfered;
774 mdata->xfer_len = min(MTK_SPI_MAX_FIFO_SIZE, len);
778 cnt = mdata->xfer_len / 4;
779 iowrite32_rep(mdata->base + SPI_TX_DATA_REG,
780 xfer->tx_buf + mdata->num_xfered, cnt);
782 remainder = mdata->xfer_len % 4;
786 xfer->tx_buf + (cnt * 4) + mdata->num_xfered,
788 writel(reg_val, mdata->base + SPI_TX_DATA_REG);
797 if (mdata->tx_sgl)
798 xfer->tx_dma += mdata->xfer_len;
799 if (mdata->rx_sgl)
800 xfer->rx_dma += mdata->xfer_len;
802 if (mdata->tx_sgl && (mdata->tx_sgl_len == 0)) {
803 mdata->tx_sgl = sg_next(mdata->tx_sgl);
804 if (mdata->tx_sgl) {
805 xfer->tx_dma = sg_dma_address(mdata->tx_sgl);
806 mdata->tx_sgl_len = sg_dma_len(mdata->tx_sgl);
809 if (mdata->rx_sgl && (mdata->rx_sgl_len == 0)) {
810 mdata->rx_sgl = sg_next(mdata->rx_sgl);
811 if (mdata->rx_sgl) {
812 xfer->rx_dma = sg_dma_address(mdata->rx_sgl);
813 mdata->rx_sgl_len = sg_dma_len(mdata->rx_sgl);
817 if (!mdata->tx_sgl && !mdata->rx_sgl) {
819 cmd = readl(mdata->base + SPI_CMD_REG);
822 writel(cmd, mdata->base + SPI_CMD_REG);
839 struct mtk_spi *mdata = spi_controller_get_devdata(host);
842 reg_val = readl(mdata->base + SPI_STATUS0_REG);
844 mdata->state = MTK_SPI_PAUSED;
846 mdata->state = MTK_SPI_IDLE;
849 if (mdata->use_spimem) {
850 complete(&mdata->spimem_done);
900 struct mtk_spi *mdata = spi_controller_get_devdata(host);
902 writel((u32)(mdata->tx_dma & MTK_SPI_32BITS_MASK),
903 mdata->base + SPI_TX_SRC_REG);
905 if (mdata->dev_comp->dma_ext)
906 writel((u32)(mdata->tx_dma >> 32),
907 mdata->base + SPI_TX_SRC_REG_64);
911 writel((u32)(mdata->rx_dma & MTK_SPI_32BITS_MASK),
912 mdata->base + SPI_RX_DST_REG);
914 if (mdata->dev_comp->dma_ext)
915 writel((u32)(mdata->rx_dma >> 32),
916 mdata->base + SPI_RX_DST_REG_64);
924 struct mtk_spi *mdata = spi_controller_get_devdata(mem->spi->controller);
942 if (!wait_for_completion_timeout(&mdata->spimem_done,
944 dev_err(mdata->dev, "spi-mem transfer timeout\n");
954 struct mtk_spi *mdata = spi_controller_get_devdata(mem->spi->controller);
959 mdata->use_spimem = true;
960 reinit_completion(&mdata->spimem_done);
962 mtk_spi_reset(mdata);
966 reg_val = readl(mdata->base + SPI_CFG3_IPM_REG);
980 writel(0, mdata->base + SPI_CFG1_REG);
983 mdata->xfer_len = op->data.nbytes;
1013 writel(reg_val, mdata->base + SPI_CFG3_IPM_REG);
1023 mdata->use_spimem = false;
1047 mdata->tx_dma = dma_map_single(mdata->dev, tx_tmp_buf,
1049 if (dma_mapping_error(mdata->dev, mdata->tx_dma)) {
1066 mdata->rx_dma = dma_map_single(mdata->dev,
1070 if (dma_mapping_error(mdata->dev, mdata->rx_dma)) {
1076 reg_val = readl(mdata->base + SPI_CMD_REG);
1080 writel(reg_val, mdata->base + SPI_CMD_REG);
1092 reg_val = readl(mdata->base + SPI_CMD_REG);
1096 writel(reg_val, mdata->base + SPI_CMD_REG);
1100 dma_unmap_single(mdata->dev, mdata->rx_dma,
1110 dma_unmap_single(mdata->dev, mdata->tx_dma,
1114 mdata->use_spimem = false;
1133 struct mtk_spi *mdata;
1136 host = devm_spi_alloc_host(dev, sizeof(*mdata));
1152 mdata = spi_controller_get_devdata(host);
1153 mdata->dev_comp = device_get_match_data(dev);
1155 if (mdata->dev_comp->enhance_timing)
1158 if (mdata->dev_comp->must_tx)
1160 if (mdata->dev_comp->ipm_design)
1164 if (mdata->dev_comp->ipm_design) {
1165 mdata->dev = dev;
1168 init_completion(&mdata->spimem_done);
1171 if (mdata->dev_comp->need_pad_sel) {
1172 mdata->pad_num = of_property_count_u32_elems(dev->of_node,
1174 if (mdata->pad_num < 0)
1178 mdata->pad_sel = devm_kmalloc_array(dev, mdata->pad_num,
1180 if (!mdata->pad_sel)
1183 for (i = 0; i < mdata->pad_num; i++) {
1186 i, &mdata->pad_sel[i]);
1187 if (mdata->pad_sel[i] > MT8173_SPI_MAX_PAD_SEL)
1190 i, mdata->pad_sel[i]);
1195 mdata->base = devm_platform_ioremap_resource(pdev, 0);
1196 if (IS_ERR(mdata->base))
1197 return PTR_ERR(mdata->base);
1206 if (mdata->dev_comp->ipm_design)
1211 mdata->parent_clk = devm_clk_get(dev, "parent-clk");
1212 if (IS_ERR(mdata->parent_clk))
1213 return dev_err_probe(dev, PTR_ERR(mdata->parent_clk),
1216 mdata->sel_clk = devm_clk_get(dev, "sel-clk");
1217 if (IS_ERR(mdata->sel_clk))
1218 return dev_err_probe(dev, PTR_ERR(mdata->sel_clk), "failed to get sel-clk\n");
1220 mdata->spi_clk = devm_clk_get(dev, "spi-clk");
1221 if (IS_ERR(mdata->spi_clk))
1222 return dev_err_probe(dev, PTR_ERR(mdata->spi_clk), "failed to get spi-clk\n");
1224 mdata->spi_hclk = devm_clk_get_optional(dev, "hclk");
1225 if (IS_ERR(mdata->spi_hclk))
1226 return dev_err_probe(dev, PTR_ERR(mdata->spi_hclk), "failed to get hclk\n");
1228 ret = clk_set_parent(mdata->sel_clk, mdata->parent_clk);
1232 ret = clk_prepare_enable(mdata->spi_hclk);
1236 ret = clk_prepare_enable(mdata->spi_clk);
1238 clk_disable_unprepare(mdata->spi_hclk);
1242 mdata->spi_clk_hz = clk_get_rate(mdata->spi_clk);
1244 if (mdata->dev_comp->no_need_unprepare) {
1245 clk_disable(mdata->spi_clk);
1246 clk_disable(mdata->spi_hclk);
1248 clk_disable_unprepare(mdata->spi_clk);
1249 clk_disable_unprepare(mdata->spi_hclk);
1252 if (mdata->dev_comp->need_pad_sel) {
1253 if (mdata->pad_num != host->num_chipselect)
1256 mdata->pad_num, host->num_chipselect);
1263 if (mdata->dev_comp->dma_ext)
1292 struct mtk_spi *mdata = spi_controller_get_devdata(host);
1295 if (mdata->use_spimem && !completion_done(&mdata->spimem_done))
1296 complete(&mdata->spimem_done);
1307 mtk_spi_reset(mdata);
1309 if (mdata->dev_comp->no_need_unprepare) {
1310 clk_unprepare(mdata->spi_clk);
1311 clk_unprepare(mdata->spi_hclk);
1324 struct mtk_spi *mdata = spi_controller_get_devdata(host);
1331 clk_disable_unprepare(mdata->spi_clk);
1332 clk_disable_unprepare(mdata->spi_hclk);
1344 struct mtk_spi *mdata = spi_controller_get_devdata(host);
1349 ret = clk_prepare_enable(mdata->spi_clk);
1355 ret = clk_prepare_enable(mdata->spi_hclk);
1358 clk_disable_unprepare(mdata->spi_clk);
1365 clk_disable_unprepare(mdata->spi_clk);
1366 clk_disable_unprepare(mdata->spi_hclk);
1377 struct mtk_spi *mdata = spi_controller_get_devdata(host);
1379 if (mdata->dev_comp->no_need_unprepare) {
1380 clk_disable(mdata->spi_clk);
1381 clk_disable(mdata->spi_hclk);
1383 clk_disable_unprepare(mdata->spi_clk);
1384 clk_disable_unprepare(mdata->spi_hclk);
1393 struct mtk_spi *mdata = spi_controller_get_devdata(host);
1396 if (mdata->dev_comp->no_need_unprepare) {
1397 ret = clk_enable(mdata->spi_clk);
1402 ret = clk_enable(mdata->spi_hclk);
1405 clk_disable(mdata->spi_clk);
1409 ret = clk_prepare_enable(mdata->spi_clk);
1415 ret = clk_prepare_enable(mdata->spi_hclk);
1418 clk_disable_unprepare(mdata->spi_clk);