Lines Matching +full:lgm +full:- +full:ebunand
1 // SPDX-License-Identifier: GPL-2.0+
7 #include <linux/dma-direction.h>
8 #include <linux/dma-mapping.h>
137 return readl_poll_timeout(ctrl->ebu + EBU_WAIT, status, in ebu_nand_waitrdy()
145 u8 cs_num = ebu_host->cs_num; in ebu_nand_readb()
148 val = readb(ebu_host->cs[cs_num].chipaddr + HSNAND_CS_OFFS); in ebu_nand_readb()
156 u8 cs_num = ebu_host->cs_num; in ebu_nand_writeb()
158 writeb(value, ebu_host->cs[cs_num].chipaddr + offset); in ebu_nand_writeb()
182 writel(0, ebu_host->ebu + EBU_CON); in ebu_nand_disable()
188 void __iomem *nand_con = ebu_host->ebu + EBU_CON; in ebu_select_chip()
189 u32 cs = ebu_host->cs_num; in ebu_select_chip()
201 unsigned int rate = clk_get_rate(ctrl->clk) / HZ_PER_MHZ; in ebu_nand_set_timings()
214 trecov = DIV_ROUND_UP(max(timings->tREA_max, timings->tREH_min), in ebu_nand_set_timings()
218 thold = DIV_ROUND_UP(max(timings->tDH_min, timings->tDS_min), period); in ebu_nand_set_timings()
221 trdwait = DIV_ROUND_UP(max(timings->tRC_min, timings->tREH_min), in ebu_nand_set_timings()
225 twrwait = DIV_ROUND_UP(max(timings->tWC_min, timings->tWH_min), period); in ebu_nand_set_timings()
231 writel(reg, ctrl->ebu + EBU_BUSCON(ctrl->cs_num)); in ebu_nand_set_timings()
242 return -ERANGE; in ebu_nand_ooblayout_ecc()
244 oobregion->offset = HSNAND_ECC_OFFSET; in ebu_nand_ooblayout_ecc()
245 oobregion->length = chip->ecc.total; in ebu_nand_ooblayout_ecc()
256 return -ERANGE; in ebu_nand_ooblayout_free()
258 oobregion->offset = chip->ecc.total + HSNAND_ECC_OFFSET; in ebu_nand_ooblayout_free()
259 oobregion->length = mtd->oobsize - oobregion->offset; in ebu_nand_ooblayout_free()
273 dmaengine_terminate_async(ebu_host->dma_rx); in ebu_dma_rx_callback()
275 complete(&ebu_host->dma_access_complete); in ebu_dma_rx_callback()
282 dmaengine_terminate_async(ebu_host->dma_tx); in ebu_dma_tx_callback()
284 complete(&ebu_host->dma_access_complete); in ebu_dma_tx_callback()
301 chan = ebu_host->dma_rx; in ebu_dma_start()
302 dma_completion = &ebu_host->dma_access_complete; in ebu_dma_start()
305 chan = ebu_host->dma_tx; in ebu_dma_start()
306 dma_completion = &ebu_host->dma_access_complete; in ebu_dma_start()
310 buf_dma = dma_map_single(chan->device->dev, (void *)buf, len, dir); in ebu_dma_start()
311 if (dma_mapping_error(chan->device->dev, buf_dma)) { in ebu_dma_start()
312 dev_err(ebu_host->dev, "Failed to map DMA buffer\n"); in ebu_dma_start()
313 ret = -EIO; in ebu_dma_start()
319 ret = -ENXIO; in ebu_dma_start()
323 tx->callback = callback; in ebu_dma_start()
324 tx->callback_param = ebu_host; in ebu_dma_start()
325 cookie = tx->tx_submit(tx); in ebu_dma_start()
329 dev_err(ebu_host->dev, "dma_submit_error %d\n", cookie); in ebu_dma_start()
330 ret = -EIO; in ebu_dma_start()
340 dev_err(ebu_host->dev, "I/O Error in DMA RX (status %d)\n", in ebu_dma_start()
343 ret = -ETIMEDOUT; in ebu_dma_start()
350 dma_unmap_single(ebu_host->dev, buf_dma, len, dir); in ebu_dma_start()
361 writel(val, ebu_host->hsnand + HSNAND_CTL1); in ebu_nand_trigger()
363 writel(val, ebu_host->hsnand + HSNAND_CTL2); in ebu_nand_trigger()
365 writel(ebu_host->nd_para0, ebu_host->hsnand + HSNAND_PARA0); in ebu_nand_trigger()
368 writel(0xFFFFFFFF, ebu_host->hsnand + HSNAND_CMSG_0); in ebu_nand_trigger()
369 writel(0xFFFFFFFF, ebu_host->hsnand + HSNAND_CMSG_1); in ebu_nand_trigger()
372 ebu_host->hsnand + HSNAND_INT_MSK_CTL); in ebu_nand_trigger()
380 HSNAND_CTL_ECC_OFF_V8TH | HSNAND_CTL_CE_SEL_CS(ebu_host->cs_num) | in ebu_nand_trigger()
382 ebu_host->hsnand + HSNAND_CTL); in ebu_nand_trigger()
394 ret = ebu_dma_start(ebu_host, DMA_DEV_TO_MEM, buf, mtd->writesize); in ebu_nand_read_page_hwecc()
399 chip->ecc.read_oob(chip, page); in ebu_nand_read_page_hwecc()
401 reg_data = readl(ebu_host->hsnand + HSNAND_CTL); in ebu_nand_read_page_hwecc()
403 writel(reg_data, ebu_host->hsnand + HSNAND_CTL); in ebu_nand_read_page_hwecc()
413 void __iomem *int_sta = ebu_host->hsnand + HSNAND_INT_STA; in ebu_nand_write_page_hwecc()
419 ret = ebu_dma_start(ebu_host, DMA_MEM_TO_DEV, buf, mtd->writesize); in ebu_nand_write_page_hwecc()
424 reg = get_unaligned_le32(chip->oob_poi); in ebu_nand_write_page_hwecc()
425 writel(reg, ebu_host->hsnand + HSNAND_CMSG_0); in ebu_nand_write_page_hwecc()
427 reg = get_unaligned_le32(chip->oob_poi + 4); in ebu_nand_write_page_hwecc()
428 writel(reg, ebu_host->hsnand + HSNAND_CMSG_1); in ebu_nand_write_page_hwecc()
436 reg_data = readl(ebu_host->hsnand + HSNAND_CTL); in ebu_nand_write_page_hwecc()
438 writel(reg_data, ebu_host->hsnand + HSNAND_CTL); in ebu_nand_write_page_hwecc()
450 u32 ecc_strength_ds = chip->ecc.strength; in ebu_nand_attach_chip()
451 u32 ecc_size = chip->ecc.size; in ebu_nand_attach_chip()
452 u32 writesize = mtd->writesize; in ebu_nand_attach_chip()
453 u32 blocksize = mtd->erasesize; in ebu_nand_attach_chip()
457 if (!chip->ecc.size) in ebu_nand_attach_chip()
458 chip->ecc.size = 512; in ebu_nand_attach_chip()
472 return -EINVAL; in ebu_nand_attach_chip()
482 return -EINVAL; in ebu_nand_attach_chip()
491 if ((ecc_total + 8) > mtd->oobsize) in ebu_nand_attach_chip()
492 return -ERANGE; in ebu_nand_attach_chip()
494 chip->ecc.total = ecc_total; in ebu_nand_attach_chip()
497 return -ERANGE; in ebu_nand_attach_chip()
501 return -ERANGE; in ebu_nand_attach_chip()
503 ebu_host->nd_para0 = pagesize | pg_per_blk | HSNAND_PARA0_BYP_EN_NP | in ebu_nand_attach_chip()
508 chip->ecc.read_page = ebu_nand_read_page_hwecc; in ebu_nand_attach_chip()
509 chip->ecc.write_page = ebu_nand_write_page_hwecc; in ebu_nand_attach_chip()
525 for (op_id = 0; op_id < op->ninstrs; op_id++) { in ebu_nand_exec_op()
526 instr = &op->instrs[op_id]; in ebu_nand_exec_op()
528 switch (instr->type) { in ebu_nand_exec_op()
531 instr->ctx.cmd.opcode); in ebu_nand_exec_op()
535 for (i = 0; i < instr->ctx.addr.naddrs; i++) in ebu_nand_exec_op()
538 instr->ctx.addr.addrs[i]); in ebu_nand_exec_op()
542 ebu_read_buf(chip, instr->ctx.data.buf.in, in ebu_nand_exec_op()
543 instr->ctx.data.len); in ebu_nand_exec_op()
547 ebu_write_buf(chip, instr->ctx.data.buf.out, in ebu_nand_exec_op()
548 instr->ctx.data.len); in ebu_nand_exec_op()
552 timeout_ms = instr->ctx.waitrdy.timeout_ms * 1000; in ebu_nand_exec_op()
569 if (ebu_host->dma_rx) in ebu_dma_cleanup()
570 dma_release_channel(ebu_host->dma_rx); in ebu_dma_cleanup()
572 if (ebu_host->dma_tx) in ebu_dma_cleanup()
573 dma_release_channel(ebu_host->dma_tx); in ebu_dma_cleanup()
578 struct device *dev = &pdev->dev; in ebu_nand_probe()
590 return -ENOMEM; in ebu_nand_probe()
592 ebu_host->dev = dev; in ebu_nand_probe()
593 nand_controller_init(&ebu_host->controller); in ebu_nand_probe()
595 ebu_host->ebu = devm_platform_ioremap_resource_byname(pdev, "ebunand"); in ebu_nand_probe()
596 if (IS_ERR(ebu_host->ebu)) in ebu_nand_probe()
597 return PTR_ERR(ebu_host->ebu); in ebu_nand_probe()
599 ebu_host->hsnand = devm_platform_ioremap_resource_byname(pdev, "hsnand"); in ebu_nand_probe()
600 if (IS_ERR(ebu_host->hsnand)) in ebu_nand_probe()
601 return PTR_ERR(ebu_host->hsnand); in ebu_nand_probe()
603 chip_np = of_get_next_child(dev->of_node, NULL); in ebu_nand_probe()
605 return dev_err_probe(dev, -EINVAL, in ebu_nand_probe()
615 ret = -EINVAL; in ebu_nand_probe()
619 ebu_host->cs_num = cs; in ebu_nand_probe()
623 ret = -ENOMEM; in ebu_nand_probe()
627 ebu_host->cs[cs].chipaddr = devm_platform_ioremap_resource_byname(pdev, in ebu_nand_probe()
629 if (IS_ERR(ebu_host->cs[cs].chipaddr)) { in ebu_nand_probe()
630 ret = PTR_ERR(ebu_host->cs[cs].chipaddr); in ebu_nand_probe()
634 ebu_host->clk = devm_clk_get_enabled(dev, NULL); in ebu_nand_probe()
635 if (IS_ERR(ebu_host->clk)) { in ebu_nand_probe()
636 ret = dev_err_probe(dev, PTR_ERR(ebu_host->clk), in ebu_nand_probe()
641 ebu_host->dma_tx = dma_request_chan(dev, "tx"); in ebu_nand_probe()
642 if (IS_ERR(ebu_host->dma_tx)) { in ebu_nand_probe()
643 ret = dev_err_probe(dev, PTR_ERR(ebu_host->dma_tx), in ebu_nand_probe()
648 ebu_host->dma_rx = dma_request_chan(dev, "rx"); in ebu_nand_probe()
649 if (IS_ERR(ebu_host->dma_rx)) { in ebu_nand_probe()
650 ret = dev_err_probe(dev, PTR_ERR(ebu_host->dma_rx), in ebu_nand_probe()
652 ebu_host->dma_rx = NULL; in ebu_nand_probe()
658 ret = -ENOMEM; in ebu_nand_probe()
664 ret = -EINVAL; in ebu_nand_probe()
667 ebu_host->cs[cs].addr_sel = res->start; in ebu_nand_probe()
668 writel(ebu_host->cs[cs].addr_sel | EBU_ADDR_MASK(5) | EBU_ADDR_SEL_REGEN, in ebu_nand_probe()
669 ebu_host->ebu + EBU_ADDR_SEL(cs)); in ebu_nand_probe()
671 nand_set_flash_node(&ebu_host->chip, chip_np); in ebu_nand_probe()
673 mtd = nand_to_mtd(&ebu_host->chip); in ebu_nand_probe()
674 if (!mtd->name) { in ebu_nand_probe()
675 dev_err(ebu_host->dev, "NAND label property is mandatory\n"); in ebu_nand_probe()
676 ret = -EINVAL; in ebu_nand_probe()
680 mtd->dev.parent = dev; in ebu_nand_probe()
681 ebu_host->dev = dev; in ebu_nand_probe()
684 nand_set_controller_data(&ebu_host->chip, ebu_host); in ebu_nand_probe()
686 nand = &ebu_host->chip; in ebu_nand_probe()
687 nand->controller = &ebu_host->controller; in ebu_nand_probe()
688 nand->controller->ops = &ebu_nand_controller_ops; in ebu_nand_probe()
691 ret = nand_scan(&ebu_host->chip, 1); in ebu_nand_probe()
702 nand_cleanup(&ebu_host->chip); in ebu_nand_probe()
716 ret = mtd_device_unregister(nand_to_mtd(&ebu_host->chip)); in ebu_nand_remove()
718 nand_cleanup(&ebu_host->chip); in ebu_nand_remove()
719 ebu_nand_disable(&ebu_host->chip); in ebu_nand_remove()
724 { .compatible = "intel,lgm-ebunand" },
733 .name = "intel-nand-controller",
742 MODULE_DESCRIPTION("Intel's LGM External Bus NAND Controller driver");