Lines Matching +full:lgm +full:- +full:nand

1 // SPDX-License-Identifier: GPL-2.0+
7 #include <linux/dma-direction.h>
8 #include <linux/dma-mapping.h>
17 #include <linux/mtd/nand.h>
102 #define NAND_DATA_IFACE_CHECK_ONLY -1
140 return readl_poll_timeout(ctrl->ebu + EBU_WAIT, status, in ebu_nand_waitrdy()
148 u8 cs_num = ebu_host->cs_num; in ebu_nand_readb()
151 val = readb(ebu_host->cs[cs_num].chipaddr + HSNAND_CS_OFFS); in ebu_nand_readb()
159 u8 cs_num = ebu_host->cs_num; in ebu_nand_writeb()
161 writeb(value, ebu_host->cs[cs_num].chipaddr + offset); in ebu_nand_writeb()
185 writel(0, ebu_host->ebu + EBU_CON); in ebu_nand_disable()
191 void __iomem *nand_con = ebu_host->ebu + EBU_CON; in ebu_select_chip()
192 u32 cs = ebu_host->cs_num; in ebu_select_chip()
204 unsigned int rate = clk_get_rate(ctrl->clk) / HZ_PER_MHZ; in ebu_nand_set_timings()
217 trecov = DIV_ROUND_UP(max(timings->tREA_max, timings->tREH_min), in ebu_nand_set_timings()
221 thold = DIV_ROUND_UP(max(timings->tDH_min, timings->tDS_min), period); in ebu_nand_set_timings()
224 trdwait = DIV_ROUND_UP(max(timings->tRC_min, timings->tREH_min), in ebu_nand_set_timings()
228 twrwait = DIV_ROUND_UP(max(timings->tWC_min, timings->tWH_min), period); in ebu_nand_set_timings()
234 writel(reg, ctrl->ebu + EBU_BUSCON(ctrl->cs_num)); in ebu_nand_set_timings()
245 return -ERANGE; in ebu_nand_ooblayout_ecc()
247 oobregion->offset = HSNAND_ECC_OFFSET; in ebu_nand_ooblayout_ecc()
248 oobregion->length = chip->ecc.total; in ebu_nand_ooblayout_ecc()
259 return -ERANGE; in ebu_nand_ooblayout_free()
261 oobregion->offset = chip->ecc.total + HSNAND_ECC_OFFSET; in ebu_nand_ooblayout_free()
262 oobregion->length = mtd->oobsize - oobregion->offset; in ebu_nand_ooblayout_free()
276 dmaengine_terminate_async(ebu_host->dma_rx); in ebu_dma_rx_callback()
278 complete(&ebu_host->dma_access_complete); in ebu_dma_rx_callback()
285 dmaengine_terminate_async(ebu_host->dma_tx); in ebu_dma_tx_callback()
287 complete(&ebu_host->dma_access_complete); in ebu_dma_tx_callback()
304 chan = ebu_host->dma_rx; in ebu_dma_start()
305 dma_completion = &ebu_host->dma_access_complete; in ebu_dma_start()
308 chan = ebu_host->dma_tx; in ebu_dma_start()
309 dma_completion = &ebu_host->dma_access_complete; in ebu_dma_start()
313 buf_dma = dma_map_single(chan->device->dev, (void *)buf, len, dir); in ebu_dma_start()
314 if (dma_mapping_error(chan->device->dev, buf_dma)) { in ebu_dma_start()
315 dev_err(ebu_host->dev, "Failed to map DMA buffer\n"); in ebu_dma_start()
316 ret = -EIO; in ebu_dma_start()
322 ret = -ENXIO; in ebu_dma_start()
326 tx->callback = callback; in ebu_dma_start()
327 tx->callback_param = ebu_host; in ebu_dma_start()
328 cookie = tx->tx_submit(tx); in ebu_dma_start()
332 dev_err(ebu_host->dev, "dma_submit_error %d\n", cookie); in ebu_dma_start()
333 ret = -EIO; in ebu_dma_start()
343 dev_err(ebu_host->dev, "I/O Error in DMA RX (status %d)\n", in ebu_dma_start()
346 ret = -ETIMEDOUT; in ebu_dma_start()
353 dma_unmap_single(ebu_host->dev, buf_dma, len, dir); in ebu_dma_start()
364 writel(val, ebu_host->hsnand + HSNAND_CTL1); in ebu_nand_trigger()
366 writel(val, ebu_host->hsnand + HSNAND_CTL2); in ebu_nand_trigger()
368 writel(ebu_host->nd_para0, ebu_host->hsnand + HSNAND_PARA0); in ebu_nand_trigger()
371 writel(0xFFFFFFFF, ebu_host->hsnand + HSNAND_CMSG_0); in ebu_nand_trigger()
372 writel(0xFFFFFFFF, ebu_host->hsnand + HSNAND_CMSG_1); in ebu_nand_trigger()
375 ebu_host->hsnand + HSNAND_INT_MSK_CTL); in ebu_nand_trigger()
383 HSNAND_CTL_ECC_OFF_V8TH | HSNAND_CTL_CE_SEL_CS(ebu_host->cs_num) | in ebu_nand_trigger()
385 ebu_host->hsnand + HSNAND_CTL); in ebu_nand_trigger()
397 ret = ebu_dma_start(ebu_host, DMA_DEV_TO_MEM, buf, mtd->writesize); in ebu_nand_read_page_hwecc()
402 chip->ecc.read_oob(chip, page); in ebu_nand_read_page_hwecc()
404 reg_data = readl(ebu_host->hsnand + HSNAND_CTL); in ebu_nand_read_page_hwecc()
406 writel(reg_data, ebu_host->hsnand + HSNAND_CTL); in ebu_nand_read_page_hwecc()
416 void __iomem *int_sta = ebu_host->hsnand + HSNAND_INT_STA; in ebu_nand_write_page_hwecc()
422 ret = ebu_dma_start(ebu_host, DMA_MEM_TO_DEV, buf, mtd->writesize); in ebu_nand_write_page_hwecc()
427 reg = get_unaligned_le32(chip->oob_poi); in ebu_nand_write_page_hwecc()
428 writel(reg, ebu_host->hsnand + HSNAND_CMSG_0); in ebu_nand_write_page_hwecc()
430 reg = get_unaligned_le32(chip->oob_poi + 4); in ebu_nand_write_page_hwecc()
431 writel(reg, ebu_host->hsnand + HSNAND_CMSG_1); in ebu_nand_write_page_hwecc()
439 reg_data = readl(ebu_host->hsnand + HSNAND_CTL); in ebu_nand_write_page_hwecc()
441 writel(reg_data, ebu_host->hsnand + HSNAND_CTL); in ebu_nand_write_page_hwecc()
453 u32 ecc_strength_ds = chip->ecc.strength; in ebu_nand_attach_chip()
454 u32 ecc_size = chip->ecc.size; in ebu_nand_attach_chip()
455 u32 writesize = mtd->writesize; in ebu_nand_attach_chip()
456 u32 blocksize = mtd->erasesize; in ebu_nand_attach_chip()
460 if (!chip->ecc.size) in ebu_nand_attach_chip()
461 chip->ecc.size = 512; in ebu_nand_attach_chip()
475 return -EINVAL; in ebu_nand_attach_chip()
485 return -EINVAL; in ebu_nand_attach_chip()
494 if ((ecc_total + 8) > mtd->oobsize) in ebu_nand_attach_chip()
495 return -ERANGE; in ebu_nand_attach_chip()
497 chip->ecc.total = ecc_total; in ebu_nand_attach_chip()
500 return -ERANGE; in ebu_nand_attach_chip()
504 return -ERANGE; in ebu_nand_attach_chip()
506 ebu_host->nd_para0 = pagesize | pg_per_blk | HSNAND_PARA0_BYP_EN_NP | in ebu_nand_attach_chip()
511 chip->ecc.read_page = ebu_nand_read_page_hwecc; in ebu_nand_attach_chip()
512 chip->ecc.write_page = ebu_nand_write_page_hwecc; in ebu_nand_attach_chip()
528 for (op_id = 0; op_id < op->ninstrs; op_id++) { in ebu_nand_exec_op()
529 instr = &op->instrs[op_id]; in ebu_nand_exec_op()
531 switch (instr->type) { in ebu_nand_exec_op()
534 instr->ctx.cmd.opcode); in ebu_nand_exec_op()
538 for (i = 0; i < instr->ctx.addr.naddrs; i++) in ebu_nand_exec_op()
541 instr->ctx.addr.addrs[i]); in ebu_nand_exec_op()
545 ebu_read_buf(chip, instr->ctx.data.buf.in, in ebu_nand_exec_op()
546 instr->ctx.data.len); in ebu_nand_exec_op()
550 ebu_write_buf(chip, instr->ctx.data.buf.out, in ebu_nand_exec_op()
551 instr->ctx.data.len); in ebu_nand_exec_op()
555 timeout_ms = instr->ctx.waitrdy.timeout_ms * 1000; in ebu_nand_exec_op()
572 if (ebu_host->dma_rx) in ebu_dma_cleanup()
573 dma_release_channel(ebu_host->dma_rx); in ebu_dma_cleanup()
575 if (ebu_host->dma_tx) in ebu_dma_cleanup()
576 dma_release_channel(ebu_host->dma_tx); in ebu_dma_cleanup()
581 struct device *dev = &pdev->dev; in ebu_nand_probe()
583 struct nand_chip *nand; in ebu_nand_probe() local
592 return -ENOMEM; in ebu_nand_probe()
594 ebu_host->dev = dev; in ebu_nand_probe()
595 nand_controller_init(&ebu_host->controller); in ebu_nand_probe()
598 ebu_host->ebu = devm_ioremap_resource(&pdev->dev, res); in ebu_nand_probe()
599 if (IS_ERR(ebu_host->ebu)) in ebu_nand_probe()
600 return PTR_ERR(ebu_host->ebu); in ebu_nand_probe()
603 ebu_host->hsnand = devm_ioremap_resource(&pdev->dev, res); in ebu_nand_probe()
604 if (IS_ERR(ebu_host->hsnand)) in ebu_nand_probe()
605 return PTR_ERR(ebu_host->hsnand); in ebu_nand_probe()
612 ebu_host->cs_num = cs; in ebu_nand_probe()
616 ebu_host->cs[cs].chipaddr = devm_ioremap_resource(dev, res); in ebu_nand_probe()
617 ebu_host->cs[cs].nand_pa = res->start; in ebu_nand_probe()
618 if (IS_ERR(ebu_host->cs[cs].chipaddr)) in ebu_nand_probe()
619 return PTR_ERR(ebu_host->cs[cs].chipaddr); in ebu_nand_probe()
621 ebu_host->clk = devm_clk_get(dev, NULL); in ebu_nand_probe()
622 if (IS_ERR(ebu_host->clk)) in ebu_nand_probe()
623 return dev_err_probe(dev, PTR_ERR(ebu_host->clk), in ebu_nand_probe()
626 ret = clk_prepare_enable(ebu_host->clk); in ebu_nand_probe()
631 ebu_host->clk_rate = clk_get_rate(ebu_host->clk); in ebu_nand_probe()
633 ebu_host->dma_tx = dma_request_chan(dev, "tx"); in ebu_nand_probe()
634 if (IS_ERR(ebu_host->dma_tx)) { in ebu_nand_probe()
635 ret = dev_err_probe(dev, PTR_ERR(ebu_host->dma_tx), in ebu_nand_probe()
640 ebu_host->dma_rx = dma_request_chan(dev, "rx"); in ebu_nand_probe()
641 if (IS_ERR(ebu_host->dma_rx)) { in ebu_nand_probe()
642 ret = dev_err_probe(dev, PTR_ERR(ebu_host->dma_rx), in ebu_nand_probe()
644 ebu_host->dma_rx = NULL; in ebu_nand_probe()
651 ret = -EINVAL; in ebu_nand_probe()
654 ebu_host->cs[cs].addr_sel = res->start; in ebu_nand_probe()
655 writel(ebu_host->cs[cs].addr_sel | EBU_ADDR_MASK(5) | EBU_ADDR_SEL_REGEN, in ebu_nand_probe()
656 ebu_host->ebu + EBU_ADDR_SEL(cs)); in ebu_nand_probe()
658 nand_set_flash_node(&ebu_host->chip, dev->of_node); in ebu_nand_probe()
660 mtd = nand_to_mtd(&ebu_host->chip); in ebu_nand_probe()
661 if (!mtd->name) { in ebu_nand_probe()
662 dev_err(ebu_host->dev, "NAND label property is mandatory\n"); in ebu_nand_probe()
663 ret = -EINVAL; in ebu_nand_probe()
667 mtd->dev.parent = dev; in ebu_nand_probe()
668 ebu_host->dev = dev; in ebu_nand_probe()
671 nand_set_controller_data(&ebu_host->chip, ebu_host); in ebu_nand_probe()
673 nand = &ebu_host->chip; in ebu_nand_probe()
674 nand->controller = &ebu_host->controller; in ebu_nand_probe()
675 nand->controller->ops = &ebu_nand_controller_ops; in ebu_nand_probe()
678 ret = nand_scan(&ebu_host->chip, 1); in ebu_nand_probe()
689 nand_cleanup(&ebu_host->chip); in ebu_nand_probe()
693 clk_disable_unprepare(ebu_host->clk); in ebu_nand_probe()
703 ret = mtd_device_unregister(nand_to_mtd(&ebu_host->chip)); in ebu_nand_remove()
705 nand_cleanup(&ebu_host->chip); in ebu_nand_remove()
706 ebu_nand_disable(&ebu_host->chip); in ebu_nand_remove()
708 clk_disable_unprepare(ebu_host->clk); in ebu_nand_remove()
714 { .compatible = "intel,nand-controller" },
715 { .compatible = "intel,lgm-ebunand" },
724 .name = "intel-nand-controller",
733 MODULE_DESCRIPTION("Intel's LGM External Bus NAND Controller driver");