Lines Matching +full:nand +full:- +full:no +full:- +full:ecc +full:- +full:engine

1 // SPDX-License-Identifier: GPL-2.0-only
3 * Copyright © 2009 - Maxim Levitsky
31 MODULE_PARM_DESC(debug, "Debug level (0-2)");
36 uint8_t reg = readb(dev->mmio + address); in r852_read_reg()
44 writeb(value, dev->mmio + address); in r852_write_reg()
51 uint32_t reg = le32_to_cpu(readl(dev->mmio + address)); in r852_read_reg_dword()
59 writel(cpu_to_le32(value), dev->mmio + address); in r852_write_reg_dword()
73 dev->dma_usable = (r852_read_reg(dev, R852_DMA_CAP) & in r852_dma_test()
76 if (!dev->dma_usable) in r852_dma_test()
81 dev->dma_usable = 0; in r852_dma_test()
87 * Expects dev->dma_dir and dev->dma_state be set
97 if (dev->dma_dir) in r852_dma_enable()
100 if (dev->dma_state == DMA_INTERNAL) { in r852_dma_enable()
105 cpu_to_le32(dev->phys_bounce_buffer)); in r852_dma_enable()
109 cpu_to_le32(dev->phys_dma_addr)); in r852_dma_enable()
132 WARN_ON(dev->dma_stage == 0); in r852_dma_done()
142 cpu_to_le32(dev->phys_bounce_buffer)); in r852_dma_done()
145 dev->dma_error = error; in r852_dma_done()
146 dev->dma_stage = 0; in r852_dma_done()
148 if (dev->phys_dma_addr && dev->phys_dma_addr != dev->phys_bounce_buffer) in r852_dma_done()
149 dma_unmap_single(&dev->pci_dev->dev, dev->phys_dma_addr, in r852_dma_done()
151 dev->dma_dir ? DMA_FROM_DEVICE : DMA_TO_DEVICE); in r852_dma_done()
159 long timeout = wait_for_completion_timeout(&dev->dma_done, in r852_dma_wait()
163 return -ETIMEDOUT; in r852_dma_wait()
178 dev->dma_error = 0; in r852_do_dma()
181 dev->dma_dir = do_read; in r852_do_dma()
182 dev->dma_stage = 1; in r852_do_dma()
183 reinit_completion(&dev->dma_done); in r852_do_dma()
189 dev->dma_state = do_read ? DMA_INTERNAL : DMA_MEMORY; in r852_do_dma()
192 if ((unsigned long)buf & (R852_DMA_LEN-1)) in r852_do_dma()
196 dev->phys_dma_addr = dma_map_single(&dev->pci_dev->dev, buf, in r852_do_dma()
199 if (dma_mapping_error(&dev->pci_dev->dev, dev->phys_dma_addr)) in r852_do_dma()
205 dev->phys_dma_addr = dev->phys_bounce_buffer; in r852_do_dma()
207 memcpy(dev->bounce_buffer, buf, R852_DMA_LEN); in r852_do_dma()
211 spin_lock_irqsave(&dev->irqlock, flags); in r852_do_dma()
213 spin_unlock_irqrestore(&dev->irqlock, flags); in r852_do_dma()
224 memcpy((void *)buf, dev->bounce_buffer, R852_DMA_LEN); in r852_do_dma()
228 * Program data lines of the nand chip to send data to it
236 if (dev->card_unstable) in r852_write_buf()
240 if (len == R852_DMA_LEN && dev->dma_usable) { in r852_write_buf()
245 /* write DWORD chinks - faster */ in r852_write_buf()
250 len -= 4; in r852_write_buf()
257 len--; in r852_write_buf()
262 * Read data lines of the nand chip to retrieve data
269 if (dev->card_unstable) { in r852_read_buf()
277 if (len == R852_DMA_LEN && dev->dma_usable) { in r852_read_buf()
290 len -= 4; in r852_read_buf()
294 while (len--) in r852_read_buf()
299 * Read one byte from nand chip
306 if (dev->card_unstable) in r852_read_byte()
319 if (dev->card_unstable) in r852_cmdctl()
324 dev->ctlreg &= ~(R852_CTL_DATA | R852_CTL_COMMAND | in r852_cmdctl()
328 dev->ctlreg |= R852_CTL_DATA; in r852_cmdctl()
331 dev->ctlreg |= R852_CTL_COMMAND; in r852_cmdctl()
334 dev->ctlreg |= (R852_CTL_CARDENABLE | R852_CTL_ON); in r852_cmdctl()
336 dev->ctlreg &= ~R852_CTL_WRITE; in r852_cmdctl()
340 dev->ctlreg |= R852_CTL_WRITE; in r852_cmdctl()
342 r852_write_reg(dev, R852_CTL, dev->ctlreg); in r852_cmdctl()
347 if (dat == NAND_CMD_SEQIN && (dev->ctlreg & R852_CTL_COMMAND)) { in r852_cmdctl()
348 dev->ctlreg |= R852_CTL_WRITE; in r852_cmdctl()
349 r852_write_reg(dev, R852_CTL, dev->ctlreg); in r852_cmdctl()
370 if (chip->legacy.dev_ready(chip)) in r852_wait()
375 /* Unfortunelly, no way to send detailed error status... */ in r852_wait()
376 if (dev->dma_error) { in r852_wait()
378 dev->dma_error = 0; in r852_wait()
395 * Set ECC engine mode
402 if (dev->card_unstable) in r852_ecc_hwctl()
408 /* enable ecc generation/check*/ in r852_ecc_hwctl()
409 dev->ctlreg |= R852_CTL_ECC_ENABLE; in r852_ecc_hwctl()
411 /* flush ecc buffer */ in r852_ecc_hwctl()
413 dev->ctlreg | R852_CTL_ECC_ACCESS); in r852_ecc_hwctl()
416 r852_write_reg(dev, R852_CTL, dev->ctlreg); in r852_ecc_hwctl()
420 /* disable ecc generation */ in r852_ecc_hwctl()
421 dev->ctlreg &= ~R852_CTL_ECC_ENABLE; in r852_ecc_hwctl()
422 r852_write_reg(dev, R852_CTL, dev->ctlreg); in r852_ecc_hwctl()
427 * Calculate ECC, only used for writes
437 if (dev->card_unstable) in r852_ecc_calculate()
440 dev->ctlreg &= ~R852_CTL_ECC_ENABLE; in r852_ecc_calculate()
441 r852_write_reg(dev, R852_CTL, dev->ctlreg | R852_CTL_ECC_ACCESS); in r852_ecc_calculate()
446 oob->ecc1[0] = (ecc1) & 0xFF; in r852_ecc_calculate()
447 oob->ecc1[1] = (ecc1 >> 8) & 0xFF; in r852_ecc_calculate()
448 oob->ecc1[2] = (ecc1 >> 16) & 0xFF; in r852_ecc_calculate()
450 oob->ecc2[0] = (ecc2) & 0xFF; in r852_ecc_calculate()
451 oob->ecc2[1] = (ecc2 >> 8) & 0xFF; in r852_ecc_calculate()
452 oob->ecc2[2] = (ecc2 >> 16) & 0xFF; in r852_ecc_calculate()
454 r852_write_reg(dev, R852_CTL, dev->ctlreg); in r852_ecc_calculate()
459 * Correct the data using ECC, hw did almost everything for us
471 if (dev->card_unstable) in r852_ecc_correct()
474 if (dev->dma_error) { in r852_ecc_correct()
475 dev->dma_error = 0; in r852_ecc_correct()
476 return -EIO; in r852_ecc_correct()
479 r852_write_reg(dev, R852_CTL, dev->ctlreg | R852_CTL_ECC_ACCESS); in r852_ecc_correct()
481 r852_write_reg(dev, R852_CTL, dev->ctlreg); in r852_ecc_correct()
487 /* ecc uncorrectable error */ in r852_ecc_correct()
489 dbg("ecc: unrecoverable error, in half %d", i); in r852_ecc_correct()
490 error = -EBADMSG; in r852_ecc_correct()
498 dbg("ecc: recoverable error, " in r852_ecc_correct()
516 * nand_read_oob_syndrome assumes we can send column address - we can't
522 return nand_read_oob_op(chip, page, 0, chip->oob_poi, mtd->oobsize); in r852_read_oob()
526 * Start the nand engine
544 * Stop the nand engine
562 spin_lock_irqsave(&dev->irqlock, flags); in r852_card_update_present()
564 dev->card_detected = !!(reg & R852_CARD_STA_PRESENT); in r852_card_update_present()
565 spin_unlock_irqrestore(&dev->irqlock, flags); in r852_card_update_present()
575 dev->card_unstable = 0; in r852_update_card_detect()
580 card_detect_reg |= dev->card_detected ? in r852_update_card_detect()
591 char *data = dev->sm ? "smartmedia" : "xd"; in r852_media_type_show()
607 spin_lock_irqsave(&dev->irqlock, flags); in r852_update_media_status()
608 if (!dev->card_detected) { in r852_update_media_status()
610 spin_unlock_irqrestore(&dev->irqlock, flags); in r852_update_media_status()
616 dev->sm = (reg & (R852_DMA1 | R852_DMA2)) && (reg & R852_SMBIT); in r852_update_media_status()
619 dev->sm ? "SmartMedia" : "xD", in r852_update_media_status()
622 dev->readonly = readonly; in r852_update_media_status()
623 spin_unlock_irqrestore(&dev->irqlock, flags); in r852_update_media_status()
627 * Register the nand device
632 struct mtd_info *mtd = nand_to_mtd(dev->chip); in r852_register_nand_device()
634 WARN_ON(dev->card_registered); in r852_register_nand_device()
636 mtd->dev.parent = &dev->pci_dev->dev; in r852_register_nand_device()
638 if (dev->readonly) in r852_register_nand_device()
639 dev->chip->options |= NAND_ROM; in r852_register_nand_device()
643 if (sm_register_device(mtd, dev->sm)) in r852_register_nand_device()
646 if (device_create_file(&mtd->dev, &dev_attr_media_type)) { in r852_register_nand_device()
651 dev->card_registered = 1; in r852_register_nand_device()
654 WARN_ON(mtd_device_unregister(nand_to_mtd(dev->chip))); in r852_register_nand_device()
655 nand_cleanup(dev->chip); in r852_register_nand_device()
658 dev->card_detected = 0; in r852_register_nand_device()
659 return -1; in r852_register_nand_device()
668 struct mtd_info *mtd = nand_to_mtd(dev->chip); in r852_unregister_nand_device()
670 if (!dev->card_registered) in r852_unregister_nand_device()
673 device_remove_file(&mtd->dev, &dev_attr_media_type); in r852_unregister_nand_device()
675 nand_cleanup(dev->chip); in r852_unregister_nand_device()
677 dev->card_registered = 0; in r852_unregister_nand_device()
688 dev->card_unstable = 0; in r852_card_detect_work()
691 if (dev->card_detected == dev->card_registered) in r852_card_detect_work()
698 if (dev->card_detected) in r852_card_detect_work()
730 spin_lock_irqsave(&dev->irqlock, flags); in r852_irq()
739 dev->card_detected = !!(card_status & R852_CARD_IRQ_INSERT); in r852_irq()
743 WARN_ON(dev->card_unstable); in r852_irq()
749 if (dev->card_unstable) in r852_irq()
753 dev->card_unstable = 1; in r852_irq()
754 queue_delayed_work(dev->card_workqueue, in r852_irq()
755 &dev->card_detect_work, msecs_to_jiffies(100)); in r852_irq()
770 r852_dma_done(dev, -EIO); in r852_irq()
771 complete(&dev->dma_done); in r852_irq()
776 WARN_ON_ONCE(dev->dma_stage == 0); in r852_irq()
778 if (dev->dma_stage == 0) in r852_irq()
782 if (dev->dma_state == DMA_INTERNAL && in r852_irq()
785 dev->dma_state = DMA_MEMORY; in r852_irq()
786 dev->dma_stage++; in r852_irq()
790 if (dev->dma_state == DMA_MEMORY && in r852_irq()
792 dev->dma_state = DMA_INTERNAL; in r852_irq()
793 dev->dma_stage++; in r852_irq()
797 if (dev->dma_stage == 2) in r852_irq()
801 if (dev->dma_stage == 3) { in r852_irq()
803 complete(&dev->dma_done); in r852_irq()
816 spin_unlock_irqrestore(&dev->irqlock, flags); in r852_irq()
822 if (chip->ecc.engine_type != NAND_ECC_ENGINE_TYPE_ON_HOST) in r852_attach_chip()
825 chip->ecc.placement = NAND_ECC_PLACEMENT_INTERLEAVED; in r852_attach_chip()
826 chip->ecc.size = R852_DMA_LEN; in r852_attach_chip()
827 chip->ecc.bytes = SM_OOB_SIZE; in r852_attach_chip()
828 chip->ecc.strength = 2; in r852_attach_chip()
829 chip->ecc.hwctl = r852_ecc_hwctl; in r852_attach_chip()
830 chip->ecc.calculate = r852_ecc_calculate; in r852_attach_chip()
831 chip->ecc.correct = r852_ecc_correct; in r852_attach_chip()
834 chip->ecc.read_oob = r852_read_oob; in r852_attach_chip()
857 error = dma_set_mask(&pci_dev->dev, DMA_BIT_MASK(32)); in r852_probe()
866 error = -ENOMEM; in r852_probe()
868 /* init nand chip, but register it only on card insert */ in r852_probe()
875 chip->legacy.cmd_ctrl = r852_cmdctl; in r852_probe()
876 chip->legacy.waitfunc = r852_wait; in r852_probe()
877 chip->legacy.dev_ready = r852_ready; in r852_probe()
880 chip->legacy.read_byte = r852_read_byte; in r852_probe()
881 chip->legacy.read_buf = r852_read_buf; in r852_probe()
882 chip->legacy.write_buf = r852_write_buf; in r852_probe()
891 dev->chip = chip; in r852_probe()
892 dev->pci_dev = pci_dev; in r852_probe()
895 nand_controller_init(&dev->controller); in r852_probe()
896 dev->controller.ops = &r852_ops; in r852_probe()
897 chip->controller = &dev->controller; in r852_probe()
899 dev->bounce_buffer = dma_alloc_coherent(&pci_dev->dev, R852_DMA_LEN, in r852_probe()
900 &dev->phys_bounce_buffer, GFP_KERNEL); in r852_probe()
902 if (!dev->bounce_buffer) in r852_probe()
906 error = -ENODEV; in r852_probe()
907 dev->mmio = pci_ioremap_bar(pci_dev, 0); in r852_probe()
909 if (!dev->mmio) in r852_probe()
912 error = -ENOMEM; in r852_probe()
913 dev->tmp_buffer = kzalloc(SM_SECTOR_SIZE, GFP_KERNEL); in r852_probe()
915 if (!dev->tmp_buffer) in r852_probe()
918 init_completion(&dev->dma_done); in r852_probe()
920 dev->card_workqueue = create_freezable_workqueue(DRV_NAME); in r852_probe()
922 if (!dev->card_workqueue) in r852_probe()
925 INIT_DELAYED_WORK(&dev->card_detect_work, r852_card_detect_work); in r852_probe()
927 /* shutdown everything - precation */ in r852_probe()
933 dev->irq = pci_dev->irq; in r852_probe()
934 spin_lock_init(&dev->irqlock); in r852_probe()
936 dev->card_detected = 0; in r852_probe()
940 error = -ENODEV; in r852_probe()
941 if (request_irq(pci_dev->irq, &r852_irq, IRQF_SHARED, in r852_probe()
946 queue_delayed_work(dev->card_workqueue, in r852_probe()
947 &dev->card_detect_work, 0); in r852_probe()
954 destroy_workqueue(dev->card_workqueue); in r852_probe()
956 kfree(dev->tmp_buffer); in r852_probe()
958 pci_iounmap(pci_dev, dev->mmio); in r852_probe()
960 dma_free_coherent(&pci_dev->dev, R852_DMA_LEN, dev->bounce_buffer, in r852_probe()
961 dev->phys_bounce_buffer); in r852_probe()
979 /* Stop detect workqueue - in r852_remove()
981 cancel_delayed_work_sync(&dev->card_detect_work); in r852_remove()
982 destroy_workqueue(dev->card_workqueue); in r852_remove()
989 free_irq(dev->irq, dev); in r852_remove()
992 kfree(dev->tmp_buffer); in r852_remove()
993 pci_iounmap(pci_dev, dev->mmio); in r852_remove()
994 dma_free_coherent(&pci_dev->dev, R852_DMA_LEN, dev->bounce_buffer, in r852_remove()
995 dev->phys_bounce_buffer); in r852_remove()
997 kfree(dev->chip); in r852_remove()
1009 cancel_delayed_work_sync(&dev->card_detect_work); in r852_shutdown()
1011 synchronize_irq(dev->irq); in r852_shutdown()
1020 if (dev->ctlreg & R852_CTL_CARDENABLE) in r852_suspend()
1021 return -EBUSY; in r852_suspend()
1024 cancel_delayed_work_sync(&dev->card_detect_work); in r852_suspend()
1033 dev->card_unstable = 0; in r852_suspend()
1047 if (dev->card_detected != dev->card_registered) { in r852_resume()
1049 dev->card_detected ? "added" : "removed"); in r852_resume()
1051 queue_delayed_work(dev->card_workqueue, in r852_resume()
1052 &dev->card_detect_work, msecs_to_jiffies(1000)); in r852_resume()
1057 if (dev->card_registered) { in r852_resume()
1059 nand_select_target(dev->chip, 0); in r852_resume()
1060 nand_reset_op(dev->chip); in r852_resume()
1061 nand_deselect_target(dev->chip); in r852_resume()