Lines Matching +full:mt8173 +full:- +full:nor

1 // SPDX-License-Identifier: GPL-2.0
3 // Mediatek SPI NOR controller driver
10 #include <linux/dma-mapping.h>
19 #include <linux/spi/spi-mem.h>
22 #define DRIVER_NAME "mtk-spi-nor"
87 // Reading DMA src/dst addresses have to be 16-byte aligned
89 #define MTK_NOR_DMA_ALIGN_MASK (MTK_NOR_DMA_ALIGN - 1)
93 // Buffered page program can do one 128-byte transfer
96 #define CLK_TO_US(sp, clkcnt) DIV_ROUND_UP(clkcnt, sp->spi_freq / 1000000)
103 * and IDs of nor flash, they need a extra_dummy_bit which can add
129 u32 val = readl(sp->base + reg); in mtk_nor_rmw()
133 writel(val, sp->base + reg); in mtk_nor_rmw()
142 writel(cmd, sp->base + MTK_NOR_REG_CMD); in mtk_nor_cmd_exec()
143 ret = readl_poll_timeout(sp->base + MTK_NOR_REG_CMD, reg, !(reg & cmd), in mtk_nor_cmd_exec()
146 dev_err(sp->dev, "command %u timeout.\n", cmd); in mtk_nor_cmd_exec()
152 u32 addr = op->addr.val; in mtk_nor_set_addr()
156 writeb(addr & 0xff, sp->base + MTK_NOR_REG_RADR(i)); in mtk_nor_set_addr()
159 if (op->addr.nbytes == 4) { in mtk_nor_set_addr()
160 writeb(addr & 0xff, sp->base + MTK_NOR_REG_RADR3); in mtk_nor_set_addr()
169 return ((uintptr_t)op->data.buf.in & MTK_NOR_DMA_ALIGN_MASK); in need_bounce()
176 if (op->dummy.nbytes) in mtk_nor_match_read()
177 dummy = op->dummy.nbytes * BITS_PER_BYTE / op->dummy.buswidth; in mtk_nor_match_read()
179 if ((op->data.buswidth == 2) || (op->data.buswidth == 4)) { in mtk_nor_match_read()
180 if (op->addr.buswidth == 1) in mtk_nor_match_read()
182 else if (op->addr.buswidth == 2) in mtk_nor_match_read()
184 else if (op->addr.buswidth == 4) in mtk_nor_match_read()
186 } else if ((op->addr.buswidth == 1) && (op->data.buswidth == 1)) { in mtk_nor_match_read()
187 if (op->cmd.opcode == 0x03) in mtk_nor_match_read()
189 else if (op->cmd.opcode == 0x0b) in mtk_nor_match_read()
199 // prg mode is spi-only. in mtk_nor_match_prg()
200 if ((op->cmd.buswidth > 1) || (op->addr.buswidth > 1) || in mtk_nor_match_prg()
201 (op->dummy.buswidth > 1) || (op->data.buswidth > 1)) in mtk_nor_match_prg()
204 tx_len = op->cmd.nbytes + op->addr.nbytes; in mtk_nor_match_prg()
206 if (op->data.dir == SPI_MEM_DATA_OUT) { in mtk_nor_match_prg()
208 tx_len += op->dummy.nbytes; in mtk_nor_match_prg()
216 if ((!op->addr.nbytes) && in mtk_nor_match_prg()
217 (tx_len + op->data.nbytes > MTK_NOR_REG_PRGDATA_MAX + 1)) in mtk_nor_match_prg()
219 } else if (op->data.dir == SPI_MEM_DATA_IN) { in mtk_nor_match_prg()
223 rx_len = op->data.nbytes; in mtk_nor_match_prg()
224 prg_left = MTK_NOR_PRG_CNT_MAX / 8 - tx_len - op->dummy.nbytes; in mtk_nor_match_prg()
228 if (!op->addr.nbytes) in mtk_nor_match_prg()
233 prg_len = tx_len + op->dummy.nbytes + rx_len; in mtk_nor_match_prg()
237 prg_len = tx_len + op->dummy.nbytes; in mtk_nor_match_prg()
248 tx_len = op->cmd.nbytes + op->addr.nbytes; in mtk_nor_adj_prg_size()
249 if (op->data.dir == SPI_MEM_DATA_OUT) { in mtk_nor_adj_prg_size()
250 tx_len += op->dummy.nbytes; in mtk_nor_adj_prg_size()
251 tx_left = MTK_NOR_REG_PRGDATA_MAX + 1 - tx_len; in mtk_nor_adj_prg_size()
252 if (op->data.nbytes > tx_left) in mtk_nor_adj_prg_size()
253 op->data.nbytes = tx_left; in mtk_nor_adj_prg_size()
254 } else if (op->data.dir == SPI_MEM_DATA_IN) { in mtk_nor_adj_prg_size()
255 prg_left = MTK_NOR_PRG_CNT_MAX / 8 - tx_len - op->dummy.nbytes; in mtk_nor_adj_prg_size()
258 if (op->data.nbytes > prg_left) in mtk_nor_adj_prg_size()
259 op->data.nbytes = prg_left; in mtk_nor_adj_prg_size()
265 struct mtk_nor *sp = spi_controller_get_devdata(mem->spi->master); in mtk_nor_adjust_op_size()
267 if (!op->data.nbytes) in mtk_nor_adjust_op_size()
270 if ((op->addr.nbytes == 3) || (op->addr.nbytes == 4)) { in mtk_nor_adjust_op_size()
271 if ((op->data.dir == SPI_MEM_DATA_IN) && in mtk_nor_adjust_op_size()
274 if (op->data.nbytes > 0x400000) in mtk_nor_adjust_op_size()
275 op->data.nbytes = 0x400000; in mtk_nor_adjust_op_size()
277 if ((op->addr.val & MTK_NOR_DMA_ALIGN_MASK) || in mtk_nor_adjust_op_size()
278 (op->data.nbytes < MTK_NOR_DMA_ALIGN)) in mtk_nor_adjust_op_size()
279 op->data.nbytes = 1; in mtk_nor_adjust_op_size()
281 op->data.nbytes &= ~MTK_NOR_DMA_ALIGN_MASK; in mtk_nor_adjust_op_size()
282 else if (op->data.nbytes > MTK_NOR_BOUNCE_BUF_SIZE) in mtk_nor_adjust_op_size()
283 op->data.nbytes = MTK_NOR_BOUNCE_BUF_SIZE; in mtk_nor_adjust_op_size()
285 } else if (op->data.dir == SPI_MEM_DATA_OUT) { in mtk_nor_adjust_op_size()
286 if (op->data.nbytes >= MTK_NOR_PP_SIZE) in mtk_nor_adjust_op_size()
287 op->data.nbytes = MTK_NOR_PP_SIZE; in mtk_nor_adjust_op_size()
289 op->data.nbytes = 1; in mtk_nor_adjust_op_size()
304 if (op->cmd.buswidth != 1) in mtk_nor_supports_op()
307 if ((op->addr.nbytes == 3) || (op->addr.nbytes == 4)) { in mtk_nor_supports_op()
308 switch (op->data.dir) { in mtk_nor_supports_op()
314 if ((op->addr.buswidth == 1) && in mtk_nor_supports_op()
315 (op->dummy.nbytes == 0) && in mtk_nor_supports_op()
316 (op->data.buswidth == 1)) in mtk_nor_supports_op()
331 if (op->addr.nbytes == 4) in mtk_nor_setup_bus()
334 if (op->data.buswidth == 4) { in mtk_nor_setup_bus()
336 writeb(op->cmd.opcode, sp->base + MTK_NOR_REG_PRGDATA(4)); in mtk_nor_setup_bus()
337 if (op->addr.buswidth == 4) in mtk_nor_setup_bus()
339 } else if (op->data.buswidth == 2) { in mtk_nor_setup_bus()
341 writeb(op->cmd.opcode, sp->base + MTK_NOR_REG_PRGDATA(3)); in mtk_nor_setup_bus()
342 if (op->addr.buswidth == 2) in mtk_nor_setup_bus()
345 if (op->cmd.opcode == 0x0b) in mtk_nor_setup_bus()
360 writel(from, sp->base + MTK_NOR_REG_DMA_FADR); in mtk_nor_dma_exec()
361 writel(dma_addr, sp->base + MTK_NOR_REG_DMA_DADR); in mtk_nor_dma_exec()
362 writel(dma_addr + length, sp->base + MTK_NOR_REG_DMA_END_DADR); in mtk_nor_dma_exec()
364 if (sp->high_dma) { in mtk_nor_dma_exec()
366 sp->base + MTK_NOR_REG_DMA_DADR_HB); in mtk_nor_dma_exec()
368 sp->base + MTK_NOR_REG_DMA_END_DADR_HB); in mtk_nor_dma_exec()
371 if (sp->has_irq) { in mtk_nor_dma_exec()
372 reinit_completion(&sp->op_done); in mtk_nor_dma_exec()
380 if (sp->has_irq) { in mtk_nor_dma_exec()
381 if (!wait_for_completion_timeout(&sp->op_done, in mtk_nor_dma_exec()
383 ret = -ETIMEDOUT; in mtk_nor_dma_exec()
385 ret = readl_poll_timeout(sp->base + MTK_NOR_REG_DMA_CTL, reg, in mtk_nor_dma_exec()
391 dev_err(sp->dev, "dma read timeout.\n"); in mtk_nor_dma_exec()
401 if (op->data.nbytes & MTK_NOR_DMA_ALIGN_MASK) in mtk_nor_read_bounce()
402 rdlen = (op->data.nbytes + MTK_NOR_DMA_ALIGN) & ~MTK_NOR_DMA_ALIGN_MASK; in mtk_nor_read_bounce()
404 rdlen = op->data.nbytes; in mtk_nor_read_bounce()
406 ret = mtk_nor_dma_exec(sp, op->addr.val, rdlen, sp->buffer_dma); in mtk_nor_read_bounce()
409 memcpy(op->data.buf.in, sp->buffer, op->data.nbytes); in mtk_nor_read_bounce()
422 dma_addr = dma_map_single(sp->dev, op->data.buf.in, in mtk_nor_read_dma()
423 op->data.nbytes, DMA_FROM_DEVICE); in mtk_nor_read_dma()
425 if (dma_mapping_error(sp->dev, dma_addr)) in mtk_nor_read_dma()
426 return -EINVAL; in mtk_nor_read_dma()
428 ret = mtk_nor_dma_exec(sp, op->addr.val, op->data.nbytes, dma_addr); in mtk_nor_read_dma()
430 dma_unmap_single(sp->dev, dma_addr, op->data.nbytes, DMA_FROM_DEVICE); in mtk_nor_read_dma()
437 u8 *buf = op->data.buf.in; in mtk_nor_read_pio()
442 buf[0] = readb(sp->base + MTK_NOR_REG_RDATA); in mtk_nor_read_pio()
451 if (sp->wbuf_en) in mtk_nor_write_buffer_enable()
454 val = readl(sp->base + MTK_NOR_REG_CFG2); in mtk_nor_write_buffer_enable()
455 writel(val | MTK_NOR_WR_BUF_EN, sp->base + MTK_NOR_REG_CFG2); in mtk_nor_write_buffer_enable()
456 ret = readl_poll_timeout(sp->base + MTK_NOR_REG_CFG2, val, in mtk_nor_write_buffer_enable()
459 sp->wbuf_en = true; in mtk_nor_write_buffer_enable()
468 if (!sp->wbuf_en) in mtk_nor_write_buffer_disable()
470 val = readl(sp->base + MTK_NOR_REG_CFG2); in mtk_nor_write_buffer_disable()
471 writel(val & ~MTK_NOR_WR_BUF_EN, sp->base + MTK_NOR_REG_CFG2); in mtk_nor_write_buffer_disable()
472 ret = readl_poll_timeout(sp->base + MTK_NOR_REG_CFG2, val, in mtk_nor_write_buffer_disable()
475 sp->wbuf_en = false; in mtk_nor_write_buffer_disable()
481 const u8 *buf = op->data.buf.out; in mtk_nor_pp_buffered()
489 for (i = 0; i < op->data.nbytes; i += 4) { in mtk_nor_pp_buffered()
492 writel(val, sp->base + MTK_NOR_REG_PP_DATA); in mtk_nor_pp_buffered()
495 (op->data.nbytes + 5) * BITS_PER_BYTE); in mtk_nor_pp_buffered()
501 const u8 *buf = op->data.buf.out; in mtk_nor_pp_unbuffered()
507 writeb(buf[0], sp->base + MTK_NOR_REG_WDATA); in mtk_nor_pp_unbuffered()
520 tx_len = op->cmd.nbytes + op->addr.nbytes; in mtk_nor_spi_mem_prg()
523 if (op->data.dir == SPI_MEM_DATA_OUT) in mtk_nor_spi_mem_prg()
524 tx_len += op->dummy.nbytes + op->data.nbytes; in mtk_nor_spi_mem_prg()
525 else if (op->data.dir == SPI_MEM_DATA_IN) in mtk_nor_spi_mem_prg()
526 rx_len = op->data.nbytes; in mtk_nor_spi_mem_prg()
528 prg_len = op->cmd.nbytes + op->addr.nbytes + op->dummy.nbytes + in mtk_nor_spi_mem_prg()
529 op->data.nbytes; in mtk_nor_spi_mem_prg()
532 // adjust_op_size. return -EINVAL instead of -ENOTSUPP so that in mtk_nor_spi_mem_prg()
533 // spi-mem won't try this op again with generic spi transfers. in mtk_nor_spi_mem_prg()
537 return -EINVAL; in mtk_nor_spi_mem_prg()
540 for (i = op->cmd.nbytes; i > 0; i--, reg_offset--) { in mtk_nor_spi_mem_prg()
541 reg = sp->base + MTK_NOR_REG_PRGDATA(reg_offset); in mtk_nor_spi_mem_prg()
542 bufbyte = (op->cmd.opcode >> ((i - 1) * BITS_PER_BYTE)) & 0xff; in mtk_nor_spi_mem_prg()
546 for (i = op->addr.nbytes; i > 0; i--, reg_offset--) { in mtk_nor_spi_mem_prg()
547 reg = sp->base + MTK_NOR_REG_PRGDATA(reg_offset); in mtk_nor_spi_mem_prg()
548 bufbyte = (op->addr.val >> ((i - 1) * BITS_PER_BYTE)) & 0xff; in mtk_nor_spi_mem_prg()
552 if (op->data.dir == SPI_MEM_DATA_OUT) { in mtk_nor_spi_mem_prg()
553 for (i = 0; i < op->dummy.nbytes; i++, reg_offset--) { in mtk_nor_spi_mem_prg()
554 reg = sp->base + MTK_NOR_REG_PRGDATA(reg_offset); in mtk_nor_spi_mem_prg()
558 for (i = 0; i < op->data.nbytes; i++, reg_offset--) { in mtk_nor_spi_mem_prg()
559 reg = sp->base + MTK_NOR_REG_PRGDATA(reg_offset); in mtk_nor_spi_mem_prg()
560 writeb(((const u8 *)(op->data.buf.out))[i], reg); in mtk_nor_spi_mem_prg()
564 for (; reg_offset >= 0; reg_offset--) { in mtk_nor_spi_mem_prg()
565 reg = sp->base + MTK_NOR_REG_PRGDATA(reg_offset); in mtk_nor_spi_mem_prg()
571 writel(prg_len * BITS_PER_BYTE + sp->caps->extra_dummy_bit, in mtk_nor_spi_mem_prg()
572 sp->base + MTK_NOR_REG_PRG_CNT); in mtk_nor_spi_mem_prg()
574 writel(prg_len * BITS_PER_BYTE, sp->base + MTK_NOR_REG_PRG_CNT); in mtk_nor_spi_mem_prg()
583 if (op->data.dir == SPI_MEM_DATA_IN) { in mtk_nor_spi_mem_prg()
584 for (i = op->data.nbytes - 1; i >= 0; i--, reg_offset++) { in mtk_nor_spi_mem_prg()
585 reg = sp->base + MTK_NOR_REG_SHIFT(reg_offset); in mtk_nor_spi_mem_prg()
586 ((u8 *)(op->data.buf.in))[i] = readb(reg); in mtk_nor_spi_mem_prg()
595 struct mtk_nor *sp = spi_controller_get_devdata(mem->spi->master); in mtk_nor_exec_op()
598 if ((op->data.nbytes == 0) || in mtk_nor_exec_op()
599 ((op->addr.nbytes != 3) && (op->addr.nbytes != 4))) in mtk_nor_exec_op()
602 if (op->data.dir == SPI_MEM_DATA_OUT) { in mtk_nor_exec_op()
604 writeb(op->cmd.opcode, sp->base + MTK_NOR_REG_PRGDATA0); in mtk_nor_exec_op()
605 if (op->data.nbytes == MTK_NOR_PP_SIZE) in mtk_nor_exec_op()
610 if ((op->data.dir == SPI_MEM_DATA_IN) && mtk_nor_match_read(op)) { in mtk_nor_exec_op()
615 if (op->data.nbytes == 1) { in mtk_nor_exec_op()
628 struct mtk_nor *sp = spi_controller_get_devdata(spi->master); in mtk_nor_setup()
630 if (spi->max_speed_hz && (spi->max_speed_hz < sp->spi_freq)) { in mtk_nor_setup()
631 dev_err(&spi->dev, "spi clock should be %u Hz.\n", in mtk_nor_setup()
632 sp->spi_freq); in mtk_nor_setup()
633 return -EINVAL; in mtk_nor_setup()
635 spi->max_speed_hz = sp->spi_freq; in mtk_nor_setup()
653 list_for_each_entry(t, &m->transfers, transfer_list) { in mtk_nor_transfer_one_message()
654 txbuf = t->tx_buf; in mtk_nor_transfer_one_message()
655 for (i = 0; i < t->len; i++, reg_offset--) { in mtk_nor_transfer_one_message()
656 reg = sp->base + MTK_NOR_REG_PRGDATA(reg_offset); in mtk_nor_transfer_one_message()
662 trx_len += t->len; in mtk_nor_transfer_one_message()
665 writel(trx_len * BITS_PER_BYTE, sp->base + MTK_NOR_REG_PRG_CNT); in mtk_nor_transfer_one_message()
672 reg_offset = trx_len - 1; in mtk_nor_transfer_one_message()
673 list_for_each_entry(t, &m->transfers, transfer_list) { in mtk_nor_transfer_one_message()
674 rxbuf = t->rx_buf; in mtk_nor_transfer_one_message()
675 for (i = 0; i < t->len; i++, reg_offset--) { in mtk_nor_transfer_one_message()
676 reg = sp->base + MTK_NOR_REG_SHIFT(reg_offset); in mtk_nor_transfer_one_message()
682 m->actual_length = trx_len; in mtk_nor_transfer_one_message()
684 m->status = stat; in mtk_nor_transfer_one_message()
692 clk_disable_unprepare(sp->spi_clk); in mtk_nor_disable_clk()
693 clk_disable_unprepare(sp->ctlr_clk); in mtk_nor_disable_clk()
694 clk_disable_unprepare(sp->axi_clk); in mtk_nor_disable_clk()
695 clk_disable_unprepare(sp->axi_s_clk); in mtk_nor_disable_clk()
702 ret = clk_prepare_enable(sp->spi_clk); in mtk_nor_enable_clk()
706 ret = clk_prepare_enable(sp->ctlr_clk); in mtk_nor_enable_clk()
708 clk_disable_unprepare(sp->spi_clk); in mtk_nor_enable_clk()
712 ret = clk_prepare_enable(sp->axi_clk); in mtk_nor_enable_clk()
714 clk_disable_unprepare(sp->spi_clk); in mtk_nor_enable_clk()
715 clk_disable_unprepare(sp->ctlr_clk); in mtk_nor_enable_clk()
719 ret = clk_prepare_enable(sp->axi_s_clk); in mtk_nor_enable_clk()
721 clk_disable_unprepare(sp->spi_clk); in mtk_nor_enable_clk()
722 clk_disable_unprepare(sp->ctlr_clk); in mtk_nor_enable_clk()
723 clk_disable_unprepare(sp->axi_clk); in mtk_nor_enable_clk()
732 writel(0, sp->base + MTK_NOR_REG_IRQ_EN); in mtk_nor_init()
733 writel(MTK_NOR_IRQ_MASK, sp->base + MTK_NOR_REG_IRQ_STAT); in mtk_nor_init()
735 writel(MTK_NOR_ENABLE_SF_CMD, sp->base + MTK_NOR_REG_WP); in mtk_nor_init()
746 irq_status = readl(sp->base + MTK_NOR_REG_IRQ_STAT); in mtk_nor_irq_handler()
747 irq_enabled = readl(sp->base + MTK_NOR_REG_IRQ_EN); in mtk_nor_irq_handler()
749 writel(irq_status, sp->base + MTK_NOR_REG_IRQ_STAT); in mtk_nor_irq_handler()
755 complete(&sp->op_done); in mtk_nor_irq_handler()
756 writel(0, sp->base + MTK_NOR_REG_IRQ_EN); in mtk_nor_irq_handler()
789 { .compatible = "mediatek,mt8173-nor", .data = &mtk_nor_caps_mt8173 },
790 { .compatible = "mediatek,mt8186-nor", .data = &mtk_nor_caps_mt8186 },
791 { .compatible = "mediatek,mt8192-nor", .data = &mtk_nor_caps_mt8192 },
809 spi_clk = devm_clk_get(&pdev->dev, "spi"); in mtk_nor_probe()
813 ctlr_clk = devm_clk_get(&pdev->dev, "sf"); in mtk_nor_probe()
817 axi_clk = devm_clk_get_optional(&pdev->dev, "axi"); in mtk_nor_probe()
821 axi_s_clk = devm_clk_get_optional(&pdev->dev, "axi_s"); in mtk_nor_probe()
825 caps = (struct mtk_nor_caps *)of_device_get_match_data(&pdev->dev); in mtk_nor_probe()
827 ret = dma_set_mask_and_coherent(&pdev->dev, DMA_BIT_MASK(caps->dma_bits)); in mtk_nor_probe()
829 dev_err(&pdev->dev, "failed to set dma mask(%u)\n", caps->dma_bits); in mtk_nor_probe()
833 ctlr = devm_spi_alloc_master(&pdev->dev, sizeof(*sp)); in mtk_nor_probe()
835 dev_err(&pdev->dev, "failed to allocate spi controller\n"); in mtk_nor_probe()
836 return -ENOMEM; in mtk_nor_probe()
839 ctlr->bits_per_word_mask = SPI_BPW_MASK(8); in mtk_nor_probe()
840 ctlr->dev.of_node = pdev->dev.of_node; in mtk_nor_probe()
841 ctlr->max_message_size = mtk_max_msg_size; in mtk_nor_probe()
842 ctlr->mem_ops = &mtk_nor_mem_ops; in mtk_nor_probe()
843 ctlr->mode_bits = SPI_RX_DUAL | SPI_RX_QUAD | SPI_TX_DUAL | SPI_TX_QUAD; in mtk_nor_probe()
844 ctlr->num_chipselect = 1; in mtk_nor_probe()
845 ctlr->setup = mtk_nor_setup; in mtk_nor_probe()
846 ctlr->transfer_one_message = mtk_nor_transfer_one_message; in mtk_nor_probe()
847 ctlr->auto_runtime_pm = true; in mtk_nor_probe()
849 dev_set_drvdata(&pdev->dev, ctlr); in mtk_nor_probe()
852 sp->base = base; in mtk_nor_probe()
853 sp->has_irq = false; in mtk_nor_probe()
854 sp->wbuf_en = false; in mtk_nor_probe()
855 sp->ctlr = ctlr; in mtk_nor_probe()
856 sp->dev = &pdev->dev; in mtk_nor_probe()
857 sp->spi_clk = spi_clk; in mtk_nor_probe()
858 sp->ctlr_clk = ctlr_clk; in mtk_nor_probe()
859 sp->axi_clk = axi_clk; in mtk_nor_probe()
860 sp->axi_s_clk = axi_s_clk; in mtk_nor_probe()
861 sp->caps = caps; in mtk_nor_probe()
862 sp->high_dma = caps->dma_bits > 32; in mtk_nor_probe()
863 sp->buffer = dmam_alloc_coherent(&pdev->dev, in mtk_nor_probe()
865 &sp->buffer_dma, GFP_KERNEL); in mtk_nor_probe()
866 if (!sp->buffer) in mtk_nor_probe()
867 return -ENOMEM; in mtk_nor_probe()
869 if ((uintptr_t)sp->buffer & MTK_NOR_DMA_ALIGN_MASK) { in mtk_nor_probe()
870 dev_err(sp->dev, "misaligned allocation of internal buffer.\n"); in mtk_nor_probe()
871 return -ENOMEM; in mtk_nor_probe()
878 sp->spi_freq = clk_get_rate(sp->spi_clk); in mtk_nor_probe()
885 dev_warn(sp->dev, "IRQ not available."); in mtk_nor_probe()
887 ret = devm_request_irq(sp->dev, irq, mtk_nor_irq_handler, 0, in mtk_nor_probe()
888 pdev->name, sp); in mtk_nor_probe()
890 dev_warn(sp->dev, "failed to request IRQ."); in mtk_nor_probe()
892 init_completion(&sp->op_done); in mtk_nor_probe()
893 sp->has_irq = true; in mtk_nor_probe()
897 pm_runtime_set_autosuspend_delay(&pdev->dev, -1); in mtk_nor_probe()
898 pm_runtime_use_autosuspend(&pdev->dev); in mtk_nor_probe()
899 pm_runtime_set_active(&pdev->dev); in mtk_nor_probe()
900 pm_runtime_enable(&pdev->dev); in mtk_nor_probe()
901 pm_runtime_get_noresume(&pdev->dev); in mtk_nor_probe()
903 ret = devm_spi_register_controller(&pdev->dev, ctlr); in mtk_nor_probe()
907 pm_runtime_mark_last_busy(&pdev->dev); in mtk_nor_probe()
908 pm_runtime_put_autosuspend(&pdev->dev); in mtk_nor_probe()
910 dev_info(&pdev->dev, "spi frequency: %d Hz\n", sp->spi_freq); in mtk_nor_probe()
915 pm_runtime_disable(&pdev->dev); in mtk_nor_probe()
916 pm_runtime_set_suspended(&pdev->dev); in mtk_nor_probe()
917 pm_runtime_dont_use_autosuspend(&pdev->dev); in mtk_nor_probe()
926 struct spi_controller *ctlr = dev_get_drvdata(&pdev->dev); in mtk_nor_remove()
929 pm_runtime_disable(&pdev->dev); in mtk_nor_remove()
930 pm_runtime_set_suspended(&pdev->dev); in mtk_nor_remove()
931 pm_runtime_dont_use_autosuspend(&pdev->dev); in mtk_nor_remove()
994 MODULE_DESCRIPTION("Mediatek SPI NOR controller driver");