Lines Matching +full:jz4780 +full:- +full:dma

1 // SPDX-License-Identifier: GPL-2.0-only
6 * Pierre-Yves Mordret <pierre-yves.mordret@st.com>
10 * Inspired by stm32-dma.c and dma-jz4780.c
16 #include <linux/dma-mapping.h>
33 #include "virt-dma.h"
36 #define STM32_MDMA_SHIFT(n) (ffs(n) - 1)
286 return container_of(chan->vchan.chan.device, struct stm32_mdma_device, in stm32_mdma_get_dev()
302 return &chan->vchan.chan.dev->device; in chan2dev()
307 return mdma_dev->ddev.dev; in mdma2dev()
312 return readl_relaxed(dmadev->base + reg); in stm32_mdma_read()
317 writel_relaxed(val, dmadev->base + reg); in stm32_mdma_write()
323 void __iomem *addr = dmadev->base + reg; in stm32_mdma_set_bits()
331 void __iomem *addr = dmadev->base + reg; in stm32_mdma_clr_bits()
347 desc->node[i].hwdesc = in stm32_mdma_alloc_desc()
348 dma_pool_alloc(chan->desc_pool, GFP_NOWAIT, in stm32_mdma_alloc_desc()
349 &desc->node[i].hwdesc_phys); in stm32_mdma_alloc_desc()
350 if (!desc->node[i].hwdesc) in stm32_mdma_alloc_desc()
354 desc->count = count; in stm32_mdma_alloc_desc()
360 while (--i >= 0) in stm32_mdma_alloc_desc()
361 dma_pool_free(chan->desc_pool, desc->node[i].hwdesc, in stm32_mdma_alloc_desc()
362 desc->node[i].hwdesc_phys); in stm32_mdma_alloc_desc()
370 struct stm32_mdma_chan *chan = to_stm32_mdma_chan(vdesc->tx.chan); in stm32_mdma_desc_free()
373 for (i = 0; i < desc->count; i++) in stm32_mdma_desc_free()
374 dma_pool_free(chan->desc_pool, desc->node[i].hwdesc, in stm32_mdma_desc_free()
375 desc->node[i].hwdesc_phys); in stm32_mdma_desc_free()
387 return ffs(width) - 1; in stm32_mdma_get_width()
389 dev_err(chan2dev(chan), "Dma bus width %i not supported\n", in stm32_mdma_get_width()
391 return -EINVAL; in stm32_mdma_get_width()
407 if ((((buf_len | addr) & (max_width - 1)) == 0) && in stm32_mdma_get_max_width()
432 id = chan->id; in stm32_mdma_disable_chan()
444 dmadev->base + STM32_MDMA_CISR(id), cisr, in stm32_mdma_disable_chan()
448 return -EBUSY; in stm32_mdma_disable_chan()
461 /* Disable DMA */ in stm32_mdma_stop()
467 status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id)); in stm32_mdma_stop()
471 stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status); in stm32_mdma_stop()
474 chan->busy = false; in stm32_mdma_stop()
486 for (i = 0; i < dmadev->nr_ahb_addr_masks; i++) { in stm32_mdma_set_bus()
487 if (mask == dmadev->ahb_addr_masks[i]) { in stm32_mdma_set_bus()
501 struct stm32_mdma_chan_config *chan_config = &chan->chan_config; in stm32_mdma_set_xfer_param()
508 src_addr_width = chan->dma_config.src_addr_width; in stm32_mdma_set_xfer_param()
509 dst_addr_width = chan->dma_config.dst_addr_width; in stm32_mdma_set_xfer_param()
510 src_maxburst = chan->dma_config.src_maxburst; in stm32_mdma_set_xfer_param()
511 dst_maxburst = chan->dma_config.dst_maxburst; in stm32_mdma_set_xfer_param()
513 ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)); in stm32_mdma_set_xfer_param()
514 ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id)); in stm32_mdma_set_xfer_param()
515 ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id)); in stm32_mdma_set_xfer_param()
522 ctcr |= chan_config->transfer_config & STM32_MDMA_CTCR_CFG_MASK; in stm32_mdma_set_xfer_param()
526 * the number of bytes - 1 in CTCR register in stm32_mdma_set_xfer_param()
530 ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1)); in stm32_mdma_set_xfer_param()
541 return -EINVAL; in stm32_mdma_set_xfer_param()
547 return -EINVAL; in stm32_mdma_set_xfer_param()
552 * - Clear SW request as in this case this is a HW one in stm32_mdma_set_xfer_param()
553 * - Clear WEX, HEX and BEX bits in stm32_mdma_set_xfer_param()
554 * - Set priority level in stm32_mdma_set_xfer_param()
558 ccr |= STM32_MDMA_CCR_PL(chan_config->priority_level); in stm32_mdma_set_xfer_param()
562 ctbr |= STM32_MDMA_CTBR_TSEL(chan_config->request); in stm32_mdma_set_xfer_param()
566 dst_addr = chan->dma_config.dst_addr; in stm32_mdma_set_xfer_param()
579 chan->mem_burst = dst_best_burst; in stm32_mdma_set_xfer_param()
585 chan->mem_width = src_addr_width; in stm32_mdma_set_xfer_param()
599 chan->mem_burst = src_best_burst; in stm32_mdma_set_xfer_param()
611 stm32_mdma_write(dmadev, STM32_MDMA_CDAR(chan->id), dst_addr); in stm32_mdma_set_xfer_param()
615 src_addr = chan->dma_config.src_addr; in stm32_mdma_set_xfer_param()
633 chan->mem_width = dst_addr_width; in stm32_mdma_set_xfer_param()
658 stm32_mdma_write(dmadev, STM32_MDMA_CSAR(chan->id), src_addr); in stm32_mdma_set_xfer_param()
662 dev_err(chan2dev(chan), "Dma direction is not supported\n"); in stm32_mdma_set_xfer_param()
663 return -EINVAL; in stm32_mdma_set_xfer_param()
676 dev_dbg(chan2dev(chan), "hwdesc: %pad\n", &node->hwdesc_phys); in stm32_mdma_dump_hwdesc()
677 dev_dbg(chan2dev(chan), "CTCR: 0x%08x\n", node->hwdesc->ctcr); in stm32_mdma_dump_hwdesc()
678 dev_dbg(chan2dev(chan), "CBNDTR: 0x%08x\n", node->hwdesc->cbndtr); in stm32_mdma_dump_hwdesc()
679 dev_dbg(chan2dev(chan), "CSAR: 0x%08x\n", node->hwdesc->csar); in stm32_mdma_dump_hwdesc()
680 dev_dbg(chan2dev(chan), "CDAR: 0x%08x\n", node->hwdesc->cdar); in stm32_mdma_dump_hwdesc()
681 dev_dbg(chan2dev(chan), "CBRUR: 0x%08x\n", node->hwdesc->cbrur); in stm32_mdma_dump_hwdesc()
682 dev_dbg(chan2dev(chan), "CLAR: 0x%08x\n", node->hwdesc->clar); in stm32_mdma_dump_hwdesc()
683 dev_dbg(chan2dev(chan), "CTBR: 0x%08x\n", node->hwdesc->ctbr); in stm32_mdma_dump_hwdesc()
684 dev_dbg(chan2dev(chan), "CMAR: 0x%08x\n", node->hwdesc->cmar); in stm32_mdma_dump_hwdesc()
685 dev_dbg(chan2dev(chan), "CMDR: 0x%08x\n\n", node->hwdesc->cmdr); in stm32_mdma_dump_hwdesc()
695 struct stm32_mdma_chan_config *config = &chan->chan_config; in stm32_mdma_setup_hwdesc()
699 hwdesc = desc->node[count].hwdesc; in stm32_mdma_setup_hwdesc()
700 hwdesc->ctcr = ctcr; in stm32_mdma_setup_hwdesc()
701 hwdesc->cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK | in stm32_mdma_setup_hwdesc()
705 hwdesc->cbndtr |= STM32_MDMA_CBNDTR_BNDT(len); in stm32_mdma_setup_hwdesc()
706 hwdesc->csar = src_addr; in stm32_mdma_setup_hwdesc()
707 hwdesc->cdar = dst_addr; in stm32_mdma_setup_hwdesc()
708 hwdesc->cbrur = 0; in stm32_mdma_setup_hwdesc()
709 hwdesc->ctbr = ctbr; in stm32_mdma_setup_hwdesc()
710 hwdesc->cmar = config->mask_addr; in stm32_mdma_setup_hwdesc()
711 hwdesc->cmdr = config->mask_data; in stm32_mdma_setup_hwdesc()
715 hwdesc->clar = desc->node[0].hwdesc_phys; in stm32_mdma_setup_hwdesc()
717 hwdesc->clar = 0; in stm32_mdma_setup_hwdesc()
719 hwdesc->clar = desc->node[next].hwdesc_phys; in stm32_mdma_setup_hwdesc()
722 stm32_mdma_dump_hwdesc(chan, &desc->node[count]); in stm32_mdma_setup_hwdesc()
731 struct dma_slave_config *dma_config = &chan->dma_config; in stm32_mdma_setup_xfer()
740 return -EINVAL; in stm32_mdma_setup_xfer()
745 dst_addr = dma_config->dst_addr; in stm32_mdma_setup_xfer()
752 src_addr = dma_config->src_addr; in stm32_mdma_setup_xfer()
766 i == sg_len - 1, i == 0, false); in stm32_mdma_setup_xfer()
774 desc->ccr = ccr; in stm32_mdma_setup_xfer()
789 * Once DMA is in setup cyclic mode the channel we cannot assign this in stm32_mdma_prep_slave_sg()
790 * channel anymore. The DMA channel needs to be aborted or terminated in stm32_mdma_prep_slave_sg()
793 if (chan->desc && chan->desc->cyclic) { in stm32_mdma_prep_slave_sg()
795 "Request not allowed when dma in cyclic mode\n"); in stm32_mdma_prep_slave_sg()
807 desc->cyclic = false; in stm32_mdma_prep_slave_sg()
809 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in stm32_mdma_prep_slave_sg()
812 for (i = 0; i < desc->count; i++) in stm32_mdma_prep_slave_sg()
813 dma_pool_free(chan->desc_pool, desc->node[i].hwdesc, in stm32_mdma_prep_slave_sg()
814 desc->node[i].hwdesc_phys); in stm32_mdma_prep_slave_sg()
827 struct dma_slave_config *dma_config = &chan->dma_config; in stm32_mdma_prep_dma_cyclic()
834 * Once DMA is in setup cyclic mode the channel we cannot assign this in stm32_mdma_prep_dma_cyclic()
835 * channel anymore. The DMA channel needs to be aborted or terminated in stm32_mdma_prep_dma_cyclic()
838 if (chan->desc && chan->desc->cyclic) { in stm32_mdma_prep_dma_cyclic()
840 "Request not allowed when dma in cyclic mode\n"); in stm32_mdma_prep_dma_cyclic()
881 desc->ccr = ccr; in stm32_mdma_prep_dma_cyclic()
887 dst_addr = dma_config->dst_addr; in stm32_mdma_prep_dma_cyclic()
889 src_addr = dma_config->src_addr; in stm32_mdma_prep_dma_cyclic()
895 i == count - 1, i == 0, true); in stm32_mdma_prep_dma_cyclic()
898 desc->cyclic = true; in stm32_mdma_prep_dma_cyclic()
900 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in stm32_mdma_prep_dma_cyclic()
903 for (i = 0; i < desc->count; i++) in stm32_mdma_prep_dma_cyclic()
904 dma_pool_free(chan->desc_pool, desc->node[i].hwdesc, in stm32_mdma_prep_dma_cyclic()
905 desc->node[i].hwdesc_phys); in stm32_mdma_prep_dma_cyclic()
926 * Once DMA is in setup cyclic mode the channel we cannot assign this in stm32_mdma_prep_dma_memcpy()
927 * channel anymore. The DMA channel needs to be aborted or terminated in stm32_mdma_prep_dma_memcpy()
930 if (chan->desc && chan->desc->cyclic) { in stm32_mdma_prep_dma_memcpy()
932 "Request not allowed when dma in cyclic mode\n"); in stm32_mdma_prep_dma_memcpy()
941 ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)); in stm32_mdma_prep_dma_memcpy()
942 ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id)); in stm32_mdma_prep_dma_memcpy()
943 ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id)); in stm32_mdma_prep_dma_memcpy()
944 cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id)); in stm32_mdma_prep_dma_memcpy()
987 ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1)); in stm32_mdma_prep_dma_memcpy()
1019 hwdesc = desc->node[0].hwdesc; in stm32_mdma_prep_dma_memcpy()
1020 hwdesc->ctcr = ctcr; in stm32_mdma_prep_dma_memcpy()
1021 hwdesc->cbndtr = cbndtr; in stm32_mdma_prep_dma_memcpy()
1022 hwdesc->csar = src; in stm32_mdma_prep_dma_memcpy()
1023 hwdesc->cdar = dest; in stm32_mdma_prep_dma_memcpy()
1024 hwdesc->cbrur = 0; in stm32_mdma_prep_dma_memcpy()
1025 hwdesc->clar = 0; in stm32_mdma_prep_dma_memcpy()
1026 hwdesc->ctbr = ctbr; in stm32_mdma_prep_dma_memcpy()
1027 hwdesc->cmar = 0; in stm32_mdma_prep_dma_memcpy()
1028 hwdesc->cmdr = 0; in stm32_mdma_prep_dma_memcpy()
1030 stm32_mdma_dump_hwdesc(chan, &desc->node[0]); in stm32_mdma_prep_dma_memcpy()
1034 STM32_MDMA_CTCR_TLEN((STM32_MDMA_MAX_BUF_LEN - 1)); in stm32_mdma_prep_dma_memcpy()
1040 xfer_count = min_t(size_t, len - offset, in stm32_mdma_prep_dma_memcpy()
1078 i == count - 1, i == 0, false); in stm32_mdma_prep_dma_memcpy()
1082 desc->ccr = ccr; in stm32_mdma_prep_dma_memcpy()
1084 desc->cyclic = false; in stm32_mdma_prep_dma_memcpy()
1086 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in stm32_mdma_prep_dma_memcpy()
1094 stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id))); in stm32_mdma_dump_reg()
1096 stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id))); in stm32_mdma_dump_reg()
1098 stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id))); in stm32_mdma_dump_reg()
1100 stm32_mdma_read(dmadev, STM32_MDMA_CSAR(chan->id))); in stm32_mdma_dump_reg()
1102 stm32_mdma_read(dmadev, STM32_MDMA_CDAR(chan->id))); in stm32_mdma_dump_reg()
1104 stm32_mdma_read(dmadev, STM32_MDMA_CBRUR(chan->id))); in stm32_mdma_dump_reg()
1106 stm32_mdma_read(dmadev, STM32_MDMA_CLAR(chan->id))); in stm32_mdma_dump_reg()
1108 stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id))); in stm32_mdma_dump_reg()
1110 stm32_mdma_read(dmadev, STM32_MDMA_CMAR(chan->id))); in stm32_mdma_dump_reg()
1112 stm32_mdma_read(dmadev, STM32_MDMA_CMDR(chan->id))); in stm32_mdma_dump_reg()
1120 u32 id = chan->id; in stm32_mdma_start_transfer()
1123 vdesc = vchan_next_desc(&chan->vchan); in stm32_mdma_start_transfer()
1125 chan->desc = NULL; in stm32_mdma_start_transfer()
1129 list_del(&vdesc->node); in stm32_mdma_start_transfer()
1131 chan->desc = to_stm32_mdma_desc(vdesc); in stm32_mdma_start_transfer()
1132 hwdesc = chan->desc->node[0].hwdesc; in stm32_mdma_start_transfer()
1133 chan->curr_hwdesc = 0; in stm32_mdma_start_transfer()
1135 stm32_mdma_write(dmadev, STM32_MDMA_CCR(id), chan->desc->ccr); in stm32_mdma_start_transfer()
1136 stm32_mdma_write(dmadev, STM32_MDMA_CTCR(id), hwdesc->ctcr); in stm32_mdma_start_transfer()
1137 stm32_mdma_write(dmadev, STM32_MDMA_CBNDTR(id), hwdesc->cbndtr); in stm32_mdma_start_transfer()
1138 stm32_mdma_write(dmadev, STM32_MDMA_CSAR(id), hwdesc->csar); in stm32_mdma_start_transfer()
1139 stm32_mdma_write(dmadev, STM32_MDMA_CDAR(id), hwdesc->cdar); in stm32_mdma_start_transfer()
1140 stm32_mdma_write(dmadev, STM32_MDMA_CBRUR(id), hwdesc->cbrur); in stm32_mdma_start_transfer()
1141 stm32_mdma_write(dmadev, STM32_MDMA_CLAR(id), hwdesc->clar); in stm32_mdma_start_transfer()
1142 stm32_mdma_write(dmadev, STM32_MDMA_CTBR(id), hwdesc->ctbr); in stm32_mdma_start_transfer()
1143 stm32_mdma_write(dmadev, STM32_MDMA_CMAR(id), hwdesc->cmar); in stm32_mdma_start_transfer()
1144 stm32_mdma_write(dmadev, STM32_MDMA_CMDR(id), hwdesc->cmdr); in stm32_mdma_start_transfer()
1153 /* Start DMA */ in stm32_mdma_start_transfer()
1157 if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM) { in stm32_mdma_start_transfer()
1162 chan->busy = true; in stm32_mdma_start_transfer()
1164 dev_dbg(chan2dev(chan), "vchan %pK: started\n", &chan->vchan); in stm32_mdma_start_transfer()
1172 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_mdma_issue_pending()
1174 if (!vchan_issue_pending(&chan->vchan)) in stm32_mdma_issue_pending()
1177 dev_dbg(chan2dev(chan), "vchan %pK: issued\n", &chan->vchan); in stm32_mdma_issue_pending()
1179 if (!chan->desc && !chan->busy) in stm32_mdma_issue_pending()
1183 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_mdma_issue_pending()
1192 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_mdma_pause()
1194 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_mdma_pause()
1197 dev_dbg(chan2dev(chan), "vchan %pK: pause\n", &chan->vchan); in stm32_mdma_pause()
1210 hwdesc = chan->desc->node[chan->curr_hwdesc].hwdesc; in stm32_mdma_resume()
1212 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_mdma_resume()
1214 /* Re-configure control register */ in stm32_mdma_resume()
1215 stm32_mdma_write(dmadev, STM32_MDMA_CCR(chan->id), chan->desc->ccr); in stm32_mdma_resume()
1218 status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id)); in stm32_mdma_resume()
1220 stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status); in stm32_mdma_resume()
1224 /* Re-start DMA */ in stm32_mdma_resume()
1225 reg = STM32_MDMA_CCR(chan->id); in stm32_mdma_resume()
1229 if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM) in stm32_mdma_resume()
1232 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_mdma_resume()
1234 dev_dbg(chan2dev(chan), "vchan %pK: resume\n", &chan->vchan); in stm32_mdma_resume()
1245 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_mdma_terminate_all()
1246 if (chan->desc) { in stm32_mdma_terminate_all()
1247 vchan_terminate_vdesc(&chan->desc->vdesc); in stm32_mdma_terminate_all()
1248 if (chan->busy) in stm32_mdma_terminate_all()
1250 chan->desc = NULL; in stm32_mdma_terminate_all()
1252 vchan_get_all_descriptors(&chan->vchan, &head); in stm32_mdma_terminate_all()
1253 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_mdma_terminate_all()
1255 vchan_dma_desc_free_list(&chan->vchan, &head); in stm32_mdma_terminate_all()
1264 vchan_synchronize(&chan->vchan); in stm32_mdma_synchronize()
1272 memcpy(&chan->dma_config, config, sizeof(*config)); in stm32_mdma_slave_config()
1282 struct stm32_mdma_hwdesc *hwdesc = desc->node[0].hwdesc; in stm32_mdma_desc_residue()
1287 for (i = curr_hwdesc + 1; i < desc->count; i++) { in stm32_mdma_desc_residue()
1288 hwdesc = desc->node[i].hwdesc; in stm32_mdma_desc_residue()
1289 residue += STM32_MDMA_CBNDTR_BNDT(hwdesc->cbndtr); in stm32_mdma_desc_residue()
1291 cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id)); in stm32_mdma_desc_residue()
1294 if (!chan->mem_burst) in stm32_mdma_desc_residue()
1297 burst_size = chan->mem_burst * chan->mem_width; in stm32_mdma_desc_residue()
1300 residue = residue - modulo + burst_size; in stm32_mdma_desc_residue()
1319 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_mdma_tx_status()
1321 vdesc = vchan_find_desc(&chan->vchan, cookie); in stm32_mdma_tx_status()
1322 if (chan->desc && cookie == chan->desc->vdesc.tx.cookie) in stm32_mdma_tx_status()
1323 residue = stm32_mdma_desc_residue(chan, chan->desc, in stm32_mdma_tx_status()
1324 chan->curr_hwdesc); in stm32_mdma_tx_status()
1330 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_mdma_tx_status()
1337 vchan_cookie_complete(&chan->desc->vdesc); in stm32_mdma_xfer_end()
1338 chan->desc = NULL; in stm32_mdma_xfer_end()
1339 chan->busy = false; in stm32_mdma_xfer_end()
1352 status = readl_relaxed(dmadev->base + STM32_MDMA_GISR0); in stm32_mdma_irq_handler()
1356 status = readl_relaxed(dmadev->base + STM32_MDMA_GISR1); in stm32_mdma_irq_handler()
1369 chan = &dmadev->chan[id]; in stm32_mdma_irq_handler()
1376 spin_lock(&chan->vchan.lock); in stm32_mdma_irq_handler()
1377 status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id)); in stm32_mdma_irq_handler()
1378 ien = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)); in stm32_mdma_irq_handler()
1383 spin_unlock(&chan->vchan.lock); in stm32_mdma_irq_handler()
1391 reg = STM32_MDMA_CIFCR(chan->id); in stm32_mdma_irq_handler()
1395 id = chan->id; in stm32_mdma_irq_handler()
1396 status = readl_relaxed(dmadev->base + STM32_MDMA_CESR(id)); in stm32_mdma_irq_handler()
1412 chan->curr_hwdesc++; in stm32_mdma_irq_handler()
1413 if (chan->desc && chan->desc->cyclic) { in stm32_mdma_irq_handler()
1414 if (chan->curr_hwdesc == chan->desc->count) in stm32_mdma_irq_handler()
1415 chan->curr_hwdesc = 0; in stm32_mdma_irq_handler()
1416 vchan_cyclic_callback(&chan->desc->vdesc); in stm32_mdma_irq_handler()
1429 spin_unlock(&chan->vchan.lock); in stm32_mdma_irq_handler()
1441 chan->desc_pool = dmam_pool_create(dev_name(&c->dev->device), in stm32_mdma_alloc_chan_resources()
1442 c->device->dev, in stm32_mdma_alloc_chan_resources()
1446 if (!chan->desc_pool) { in stm32_mdma_alloc_chan_resources()
1448 return -ENOMEM; in stm32_mdma_alloc_chan_resources()
1451 ret = pm_runtime_get_sync(dmadev->ddev.dev); in stm32_mdma_alloc_chan_resources()
1457 pm_runtime_put(dmadev->ddev.dev); in stm32_mdma_alloc_chan_resources()
1468 dev_dbg(chan2dev(chan), "Freeing channel %d\n", chan->id); in stm32_mdma_free_chan_resources()
1470 if (chan->busy) { in stm32_mdma_free_chan_resources()
1471 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_mdma_free_chan_resources()
1473 chan->desc = NULL; in stm32_mdma_free_chan_resources()
1474 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_mdma_free_chan_resources()
1477 pm_runtime_put(dmadev->ddev.dev); in stm32_mdma_free_chan_resources()
1479 dmam_pool_destroy(chan->desc_pool); in stm32_mdma_free_chan_resources()
1480 chan->desc_pool = NULL; in stm32_mdma_free_chan_resources()
1486 struct stm32_mdma_device *dmadev = ofdma->of_dma_data; in stm32_mdma_of_xlate()
1491 if (dma_spec->args_count < 5) { in stm32_mdma_of_xlate()
1496 config.request = dma_spec->args[0]; in stm32_mdma_of_xlate()
1497 config.priority_level = dma_spec->args[1]; in stm32_mdma_of_xlate()
1498 config.transfer_config = dma_spec->args[2]; in stm32_mdma_of_xlate()
1499 config.mask_addr = dma_spec->args[3]; in stm32_mdma_of_xlate()
1500 config.mask_data = dma_spec->args[4]; in stm32_mdma_of_xlate()
1502 if (config.request >= dmadev->nr_requests) { in stm32_mdma_of_xlate()
1512 c = dma_get_any_slave_channel(&dmadev->ddev); in stm32_mdma_of_xlate()
1519 chan->chan_config = config; in stm32_mdma_of_xlate()
1525 { .compatible = "st,stm32h7-mdma", },
1541 of_node = pdev->dev.of_node; in stm32_mdma_probe()
1543 return -ENODEV; in stm32_mdma_probe()
1545 ret = device_property_read_u32(&pdev->dev, "dma-channels", in stm32_mdma_probe()
1549 dev_warn(&pdev->dev, "MDMA defaulting on %i channels\n", in stm32_mdma_probe()
1553 ret = device_property_read_u32(&pdev->dev, "dma-requests", in stm32_mdma_probe()
1557 dev_warn(&pdev->dev, "MDMA defaulting on %i request lines\n", in stm32_mdma_probe()
1561 count = device_property_count_u32(&pdev->dev, "st,ahb-addr-masks"); in stm32_mdma_probe()
1565 dmadev = devm_kzalloc(&pdev->dev, sizeof(*dmadev) + sizeof(u32) * count, in stm32_mdma_probe()
1568 return -ENOMEM; in stm32_mdma_probe()
1570 dmadev->nr_channels = nr_channels; in stm32_mdma_probe()
1571 dmadev->nr_requests = nr_requests; in stm32_mdma_probe()
1572 device_property_read_u32_array(&pdev->dev, "st,ahb-addr-masks", in stm32_mdma_probe()
1573 dmadev->ahb_addr_masks, in stm32_mdma_probe()
1575 dmadev->nr_ahb_addr_masks = count; in stm32_mdma_probe()
1578 dmadev->base = devm_ioremap_resource(&pdev->dev, res); in stm32_mdma_probe()
1579 if (IS_ERR(dmadev->base)) in stm32_mdma_probe()
1580 return PTR_ERR(dmadev->base); in stm32_mdma_probe()
1582 dmadev->clk = devm_clk_get(&pdev->dev, NULL); in stm32_mdma_probe()
1583 if (IS_ERR(dmadev->clk)) in stm32_mdma_probe()
1584 return dev_err_probe(&pdev->dev, PTR_ERR(dmadev->clk), in stm32_mdma_probe()
1587 ret = clk_prepare_enable(dmadev->clk); in stm32_mdma_probe()
1589 dev_err(&pdev->dev, "clk_prep_enable error: %d\n", ret); in stm32_mdma_probe()
1593 rst = devm_reset_control_get(&pdev->dev, NULL); in stm32_mdma_probe()
1596 if (ret == -EPROBE_DEFER) in stm32_mdma_probe()
1604 dd = &dmadev->ddev; in stm32_mdma_probe()
1605 dma_cap_set(DMA_SLAVE, dd->cap_mask); in stm32_mdma_probe()
1606 dma_cap_set(DMA_PRIVATE, dd->cap_mask); in stm32_mdma_probe()
1607 dma_cap_set(DMA_CYCLIC, dd->cap_mask); in stm32_mdma_probe()
1608 dma_cap_set(DMA_MEMCPY, dd->cap_mask); in stm32_mdma_probe()
1609 dd->device_alloc_chan_resources = stm32_mdma_alloc_chan_resources; in stm32_mdma_probe()
1610 dd->device_free_chan_resources = stm32_mdma_free_chan_resources; in stm32_mdma_probe()
1611 dd->device_tx_status = stm32_mdma_tx_status; in stm32_mdma_probe()
1612 dd->device_issue_pending = stm32_mdma_issue_pending; in stm32_mdma_probe()
1613 dd->device_prep_slave_sg = stm32_mdma_prep_slave_sg; in stm32_mdma_probe()
1614 dd->device_prep_dma_cyclic = stm32_mdma_prep_dma_cyclic; in stm32_mdma_probe()
1615 dd->device_prep_dma_memcpy = stm32_mdma_prep_dma_memcpy; in stm32_mdma_probe()
1616 dd->device_config = stm32_mdma_slave_config; in stm32_mdma_probe()
1617 dd->device_pause = stm32_mdma_pause; in stm32_mdma_probe()
1618 dd->device_resume = stm32_mdma_resume; in stm32_mdma_probe()
1619 dd->device_terminate_all = stm32_mdma_terminate_all; in stm32_mdma_probe()
1620 dd->device_synchronize = stm32_mdma_synchronize; in stm32_mdma_probe()
1621 dd->descriptor_reuse = true; in stm32_mdma_probe()
1623 dd->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) | in stm32_mdma_probe()
1627 dd->dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) | in stm32_mdma_probe()
1631 dd->directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV) | in stm32_mdma_probe()
1633 dd->residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in stm32_mdma_probe()
1634 dd->max_burst = STM32_MDMA_MAX_BURST; in stm32_mdma_probe()
1635 dd->dev = &pdev->dev; in stm32_mdma_probe()
1636 INIT_LIST_HEAD(&dd->channels); in stm32_mdma_probe()
1638 for (i = 0; i < dmadev->nr_channels; i++) { in stm32_mdma_probe()
1639 chan = &dmadev->chan[i]; in stm32_mdma_probe()
1640 chan->id = i; in stm32_mdma_probe()
1641 chan->vchan.desc_free = stm32_mdma_desc_free; in stm32_mdma_probe()
1642 vchan_init(&chan->vchan, dd); in stm32_mdma_probe()
1645 dmadev->irq = platform_get_irq(pdev, 0); in stm32_mdma_probe()
1646 if (dmadev->irq < 0) { in stm32_mdma_probe()
1647 ret = dmadev->irq; in stm32_mdma_probe()
1651 ret = devm_request_irq(&pdev->dev, dmadev->irq, stm32_mdma_irq_handler, in stm32_mdma_probe()
1652 0, dev_name(&pdev->dev), dmadev); in stm32_mdma_probe()
1654 dev_err(&pdev->dev, "failed to request IRQ\n"); in stm32_mdma_probe()
1664 dev_err(&pdev->dev, in stm32_mdma_probe()
1665 "STM32 MDMA DMA OF registration failed %d\n", ret); in stm32_mdma_probe()
1670 pm_runtime_set_active(&pdev->dev); in stm32_mdma_probe()
1671 pm_runtime_enable(&pdev->dev); in stm32_mdma_probe()
1672 pm_runtime_get_noresume(&pdev->dev); in stm32_mdma_probe()
1673 pm_runtime_put(&pdev->dev); in stm32_mdma_probe()
1675 dev_info(&pdev->dev, "STM32 MDMA driver registered\n"); in stm32_mdma_probe()
1680 clk_disable_unprepare(dmadev->clk); in stm32_mdma_probe()
1690 clk_disable_unprepare(dmadev->clk); in stm32_mdma_runtime_suspend()
1700 ret = clk_prepare_enable(dmadev->clk); in stm32_mdma_runtime_resume()
1721 for (id = 0; id < dmadev->nr_channels; id++) { in stm32_mdma_pm_suspend()
1725 return -EBUSY; in stm32_mdma_pm_suspend()
1751 .name = "stm32-mdma",
1766 MODULE_AUTHOR("Pierre-Yves Mordret <pierre-yves.mordret@st.com>");