Lines Matching +full:axi +full:- +full:config
1 // SPDX-License-Identifier: GPL-2.0
2 // (C) 2017-2018 Synopsys, Inc. (www.synopsys.com)
5 * Synopsys DesignWare AXI DMA Controller driver.
15 #include <linux/dma-mapping.h>
20 #include <linux/io-64-nonatomic-lo-hi.h>
31 #include "dw-axi-dmac.h"
33 #include "../virt-dma.h"
36 * The set of bus widths supported by the DMA controller. DW AXI DMAC supports
37 * master data bus width up to 512 bits (for both AXI master interfaces), but
52 iowrite32(val, chip->regs + reg); in axi_dma_iowrite32()
57 return ioread32(chip->regs + reg); in axi_dma_ioread32()
63 iowrite32(val, chan->chan_regs + reg); in axi_chan_iowrite32()
68 return ioread32(chan->chan_regs + reg); in axi_chan_ioread32()
78 iowrite32(lower_32_bits(val), chan->chan_regs + reg); in axi_chan_iowrite64()
79 iowrite32(upper_32_bits(val), chan->chan_regs + reg + 4); in axi_chan_iowrite64()
83 struct axi_dma_chan_config *config) in axi_chan_config_write() argument
87 cfg_lo = (config->dst_multblk_type << CH_CFG_L_DST_MULTBLK_TYPE_POS | in axi_chan_config_write()
88 config->src_multblk_type << CH_CFG_L_SRC_MULTBLK_TYPE_POS); in axi_chan_config_write()
89 if (chan->chip->dw->hdata->reg_map_8_channels) { in axi_chan_config_write()
90 cfg_hi = config->tt_fc << CH_CFG_H_TT_FC_POS | in axi_chan_config_write()
91 config->hs_sel_src << CH_CFG_H_HS_SEL_SRC_POS | in axi_chan_config_write()
92 config->hs_sel_dst << CH_CFG_H_HS_SEL_DST_POS | in axi_chan_config_write()
93 config->src_per << CH_CFG_H_SRC_PER_POS | in axi_chan_config_write()
94 config->dst_per << CH_CFG_H_DST_PER_POS | in axi_chan_config_write()
95 config->prior << CH_CFG_H_PRIORITY_POS; in axi_chan_config_write()
97 cfg_lo |= config->src_per << CH_CFG2_L_SRC_PER_POS | in axi_chan_config_write()
98 config->dst_per << CH_CFG2_L_DST_PER_POS; in axi_chan_config_write()
99 cfg_hi = config->tt_fc << CH_CFG2_H_TT_FC_POS | in axi_chan_config_write()
100 config->hs_sel_src << CH_CFG2_H_HS_SEL_SRC_POS | in axi_chan_config_write()
101 config->hs_sel_dst << CH_CFG2_H_HS_SEL_DST_POS | in axi_chan_config_write()
102 config->prior << CH_CFG2_H_PRIORITY_POS; in axi_chan_config_write()
181 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in axi_chan_disable()
182 val &= ~(BIT(chan->id) << DMAC_CHAN_EN_SHIFT); in axi_chan_disable()
183 if (chan->chip->dw->hdata->reg_map_8_channels) in axi_chan_disable()
184 val |= BIT(chan->id) << DMAC_CHAN_EN_WE_SHIFT; in axi_chan_disable()
186 val |= BIT(chan->id) << DMAC_CHAN_EN2_WE_SHIFT; in axi_chan_disable()
187 axi_dma_iowrite32(chan->chip, DMAC_CHEN, val); in axi_chan_disable()
194 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in axi_chan_enable()
195 if (chan->chip->dw->hdata->reg_map_8_channels) in axi_chan_enable()
196 val |= BIT(chan->id) << DMAC_CHAN_EN_SHIFT | in axi_chan_enable()
197 BIT(chan->id) << DMAC_CHAN_EN_WE_SHIFT; in axi_chan_enable()
199 val |= BIT(chan->id) << DMAC_CHAN_EN_SHIFT | in axi_chan_enable()
200 BIT(chan->id) << DMAC_CHAN_EN2_WE_SHIFT; in axi_chan_enable()
201 axi_dma_iowrite32(chan->chip, DMAC_CHEN, val); in axi_chan_enable()
208 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in axi_chan_is_hw_enable()
210 return !!(val & (BIT(chan->id) << DMAC_CHAN_EN_SHIFT)); in axi_chan_is_hw_enable()
218 for (i = 0; i < chip->dw->hdata->nr_channels; i++) { in axi_dma_hw_init()
219 axi_chan_irq_disable(&chip->dw->chan[i], DWAXIDMAC_IRQ_ALL); in axi_dma_hw_init()
220 axi_chan_disable(&chip->dw->chan[i]); in axi_dma_hw_init()
222 ret = dma_set_mask_and_coherent(chip->dev, DMA_BIT_MASK(64)); in axi_dma_hw_init()
224 dev_warn(chip->dev, "Unable to set coherent mask\n"); in axi_dma_hw_init()
230 u32 max_width = chan->chip->dw->hdata->m_data_width; in axi_chan_get_xfer_width()
237 return dma_chan_name(&chan->vc.chan); in axi_chan_name()
248 desc->hw_desc = kcalloc(num, sizeof(*desc->hw_desc), GFP_NOWAIT); in axi_desc_alloc()
249 if (!desc->hw_desc) { in axi_desc_alloc()
263 lli = dma_pool_zalloc(chan->desc_pool, GFP_NOWAIT, &phys); in axi_desc_get()
270 atomic_inc(&chan->descs_allocated); in axi_desc_get()
278 struct axi_dma_chan *chan = desc->chan; in axi_desc_put()
279 int count = atomic_read(&chan->descs_allocated); in axi_desc_put()
284 hw_desc = &desc->hw_desc[descs_put]; in axi_desc_put()
285 dma_pool_free(chan->desc_pool, hw_desc->lli, hw_desc->llp); in axi_desc_put()
288 kfree(desc->hw_desc); in axi_desc_put()
290 atomic_sub(descs_put, &chan->descs_allocated); in axi_desc_put()
293 atomic_read(&chan->descs_allocated)); in axi_desc_put()
319 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_tx_status()
321 vdesc = vchan_find_desc(&chan->vc, cookie); in dma_chan_tx_status()
323 length = vd_to_axi_desc(vdesc)->length; in dma_chan_tx_status()
324 completed_blocks = vd_to_axi_desc(vdesc)->completed_blocks; in dma_chan_tx_status()
325 len = vd_to_axi_desc(vdesc)->hw_desc[0].len; in dma_chan_tx_status()
327 bytes = length - completed_length; in dma_chan_tx_status()
329 bytes = vd_to_axi_desc(vdesc)->length; in dma_chan_tx_status()
332 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_tx_status()
340 desc->lli->llp = cpu_to_le64(adr); in write_desc_llp()
353 if (!chan->chip->apb_regs) { in dw_axi_dma_set_byte_halfword()
354 dev_dbg(chan->chip->dev, "apb_regs not initialized\n"); in dw_axi_dma_set_byte_halfword()
358 reg_width = __ffs(chan->config.dst_addr_width); in dw_axi_dma_set_byte_halfword()
362 val = ioread32(chan->chip->apb_regs + offset); in dw_axi_dma_set_byte_halfword()
365 val |= BIT(chan->id); in dw_axi_dma_set_byte_halfword()
367 val &= ~BIT(chan->id); in dw_axi_dma_set_byte_halfword()
369 iowrite32(val, chan->chip->apb_regs + offset); in dw_axi_dma_set_byte_halfword()
375 u32 priority = chan->chip->dw->hdata->priority[chan->id]; in axi_chan_block_xfer_start()
376 struct axi_dma_chan_config config = {}; in axi_chan_block_xfer_start() local
381 dev_err(chan2dev(chan), "%s is non-idle!\n", in axi_chan_block_xfer_start()
387 axi_dma_enable(chan->chip); in axi_chan_block_xfer_start()
389 config.dst_multblk_type = DWAXIDMAC_MBLK_TYPE_LL; in axi_chan_block_xfer_start()
390 config.src_multblk_type = DWAXIDMAC_MBLK_TYPE_LL; in axi_chan_block_xfer_start()
391 config.tt_fc = DWAXIDMAC_TT_FC_MEM_TO_MEM_DMAC; in axi_chan_block_xfer_start()
392 config.prior = priority; in axi_chan_block_xfer_start()
393 config.hs_sel_dst = DWAXIDMAC_HS_SEL_HW; in axi_chan_block_xfer_start()
394 config.hs_sel_src = DWAXIDMAC_HS_SEL_HW; in axi_chan_block_xfer_start()
395 switch (chan->direction) { in axi_chan_block_xfer_start()
398 config.tt_fc = chan->config.device_fc ? in axi_chan_block_xfer_start()
401 if (chan->chip->apb_regs) in axi_chan_block_xfer_start()
402 config.dst_per = chan->id; in axi_chan_block_xfer_start()
404 config.dst_per = chan->hw_handshake_num; in axi_chan_block_xfer_start()
407 config.tt_fc = chan->config.device_fc ? in axi_chan_block_xfer_start()
410 if (chan->chip->apb_regs) in axi_chan_block_xfer_start()
411 config.src_per = chan->id; in axi_chan_block_xfer_start()
413 config.src_per = chan->hw_handshake_num; in axi_chan_block_xfer_start()
418 axi_chan_config_write(chan, &config); in axi_chan_block_xfer_start()
420 write_chan_llp(chan, first->hw_desc[0].llp | lms); in axi_chan_block_xfer_start()
437 vd = vchan_next_desc(&chan->vc); in axi_chan_start_first_queued()
443 vd->tx.cookie); in axi_chan_start_first_queued()
452 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_issue_pending()
453 if (vchan_issue_pending(&chan->vc)) in dma_chan_issue_pending()
455 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_issue_pending()
462 vchan_synchronize(&chan->vc); in dw_axi_dma_synchronize()
471 dev_err(chan2dev(chan), "%s is non-idle!\n", in dma_chan_alloc_chan_resources()
473 return -EBUSY; in dma_chan_alloc_chan_resources()
476 /* LLI address must be aligned to a 64-byte boundary */ in dma_chan_alloc_chan_resources()
477 chan->desc_pool = dma_pool_create(dev_name(chan2dev(chan)), in dma_chan_alloc_chan_resources()
478 chan->chip->dev, in dma_chan_alloc_chan_resources()
481 if (!chan->desc_pool) { in dma_chan_alloc_chan_resources()
483 return -ENOMEM; in dma_chan_alloc_chan_resources()
487 pm_runtime_get(chan->chip->dev); in dma_chan_alloc_chan_resources()
498 dev_err(dchan2dev(dchan), "%s is non-idle!\n", in dma_chan_free_chan_resources()
504 vchan_free_chan_resources(&chan->vc); in dma_chan_free_chan_resources()
506 dma_pool_destroy(chan->desc_pool); in dma_chan_free_chan_resources()
507 chan->desc_pool = NULL; in dma_chan_free_chan_resources()
510 axi_chan_name(chan), atomic_read(&chan->descs_allocated)); in dma_chan_free_chan_resources()
512 pm_runtime_put(chan->chip->dev); in dma_chan_free_chan_resources()
517 struct axi_dma_chip *chip = chan->chip; in dw_axi_dma_set_hw_channel()
520 if (!chip->apb_regs) { in dw_axi_dma_set_hw_channel()
521 dev_err(chip->dev, "apb_regs not initialized\n"); in dw_axi_dma_set_hw_channel()
531 val = chan->hw_handshake_num; in dw_axi_dma_set_hw_channel()
535 reg_value = lo_hi_readq(chip->apb_regs + DMAC_APB_HW_HS_SEL_0); in dw_axi_dma_set_hw_channel()
541 (chan->id * DMA_APB_HS_SEL_BIT_SIZE)); in dw_axi_dma_set_hw_channel()
542 reg_value |= (val << (chan->id * DMA_APB_HS_SEL_BIT_SIZE)); in dw_axi_dma_set_hw_channel()
543 lo_hi_writeq(reg_value, chip->apb_regs + DMAC_APB_HW_HS_SEL_0); in dw_axi_dma_set_hw_channel()
558 val = le32_to_cpu(desc->lli->ctl_hi); in set_desc_last()
560 desc->lli->ctl_hi = cpu_to_le32(val); in set_desc_last()
565 desc->lli->sar = cpu_to_le64(adr); in write_desc_sar()
570 desc->lli->dar = cpu_to_le64(adr); in write_desc_dar()
578 val = le32_to_cpu(desc->lli->ctl_lo); in set_desc_src_master()
580 desc->lli->ctl_lo = cpu_to_le32(val); in set_desc_src_master()
589 val = le32_to_cpu(hw_desc->lli->ctl_lo); in set_desc_dest_master()
590 if (desc->chan->chip->dw->hdata->nr_masters > 1) in set_desc_dest_master()
595 hw_desc->lli->ctl_lo = cpu_to_le32(val); in set_desc_dest_master()
602 unsigned int data_width = BIT(chan->chip->dw->hdata->m_data_width); in dw_axi_dma_set_hw_desc()
611 axi_block_ts = chan->chip->dw->hdata->block_size[chan->id]; in dw_axi_dma_set_hw_desc()
618 dev_err(chan->chip->dev, "invalid buffer alignment\n"); in dw_axi_dma_set_hw_desc()
619 return -EINVAL; in dw_axi_dma_set_hw_desc()
622 switch (chan->direction) { in dw_axi_dma_set_hw_desc()
624 reg_width = __ffs(chan->config.dst_addr_width); in dw_axi_dma_set_hw_desc()
625 device_addr = chan->config.dst_addr; in dw_axi_dma_set_hw_desc()
633 reg_width = __ffs(chan->config.src_addr_width); in dw_axi_dma_set_hw_desc()
634 device_addr = chan->config.src_addr; in dw_axi_dma_set_hw_desc()
642 return -EINVAL; in dw_axi_dma_set_hw_desc()
646 return -EINVAL; in dw_axi_dma_set_hw_desc()
648 hw_desc->lli = axi_desc_get(chan, &hw_desc->llp); in dw_axi_dma_set_hw_desc()
649 if (unlikely(!hw_desc->lli)) in dw_axi_dma_set_hw_desc()
650 return -ENOMEM; in dw_axi_dma_set_hw_desc()
654 if (chan->chip->dw->hdata->restrict_axi_burst_len) { in dw_axi_dma_set_hw_desc()
655 burst_len = chan->chip->dw->hdata->axi_rw_burst_len; in dw_axi_dma_set_hw_desc()
661 hw_desc->lli->ctl_hi = cpu_to_le32(ctlhi); in dw_axi_dma_set_hw_desc()
663 if (chan->direction == DMA_MEM_TO_DEV) { in dw_axi_dma_set_hw_desc()
671 hw_desc->lli->block_ts_lo = cpu_to_le32(block_ts - 1); in dw_axi_dma_set_hw_desc()
675 hw_desc->lli->ctl_lo = cpu_to_le32(ctllo); in dw_axi_dma_set_hw_desc()
679 hw_desc->len = len; in dw_axi_dma_set_hw_desc()
690 axi_block_ts = chan->chip->dw->hdata->block_size[chan->id]; in calculate_block_len()
694 data_width = BIT(chan->chip->dw->hdata->m_data_width); in calculate_block_len()
702 reg_width = __ffs(chan->config.src_addr_width); in calculate_block_len()
746 chan->direction = direction; in dw_axi_dma_chan_prep_cyclic()
747 desc->chan = chan; in dw_axi_dma_chan_prep_cyclic()
748 chan->cyclic = true; in dw_axi_dma_chan_prep_cyclic()
749 desc->length = 0; in dw_axi_dma_chan_prep_cyclic()
750 desc->period_len = period_len; in dw_axi_dma_chan_prep_cyclic()
753 hw_desc = &desc->hw_desc[i]; in dw_axi_dma_chan_prep_cyclic()
760 desc->length += hw_desc->len; in dw_axi_dma_chan_prep_cyclic()
761 /* Set end-of-link to the linked descriptor, so that cyclic in dw_axi_dma_chan_prep_cyclic()
769 llp = desc->hw_desc[0].llp; in dw_axi_dma_chan_prep_cyclic()
773 hw_desc = &desc->hw_desc[--total_segments]; in dw_axi_dma_chan_prep_cyclic()
775 llp = hw_desc->llp; in dw_axi_dma_chan_prep_cyclic()
780 return vchan_tx_prep(&chan->vc, &desc->vd, flags); in dw_axi_dma_chan_prep_cyclic()
826 desc->chan = chan; in dw_axi_dma_chan_prep_slave_sg()
827 desc->length = 0; in dw_axi_dma_chan_prep_slave_sg()
828 chan->direction = direction; in dw_axi_dma_chan_prep_slave_sg()
837 hw_desc = &desc->hw_desc[loop++]; in dw_axi_dma_chan_prep_slave_sg()
842 desc->length += hw_desc->len; in dw_axi_dma_chan_prep_slave_sg()
843 len -= segment_len; in dw_axi_dma_chan_prep_slave_sg()
848 /* Set end-of-link to the last link descriptor of list */ in dw_axi_dma_chan_prep_slave_sg()
849 set_desc_last(&desc->hw_desc[num_sgs - 1]); in dw_axi_dma_chan_prep_slave_sg()
853 hw_desc = &desc->hw_desc[--num_sgs]; in dw_axi_dma_chan_prep_slave_sg()
855 llp = hw_desc->llp; in dw_axi_dma_chan_prep_slave_sg()
860 return vchan_tx_prep(&chan->vc, &desc->vd, flags); in dw_axi_dma_chan_prep_slave_sg()
884 max_block_ts = chan->chip->dw->hdata->block_size[chan->id]; in dma_chan_prep_dma_memcpy()
891 desc->chan = chan; in dma_chan_prep_dma_memcpy()
893 desc->length = 0; in dma_chan_prep_dma_memcpy()
897 hw_desc = &desc->hw_desc[num]; in dma_chan_prep_dma_memcpy()
908 * BLOCK_TS register should be set to block_ts - 1 in dma_chan_prep_dma_memcpy()
916 hw_desc->lli = axi_desc_get(chan, &hw_desc->llp); in dma_chan_prep_dma_memcpy()
917 if (unlikely(!hw_desc->lli)) in dma_chan_prep_dma_memcpy()
922 hw_desc->lli->block_ts_lo = cpu_to_le32(block_ts - 1); in dma_chan_prep_dma_memcpy()
925 if (chan->chip->dw->hdata->restrict_axi_burst_len) { in dma_chan_prep_dma_memcpy()
926 u32 burst_len = chan->chip->dw->hdata->axi_rw_burst_len; in dma_chan_prep_dma_memcpy()
933 hw_desc->lli->ctl_hi = cpu_to_le32(reg); in dma_chan_prep_dma_memcpy()
941 hw_desc->lli->ctl_lo = cpu_to_le32(reg); in dma_chan_prep_dma_memcpy()
946 hw_desc->len = xfer_len; in dma_chan_prep_dma_memcpy()
947 desc->length += hw_desc->len; in dma_chan_prep_dma_memcpy()
949 len -= xfer_len; in dma_chan_prep_dma_memcpy()
955 /* Set end-of-link to the last link descriptor of list */ in dma_chan_prep_dma_memcpy()
956 set_desc_last(&desc->hw_desc[num - 1]); in dma_chan_prep_dma_memcpy()
959 hw_desc = &desc->hw_desc[--num]; in dma_chan_prep_dma_memcpy()
961 llp = hw_desc->llp; in dma_chan_prep_dma_memcpy()
964 return vchan_tx_prep(&chan->vc, &desc->vd, flags); in dma_chan_prep_dma_memcpy()
973 struct dma_slave_config *config) in dw_axi_dma_chan_slave_config() argument
977 memcpy(&chan->config, config, sizeof(*config)); in dw_axi_dma_chan_slave_config()
985 if (!desc->lli) { in axi_chan_dump_lli()
986 dev_err(dchan2dev(&chan->vc.chan), "NULL LLI\n"); in axi_chan_dump_lli()
990 dev_err(dchan2dev(&chan->vc.chan), in axi_chan_dump_lli()
992 le64_to_cpu(desc->lli->sar), in axi_chan_dump_lli()
993 le64_to_cpu(desc->lli->dar), in axi_chan_dump_lli()
994 le64_to_cpu(desc->lli->llp), in axi_chan_dump_lli()
995 le32_to_cpu(desc->lli->block_ts_lo), in axi_chan_dump_lli()
996 le32_to_cpu(desc->lli->ctl_hi), in axi_chan_dump_lli()
997 le32_to_cpu(desc->lli->ctl_lo)); in axi_chan_dump_lli()
1003 int count = atomic_read(&chan->descs_allocated); in axi_chan_list_dump_lli()
1007 axi_chan_dump_lli(chan, &desc_head->hw_desc[i]); in axi_chan_list_dump_lli()
1015 spin_lock_irqsave(&chan->vc.lock, flags); in axi_chan_handle_err()
1020 vd = vchan_next_desc(&chan->vc); in axi_chan_handle_err()
1022 list_del(&vd->node); in axi_chan_handle_err()
1027 axi_chan_name(chan), vd->tx.cookie, status); in axi_chan_handle_err()
1035 spin_unlock_irqrestore(&chan->vc.lock, flags); in axi_chan_handle_err()
1040 int count = atomic_read(&chan->descs_allocated); in axi_chan_block_xfer_complete()
1048 spin_lock_irqsave(&chan->vc.lock, flags); in axi_chan_block_xfer_complete()
1056 vd = vchan_next_desc(&chan->vc); in axi_chan_block_xfer_complete()
1063 if (chan->cyclic) { in axi_chan_block_xfer_complete()
1066 llp = lo_hi_readq(chan->chan_regs + CH_LLP); in axi_chan_block_xfer_complete()
1068 hw_desc = &desc->hw_desc[i]; in axi_chan_block_xfer_complete()
1069 if (hw_desc->llp == llp) { in axi_chan_block_xfer_complete()
1070 axi_chan_irq_clear(chan, hw_desc->lli->status_lo); in axi_chan_block_xfer_complete()
1071 hw_desc->lli->ctl_hi |= CH_CTL_H_LLI_VALID; in axi_chan_block_xfer_complete()
1072 desc->completed_blocks = i; in axi_chan_block_xfer_complete()
1074 if (((hw_desc->len * (i + 1)) % desc->period_len) == 0) in axi_chan_block_xfer_complete()
1084 list_del(&vd->node); in axi_chan_block_xfer_complete()
1092 spin_unlock_irqrestore(&chan->vc.lock, flags); in axi_chan_block_xfer_complete()
1098 struct dw_axi_dma *dw = chip->dw; in dw_axi_dma_interrupt()
1107 for (i = 0; i < dw->hdata->nr_channels; i++) { in dw_axi_dma_interrupt()
1108 chan = &dw->chan[i]; in dw_axi_dma_interrupt()
1112 dev_vdbg(chip->dev, "%s %u IRQ status: 0x%08x\n", in dw_axi_dma_interrupt()
1121 /* Re-enable interrupts */ in dw_axi_dma_interrupt()
1130 u32 chan_active = BIT(chan->id) << DMAC_CHAN_EN_SHIFT; in dma_chan_terminate_all()
1138 ret = readl_poll_timeout_atomic(chan->chip->regs + DMAC_CHEN, val, in dma_chan_terminate_all()
1140 if (ret == -ETIMEDOUT) in dma_chan_terminate_all()
1144 if (chan->direction != DMA_MEM_TO_MEM) in dma_chan_terminate_all()
1146 if (chan->direction == DMA_MEM_TO_DEV) in dma_chan_terminate_all()
1149 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_terminate_all()
1151 vchan_get_all_descriptors(&chan->vc, &head); in dma_chan_terminate_all()
1153 chan->cyclic = false; in dma_chan_terminate_all()
1154 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_terminate_all()
1156 vchan_dma_desc_free_list(&chan->vc, &head); in dma_chan_terminate_all()
1170 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_pause()
1172 if (chan->chip->dw->hdata->reg_map_8_channels) { in dma_chan_pause()
1173 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in dma_chan_pause()
1174 val |= BIT(chan->id) << DMAC_CHAN_SUSP_SHIFT | in dma_chan_pause()
1175 BIT(chan->id) << DMAC_CHAN_SUSP_WE_SHIFT; in dma_chan_pause()
1176 axi_dma_iowrite32(chan->chip, DMAC_CHEN, val); in dma_chan_pause()
1178 val = axi_dma_ioread32(chan->chip, DMAC_CHSUSPREG); in dma_chan_pause()
1179 val |= BIT(chan->id) << DMAC_CHAN_SUSP2_SHIFT | in dma_chan_pause()
1180 BIT(chan->id) << DMAC_CHAN_SUSP2_WE_SHIFT; in dma_chan_pause()
1181 axi_dma_iowrite32(chan->chip, DMAC_CHSUSPREG, val); in dma_chan_pause()
1189 } while (--timeout); in dma_chan_pause()
1193 chan->is_paused = true; in dma_chan_pause()
1195 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_pause()
1197 return timeout ? 0 : -EAGAIN; in dma_chan_pause()
1205 if (chan->chip->dw->hdata->reg_map_8_channels) { in axi_chan_resume()
1206 val = axi_dma_ioread32(chan->chip, DMAC_CHEN); in axi_chan_resume()
1207 val &= ~(BIT(chan->id) << DMAC_CHAN_SUSP_SHIFT); in axi_chan_resume()
1208 val |= (BIT(chan->id) << DMAC_CHAN_SUSP_WE_SHIFT); in axi_chan_resume()
1209 axi_dma_iowrite32(chan->chip, DMAC_CHEN, val); in axi_chan_resume()
1211 val = axi_dma_ioread32(chan->chip, DMAC_CHSUSPREG); in axi_chan_resume()
1212 val &= ~(BIT(chan->id) << DMAC_CHAN_SUSP2_SHIFT); in axi_chan_resume()
1213 val |= (BIT(chan->id) << DMAC_CHAN_SUSP2_WE_SHIFT); in axi_chan_resume()
1214 axi_dma_iowrite32(chan->chip, DMAC_CHSUSPREG, val); in axi_chan_resume()
1217 chan->is_paused = false; in axi_chan_resume()
1225 spin_lock_irqsave(&chan->vc.lock, flags); in dma_chan_resume()
1227 if (chan->is_paused) in dma_chan_resume()
1230 spin_unlock_irqrestore(&chan->vc.lock, flags); in dma_chan_resume()
1240 clk_disable_unprepare(chip->core_clk); in axi_dma_suspend()
1241 clk_disable_unprepare(chip->cfgr_clk); in axi_dma_suspend()
1250 ret = clk_prepare_enable(chip->cfgr_clk); in axi_dma_resume()
1254 ret = clk_prepare_enable(chip->core_clk); in axi_dma_resume()
1281 struct dw_axi_dma *dw = ofdma->of_dma_data; in dw_axi_dma_of_xlate()
1285 dchan = dma_get_any_slave_channel(&dw->dma); in dw_axi_dma_of_xlate()
1290 chan->hw_handshake_num = dma_spec->args[0]; in dw_axi_dma_of_xlate()
1296 struct device *dev = chip->dev; in parse_device_properties()
1300 ret = device_property_read_u32(dev, "dma-channels", &tmp); in parse_device_properties()
1304 return -EINVAL; in parse_device_properties()
1306 chip->dw->hdata->nr_channels = tmp; in parse_device_properties()
1308 chip->dw->hdata->reg_map_8_channels = true; in parse_device_properties()
1310 ret = device_property_read_u32(dev, "snps,dma-masters", &tmp); in parse_device_properties()
1314 return -EINVAL; in parse_device_properties()
1316 chip->dw->hdata->nr_masters = tmp; in parse_device_properties()
1318 ret = device_property_read_u32(dev, "snps,data-width", &tmp); in parse_device_properties()
1322 return -EINVAL; in parse_device_properties()
1324 chip->dw->hdata->m_data_width = tmp; in parse_device_properties()
1326 ret = device_property_read_u32_array(dev, "snps,block-size", carr, in parse_device_properties()
1327 chip->dw->hdata->nr_channels); in parse_device_properties()
1330 for (tmp = 0; tmp < chip->dw->hdata->nr_channels; tmp++) { in parse_device_properties()
1332 return -EINVAL; in parse_device_properties()
1334 chip->dw->hdata->block_size[tmp] = carr[tmp]; in parse_device_properties()
1338 chip->dw->hdata->nr_channels); in parse_device_properties()
1341 /* Priority value must be programmed within [0:nr_channels-1] range */ in parse_device_properties()
1342 for (tmp = 0; tmp < chip->dw->hdata->nr_channels; tmp++) { in parse_device_properties()
1343 if (carr[tmp] >= chip->dw->hdata->nr_channels) in parse_device_properties()
1344 return -EINVAL; in parse_device_properties()
1346 chip->dw->hdata->priority[tmp] = carr[tmp]; in parse_device_properties()
1349 /* axi-max-burst-len is optional property */ in parse_device_properties()
1350 ret = device_property_read_u32(dev, "snps,axi-max-burst-len", &tmp); in parse_device_properties()
1353 return -EINVAL; in parse_device_properties()
1355 return -EINVAL; in parse_device_properties()
1357 chip->dw->hdata->restrict_axi_burst_len = true; in parse_device_properties()
1358 chip->dw->hdata->axi_rw_burst_len = tmp; in parse_device_properties()
1366 struct device_node *node = pdev->dev.of_node; in dw_probe()
1374 chip = devm_kzalloc(&pdev->dev, sizeof(*chip), GFP_KERNEL); in dw_probe()
1376 return -ENOMEM; in dw_probe()
1378 dw = devm_kzalloc(&pdev->dev, sizeof(*dw), GFP_KERNEL); in dw_probe()
1380 return -ENOMEM; in dw_probe()
1382 hdata = devm_kzalloc(&pdev->dev, sizeof(*hdata), GFP_KERNEL); in dw_probe()
1384 return -ENOMEM; in dw_probe()
1386 chip->dw = dw; in dw_probe()
1387 chip->dev = &pdev->dev; in dw_probe()
1388 chip->dw->hdata = hdata; in dw_probe()
1390 chip->irq = platform_get_irq(pdev, 0); in dw_probe()
1391 if (chip->irq < 0) in dw_probe()
1392 return chip->irq; in dw_probe()
1395 chip->regs = devm_ioremap_resource(chip->dev, mem); in dw_probe()
1396 if (IS_ERR(chip->regs)) in dw_probe()
1397 return PTR_ERR(chip->regs); in dw_probe()
1399 if (of_device_is_compatible(node, "intel,kmb-axi-dma")) { in dw_probe()
1400 chip->apb_regs = devm_platform_ioremap_resource(pdev, 1); in dw_probe()
1401 if (IS_ERR(chip->apb_regs)) in dw_probe()
1402 return PTR_ERR(chip->apb_regs); in dw_probe()
1405 chip->core_clk = devm_clk_get(chip->dev, "core-clk"); in dw_probe()
1406 if (IS_ERR(chip->core_clk)) in dw_probe()
1407 return PTR_ERR(chip->core_clk); in dw_probe()
1409 chip->cfgr_clk = devm_clk_get(chip->dev, "cfgr-clk"); in dw_probe()
1410 if (IS_ERR(chip->cfgr_clk)) in dw_probe()
1411 return PTR_ERR(chip->cfgr_clk); in dw_probe()
1417 dw->chan = devm_kcalloc(chip->dev, hdata->nr_channels, in dw_probe()
1418 sizeof(*dw->chan), GFP_KERNEL); in dw_probe()
1419 if (!dw->chan) in dw_probe()
1420 return -ENOMEM; in dw_probe()
1422 ret = devm_request_irq(chip->dev, chip->irq, dw_axi_dma_interrupt, in dw_probe()
1427 INIT_LIST_HEAD(&dw->dma.channels); in dw_probe()
1428 for (i = 0; i < hdata->nr_channels; i++) { in dw_probe()
1429 struct axi_dma_chan *chan = &dw->chan[i]; in dw_probe()
1431 chan->chip = chip; in dw_probe()
1432 chan->id = i; in dw_probe()
1433 chan->chan_regs = chip->regs + COMMON_REG_LEN + i * CHAN_REG_LEN; in dw_probe()
1434 atomic_set(&chan->descs_allocated, 0); in dw_probe()
1436 chan->vc.desc_free = vchan_desc_put; in dw_probe()
1437 vchan_init(&chan->vc, &dw->dma); in dw_probe()
1441 dma_cap_set(DMA_MEMCPY, dw->dma.cap_mask); in dw_probe()
1442 dma_cap_set(DMA_SLAVE, dw->dma.cap_mask); in dw_probe()
1443 dma_cap_set(DMA_CYCLIC, dw->dma.cap_mask); in dw_probe()
1446 dw->dma.chancnt = hdata->nr_channels; in dw_probe()
1447 dw->dma.max_burst = hdata->axi_rw_burst_len; in dw_probe()
1448 dw->dma.src_addr_widths = AXI_DMA_BUSWIDTHS; in dw_probe()
1449 dw->dma.dst_addr_widths = AXI_DMA_BUSWIDTHS; in dw_probe()
1450 dw->dma.directions = BIT(DMA_MEM_TO_MEM); in dw_probe()
1451 dw->dma.directions |= BIT(DMA_MEM_TO_DEV) | BIT(DMA_DEV_TO_MEM); in dw_probe()
1452 dw->dma.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in dw_probe()
1454 dw->dma.dev = chip->dev; in dw_probe()
1455 dw->dma.device_tx_status = dma_chan_tx_status; in dw_probe()
1456 dw->dma.device_issue_pending = dma_chan_issue_pending; in dw_probe()
1457 dw->dma.device_terminate_all = dma_chan_terminate_all; in dw_probe()
1458 dw->dma.device_pause = dma_chan_pause; in dw_probe()
1459 dw->dma.device_resume = dma_chan_resume; in dw_probe()
1461 dw->dma.device_alloc_chan_resources = dma_chan_alloc_chan_resources; in dw_probe()
1462 dw->dma.device_free_chan_resources = dma_chan_free_chan_resources; in dw_probe()
1464 dw->dma.device_prep_dma_memcpy = dma_chan_prep_dma_memcpy; in dw_probe()
1465 dw->dma.device_synchronize = dw_axi_dma_synchronize; in dw_probe()
1466 dw->dma.device_config = dw_axi_dma_chan_slave_config; in dw_probe()
1467 dw->dma.device_prep_slave_sg = dw_axi_dma_chan_prep_slave_sg; in dw_probe()
1468 dw->dma.device_prep_dma_cyclic = dw_axi_dma_chan_prep_cyclic; in dw_probe()
1475 dw->dma.dev->dma_parms = &dw->dma_parms; in dw_probe()
1476 dma_set_max_seg_size(&pdev->dev, MAX_BLOCK_SIZE); in dw_probe()
1479 pm_runtime_enable(chip->dev); in dw_probe()
1486 pm_runtime_get_noresume(chip->dev); in dw_probe()
1493 pm_runtime_put(chip->dev); in dw_probe()
1495 ret = dmaenginem_async_device_register(&dw->dma); in dw_probe()
1500 ret = of_dma_controller_register(pdev->dev.of_node, in dw_probe()
1503 dev_warn(&pdev->dev, in dw_probe()
1506 dev_info(chip->dev, "DesignWare AXI DMA Controller, %d channels\n", in dw_probe()
1507 dw->hdata->nr_channels); in dw_probe()
1512 pm_runtime_disable(chip->dev); in dw_probe()
1520 struct dw_axi_dma *dw = chip->dw; in dw_remove()
1525 clk_prepare_enable(chip->cfgr_clk); in dw_remove()
1526 clk_prepare_enable(chip->core_clk); in dw_remove()
1528 for (i = 0; i < dw->hdata->nr_channels; i++) { in dw_remove()
1529 axi_chan_disable(&chip->dw->chan[i]); in dw_remove()
1530 axi_chan_irq_disable(&chip->dw->chan[i], DWAXIDMAC_IRQ_ALL); in dw_remove()
1534 pm_runtime_disable(chip->dev); in dw_remove()
1537 devm_free_irq(chip->dev, chip->irq, chip); in dw_remove()
1539 of_dma_controller_free(chip->dev->of_node); in dw_remove()
1541 list_for_each_entry_safe(chan, _chan, &dw->dma.channels, in dw_remove()
1543 list_del(&chan->vc.chan.device_node); in dw_remove()
1544 tasklet_kill(&chan->vc.task); in dw_remove()
1555 { .compatible = "snps,axi-dma-1.01a" },
1556 { .compatible = "intel,kmb-axi-dma" },
1573 MODULE_DESCRIPTION("Synopsys DesignWare AXI DMA Controller platform driver");