Lines Matching +full:omap +full:- +full:sdma

1 // SPDX-License-Identifier: GPL-2.0-only
3 * OMAP DMAengine support
8 #include <linux/dma-mapping.h>
15 #include <linux/omap-dma.h>
22 #include "../virt-dma.h"
101 uint32_t en; /* number of elements (24-bit) */
102 uint32_t fn; /* number of frames (16-bit) */
250 struct omap_desc *d = to_omap_dma_desc(&vd->tx); in omap_dma_desc_free()
252 if (d->using_ll) { in omap_dma_desc_free()
253 struct omap_dmadev *od = to_omap_dma_dev(vd->tx.chan->device); in omap_dma_desc_free()
256 for (i = 0; i < d->sglen; i++) { in omap_dma_desc_free()
257 if (d->sg[i].t2_desc) in omap_dma_desc_free()
258 dma_pool_free(od->desc_pool, d->sg[i].t2_desc, in omap_dma_desc_free()
259 d->sg[i].t2_desc_paddr); in omap_dma_desc_free()
269 struct omap_sg *sg = &d->sg[idx]; in omap_dma_fill_type2_desc()
270 struct omap_type2_desc *t2_desc = sg->t2_desc; in omap_dma_fill_type2_desc()
273 d->sg[idx - 1].t2_desc->next_desc = sg->t2_desc_paddr; in omap_dma_fill_type2_desc()
275 t2_desc->next_desc = 0xfffffffc; in omap_dma_fill_type2_desc()
277 t2_desc->en = sg->en; in omap_dma_fill_type2_desc()
278 t2_desc->addr = sg->addr; in omap_dma_fill_type2_desc()
279 t2_desc->fn = sg->fn & 0xffff; in omap_dma_fill_type2_desc()
280 t2_desc->cicr = d->cicr; in omap_dma_fill_type2_desc()
282 t2_desc->cicr &= ~CICR_BLOCK_IE; in omap_dma_fill_type2_desc()
286 t2_desc->cdei = sg->ei; in omap_dma_fill_type2_desc()
287 t2_desc->csei = d->ei; in omap_dma_fill_type2_desc()
288 t2_desc->cdfi = sg->fi; in omap_dma_fill_type2_desc()
289 t2_desc->csfi = d->fi; in omap_dma_fill_type2_desc()
291 t2_desc->en |= DESC_NXT_DV_REFRESH; in omap_dma_fill_type2_desc()
292 t2_desc->en |= DESC_NXT_SV_REUSE; in omap_dma_fill_type2_desc()
295 t2_desc->cdei = d->ei; in omap_dma_fill_type2_desc()
296 t2_desc->csei = sg->ei; in omap_dma_fill_type2_desc()
297 t2_desc->cdfi = d->fi; in omap_dma_fill_type2_desc()
298 t2_desc->csfi = sg->fi; in omap_dma_fill_type2_desc()
300 t2_desc->en |= DESC_NXT_SV_REFRESH; in omap_dma_fill_type2_desc()
301 t2_desc->en |= DESC_NXT_DV_REUSE; in omap_dma_fill_type2_desc()
307 t2_desc->en |= DESC_NTYPE_TYPE2; in omap_dma_fill_type2_desc()
353 const struct omap_dma_reg *r = od->reg_map + reg; in omap_dma_glbl_write()
355 WARN_ON(r->stride); in omap_dma_glbl_write()
357 omap_dma_write(val, r->type, od->base + r->offset); in omap_dma_glbl_write()
362 const struct omap_dma_reg *r = od->reg_map + reg; in omap_dma_glbl_read()
364 WARN_ON(r->stride); in omap_dma_glbl_read()
366 return omap_dma_read(r->type, od->base + r->offset); in omap_dma_glbl_read()
371 const struct omap_dma_reg *r = c->reg_map + reg; in omap_dma_chan_write()
373 omap_dma_write(val, r->type, c->channel_base + r->offset); in omap_dma_chan_write()
378 const struct omap_dma_reg *r = c->reg_map + reg; in omap_dma_chan_read()
380 return omap_dma_read(r->type, c->channel_base + r->offset); in omap_dma_chan_read()
406 c = od->lch_map[lch]; in omap_dma_clear_lch()
410 for (i = CSDP; i <= od->cfg->lch_end; i++) in omap_dma_clear_lch()
417 c->channel_base = od->base + od->plat->channel_stride * lch; in omap_dma_assign()
419 od->lch_map[lch] = c; in omap_dma_assign()
424 struct omap_dmadev *od = to_omap_dma_dev(c->vc.chan.device); in omap_dma_start()
425 uint16_t cicr = d->cicr; in omap_dma_start()
427 if (__dma_omap15xx(od->plat->dma_attr)) in omap_dma_start()
434 if (d->using_ll) { in omap_dma_start()
437 if (d->dir == DMA_DEV_TO_MEM) in omap_dma_start()
443 omap_dma_chan_write(c, CNDP, d->sg[0].t2_desc_paddr); in omap_dma_start()
449 } else if (od->ll123_supported) { in omap_dma_start()
457 omap_dma_chan_write(c, CCR, d->ccr | CCR_ENABLE); in omap_dma_start()
459 c->running = true; in omap_dma_start()
467 /* Wait for sDMA FIFO to drain */ in omap_dma_drain_chan()
480 dev_err(c->vc.chan.device->dev, in omap_dma_drain_chan()
482 c->dma_ch); in omap_dma_drain_chan()
487 struct omap_dmadev *od = to_omap_dma_dev(c->vc.chan.device); in omap_dma_stop()
496 if (od->plat->errata & DMA_ERRATA_i541 && val & CCR_TRIGGER_SRC) { in omap_dma_stop()
508 if (!(c->ccr & CCR_BUFFERING_DISABLE)) in omap_dma_stop()
514 return -EINVAL; in omap_dma_stop()
519 if (!(c->ccr & CCR_BUFFERING_DISABLE)) in omap_dma_stop()
525 if (!__dma_omap15xx(od->plat->dma_attr) && c->cyclic) { in omap_dma_stop()
535 c->running = false; in omap_dma_stop()
541 struct omap_sg *sg = d->sg + c->sgidx; in omap_dma_start_sg()
544 if (d->dir == DMA_DEV_TO_MEM || d->dir == DMA_MEM_TO_MEM) { in omap_dma_start_sg()
554 omap_dma_chan_write(c, cxsa, sg->addr); in omap_dma_start_sg()
555 omap_dma_chan_write(c, cxei, sg->ei); in omap_dma_start_sg()
556 omap_dma_chan_write(c, cxfi, sg->fi); in omap_dma_start_sg()
557 omap_dma_chan_write(c, CEN, sg->en); in omap_dma_start_sg()
558 omap_dma_chan_write(c, CFN, sg->fn); in omap_dma_start_sg()
561 c->sgidx++; in omap_dma_start_sg()
566 struct virt_dma_desc *vd = vchan_next_desc(&c->vc); in omap_dma_start_desc()
571 c->desc = NULL; in omap_dma_start_desc()
575 list_del(&vd->node); in omap_dma_start_desc()
577 c->desc = d = to_omap_dma_desc(&vd->tx); in omap_dma_start_desc()
578 c->sgidx = 0; in omap_dma_start_desc()
587 omap_dma_chan_write(c, CCR, d->ccr); in omap_dma_start_desc()
589 omap_dma_chan_write(c, CCR2, d->ccr >> 16); in omap_dma_start_desc()
591 if (d->dir == DMA_DEV_TO_MEM || d->dir == DMA_MEM_TO_MEM) { in omap_dma_start_desc()
601 omap_dma_chan_write(c, cxsa, d->dev_addr); in omap_dma_start_desc()
602 omap_dma_chan_write(c, cxei, d->ei); in omap_dma_start_desc()
603 omap_dma_chan_write(c, cxfi, d->fi); in omap_dma_start_desc()
604 omap_dma_chan_write(c, CSDP, d->csdp); in omap_dma_start_desc()
605 omap_dma_chan_write(c, CLNK_CTRL, d->clnk_ctrl); in omap_dma_start_desc()
616 spin_lock_irqsave(&c->vc.lock, flags); in omap_dma_callback()
617 d = c->desc; in omap_dma_callback()
619 if (c->cyclic) { in omap_dma_callback()
620 vchan_cyclic_callback(&d->vd); in omap_dma_callback()
621 } else if (d->using_ll || c->sgidx == d->sglen) { in omap_dma_callback()
623 vchan_cookie_complete(&d->vd); in omap_dma_callback()
628 spin_unlock_irqrestore(&c->vc.lock, flags); in omap_dma_callback()
636 spin_lock(&od->irq_lock); in omap_dma_irq()
639 status &= od->irq_enable_mask; in omap_dma_irq()
641 spin_unlock(&od->irq_lock); in omap_dma_irq()
649 channel -= 1; in omap_dma_irq()
653 c = od->lch_map[channel]; in omap_dma_irq()
656 dev_err(od->ddev.dev, "invalid channel %u\n", channel); in omap_dma_irq()
666 spin_unlock(&od->irq_lock); in omap_dma_irq()
675 mutex_lock(&od->lch_lock); in omap_dma_get_lch()
676 channel = find_first_zero_bit(od->lch_bitmap, od->lch_count); in omap_dma_get_lch()
677 if (channel >= od->lch_count) in omap_dma_get_lch()
679 set_bit(channel, od->lch_bitmap); in omap_dma_get_lch()
680 mutex_unlock(&od->lch_lock); in omap_dma_get_lch()
688 mutex_unlock(&od->lch_lock); in omap_dma_get_lch()
689 *lch = -EINVAL; in omap_dma_get_lch()
691 return -EBUSY; in omap_dma_get_lch()
697 mutex_lock(&od->lch_lock); in omap_dma_put_lch()
698 clear_bit(lch, od->lch_bitmap); in omap_dma_put_lch()
699 mutex_unlock(&od->lch_lock); in omap_dma_put_lch()
704 struct omap_dmadev *od = to_omap_dma_dev(chan->device); in omap_dma_alloc_chan_resources()
706 struct device *dev = od->ddev.dev; in omap_dma_alloc_chan_resources()
709 if (od->legacy) { in omap_dma_alloc_chan_resources()
710 ret = omap_request_dma(c->dma_sig, "DMA engine", in omap_dma_alloc_chan_resources()
711 omap_dma_callback, c, &c->dma_ch); in omap_dma_alloc_chan_resources()
713 ret = omap_dma_get_lch(od, &c->dma_ch); in omap_dma_alloc_chan_resources()
716 dev_dbg(dev, "allocating channel %u for %u\n", c->dma_ch, c->dma_sig); in omap_dma_alloc_chan_resources()
719 omap_dma_assign(od, c, c->dma_ch); in omap_dma_alloc_chan_resources()
721 if (!od->legacy) { in omap_dma_alloc_chan_resources()
724 spin_lock_irq(&od->irq_lock); in omap_dma_alloc_chan_resources()
725 val = BIT(c->dma_ch); in omap_dma_alloc_chan_resources()
727 od->irq_enable_mask |= val; in omap_dma_alloc_chan_resources()
728 omap_dma_glbl_write(od, IRQENABLE_L1, od->irq_enable_mask); in omap_dma_alloc_chan_resources()
731 val &= ~BIT(c->dma_ch); in omap_dma_alloc_chan_resources()
733 spin_unlock_irq(&od->irq_lock); in omap_dma_alloc_chan_resources()
738 if (__dma_omap16xx(od->plat->dma_attr)) { in omap_dma_alloc_chan_resources()
739 c->ccr = CCR_OMAP31_DISABLE; in omap_dma_alloc_chan_resources()
740 /* Duplicate what plat-omap/dma.c does */ in omap_dma_alloc_chan_resources()
741 c->ccr |= c->dma_ch + 1; in omap_dma_alloc_chan_resources()
743 c->ccr = c->dma_sig & 0x1f; in omap_dma_alloc_chan_resources()
746 c->ccr = c->dma_sig & 0x1f; in omap_dma_alloc_chan_resources()
747 c->ccr |= (c->dma_sig & ~0x1f) << 14; in omap_dma_alloc_chan_resources()
749 if (od->plat->errata & DMA_ERRATA_IFRAME_BUFFERING) in omap_dma_alloc_chan_resources()
750 c->ccr |= CCR_BUFFERING_DISABLE; in omap_dma_alloc_chan_resources()
757 struct omap_dmadev *od = to_omap_dma_dev(chan->device); in omap_dma_free_chan_resources()
760 if (!od->legacy) { in omap_dma_free_chan_resources()
761 spin_lock_irq(&od->irq_lock); in omap_dma_free_chan_resources()
762 od->irq_enable_mask &= ~BIT(c->dma_ch); in omap_dma_free_chan_resources()
763 omap_dma_glbl_write(od, IRQENABLE_L1, od->irq_enable_mask); in omap_dma_free_chan_resources()
764 spin_unlock_irq(&od->irq_lock); in omap_dma_free_chan_resources()
767 c->channel_base = NULL; in omap_dma_free_chan_resources()
768 od->lch_map[c->dma_ch] = NULL; in omap_dma_free_chan_resources()
769 vchan_free_chan_resources(&c->vc); in omap_dma_free_chan_resources()
771 if (od->legacy) in omap_dma_free_chan_resources()
772 omap_free_dma(c->dma_ch); in omap_dma_free_chan_resources()
774 omap_dma_put_lch(od, c->dma_ch); in omap_dma_free_chan_resources()
776 dev_dbg(od->ddev.dev, "freeing channel %u used for %u\n", c->dma_ch, in omap_dma_free_chan_resources()
777 c->dma_sig); in omap_dma_free_chan_resources()
778 c->dma_sig = 0; in omap_dma_free_chan_resources()
783 return sg->en * sg->fn; in omap_dma_sg_size()
791 for (size = i = 0; i < d->sglen; i++) in omap_dma_desc_size()
792 size += omap_dma_sg_size(&d->sg[i]); in omap_dma_desc_size()
794 return size * es_bytes[d->es]; in omap_dma_desc_size()
800 size_t size, es_size = es_bytes[d->es]; in omap_dma_desc_size_pos()
802 for (size = i = 0; i < d->sglen; i++) { in omap_dma_desc_size_pos()
803 size_t this_size = omap_dma_sg_size(&d->sg[i]) * es_size; in omap_dma_desc_size_pos()
807 else if (addr >= d->sg[i].addr && in omap_dma_desc_size_pos()
808 addr < d->sg[i].addr + this_size) in omap_dma_desc_size_pos()
809 size += d->sg[i].addr + this_size - addr; in omap_dma_desc_size_pos()
815 * OMAP 3.2/3.3 erratum: sometimes 0 is returned if CSAC/CDAC is
820 struct omap_dmadev *od = to_omap_dma_dev(c->vc.chan.device); in omap_dma_chan_read_3_3()
824 if (val == 0 && od->plat->errata & DMA_ERRATA_3_3) in omap_dma_chan_read_3_3()
832 struct omap_dmadev *od = to_omap_dma_dev(c->vc.chan.device); in omap_dma_get_src_pos()
835 if (__dma_omap15xx(od->plat->dma_attr)) { in omap_dma_get_src_pos()
858 struct omap_dmadev *od = to_omap_dma_dev(c->vc.chan.device); in omap_dma_get_dst_pos()
861 if (__dma_omap15xx(od->plat->dma_attr)) { in omap_dma_get_dst_pos()
894 spin_lock_irqsave(&c->vc.lock, flags); in omap_dma_tx_status()
895 if (c->desc && c->desc->vd.tx.cookie == cookie) in omap_dma_tx_status()
896 d = c->desc; in omap_dma_tx_status()
904 if (d->dir == DMA_MEM_TO_DEV) in omap_dma_tx_status()
906 else if (d->dir == DMA_DEV_TO_MEM || d->dir == DMA_MEM_TO_MEM) in omap_dma_tx_status()
911 txstate->residue = omap_dma_desc_size_pos(d, pos); in omap_dma_tx_status()
913 struct virt_dma_desc *vd = vchan_find_desc(&c->vc, cookie); in omap_dma_tx_status()
916 txstate->residue = omap_dma_desc_size( in omap_dma_tx_status()
917 to_omap_dma_desc(&vd->tx)); in omap_dma_tx_status()
919 txstate->residue = 0; in omap_dma_tx_status()
923 if (ret == DMA_IN_PROGRESS && c->paused) { in omap_dma_tx_status()
925 } else if (d && d->polled && c->running) { in omap_dma_tx_status()
934 vchan_cookie_complete(&d->vd); in omap_dma_tx_status()
938 spin_unlock_irqrestore(&c->vc.lock, flags); in omap_dma_tx_status()
948 spin_lock_irqsave(&c->vc.lock, flags); in omap_dma_issue_pending()
949 if (vchan_issue_pending(&c->vc) && !c->desc) in omap_dma_issue_pending()
951 spin_unlock_irqrestore(&c->vc.lock, flags); in omap_dma_issue_pending()
958 struct omap_dmadev *od = to_omap_dma_dev(chan->device); in omap_dma_prep_slave_sg()
970 dev_addr = c->cfg.src_addr; in omap_dma_prep_slave_sg()
971 dev_width = c->cfg.src_addr_width; in omap_dma_prep_slave_sg()
972 burst = c->cfg.src_maxburst; in omap_dma_prep_slave_sg()
973 port_window = c->cfg.src_port_window_size; in omap_dma_prep_slave_sg()
975 dev_addr = c->cfg.dst_addr; in omap_dma_prep_slave_sg()
976 dev_width = c->cfg.dst_addr_width; in omap_dma_prep_slave_sg()
977 burst = c->cfg.dst_maxburst; in omap_dma_prep_slave_sg()
978 port_window = c->cfg.dst_port_window_size; in omap_dma_prep_slave_sg()
980 dev_err(chan->device->dev, "%s: bad direction?\n", __func__); in omap_dma_prep_slave_sg()
1004 d->dir = dir; in omap_dma_prep_slave_sg()
1005 d->dev_addr = dev_addr; in omap_dma_prep_slave_sg()
1006 d->es = es; in omap_dma_prep_slave_sg()
1013 d->ei = 1; in omap_dma_prep_slave_sg()
1016 * the source frame index to be -1 * (port_window - 1) in omap_dma_prep_slave_sg()
1017 * we instruct the sDMA that after a frame is processed in omap_dma_prep_slave_sg()
1020 d->fi = -(port_window_bytes - 1); in omap_dma_prep_slave_sg()
1023 d->ccr = c->ccr | CCR_SYNC_FRAME; in omap_dma_prep_slave_sg()
1025 d->csdp = CSDP_DST_BURST_64 | CSDP_DST_PACKED; in omap_dma_prep_slave_sg()
1027 d->ccr |= CCR_DST_AMODE_POSTINC; in omap_dma_prep_slave_sg()
1029 d->ccr |= CCR_SRC_AMODE_DBLIDX; in omap_dma_prep_slave_sg()
1032 d->csdp |= CSDP_SRC_BURST_64; in omap_dma_prep_slave_sg()
1034 d->csdp |= CSDP_SRC_BURST_32; in omap_dma_prep_slave_sg()
1036 d->csdp |= CSDP_SRC_BURST_16; in omap_dma_prep_slave_sg()
1039 d->ccr |= CCR_SRC_AMODE_CONSTANT; in omap_dma_prep_slave_sg()
1042 d->csdp = CSDP_SRC_BURST_64 | CSDP_SRC_PACKED; in omap_dma_prep_slave_sg()
1044 d->ccr |= CCR_SRC_AMODE_POSTINC; in omap_dma_prep_slave_sg()
1046 d->ccr |= CCR_DST_AMODE_DBLIDX; in omap_dma_prep_slave_sg()
1049 d->csdp |= CSDP_DST_BURST_64; in omap_dma_prep_slave_sg()
1051 d->csdp |= CSDP_DST_BURST_32; in omap_dma_prep_slave_sg()
1053 d->csdp |= CSDP_DST_BURST_16; in omap_dma_prep_slave_sg()
1055 d->ccr |= CCR_DST_AMODE_CONSTANT; in omap_dma_prep_slave_sg()
1059 d->cicr = CICR_DROP_IE | CICR_BLOCK_IE; in omap_dma_prep_slave_sg()
1060 d->csdp |= es; in omap_dma_prep_slave_sg()
1063 d->cicr |= CICR_TOUT_IE; in omap_dma_prep_slave_sg()
1066 d->csdp |= CSDP_DST_PORT_EMIFF | CSDP_SRC_PORT_TIPB; in omap_dma_prep_slave_sg()
1068 d->csdp |= CSDP_DST_PORT_TIPB | CSDP_SRC_PORT_EMIFF; in omap_dma_prep_slave_sg()
1071 d->ccr |= CCR_TRIGGER_SRC; in omap_dma_prep_slave_sg()
1073 d->cicr |= CICR_MISALIGNED_ERR_IE | CICR_TRANS_ERR_IE; in omap_dma_prep_slave_sg()
1076 d->csdp |= CSDP_WRITE_LAST_NON_POSTED; in omap_dma_prep_slave_sg()
1078 if (od->plat->errata & DMA_ERRATA_PARALLEL_CHANNELS) in omap_dma_prep_slave_sg()
1079 d->clnk_ctrl = c->dma_ch; in omap_dma_prep_slave_sg()
1087 * Note: DMA engine defines burst to be the number of dev-width in omap_dma_prep_slave_sg()
1094 d->using_ll = od->ll123_supported; in omap_dma_prep_slave_sg()
1097 struct omap_sg *osg = &d->sg[i]; in omap_dma_prep_slave_sg()
1099 osg->addr = sg_dma_address(sgent); in omap_dma_prep_slave_sg()
1100 osg->en = en; in omap_dma_prep_slave_sg()
1101 osg->fn = sg_dma_len(sgent) / frame_bytes; in omap_dma_prep_slave_sg()
1103 if (d->using_ll) { in omap_dma_prep_slave_sg()
1104 osg->t2_desc = dma_pool_alloc(od->desc_pool, GFP_ATOMIC, in omap_dma_prep_slave_sg()
1105 &osg->t2_desc_paddr); in omap_dma_prep_slave_sg()
1106 if (!osg->t2_desc) { in omap_dma_prep_slave_sg()
1107 dev_err(chan->device->dev, in omap_dma_prep_slave_sg()
1110 d->using_ll = false; in omap_dma_prep_slave_sg()
1114 omap_dma_fill_type2_desc(d, i, dir, (i == sglen - 1)); in omap_dma_prep_slave_sg()
1118 d->sglen = sglen; in omap_dma_prep_slave_sg()
1122 for (i = 0; i < d->sglen; i++) { in omap_dma_prep_slave_sg()
1123 struct omap_sg *osg = &d->sg[i]; in omap_dma_prep_slave_sg()
1125 if (osg->t2_desc) { in omap_dma_prep_slave_sg()
1126 dma_pool_free(od->desc_pool, osg->t2_desc, in omap_dma_prep_slave_sg()
1127 osg->t2_desc_paddr); in omap_dma_prep_slave_sg()
1128 osg->t2_desc = NULL; in omap_dma_prep_slave_sg()
1133 return vchan_tx_prep(&c->vc, &d->vd, tx_flags); in omap_dma_prep_slave_sg()
1140 struct omap_dmadev *od = to_omap_dma_dev(chan->device); in omap_dma_prep_dma_cyclic()
1149 dev_addr = c->cfg.src_addr; in omap_dma_prep_dma_cyclic()
1150 dev_width = c->cfg.src_addr_width; in omap_dma_prep_dma_cyclic()
1151 burst = c->cfg.src_maxburst; in omap_dma_prep_dma_cyclic()
1153 dev_addr = c->cfg.dst_addr; in omap_dma_prep_dma_cyclic()
1154 dev_width = c->cfg.dst_addr_width; in omap_dma_prep_dma_cyclic()
1155 burst = c->cfg.dst_maxburst; in omap_dma_prep_dma_cyclic()
1157 dev_err(chan->device->dev, "%s: bad direction?\n", __func__); in omap_dma_prep_dma_cyclic()
1177 d = kzalloc(sizeof(*d) + sizeof(d->sg[0]), GFP_ATOMIC); in omap_dma_prep_dma_cyclic()
1181 d->dir = dir; in omap_dma_prep_dma_cyclic()
1182 d->dev_addr = dev_addr; in omap_dma_prep_dma_cyclic()
1183 d->fi = burst; in omap_dma_prep_dma_cyclic()
1184 d->es = es; in omap_dma_prep_dma_cyclic()
1185 d->sg[0].addr = buf_addr; in omap_dma_prep_dma_cyclic()
1186 d->sg[0].en = period_len / es_bytes[es]; in omap_dma_prep_dma_cyclic()
1187 d->sg[0].fn = buf_len / period_len; in omap_dma_prep_dma_cyclic()
1188 d->sglen = 1; in omap_dma_prep_dma_cyclic()
1190 d->ccr = c->ccr; in omap_dma_prep_dma_cyclic()
1192 d->ccr |= CCR_DST_AMODE_POSTINC | CCR_SRC_AMODE_CONSTANT; in omap_dma_prep_dma_cyclic()
1194 d->ccr |= CCR_DST_AMODE_CONSTANT | CCR_SRC_AMODE_POSTINC; in omap_dma_prep_dma_cyclic()
1196 d->cicr = CICR_DROP_IE; in omap_dma_prep_dma_cyclic()
1198 d->cicr |= CICR_FRAME_IE; in omap_dma_prep_dma_cyclic()
1200 d->csdp = es; in omap_dma_prep_dma_cyclic()
1203 d->cicr |= CICR_TOUT_IE; in omap_dma_prep_dma_cyclic()
1206 d->csdp |= CSDP_DST_PORT_EMIFF | CSDP_SRC_PORT_MPUI; in omap_dma_prep_dma_cyclic()
1208 d->csdp |= CSDP_DST_PORT_MPUI | CSDP_SRC_PORT_EMIFF; in omap_dma_prep_dma_cyclic()
1211 d->ccr |= CCR_SYNC_PACKET; in omap_dma_prep_dma_cyclic()
1213 d->ccr |= CCR_SYNC_ELEMENT; in omap_dma_prep_dma_cyclic()
1216 d->ccr |= CCR_TRIGGER_SRC; in omap_dma_prep_dma_cyclic()
1217 d->csdp |= CSDP_DST_PACKED; in omap_dma_prep_dma_cyclic()
1219 d->csdp |= CSDP_SRC_PACKED; in omap_dma_prep_dma_cyclic()
1222 d->cicr |= CICR_MISALIGNED_ERR_IE | CICR_TRANS_ERR_IE; in omap_dma_prep_dma_cyclic()
1224 d->csdp |= CSDP_DST_BURST_64 | CSDP_SRC_BURST_64; in omap_dma_prep_dma_cyclic()
1227 if (__dma_omap15xx(od->plat->dma_attr)) in omap_dma_prep_dma_cyclic()
1228 d->ccr |= CCR_AUTO_INIT | CCR_REPEAT; in omap_dma_prep_dma_cyclic()
1230 d->clnk_ctrl = c->dma_ch | CLNK_CTRL_ENABLE_LNK; in omap_dma_prep_dma_cyclic()
1232 c->cyclic = true; in omap_dma_prep_dma_cyclic()
1234 return vchan_tx_prep(&c->vc, &d->vd, flags); in omap_dma_prep_dma_cyclic()
1245 d = kzalloc(sizeof(*d) + sizeof(d->sg[0]), GFP_ATOMIC); in omap_dma_prep_dma_memcpy()
1253 d->dir = DMA_MEM_TO_MEM; in omap_dma_prep_dma_memcpy()
1254 d->dev_addr = src; in omap_dma_prep_dma_memcpy()
1255 d->fi = 0; in omap_dma_prep_dma_memcpy()
1256 d->es = data_type; in omap_dma_prep_dma_memcpy()
1257 d->sg[0].en = len / BIT(data_type); in omap_dma_prep_dma_memcpy()
1258 d->sg[0].fn = 1; in omap_dma_prep_dma_memcpy()
1259 d->sg[0].addr = dest; in omap_dma_prep_dma_memcpy()
1260 d->sglen = 1; in omap_dma_prep_dma_memcpy()
1261 d->ccr = c->ccr; in omap_dma_prep_dma_memcpy()
1262 d->ccr |= CCR_DST_AMODE_POSTINC | CCR_SRC_AMODE_POSTINC; in omap_dma_prep_dma_memcpy()
1265 d->cicr |= CICR_FRAME_IE; in omap_dma_prep_dma_memcpy()
1267 d->polled = true; in omap_dma_prep_dma_memcpy()
1269 d->csdp = data_type; in omap_dma_prep_dma_memcpy()
1272 d->cicr |= CICR_TOUT_IE; in omap_dma_prep_dma_memcpy()
1273 d->csdp |= CSDP_DST_PORT_EMIFF | CSDP_SRC_PORT_EMIFF; in omap_dma_prep_dma_memcpy()
1275 d->csdp |= CSDP_DST_PACKED | CSDP_SRC_PACKED; in omap_dma_prep_dma_memcpy()
1276 d->cicr |= CICR_MISALIGNED_ERR_IE | CICR_TRANS_ERR_IE; in omap_dma_prep_dma_memcpy()
1277 d->csdp |= CSDP_DST_BURST_64 | CSDP_SRC_BURST_64; in omap_dma_prep_dma_memcpy()
1280 return vchan_tx_prep(&c->vc, &d->vd, tx_flags); in omap_dma_prep_dma_memcpy()
1294 if (is_slave_direction(xt->dir)) in omap_dma_prep_dma_interleaved()
1297 if (xt->frame_size != 1 || xt->numf == 0) in omap_dma_prep_dma_interleaved()
1300 d = kzalloc(sizeof(*d) + sizeof(d->sg[0]), GFP_ATOMIC); in omap_dma_prep_dma_interleaved()
1304 data_type = __ffs((xt->src_start | xt->dst_start | xt->sgl[0].size)); in omap_dma_prep_dma_interleaved()
1308 sg = &d->sg[0]; in omap_dma_prep_dma_interleaved()
1309 d->dir = DMA_MEM_TO_MEM; in omap_dma_prep_dma_interleaved()
1310 d->dev_addr = xt->src_start; in omap_dma_prep_dma_interleaved()
1311 d->es = data_type; in omap_dma_prep_dma_interleaved()
1312 sg->en = xt->sgl[0].size / BIT(data_type); in omap_dma_prep_dma_interleaved()
1313 sg->fn = xt->numf; in omap_dma_prep_dma_interleaved()
1314 sg->addr = xt->dst_start; in omap_dma_prep_dma_interleaved()
1315 d->sglen = 1; in omap_dma_prep_dma_interleaved()
1316 d->ccr = c->ccr; in omap_dma_prep_dma_interleaved()
1318 src_icg = dmaengine_get_src_icg(xt, &xt->sgl[0]); in omap_dma_prep_dma_interleaved()
1319 dst_icg = dmaengine_get_dst_icg(xt, &xt->sgl[0]); in omap_dma_prep_dma_interleaved()
1321 d->ccr |= CCR_SRC_AMODE_DBLIDX; in omap_dma_prep_dma_interleaved()
1322 d->ei = 1; in omap_dma_prep_dma_interleaved()
1323 d->fi = src_icg + 1; in omap_dma_prep_dma_interleaved()
1324 } else if (xt->src_inc) { in omap_dma_prep_dma_interleaved()
1325 d->ccr |= CCR_SRC_AMODE_POSTINC; in omap_dma_prep_dma_interleaved()
1326 d->fi = 0; in omap_dma_prep_dma_interleaved()
1328 dev_err(chan->device->dev, in omap_dma_prep_dma_interleaved()
1336 d->ccr |= CCR_DST_AMODE_DBLIDX; in omap_dma_prep_dma_interleaved()
1337 sg->ei = 1; in omap_dma_prep_dma_interleaved()
1338 sg->fi = dst_icg + 1; in omap_dma_prep_dma_interleaved()
1339 } else if (xt->dst_inc) { in omap_dma_prep_dma_interleaved()
1340 d->ccr |= CCR_DST_AMODE_POSTINC; in omap_dma_prep_dma_interleaved()
1341 sg->fi = 0; in omap_dma_prep_dma_interleaved()
1343 dev_err(chan->device->dev, in omap_dma_prep_dma_interleaved()
1350 d->cicr = CICR_DROP_IE | CICR_FRAME_IE; in omap_dma_prep_dma_interleaved()
1352 d->csdp = data_type; in omap_dma_prep_dma_interleaved()
1355 d->cicr |= CICR_TOUT_IE; in omap_dma_prep_dma_interleaved()
1356 d->csdp |= CSDP_DST_PORT_EMIFF | CSDP_SRC_PORT_EMIFF; in omap_dma_prep_dma_interleaved()
1358 d->csdp |= CSDP_DST_PACKED | CSDP_SRC_PACKED; in omap_dma_prep_dma_interleaved()
1359 d->cicr |= CICR_MISALIGNED_ERR_IE | CICR_TRANS_ERR_IE; in omap_dma_prep_dma_interleaved()
1360 d->csdp |= CSDP_DST_BURST_64 | CSDP_SRC_BURST_64; in omap_dma_prep_dma_interleaved()
1363 return vchan_tx_prep(&c->vc, &d->vd, flags); in omap_dma_prep_dma_interleaved()
1370 if (cfg->src_addr_width == DMA_SLAVE_BUSWIDTH_8_BYTES || in omap_dma_slave_config()
1371 cfg->dst_addr_width == DMA_SLAVE_BUSWIDTH_8_BYTES) in omap_dma_slave_config()
1372 return -EINVAL; in omap_dma_slave_config()
1374 if (cfg->src_maxburst > chan->device->max_burst || in omap_dma_slave_config()
1375 cfg->dst_maxburst > chan->device->max_burst) in omap_dma_slave_config()
1376 return -EINVAL; in omap_dma_slave_config()
1378 memcpy(&c->cfg, cfg, sizeof(c->cfg)); in omap_dma_slave_config()
1389 spin_lock_irqsave(&c->vc.lock, flags); in omap_dma_terminate_all()
1394 * c->desc is NULL and exit.) in omap_dma_terminate_all()
1396 if (c->desc) { in omap_dma_terminate_all()
1397 vchan_terminate_vdesc(&c->desc->vd); in omap_dma_terminate_all()
1398 c->desc = NULL; in omap_dma_terminate_all()
1400 if (!c->paused) in omap_dma_terminate_all()
1404 c->cyclic = false; in omap_dma_terminate_all()
1405 c->paused = false; in omap_dma_terminate_all()
1407 vchan_get_all_descriptors(&c->vc, &head); in omap_dma_terminate_all()
1408 spin_unlock_irqrestore(&c->vc.lock, flags); in omap_dma_terminate_all()
1409 vchan_dma_desc_free_list(&c->vc, &head); in omap_dma_terminate_all()
1418 vchan_synchronize(&c->vc); in omap_dma_synchronize()
1424 struct omap_dmadev *od = to_omap_dma_dev(chan->device); in omap_dma_pause()
1426 int ret = -EINVAL; in omap_dma_pause()
1429 spin_lock_irqsave(&od->irq_lock, flags); in omap_dma_pause()
1431 if (!c->desc) in omap_dma_pause()
1434 if (c->cyclic) in omap_dma_pause()
1441 * an abort, unless it is hardware-source-synchronized …". in omap_dma_pause()
1442 * A source-synchronised channel is one where the fetching of data is in omap_dma_pause()
1443 * under control of the device. In other words, a device-to-memory in omap_dma_pause()
1444 * transfer. So, a destination-synchronised channel (which would be a in omap_dma_pause()
1445 * memory-to-device transfer) undergoes an abort if the the CCR_ENABLE in omap_dma_pause()
1460 else if (c->desc->dir == DMA_DEV_TO_MEM) in omap_dma_pause()
1463 if (can_pause && !c->paused) { in omap_dma_pause()
1466 c->paused = true; in omap_dma_pause()
1469 spin_unlock_irqrestore(&od->irq_lock, flags); in omap_dma_pause()
1477 struct omap_dmadev *od = to_omap_dma_dev(chan->device); in omap_dma_resume()
1479 int ret = -EINVAL; in omap_dma_resume()
1481 spin_lock_irqsave(&od->irq_lock, flags); in omap_dma_resume()
1483 if (c->paused && c->desc) { in omap_dma_resume()
1487 omap_dma_chan_write(c, CLNK_CTRL, c->desc->clnk_ctrl); in omap_dma_resume()
1489 omap_dma_start(c, c->desc); in omap_dma_resume()
1490 c->paused = false; in omap_dma_resume()
1493 spin_unlock_irqrestore(&od->irq_lock, flags); in omap_dma_resume()
1504 return -ENOMEM; in omap_dma_chan_init()
1506 c->reg_map = od->reg_map; in omap_dma_chan_init()
1507 c->vc.desc_free = omap_dma_desc_free; in omap_dma_chan_init()
1508 vchan_init(&c->vc, &od->ddev); in omap_dma_chan_init()
1515 while (!list_empty(&od->ddev.channels)) { in omap_dma_free()
1516 struct omap_chan *c = list_first_entry(&od->ddev.channels, in omap_dma_free()
1519 list_del(&c->vc.chan.device_node); in omap_dma_free()
1520 tasklet_kill(&c->vc.task); in omap_dma_free()
1529 int lch = -1; in omap_dma_busy()
1532 lch = find_next_bit(od->lch_bitmap, od->lch_count, lch + 1); in omap_dma_busy()
1533 if (lch >= od->lch_count) in omap_dma_busy()
1535 c = od->lch_map[lch]; in omap_dma_busy()
1573 od->context.irqenable_l0 = omap_dma_glbl_read(od, IRQENABLE_L0); in omap_dma_context_save()
1574 od->context.irqenable_l1 = omap_dma_glbl_read(od, IRQENABLE_L1); in omap_dma_context_save()
1575 od->context.ocp_sysconfig = omap_dma_glbl_read(od, OCP_SYSCONFIG); in omap_dma_context_save()
1576 od->context.gcr = omap_dma_glbl_read(od, GCR); in omap_dma_context_save()
1583 omap_dma_glbl_write(od, GCR, od->context.gcr); in omap_dma_context_restore()
1584 omap_dma_glbl_write(od, OCP_SYSCONFIG, od->context.ocp_sysconfig); in omap_dma_context_restore()
1585 omap_dma_glbl_write(od, IRQENABLE_L0, od->context.irqenable_l0); in omap_dma_context_restore()
1586 omap_dma_glbl_write(od, IRQENABLE_L1, od->context.irqenable_l1); in omap_dma_context_restore()
1589 if (od->plat->errata & DMA_ROMCODE_BUG) in omap_dma_context_restore()
1593 for (i = 0; i < od->lch_count; i++) in omap_dma_context_restore()
1626 if (!od->cfg->rw_priority) in omap_dma_init_gcr()
1659 od = devm_kzalloc(&pdev->dev, sizeof(*od), GFP_KERNEL); in omap_dma_probe()
1661 return -ENOMEM; in omap_dma_probe()
1664 od->base = devm_ioremap_resource(&pdev->dev, res); in omap_dma_probe()
1665 if (IS_ERR(od->base)) in omap_dma_probe()
1666 return PTR_ERR(od->base); in omap_dma_probe()
1668 conf = of_device_get_match_data(&pdev->dev); in omap_dma_probe()
1670 od->cfg = conf; in omap_dma_probe()
1671 od->plat = dev_get_platdata(&pdev->dev); in omap_dma_probe()
1672 if (!od->plat) { in omap_dma_probe()
1673 dev_err(&pdev->dev, "omap_system_dma_plat_info is missing"); in omap_dma_probe()
1674 return -ENODEV; in omap_dma_probe()
1677 od->cfg = &default_cfg; in omap_dma_probe()
1679 od->plat = omap_get_plat_info(); in omap_dma_probe()
1680 if (!od->plat) in omap_dma_probe()
1681 return -EPROBE_DEFER; in omap_dma_probe()
1684 od->reg_map = od->plat->reg_map; in omap_dma_probe()
1686 dma_cap_set(DMA_SLAVE, od->ddev.cap_mask); in omap_dma_probe()
1687 dma_cap_set(DMA_CYCLIC, od->ddev.cap_mask); in omap_dma_probe()
1688 dma_cap_set(DMA_MEMCPY, od->ddev.cap_mask); in omap_dma_probe()
1689 dma_cap_set(DMA_INTERLEAVE, od->ddev.cap_mask); in omap_dma_probe()
1690 od->ddev.device_alloc_chan_resources = omap_dma_alloc_chan_resources; in omap_dma_probe()
1691 od->ddev.device_free_chan_resources = omap_dma_free_chan_resources; in omap_dma_probe()
1692 od->ddev.device_tx_status = omap_dma_tx_status; in omap_dma_probe()
1693 od->ddev.device_issue_pending = omap_dma_issue_pending; in omap_dma_probe()
1694 od->ddev.device_prep_slave_sg = omap_dma_prep_slave_sg; in omap_dma_probe()
1695 od->ddev.device_prep_dma_cyclic = omap_dma_prep_dma_cyclic; in omap_dma_probe()
1696 od->ddev.device_prep_dma_memcpy = omap_dma_prep_dma_memcpy; in omap_dma_probe()
1697 od->ddev.device_prep_interleaved_dma = omap_dma_prep_dma_interleaved; in omap_dma_probe()
1698 od->ddev.device_config = omap_dma_slave_config; in omap_dma_probe()
1699 od->ddev.device_pause = omap_dma_pause; in omap_dma_probe()
1700 od->ddev.device_resume = omap_dma_resume; in omap_dma_probe()
1701 od->ddev.device_terminate_all = omap_dma_terminate_all; in omap_dma_probe()
1702 od->ddev.device_synchronize = omap_dma_synchronize; in omap_dma_probe()
1703 od->ddev.src_addr_widths = OMAP_DMA_BUSWIDTHS; in omap_dma_probe()
1704 od->ddev.dst_addr_widths = OMAP_DMA_BUSWIDTHS; in omap_dma_probe()
1705 od->ddev.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); in omap_dma_probe()
1706 if (__dma_omap15xx(od->plat->dma_attr)) in omap_dma_probe()
1707 od->ddev.residue_granularity = in omap_dma_probe()
1710 od->ddev.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in omap_dma_probe()
1711 od->ddev.max_burst = SZ_16M - 1; /* CCEN: 24bit unsigned */ in omap_dma_probe()
1712 od->ddev.dev = &pdev->dev; in omap_dma_probe()
1713 INIT_LIST_HEAD(&od->ddev.channels); in omap_dma_probe()
1714 mutex_init(&od->lch_lock); in omap_dma_probe()
1715 spin_lock_init(&od->lock); in omap_dma_probe()
1716 spin_lock_init(&od->irq_lock); in omap_dma_probe()
1719 od->dma_requests = OMAP_SDMA_REQUESTS; in omap_dma_probe()
1720 if (pdev->dev.of_node && of_property_read_u32(pdev->dev.of_node, in omap_dma_probe()
1721 "dma-requests", in omap_dma_probe()
1722 &od->dma_requests)) { in omap_dma_probe()
1723 dev_info(&pdev->dev, in omap_dma_probe()
1724 "Missing dma-requests property, using %u.\n", in omap_dma_probe()
1729 if (!pdev->dev.of_node) { in omap_dma_probe()
1730 od->lch_count = od->plat->dma_attr->lch_count; in omap_dma_probe()
1731 if (unlikely(!od->lch_count)) in omap_dma_probe()
1732 od->lch_count = OMAP_SDMA_CHANNELS; in omap_dma_probe()
1733 } else if (of_property_read_u32(pdev->dev.of_node, "dma-channels", in omap_dma_probe()
1734 &od->lch_count)) { in omap_dma_probe()
1735 dev_info(&pdev->dev, in omap_dma_probe()
1736 "Missing dma-channels property, using %u.\n", in omap_dma_probe()
1738 od->lch_count = OMAP_SDMA_CHANNELS; in omap_dma_probe()
1742 if (pdev->dev.of_node && !of_property_read_u32(pdev->dev.of_node, in omap_dma_probe()
1743 "dma-channel-mask", in omap_dma_probe()
1747 bitmap_from_arr32(od->lch_bitmap, &val, od->lch_count); in omap_dma_probe()
1749 if (od->plat->dma_attr->dev_caps & HS_CHANNELS_RESERVED) in omap_dma_probe()
1750 bitmap_set(od->lch_bitmap, 0, 2); in omap_dma_probe()
1752 od->lch_map = devm_kcalloc(&pdev->dev, od->lch_count, in omap_dma_probe()
1753 sizeof(*od->lch_map), in omap_dma_probe()
1755 if (!od->lch_map) in omap_dma_probe()
1756 return -ENOMEM; in omap_dma_probe()
1758 for (i = 0; i < od->dma_requests; i++) { in omap_dma_probe()
1768 dev_info(&pdev->dev, "failed to get L1 IRQ: %d\n", irq); in omap_dma_probe()
1769 od->legacy = true; in omap_dma_probe()
1772 od->irq_enable_mask = 0; in omap_dma_probe()
1775 rc = devm_request_irq(&pdev->dev, irq, omap_dma_irq, in omap_dma_probe()
1776 IRQF_SHARED, "omap-dma-engine", od); in omap_dma_probe()
1784 od->ll123_supported = true; in omap_dma_probe()
1786 od->ddev.filter.map = od->plat->slave_map; in omap_dma_probe()
1787 od->ddev.filter.mapcnt = od->plat->slavecnt; in omap_dma_probe()
1788 od->ddev.filter.fn = omap_dma_filter_fn; in omap_dma_probe()
1790 if (od->ll123_supported) { in omap_dma_probe()
1791 od->desc_pool = dma_pool_create(dev_name(&pdev->dev), in omap_dma_probe()
1792 &pdev->dev, in omap_dma_probe()
1795 if (!od->desc_pool) { in omap_dma_probe()
1796 dev_err(&pdev->dev, in omap_dma_probe()
1798 od->ll123_supported = false; in omap_dma_probe()
1802 rc = dma_async_device_register(&od->ddev); in omap_dma_probe()
1804 pr_warn("OMAP-DMA: failed to register slave DMA engine device: %d\n", in omap_dma_probe()
1812 if (pdev->dev.of_node) { in omap_dma_probe()
1813 omap_dma_info.dma_cap = od->ddev.cap_mask; in omap_dma_probe()
1815 /* Device-tree DMA controller registration */ in omap_dma_probe()
1816 rc = of_dma_controller_register(pdev->dev.of_node, in omap_dma_probe()
1819 pr_warn("OMAP-DMA: failed to register DMA controller\n"); in omap_dma_probe()
1820 dma_async_device_unregister(&od->ddev); in omap_dma_probe()
1827 if (od->cfg->needs_busy_check) { in omap_dma_probe()
1828 od->nb.notifier_call = omap_dma_busy_notifier; in omap_dma_probe()
1829 cpu_pm_register_notifier(&od->nb); in omap_dma_probe()
1830 } else if (od->cfg->may_lose_context) { in omap_dma_probe()
1831 od->nb.notifier_call = omap_dma_context_notifier; in omap_dma_probe()
1832 cpu_pm_register_notifier(&od->nb); in omap_dma_probe()
1835 dev_info(&pdev->dev, "OMAP DMA engine driver%s\n", in omap_dma_probe()
1836 od->ll123_supported ? " (LinkedList1/2/3 supported)" : ""); in omap_dma_probe()
1846 if (od->cfg->may_lose_context) in omap_dma_remove()
1847 cpu_pm_unregister_notifier(&od->nb); in omap_dma_remove()
1849 if (pdev->dev.of_node) in omap_dma_remove()
1850 of_dma_controller_free(pdev->dev.of_node); in omap_dma_remove()
1853 devm_free_irq(&pdev->dev, irq, od); in omap_dma_remove()
1855 dma_async_device_unregister(&od->ddev); in omap_dma_remove()
1857 if (!od->legacy) { in omap_dma_remove()
1862 if (od->ll123_supported) in omap_dma_remove()
1863 dma_pool_destroy(od->desc_pool); in omap_dma_remove()
1904 { .compatible = "ti,omap2420-sdma", .data = &omap2420_data, },
1905 { .compatible = "ti,omap2430-sdma", .data = &omap2430_data, },
1906 { .compatible = "ti,omap3430-sdma", .data = &omap3430_data, },
1907 { .compatible = "ti,omap3630-sdma", .data = &omap3630_data, },
1908 { .compatible = "ti,omap4430-sdma", .data = &omap4_data, },
1917 .name = "omap-dma-engine",
1924 if (chan->device->dev->driver == &omap_dma_driver.driver) { in omap_dma_filter_fn()
1925 struct omap_dmadev *od = to_omap_dma_dev(chan->device); in omap_dma_filter_fn()
1929 if (req <= od->dma_requests) { in omap_dma_filter_fn()
1930 c->dma_sig = req; in omap_dma_filter_fn()