Lines Matching refs:imxdmac
222 static inline bool imxdma_chan_is_doing_cyclic(struct imxdma_channel *imxdmac) in imxdma_chan_is_doing_cyclic() argument
226 if (!list_empty(&imxdmac->ld_active)) { in imxdma_chan_is_doing_cyclic()
227 desc = list_first_entry(&imxdmac->ld_active, struct imxdma_desc, in imxdma_chan_is_doing_cyclic()
248 static int imxdma_hw_chain(struct imxdma_channel *imxdmac) in imxdma_hw_chain() argument
250 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_hw_chain()
253 return imxdmac->hw_chaining; in imxdma_hw_chain()
263 struct imxdma_channel *imxdmac = to_imxdma_chan(d->desc.chan); in imxdma_sg_next() local
264 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_sg_next()
274 DMA_DAR(imxdmac->channel)); in imxdma_sg_next()
277 DMA_SAR(imxdmac->channel)); in imxdma_sg_next()
279 imx_dmav1_writel(imxdma, now, DMA_CNTR(imxdmac->channel)); in imxdma_sg_next()
282 "size 0x%08x\n", __func__, imxdmac->channel, in imxdma_sg_next()
283 imx_dmav1_readl(imxdma, DMA_DAR(imxdmac->channel)), in imxdma_sg_next()
284 imx_dmav1_readl(imxdma, DMA_SAR(imxdmac->channel)), in imxdma_sg_next()
285 imx_dmav1_readl(imxdma, DMA_CNTR(imxdmac->channel))); in imxdma_sg_next()
290 struct imxdma_channel *imxdmac = to_imxdma_chan(d->desc.chan); in imxdma_enable_hw() local
291 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_enable_hw()
292 int channel = imxdmac->channel; in imxdma_enable_hw()
306 d->sg && imxdma_hw_chain(imxdmac)) { in imxdma_enable_hw()
320 static void imxdma_disable_hw(struct imxdma_channel *imxdmac) in imxdma_disable_hw() argument
322 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_disable_hw()
323 int channel = imxdmac->channel; in imxdma_disable_hw()
328 if (imxdma_hw_chain(imxdmac)) in imxdma_disable_hw()
329 del_timer(&imxdmac->watchdog); in imxdma_disable_hw()
342 struct imxdma_channel *imxdmac = from_timer(imxdmac, t, watchdog); in imxdma_watchdog() local
343 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_watchdog()
344 int channel = imxdmac->channel; in imxdma_watchdog()
349 tasklet_schedule(&imxdmac->dma_tasklet); in imxdma_watchdog()
351 imxdmac->channel); in imxdma_watchdog()
407 static void dma_irq_handle_channel(struct imxdma_channel *imxdmac) in dma_irq_handle_channel() argument
409 struct imxdma_engine *imxdma = imxdmac->imxdma; in dma_irq_handle_channel()
410 int chno = imxdmac->channel; in dma_irq_handle_channel()
415 if (list_empty(&imxdmac->ld_active)) { in dma_irq_handle_channel()
420 desc = list_first_entry(&imxdmac->ld_active, in dma_irq_handle_channel()
434 if (imxdma_hw_chain(imxdmac)) { in dma_irq_handle_channel()
438 mod_timer(&imxdmac->watchdog, in dma_irq_handle_channel()
451 if (imxdma_chan_is_doing_cyclic(imxdmac)) in dma_irq_handle_channel()
453 tasklet_schedule(&imxdmac->dma_tasklet); in dma_irq_handle_channel()
458 if (imxdma_hw_chain(imxdmac)) { in dma_irq_handle_channel()
459 del_timer(&imxdmac->watchdog); in dma_irq_handle_channel()
467 tasklet_schedule(&imxdmac->dma_tasklet); in dma_irq_handle_channel()
493 struct imxdma_channel *imxdmac = to_imxdma_chan(d->desc.chan); in imxdma_xfer_desc() local
494 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_xfer_desc()
519 imxdmac->slot_2d = slot; in imxdma_xfer_desc()
520 imxdmac->enabled_2d = true; in imxdma_xfer_desc()
541 imx_dmav1_writel(imxdma, d->src, DMA_SAR(imxdmac->channel)); in imxdma_xfer_desc()
542 imx_dmav1_writel(imxdma, d->dest, DMA_DAR(imxdmac->channel)); in imxdma_xfer_desc()
544 DMA_CCR(imxdmac->channel)); in imxdma_xfer_desc()
546 imx_dmav1_writel(imxdma, d->len, DMA_CNTR(imxdmac->channel)); in imxdma_xfer_desc()
550 __func__, imxdmac->channel, in imxdma_xfer_desc()
559 imx_dmav1_writel(imxdma, imxdmac->per_address, in imxdma_xfer_desc()
560 DMA_SAR(imxdmac->channel)); in imxdma_xfer_desc()
561 imx_dmav1_writel(imxdma, imxdmac->ccr_from_device, in imxdma_xfer_desc()
562 DMA_CCR(imxdmac->channel)); in imxdma_xfer_desc()
566 __func__, imxdmac->channel, in imxdma_xfer_desc()
568 (unsigned long long)imxdmac->per_address); in imxdma_xfer_desc()
570 imx_dmav1_writel(imxdma, imxdmac->per_address, in imxdma_xfer_desc()
571 DMA_DAR(imxdmac->channel)); in imxdma_xfer_desc()
572 imx_dmav1_writel(imxdma, imxdmac->ccr_to_device, in imxdma_xfer_desc()
573 DMA_CCR(imxdmac->channel)); in imxdma_xfer_desc()
577 __func__, imxdmac->channel, in imxdma_xfer_desc()
579 (unsigned long long)imxdmac->per_address); in imxdma_xfer_desc()
582 __func__, imxdmac->channel); in imxdma_xfer_desc()
598 struct imxdma_channel *imxdmac = from_tasklet(imxdmac, t, dma_tasklet); in imxdma_tasklet() local
599 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_tasklet()
605 if (list_empty(&imxdmac->ld_active)) { in imxdma_tasklet()
610 desc = list_first_entry(&imxdmac->ld_active, struct imxdma_desc, node); in imxdma_tasklet()
616 if (imxdma_chan_is_doing_cyclic(imxdmac)) in imxdma_tasklet()
622 if (imxdmac->enabled_2d) { in imxdma_tasklet()
623 imxdma->slots_2d[imxdmac->slot_2d].count--; in imxdma_tasklet()
624 imxdmac->enabled_2d = false; in imxdma_tasklet()
627 list_move_tail(imxdmac->ld_active.next, &imxdmac->ld_free); in imxdma_tasklet()
629 if (!list_empty(&imxdmac->ld_queue)) { in imxdma_tasklet()
630 next_desc = list_first_entry(&imxdmac->ld_queue, in imxdma_tasklet()
632 list_move_tail(imxdmac->ld_queue.next, &imxdmac->ld_active); in imxdma_tasklet()
635 __func__, imxdmac->channel); in imxdma_tasklet()
645 struct imxdma_channel *imxdmac = to_imxdma_chan(chan); in imxdma_terminate_all() local
646 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_terminate_all()
649 imxdma_disable_hw(imxdmac); in imxdma_terminate_all()
652 list_splice_tail_init(&imxdmac->ld_active, &imxdmac->ld_free); in imxdma_terminate_all()
653 list_splice_tail_init(&imxdmac->ld_queue, &imxdmac->ld_free); in imxdma_terminate_all()
662 struct imxdma_channel *imxdmac = to_imxdma_chan(chan); in imxdma_config_write() local
663 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_config_write()
667 imxdmac->per_address = dmaengine_cfg->src_addr; in imxdma_config_write()
668 imxdmac->watermark_level = dmaengine_cfg->src_maxburst; in imxdma_config_write()
669 imxdmac->word_size = dmaengine_cfg->src_addr_width; in imxdma_config_write()
671 imxdmac->per_address = dmaengine_cfg->dst_addr; in imxdma_config_write()
672 imxdmac->watermark_level = dmaengine_cfg->dst_maxburst; in imxdma_config_write()
673 imxdmac->word_size = dmaengine_cfg->dst_addr_width; in imxdma_config_write()
676 switch (imxdmac->word_size) { in imxdma_config_write()
689 imxdmac->hw_chaining = 0; in imxdma_config_write()
691 imxdmac->ccr_from_device = (mode | IMX_DMA_TYPE_FIFO) | in imxdma_config_write()
694 imxdmac->ccr_to_device = in imxdma_config_write()
697 imx_dmav1_writel(imxdma, imxdmac->dma_request, in imxdma_config_write()
698 DMA_RSSR(imxdmac->channel)); in imxdma_config_write()
701 imx_dmav1_writel(imxdma, imxdmac->watermark_level * in imxdma_config_write()
702 imxdmac->word_size, DMA_BLR(imxdmac->channel)); in imxdma_config_write()
710 struct imxdma_channel *imxdmac = to_imxdma_chan(chan); in imxdma_config() local
712 memcpy(&imxdmac->config, dmaengine_cfg, sizeof(*dmaengine_cfg)); in imxdma_config()
726 struct imxdma_channel *imxdmac = to_imxdma_chan(tx->chan); in imxdma_tx_submit() local
727 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_tx_submit()
732 list_move_tail(imxdmac->ld_free.next, &imxdmac->ld_queue); in imxdma_tx_submit()
741 struct imxdma_channel *imxdmac = to_imxdma_chan(chan); in imxdma_alloc_chan_resources() local
745 imxdmac->dma_request = data->dma_request; in imxdma_alloc_chan_resources()
747 while (imxdmac->descs_allocated < IMXDMA_MAX_CHAN_DESCRIPTORS) { in imxdma_alloc_chan_resources()
759 list_add_tail(&desc->node, &imxdmac->ld_free); in imxdma_alloc_chan_resources()
760 imxdmac->descs_allocated++; in imxdma_alloc_chan_resources()
763 if (!imxdmac->descs_allocated) in imxdma_alloc_chan_resources()
766 return imxdmac->descs_allocated; in imxdma_alloc_chan_resources()
771 struct imxdma_channel *imxdmac = to_imxdma_chan(chan); in imxdma_free_chan_resources() local
772 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_free_chan_resources()
778 imxdma_disable_hw(imxdmac); in imxdma_free_chan_resources()
779 list_splice_tail_init(&imxdmac->ld_active, &imxdmac->ld_free); in imxdma_free_chan_resources()
780 list_splice_tail_init(&imxdmac->ld_queue, &imxdmac->ld_free); in imxdma_free_chan_resources()
784 list_for_each_entry_safe(desc, _desc, &imxdmac->ld_free, node) { in imxdma_free_chan_resources()
786 imxdmac->descs_allocated--; in imxdma_free_chan_resources()
788 INIT_LIST_HEAD(&imxdmac->ld_free); in imxdma_free_chan_resources()
790 kfree(imxdmac->sg_list); in imxdma_free_chan_resources()
791 imxdmac->sg_list = NULL; in imxdma_free_chan_resources()
799 struct imxdma_channel *imxdmac = to_imxdma_chan(chan); in imxdma_prep_slave_sg() local
804 if (list_empty(&imxdmac->ld_free) || in imxdma_prep_slave_sg()
805 imxdma_chan_is_doing_cyclic(imxdmac)) in imxdma_prep_slave_sg()
808 desc = list_first_entry(&imxdmac->ld_free, struct imxdma_desc, node); in imxdma_prep_slave_sg()
814 imxdma_config_write(chan, &imxdmac->config, direction); in imxdma_prep_slave_sg()
816 switch (imxdmac->word_size) { in imxdma_prep_slave_sg()
837 desc->src = imxdmac->per_address; in imxdma_prep_slave_sg()
839 desc->dest = imxdmac->per_address; in imxdma_prep_slave_sg()
852 struct imxdma_channel *imxdmac = to_imxdma_chan(chan); in imxdma_prep_dma_cyclic() local
853 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_prep_dma_cyclic()
859 __func__, imxdmac->channel, buf_len, period_len); in imxdma_prep_dma_cyclic()
861 if (list_empty(&imxdmac->ld_free) || in imxdma_prep_dma_cyclic()
862 imxdma_chan_is_doing_cyclic(imxdmac)) in imxdma_prep_dma_cyclic()
865 desc = list_first_entry(&imxdmac->ld_free, struct imxdma_desc, node); in imxdma_prep_dma_cyclic()
867 kfree(imxdmac->sg_list); in imxdma_prep_dma_cyclic()
869 imxdmac->sg_list = kcalloc(periods + 1, in imxdma_prep_dma_cyclic()
871 if (!imxdmac->sg_list) in imxdma_prep_dma_cyclic()
874 sg_init_table(imxdmac->sg_list, periods); in imxdma_prep_dma_cyclic()
877 sg_assign_page(&imxdmac->sg_list[i], NULL); in imxdma_prep_dma_cyclic()
878 imxdmac->sg_list[i].offset = 0; in imxdma_prep_dma_cyclic()
879 imxdmac->sg_list[i].dma_address = dma_addr; in imxdma_prep_dma_cyclic()
880 sg_dma_len(&imxdmac->sg_list[i]) = period_len; in imxdma_prep_dma_cyclic()
885 sg_chain(imxdmac->sg_list, periods + 1, imxdmac->sg_list); in imxdma_prep_dma_cyclic()
888 desc->sg = imxdmac->sg_list; in imxdma_prep_dma_cyclic()
893 desc->src = imxdmac->per_address; in imxdma_prep_dma_cyclic()
895 desc->dest = imxdmac->per_address; in imxdma_prep_dma_cyclic()
900 imxdma_config_write(chan, &imxdmac->config, direction); in imxdma_prep_dma_cyclic()
909 struct imxdma_channel *imxdmac = to_imxdma_chan(chan); in imxdma_prep_dma_memcpy() local
910 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_prep_dma_memcpy()
914 __func__, imxdmac->channel, (unsigned long long)src, in imxdma_prep_dma_memcpy()
917 if (list_empty(&imxdmac->ld_free) || in imxdma_prep_dma_memcpy()
918 imxdma_chan_is_doing_cyclic(imxdmac)) in imxdma_prep_dma_memcpy()
921 desc = list_first_entry(&imxdmac->ld_free, struct imxdma_desc, node); in imxdma_prep_dma_memcpy()
940 struct imxdma_channel *imxdmac = to_imxdma_chan(chan); in imxdma_prep_dma_interleaved() local
941 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_prep_dma_interleaved()
946 imxdmac->channel, (unsigned long long)xt->src_start, in imxdma_prep_dma_interleaved()
951 if (list_empty(&imxdmac->ld_free) || in imxdma_prep_dma_interleaved()
952 imxdma_chan_is_doing_cyclic(imxdmac)) in imxdma_prep_dma_interleaved()
958 desc = list_first_entry(&imxdmac->ld_free, struct imxdma_desc, node); in imxdma_prep_dma_interleaved()
982 struct imxdma_channel *imxdmac = to_imxdma_chan(chan); in imxdma_issue_pending() local
983 struct imxdma_engine *imxdma = imxdmac->imxdma; in imxdma_issue_pending()
988 if (list_empty(&imxdmac->ld_active) && in imxdma_issue_pending()
989 !list_empty(&imxdmac->ld_queue)) { in imxdma_issue_pending()
990 desc = list_first_entry(&imxdmac->ld_queue, in imxdma_issue_pending()
996 __func__, imxdmac->channel); in imxdma_issue_pending()
998 list_move_tail(imxdmac->ld_queue.next, in imxdma_issue_pending()
999 &imxdmac->ld_active); in imxdma_issue_pending()
1124 struct imxdma_channel *imxdmac = &imxdma->channel[i]; in imxdma_probe() local
1136 imxdmac->irq = irq + i; in imxdma_probe()
1137 timer_setup(&imxdmac->watchdog, imxdma_watchdog, 0); in imxdma_probe()
1140 imxdmac->imxdma = imxdma; in imxdma_probe()
1142 INIT_LIST_HEAD(&imxdmac->ld_queue); in imxdma_probe()
1143 INIT_LIST_HEAD(&imxdmac->ld_free); in imxdma_probe()
1144 INIT_LIST_HEAD(&imxdmac->ld_active); in imxdma_probe()
1146 tasklet_setup(&imxdmac->dma_tasklet, imxdma_tasklet); in imxdma_probe()
1147 imxdmac->chan.device = &imxdma->dma_device; in imxdma_probe()
1148 dma_cookie_init(&imxdmac->chan); in imxdma_probe()
1149 imxdmac->channel = i; in imxdma_probe()
1152 list_add_tail(&imxdmac->chan.device_node, in imxdma_probe()
1210 struct imxdma_channel *imxdmac = &imxdma->channel[i]; in imxdma_free_irq() local
1213 disable_irq(imxdmac->irq); in imxdma_free_irq()
1215 tasklet_kill(&imxdmac->dma_tasklet); in imxdma_free_irq()