Lines Matching refs:atchan

99 static struct at_desc *atc_first_active(struct at_dma_chan *atchan)  in atc_first_active()  argument
101 return list_first_entry(&atchan->active_list, in atc_first_active()
105 static struct at_desc *atc_first_queued(struct at_dma_chan *atchan) in atc_first_queued() argument
107 return list_first_entry(&atchan->queue, in atc_first_queued()
145 static struct at_desc *atc_desc_get(struct at_dma_chan *atchan) in atc_desc_get() argument
152 spin_lock_irqsave(&atchan->lock, flags); in atc_desc_get()
153 list_for_each_entry_safe(desc, _desc, &atchan->free_list, desc_node) { in atc_desc_get()
160 dev_dbg(chan2dev(&atchan->chan_common), in atc_desc_get()
163 spin_unlock_irqrestore(&atchan->lock, flags); in atc_desc_get()
164 dev_vdbg(chan2dev(&atchan->chan_common), in atc_desc_get()
169 ret = atc_alloc_descriptor(&atchan->chan_common, GFP_NOWAIT); in atc_desc_get()
179 static void atc_desc_put(struct at_dma_chan *atchan, struct at_desc *desc) in atc_desc_put() argument
185 spin_lock_irqsave(&atchan->lock, flags); in atc_desc_put()
187 dev_vdbg(chan2dev(&atchan->chan_common), in atc_desc_put()
190 list_splice_init(&desc->tx_list, &atchan->free_list); in atc_desc_put()
191 dev_vdbg(chan2dev(&atchan->chan_common), in atc_desc_put()
193 list_add(&desc->desc_node, &atchan->free_list); in atc_desc_put()
194 spin_unlock_irqrestore(&atchan->lock, flags); in atc_desc_put()
228 static void atc_dostart(struct at_dma_chan *atchan, struct at_desc *first) in atc_dostart() argument
230 struct at_dma *atdma = to_at_dma(atchan->chan_common.device); in atc_dostart()
233 if (atc_chan_is_enabled(atchan)) { in atc_dostart()
234 dev_err(chan2dev(&atchan->chan_common), in atc_dostart()
236 dev_err(chan2dev(&atchan->chan_common), in atc_dostart()
238 channel_readl(atchan, SADDR), in atc_dostart()
239 channel_readl(atchan, DADDR), in atc_dostart()
240 channel_readl(atchan, CTRLA), in atc_dostart()
241 channel_readl(atchan, CTRLB), in atc_dostart()
242 channel_readl(atchan, DSCR)); in atc_dostart()
248 vdbg_dump_regs(atchan); in atc_dostart()
250 channel_writel(atchan, SADDR, 0); in atc_dostart()
251 channel_writel(atchan, DADDR, 0); in atc_dostart()
252 channel_writel(atchan, CTRLA, 0); in atc_dostart()
253 channel_writel(atchan, CTRLB, 0); in atc_dostart()
254 channel_writel(atchan, DSCR, first->txd.phys); in atc_dostart()
255 channel_writel(atchan, SPIP, ATC_SPIP_HOLE(first->src_hole) | in atc_dostart()
257 channel_writel(atchan, DPIP, ATC_DPIP_HOLE(first->dst_hole) | in atc_dostart()
259 dma_writel(atdma, CHER, atchan->mask); in atc_dostart()
261 vdbg_dump_regs(atchan); in atc_dostart()
269 static struct at_desc *atc_get_desc_by_cookie(struct at_dma_chan *atchan, in atc_get_desc_by_cookie() argument
274 list_for_each_entry_safe(desc, _desc, &atchan->queue, desc_node) { in atc_get_desc_by_cookie()
279 list_for_each_entry_safe(desc, _desc, &atchan->active_list, desc_node) { in atc_get_desc_by_cookie()
315 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_get_bytes_left() local
316 struct at_desc *desc_first = atc_first_active(atchan); in atc_get_bytes_left()
326 desc = atc_get_desc_by_cookie(atchan, cookie); in atc_get_bytes_left()
386 dscr = channel_readl(atchan, DSCR); in atc_get_bytes_left()
388 ctrla = channel_readl(atchan, CTRLA); in atc_get_bytes_left()
393 new_dscr = channel_readl(atchan, DSCR); in atc_get_bytes_left()
413 ctrla = channel_readl(atchan, CTRLA); in atc_get_bytes_left()
437 ctrla = channel_readl(atchan, CTRLA); in atc_get_bytes_left()
450 atc_chain_complete(struct at_dma_chan *atchan, struct at_desc *desc) in atc_chain_complete() argument
453 struct at_dma *atdma = to_at_dma(atchan->chan_common.device); in atc_chain_complete()
456 dev_vdbg(chan2dev(&atchan->chan_common), in atc_chain_complete()
459 spin_lock_irqsave(&atchan->lock, flags); in atc_chain_complete()
462 if (!atc_chan_is_cyclic(atchan)) in atc_chain_complete()
473 list_splice_init(&desc->tx_list, &atchan->free_list); in atc_chain_complete()
475 list_move(&desc->desc_node, &atchan->free_list); in atc_chain_complete()
477 spin_unlock_irqrestore(&atchan->lock, flags); in atc_chain_complete()
482 if (!atc_chan_is_cyclic(atchan)) in atc_chain_complete()
497 static void atc_complete_all(struct at_dma_chan *atchan) in atc_complete_all() argument
503 dev_vdbg(chan2dev(&atchan->chan_common), "complete all\n"); in atc_complete_all()
505 spin_lock_irqsave(&atchan->lock, flags); in atc_complete_all()
511 if (!list_empty(&atchan->queue)) in atc_complete_all()
512 atc_dostart(atchan, atc_first_queued(atchan)); in atc_complete_all()
514 list_splice_init(&atchan->active_list, &list); in atc_complete_all()
516 list_splice_init(&atchan->queue, &atchan->active_list); in atc_complete_all()
518 spin_unlock_irqrestore(&atchan->lock, flags); in atc_complete_all()
521 atc_chain_complete(atchan, desc); in atc_complete_all()
528 static void atc_advance_work(struct at_dma_chan *atchan) in atc_advance_work() argument
533 dev_vdbg(chan2dev(&atchan->chan_common), "advance_work\n"); in atc_advance_work()
535 spin_lock_irqsave(&atchan->lock, flags); in atc_advance_work()
536 ret = atc_chan_is_enabled(atchan); in atc_advance_work()
537 spin_unlock_irqrestore(&atchan->lock, flags); in atc_advance_work()
541 if (list_empty(&atchan->active_list) || in atc_advance_work()
542 list_is_singular(&atchan->active_list)) in atc_advance_work()
543 return atc_complete_all(atchan); in atc_advance_work()
545 atc_chain_complete(atchan, atc_first_active(atchan)); in atc_advance_work()
548 spin_lock_irqsave(&atchan->lock, flags); in atc_advance_work()
549 atc_dostart(atchan, atc_first_active(atchan)); in atc_advance_work()
550 spin_unlock_irqrestore(&atchan->lock, flags); in atc_advance_work()
558 static void atc_handle_error(struct at_dma_chan *atchan) in atc_handle_error() argument
564 spin_lock_irqsave(&atchan->lock, flags); in atc_handle_error()
570 bad_desc = atc_first_active(atchan); in atc_handle_error()
575 list_splice_init(&atchan->queue, atchan->active_list.prev); in atc_handle_error()
578 if (!list_empty(&atchan->active_list)) in atc_handle_error()
579 atc_dostart(atchan, atc_first_active(atchan)); in atc_handle_error()
588 dev_crit(chan2dev(&atchan->chan_common), in atc_handle_error()
590 dev_crit(chan2dev(&atchan->chan_common), in atc_handle_error()
592 atc_dump_lli(atchan, &bad_desc->lli); in atc_handle_error()
594 atc_dump_lli(atchan, &child->lli); in atc_handle_error()
596 spin_unlock_irqrestore(&atchan->lock, flags); in atc_handle_error()
599 atc_chain_complete(atchan, bad_desc); in atc_handle_error()
606 static void atc_handle_cyclic(struct at_dma_chan *atchan) in atc_handle_cyclic() argument
608 struct at_desc *first = atc_first_active(atchan); in atc_handle_cyclic()
611 dev_vdbg(chan2dev(&atchan->chan_common), in atc_handle_cyclic()
613 channel_readl(atchan, DSCR)); in atc_handle_cyclic()
622 struct at_dma_chan *atchan = from_tasklet(atchan, t, tasklet); in atc_tasklet() local
624 if (test_and_clear_bit(ATC_IS_ERROR, &atchan->status)) in atc_tasklet()
625 return atc_handle_error(atchan); in atc_tasklet()
627 if (atc_chan_is_cyclic(atchan)) in atc_tasklet()
628 return atc_handle_cyclic(atchan); in atc_tasklet()
630 atc_advance_work(atchan); in atc_tasklet()
636 struct at_dma_chan *atchan; in at_dma_interrupt() local
654 atchan = &atdma->chan[i]; in at_dma_interrupt()
659 AT_DMA_RES(i) | atchan->mask); in at_dma_interrupt()
661 set_bit(ATC_IS_ERROR, &atchan->status); in at_dma_interrupt()
663 tasklet_schedule(&atchan->tasklet); in at_dma_interrupt()
687 struct at_dma_chan *atchan = to_at_dma_chan(tx->chan); in atc_tx_submit() local
691 spin_lock_irqsave(&atchan->lock, flags); in atc_tx_submit()
694 if (list_empty(&atchan->active_list)) { in atc_tx_submit()
697 atc_dostart(atchan, desc); in atc_tx_submit()
698 list_add_tail(&desc->desc_node, &atchan->active_list); in atc_tx_submit()
702 list_add_tail(&desc->desc_node, &atchan->queue); in atc_tx_submit()
705 spin_unlock_irqrestore(&atchan->lock, flags); in atc_tx_submit()
721 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_prep_dma_interleaved() local
782 desc = atc_desc_get(atchan); in atc_prep_dma_interleaved()
821 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_prep_dma_memcpy() local
858 desc = atc_desc_get(atchan); in atc_prep_dma_memcpy()
885 atc_desc_put(atchan, first); in atc_prep_dma_memcpy()
894 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_create_memset_desc() local
911 desc = atc_desc_get(atchan); in atc_create_memset_desc()
1000 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_prep_dma_memset_sg() local
1067 atc_desc_put(atchan, first); in atc_prep_dma_memset_sg()
1085 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_prep_slave_sg() local
1087 struct dma_slave_config *sconfig = &atchan->dma_sconfig; in atc_prep_slave_sg()
1120 | ATC_SIF(atchan->mem_if) | ATC_DIF(atchan->per_if); in atc_prep_slave_sg()
1127 desc = atc_desc_get(atchan); in atc_prep_slave_sg()
1160 | ATC_SIF(atchan->per_if) | ATC_DIF(atchan->mem_if); in atc_prep_slave_sg()
1168 desc = atc_desc_get(atchan); in atc_prep_slave_sg()
1214 atc_desc_put(atchan, first); in atc_prep_slave_sg()
1248 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_dma_cyclic_fill_desc() local
1249 struct dma_slave_config *sconfig = &atchan->dma_sconfig; in atc_dma_cyclic_fill_desc()
1267 | ATC_SIF(atchan->mem_if) in atc_dma_cyclic_fill_desc()
1268 | ATC_DIF(atchan->per_if); in atc_dma_cyclic_fill_desc()
1279 | ATC_SIF(atchan->per_if) in atc_dma_cyclic_fill_desc()
1280 | ATC_DIF(atchan->mem_if); in atc_dma_cyclic_fill_desc()
1305 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_prep_dma_cyclic() local
1307 struct dma_slave_config *sconfig = &atchan->dma_sconfig; in atc_prep_dma_cyclic()
1325 was_cyclic = test_and_set_bit(ATC_IS_CYCLIC, &atchan->status); in atc_prep_dma_cyclic()
1347 desc = atc_desc_get(atchan); in atc_prep_dma_cyclic()
1369 atc_desc_put(atchan, first); in atc_prep_dma_cyclic()
1371 clear_bit(ATC_IS_CYCLIC, &atchan->status); in atc_prep_dma_cyclic()
1378 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_config() local
1386 memcpy(&atchan->dma_sconfig, sconfig, sizeof(*sconfig)); in atc_config()
1388 convert_burst(&atchan->dma_sconfig.src_maxburst); in atc_config()
1389 convert_burst(&atchan->dma_sconfig.dst_maxburst); in atc_config()
1396 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_pause() local
1398 int chan_id = atchan->chan_common.chan_id; in atc_pause()
1403 spin_lock_irqsave(&atchan->lock, flags); in atc_pause()
1406 set_bit(ATC_IS_PAUSED, &atchan->status); in atc_pause()
1408 spin_unlock_irqrestore(&atchan->lock, flags); in atc_pause()
1415 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_resume() local
1417 int chan_id = atchan->chan_common.chan_id; in atc_resume()
1422 if (!atc_chan_is_paused(atchan)) in atc_resume()
1425 spin_lock_irqsave(&atchan->lock, flags); in atc_resume()
1428 clear_bit(ATC_IS_PAUSED, &atchan->status); in atc_resume()
1430 spin_unlock_irqrestore(&atchan->lock, flags); in atc_resume()
1437 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_terminate_all() local
1439 int chan_id = atchan->chan_common.chan_id; in atc_terminate_all()
1453 spin_lock_irqsave(&atchan->lock, flags); in atc_terminate_all()
1456 dma_writel(atdma, CHDR, AT_DMA_RES(chan_id) | atchan->mask); in atc_terminate_all()
1459 while (dma_readl(atdma, CHSR) & atchan->mask) in atc_terminate_all()
1463 list_splice_init(&atchan->queue, &list); in atc_terminate_all()
1464 list_splice_init(&atchan->active_list, &list); in atc_terminate_all()
1466 spin_unlock_irqrestore(&atchan->lock, flags); in atc_terminate_all()
1470 atc_chain_complete(atchan, desc); in atc_terminate_all()
1472 clear_bit(ATC_IS_PAUSED, &atchan->status); in atc_terminate_all()
1474 clear_bit(ATC_IS_CYCLIC, &atchan->status); in atc_terminate_all()
1494 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_tx_status() local
1509 spin_lock_irqsave(&atchan->lock, flags); in atc_tx_status()
1514 spin_unlock_irqrestore(&atchan->lock, flags); in atc_tx_status()
1535 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_issue_pending() local
1540 if (atc_chan_is_cyclic(atchan)) in atc_issue_pending()
1543 atc_advance_work(atchan); in atc_issue_pending()
1554 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_alloc_chan_resources() local
1564 if (atc_chan_is_enabled(atchan)) { in atc_alloc_chan_resources()
1569 if (!list_empty(&atchan->free_list)) { in atc_alloc_chan_resources()
1597 list_add_tail(&desc->desc_node, &atchan->free_list); in atc_alloc_chan_resources()
1603 channel_writel(atchan, CFG, cfg); in atc_alloc_chan_resources()
1617 struct at_dma_chan *atchan = to_at_dma_chan(chan); in atc_free_chan_resources() local
1623 BUG_ON(!list_empty(&atchan->active_list)); in atc_free_chan_resources()
1624 BUG_ON(!list_empty(&atchan->queue)); in atc_free_chan_resources()
1625 BUG_ON(atc_chan_is_enabled(atchan)); in atc_free_chan_resources()
1627 list_for_each_entry_safe(desc, _desc, &atchan->free_list, desc_node) { in atc_free_chan_resources()
1633 list_splice_init(&atchan->free_list, &list); in atc_free_chan_resources()
1634 atchan->status = 0; in atc_free_chan_resources()
1662 struct at_dma_chan *atchan; in at_dma_xlate() local
1717 atchan = to_at_dma_chan(chan); in at_dma_xlate()
1718 atchan->per_if = dma_spec->args[0] & 0xff; in at_dma_xlate()
1719 atchan->mem_if = (dma_spec->args[0] >> 16) & 0xff; in at_dma_xlate()
1897 struct at_dma_chan *atchan = &atdma->chan[i]; in at_dma_probe() local
1899 atchan->mem_if = AT_DMA_MEM_IF; in at_dma_probe()
1900 atchan->per_if = AT_DMA_PER_IF; in at_dma_probe()
1901 atchan->chan_common.device = &atdma->dma_common; in at_dma_probe()
1902 dma_cookie_init(&atchan->chan_common); in at_dma_probe()
1903 list_add_tail(&atchan->chan_common.device_node, in at_dma_probe()
1906 atchan->ch_regs = atdma->regs + ch_regs(i); in at_dma_probe()
1907 spin_lock_init(&atchan->lock); in at_dma_probe()
1908 atchan->mask = 1 << i; in at_dma_probe()
1910 INIT_LIST_HEAD(&atchan->active_list); in at_dma_probe()
1911 INIT_LIST_HEAD(&atchan->queue); in at_dma_probe()
1912 INIT_LIST_HEAD(&atchan->free_list); in at_dma_probe()
1914 tasklet_setup(&atchan->tasklet, atc_tasklet); in at_dma_probe()
2017 struct at_dma_chan *atchan = to_at_dma_chan(chan); in at_dma_remove() local
2022 tasklet_kill(&atchan->tasklet); in at_dma_remove()
2055 struct at_dma_chan *atchan = to_at_dma_chan(chan); in at_dma_prepare() local
2057 if (atc_chan_is_enabled(atchan) && !atc_chan_is_cyclic(atchan)) in at_dma_prepare()
2063 static void atc_suspend_cyclic(struct at_dma_chan *atchan) in atc_suspend_cyclic() argument
2065 struct dma_chan *chan = &atchan->chan_common; in atc_suspend_cyclic()
2069 if (!atc_chan_is_paused(atchan)) { in atc_suspend_cyclic()
2077 atchan->save_dscr = channel_readl(atchan, DSCR); in atc_suspend_cyclic()
2079 vdbg_dump_regs(atchan); in atc_suspend_cyclic()
2090 struct at_dma_chan *atchan = to_at_dma_chan(chan); in at_dma_suspend_noirq() local
2092 if (atc_chan_is_cyclic(atchan)) in at_dma_suspend_noirq()
2093 atc_suspend_cyclic(atchan); in at_dma_suspend_noirq()
2094 atchan->save_cfg = channel_readl(atchan, CFG); in at_dma_suspend_noirq()
2104 static void atc_resume_cyclic(struct at_dma_chan *atchan) in atc_resume_cyclic() argument
2106 struct at_dma *atdma = to_at_dma(atchan->chan_common.device); in atc_resume_cyclic()
2110 channel_writel(atchan, SADDR, 0); in atc_resume_cyclic()
2111 channel_writel(atchan, DADDR, 0); in atc_resume_cyclic()
2112 channel_writel(atchan, CTRLA, 0); in atc_resume_cyclic()
2113 channel_writel(atchan, CTRLB, 0); in atc_resume_cyclic()
2114 channel_writel(atchan, DSCR, atchan->save_dscr); in atc_resume_cyclic()
2115 dma_writel(atdma, CHER, atchan->mask); in atc_resume_cyclic()
2120 vdbg_dump_regs(atchan); in atc_resume_cyclic()
2140 struct at_dma_chan *atchan = to_at_dma_chan(chan); in at_dma_resume_noirq() local
2142 channel_writel(atchan, CFG, atchan->save_cfg); in at_dma_resume_noirq()
2143 if (atc_chan_is_cyclic(atchan)) in at_dma_resume_noirq()
2144 atc_resume_cyclic(atchan); in at_dma_resume_noirq()