Lines Matching refs:schan
159 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(c); in dma_chan_to_sirfsoc_dma() local
160 return container_of(schan, struct sirfsoc_dma, channels[c->chan_id]); in dma_chan_to_sirfsoc_dma()
237 static void sirfsoc_dma_execute(struct sirfsoc_dma_chan *schan) in sirfsoc_dma_execute() argument
239 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(&schan->chan); in sirfsoc_dma_execute()
240 int cid = schan->chan.chan_id; in sirfsoc_dma_execute()
249 sdesc = list_first_entry(&schan->queued, struct sirfsoc_dma_desc, in sirfsoc_dma_execute()
252 list_move_tail(&sdesc->node, &schan->active); in sirfsoc_dma_execute()
258 sdma->exec_desc(sdesc, cid, schan->mode, base); in sirfsoc_dma_execute()
261 schan->happened_cyclic = schan->completed_cyclic = 0; in sirfsoc_dma_execute()
268 struct sirfsoc_dma_chan *schan; in sirfsoc_dma_irq() local
283 schan = &sdma->channels[ch]; in sirfsoc_dma_irq()
284 spin_lock(&schan->lock); in sirfsoc_dma_irq()
285 sdesc = list_first_entry(&schan->active, in sirfsoc_dma_irq()
289 list_splice_tail_init(&schan->active, in sirfsoc_dma_irq()
290 &schan->completed); in sirfsoc_dma_irq()
292 if (!list_empty(&schan->queued)) in sirfsoc_dma_irq()
293 sirfsoc_dma_execute(schan); in sirfsoc_dma_irq()
295 schan->happened_cyclic++; in sirfsoc_dma_irq()
296 spin_unlock(&schan->lock); in sirfsoc_dma_irq()
305 schan = &sdma->channels[0]; in sirfsoc_dma_irq()
306 spin_lock(&schan->lock); in sirfsoc_dma_irq()
307 sdesc = list_first_entry(&schan->active, in sirfsoc_dma_irq()
315 list_splice_tail_init(&schan->active, in sirfsoc_dma_irq()
316 &schan->completed); in sirfsoc_dma_irq()
318 if (!list_empty(&schan->queued)) in sirfsoc_dma_irq()
319 sirfsoc_dma_execute(schan); in sirfsoc_dma_irq()
323 schan->happened_cyclic++; in sirfsoc_dma_irq()
325 spin_unlock(&schan->lock); in sirfsoc_dma_irq()
342 struct sirfsoc_dma_chan *schan; in sirfsoc_dma_process_completed() local
351 schan = &sdma->channels[i]; in sirfsoc_dma_process_completed()
354 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_process_completed()
355 if (!list_empty(&schan->completed)) { in sirfsoc_dma_process_completed()
356 list_splice_tail_init(&schan->completed, &list); in sirfsoc_dma_process_completed()
357 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_process_completed()
369 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_process_completed()
370 list_splice_tail_init(&list, &schan->free); in sirfsoc_dma_process_completed()
371 schan->chan.completed_cookie = last_cookie; in sirfsoc_dma_process_completed()
372 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_process_completed()
374 if (list_empty(&schan->active)) { in sirfsoc_dma_process_completed()
375 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_process_completed()
380 sdesc = list_first_entry(&schan->active, in sirfsoc_dma_process_completed()
384 happened_cyclic = schan->happened_cyclic; in sirfsoc_dma_process_completed()
385 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_process_completed()
388 while (happened_cyclic != schan->completed_cyclic) { in sirfsoc_dma_process_completed()
390 schan->completed_cyclic++; in sirfsoc_dma_process_completed()
407 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(txd->chan); in sirfsoc_dma_tx_submit() local
414 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_tx_submit()
417 list_move_tail(&sdesc->node, &schan->queued); in sirfsoc_dma_tx_submit()
421 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_tx_submit()
429 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); in sirfsoc_dma_slave_config() local
436 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_slave_config()
437 schan->mode = (config->src_maxburst == 4 ? 1 : 0); in sirfsoc_dma_slave_config()
438 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_slave_config()
445 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); in sirfsoc_dma_terminate_all() local
446 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(&schan->chan); in sirfsoc_dma_terminate_all()
447 int cid = schan->chan.chan_id; in sirfsoc_dma_terminate_all()
450 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_terminate_all()
481 list_splice_tail_init(&schan->active, &schan->free); in sirfsoc_dma_terminate_all()
482 list_splice_tail_init(&schan->queued, &schan->free); in sirfsoc_dma_terminate_all()
484 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_terminate_all()
491 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); in sirfsoc_dma_pause_chan() local
492 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(&schan->chan); in sirfsoc_dma_pause_chan()
493 int cid = schan->chan.chan_id; in sirfsoc_dma_pause_chan()
496 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_pause_chan()
518 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_pause_chan()
525 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); in sirfsoc_dma_resume_chan() local
526 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(&schan->chan); in sirfsoc_dma_resume_chan()
527 int cid = schan->chan.chan_id; in sirfsoc_dma_resume_chan()
530 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_resume_chan()
551 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_resume_chan()
560 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); in sirfsoc_dma_alloc_chan_resources() local
588 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_alloc_chan_resources()
590 list_splice_tail_init(&descs, &schan->free); in sirfsoc_dma_alloc_chan_resources()
591 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_alloc_chan_resources()
599 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); in sirfsoc_dma_free_chan_resources() local
605 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_free_chan_resources()
608 BUG_ON(!list_empty(&schan->prepared)); in sirfsoc_dma_free_chan_resources()
609 BUG_ON(!list_empty(&schan->queued)); in sirfsoc_dma_free_chan_resources()
610 BUG_ON(!list_empty(&schan->active)); in sirfsoc_dma_free_chan_resources()
611 BUG_ON(!list_empty(&schan->completed)); in sirfsoc_dma_free_chan_resources()
614 list_splice_tail_init(&schan->free, &descs); in sirfsoc_dma_free_chan_resources()
616 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_free_chan_resources()
628 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); in sirfsoc_dma_issue_pending() local
631 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_issue_pending()
633 if (list_empty(&schan->active) && !list_empty(&schan->queued)) in sirfsoc_dma_issue_pending()
634 sirfsoc_dma_execute(schan); in sirfsoc_dma_issue_pending()
636 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_issue_pending()
645 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); in sirfsoc_dma_tx_status() local
649 int cid = schan->chan.chan_id; in sirfsoc_dma_tx_status()
654 spin_lock_irqsave(&schan->lock, flags); in sirfsoc_dma_tx_status()
656 if (list_empty(&schan->active)) { in sirfsoc_dma_tx_status()
659 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_tx_status()
662 sdesc = list_first_entry(&schan->active, struct sirfsoc_dma_desc, node); in sirfsoc_dma_tx_status()
684 spin_unlock_irqrestore(&schan->lock, flags); in sirfsoc_dma_tx_status()
694 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); in sirfsoc_dma_prep_interleaved() local
705 spin_lock_irqsave(&schan->lock, iflags); in sirfsoc_dma_prep_interleaved()
706 if (!list_empty(&schan->free)) { in sirfsoc_dma_prep_interleaved()
707 sdesc = list_first_entry(&schan->free, struct sirfsoc_dma_desc, in sirfsoc_dma_prep_interleaved()
711 spin_unlock_irqrestore(&schan->lock, iflags); in sirfsoc_dma_prep_interleaved()
721 spin_lock_irqsave(&schan->lock, iflags); in sirfsoc_dma_prep_interleaved()
741 list_add_tail(&sdesc->node, &schan->prepared); in sirfsoc_dma_prep_interleaved()
747 spin_unlock_irqrestore(&schan->lock, iflags); in sirfsoc_dma_prep_interleaved()
751 spin_unlock_irqrestore(&schan->lock, iflags); in sirfsoc_dma_prep_interleaved()
762 struct sirfsoc_dma_chan *schan = dma_chan_to_sirfsoc_dma_chan(chan); in sirfsoc_dma_prep_cyclic() local
781 spin_lock_irqsave(&schan->lock, iflags); in sirfsoc_dma_prep_cyclic()
782 if (!list_empty(&schan->free)) { in sirfsoc_dma_prep_cyclic()
783 sdesc = list_first_entry(&schan->free, struct sirfsoc_dma_desc, in sirfsoc_dma_prep_cyclic()
787 spin_unlock_irqrestore(&schan->lock, iflags); in sirfsoc_dma_prep_cyclic()
793 spin_lock_irqsave(&schan->lock, iflags); in sirfsoc_dma_prep_cyclic()
799 list_add_tail(&sdesc->node, &schan->prepared); in sirfsoc_dma_prep_cyclic()
800 spin_unlock_irqrestore(&schan->lock, iflags); in sirfsoc_dma_prep_cyclic()
846 struct sirfsoc_dma_chan *schan; in sirfsoc_dma_probe() local
927 schan = &sdma->channels[i]; in sirfsoc_dma_probe()
929 schan->chan.device = dma; in sirfsoc_dma_probe()
930 dma_cookie_init(&schan->chan); in sirfsoc_dma_probe()
932 INIT_LIST_HEAD(&schan->free); in sirfsoc_dma_probe()
933 INIT_LIST_HEAD(&schan->prepared); in sirfsoc_dma_probe()
934 INIT_LIST_HEAD(&schan->queued); in sirfsoc_dma_probe()
935 INIT_LIST_HEAD(&schan->active); in sirfsoc_dma_probe()
936 INIT_LIST_HEAD(&schan->completed); in sirfsoc_dma_probe()
938 spin_lock_init(&schan->lock); in sirfsoc_dma_probe()
939 list_add_tail(&schan->chan.device_node, &dma->channels); in sirfsoc_dma_probe()
1014 struct sirfsoc_dma_chan *schan; in sirfsoc_dma_pm_suspend() local
1043 schan = &sdma->channels[ch]; in sirfsoc_dma_pm_suspend()
1044 if (list_empty(&schan->active)) in sirfsoc_dma_pm_suspend()
1062 struct sirfsoc_dma_chan *schan; in sirfsoc_dma_pm_resume() local
1086 schan = &sdma->channels[ch]; in sirfsoc_dma_pm_resume()
1087 if (list_empty(&schan->active)) in sirfsoc_dma_pm_resume()
1089 sdesc = list_first_entry(&schan->active, in sirfsoc_dma_pm_resume()