Lines Matching refs:mchan

114 static void hidma_process_completed(struct hidma_chan *mchan)  in hidma_process_completed()  argument
116 struct dma_device *ddev = mchan->chan.device; in hidma_process_completed()
128 spin_lock_irqsave(&mchan->lock, irqflags); in hidma_process_completed()
129 list_splice_tail_init(&mchan->completed, &list); in hidma_process_completed()
130 spin_unlock_irqrestore(&mchan->lock, irqflags); in hidma_process_completed()
143 spin_lock_irqsave(&mchan->lock, irqflags); in hidma_process_completed()
145 mchan->last_success = last_cookie; in hidma_process_completed()
152 spin_unlock_irqrestore(&mchan->lock, irqflags); in hidma_process_completed()
158 spin_lock_irqsave(&mchan->lock, irqflags); in hidma_process_completed()
159 list_move(&mdesc->node, &mchan->free); in hidma_process_completed()
160 spin_unlock_irqrestore(&mchan->lock, irqflags); in hidma_process_completed()
174 struct hidma_chan *mchan = to_hidma_chan(mdesc->desc.chan); in hidma_callback() local
175 struct dma_device *ddev = mchan->chan.device; in hidma_callback()
180 spin_lock_irqsave(&mchan->lock, irqflags); in hidma_callback()
183 list_move_tail(&mdesc->node, &mchan->completed); in hidma_callback()
187 mchan->running = list_first_entry(&mchan->active, in hidma_callback()
190 spin_unlock_irqrestore(&mchan->lock, irqflags); in hidma_callback()
192 hidma_process_completed(mchan); in hidma_callback()
202 struct hidma_chan *mchan; in hidma_chan_init() local
205 mchan = devm_kzalloc(dmadev->ddev.dev, sizeof(*mchan), GFP_KERNEL); in hidma_chan_init()
206 if (!mchan) in hidma_chan_init()
210 mchan->dma_sig = dma_sig; in hidma_chan_init()
211 mchan->dmadev = dmadev; in hidma_chan_init()
212 mchan->chan.device = ddev; in hidma_chan_init()
213 dma_cookie_init(&mchan->chan); in hidma_chan_init()
215 INIT_LIST_HEAD(&mchan->free); in hidma_chan_init()
216 INIT_LIST_HEAD(&mchan->prepared); in hidma_chan_init()
217 INIT_LIST_HEAD(&mchan->active); in hidma_chan_init()
218 INIT_LIST_HEAD(&mchan->completed); in hidma_chan_init()
219 INIT_LIST_HEAD(&mchan->queued); in hidma_chan_init()
221 spin_lock_init(&mchan->lock); in hidma_chan_init()
222 list_add_tail(&mchan->chan.device_node, &ddev->channels); in hidma_chan_init()
237 struct hidma_chan *mchan = to_hidma_chan(dmach); in hidma_issue_pending() local
238 struct hidma_dev *dmadev = mchan->dmadev; in hidma_issue_pending()
243 spin_lock_irqsave(&mchan->lock, flags); in hidma_issue_pending()
244 list_for_each_entry_safe(qdesc, next, &mchan->queued, node) { in hidma_issue_pending()
246 list_move_tail(&qdesc->node, &mchan->active); in hidma_issue_pending()
249 if (!mchan->running) { in hidma_issue_pending()
250 struct hidma_desc *desc = list_first_entry(&mchan->active, in hidma_issue_pending()
253 mchan->running = desc; in hidma_issue_pending()
255 spin_unlock_irqrestore(&mchan->lock, flags); in hidma_issue_pending()
282 struct hidma_chan *mchan = to_hidma_chan(dmach); in hidma_tx_status() local
289 is_success = hidma_txn_is_success(cookie, mchan->last_success, in hidma_tx_status()
294 if (mchan->paused && (ret == DMA_IN_PROGRESS)) { in hidma_tx_status()
298 spin_lock_irqsave(&mchan->lock, flags); in hidma_tx_status()
299 if (mchan->running) in hidma_tx_status()
300 runcookie = mchan->running->desc.cookie; in hidma_tx_status()
307 spin_unlock_irqrestore(&mchan->lock, flags); in hidma_tx_status()
319 struct hidma_chan *mchan = to_hidma_chan(txd->chan); in hidma_tx_submit() local
320 struct hidma_dev *dmadev = mchan->dmadev; in hidma_tx_submit()
335 spin_lock_irqsave(&mchan->lock, irqflags); in hidma_tx_submit()
338 list_move_tail(&mdesc->node, &mchan->queued); in hidma_tx_submit()
343 spin_unlock_irqrestore(&mchan->lock, irqflags); in hidma_tx_submit()
350 struct hidma_chan *mchan = to_hidma_chan(dmach); in hidma_alloc_chan_resources() local
351 struct hidma_dev *dmadev = mchan->dmadev; in hidma_alloc_chan_resources()
358 if (mchan->allocated) in hidma_alloc_chan_resources()
371 rc = hidma_ll_request(dmadev->lldev, mchan->dma_sig, in hidma_alloc_chan_resources()
392 spin_lock_irqsave(&mchan->lock, irqflags); in hidma_alloc_chan_resources()
393 list_splice_tail_init(&descs, &mchan->free); in hidma_alloc_chan_resources()
394 mchan->allocated = true; in hidma_alloc_chan_resources()
395 spin_unlock_irqrestore(&mchan->lock, irqflags); in hidma_alloc_chan_resources()
403 struct hidma_chan *mchan = to_hidma_chan(dmach); in hidma_prep_dma_memcpy() local
405 struct hidma_dev *mdma = mchan->dmadev; in hidma_prep_dma_memcpy()
409 spin_lock_irqsave(&mchan->lock, irqflags); in hidma_prep_dma_memcpy()
410 if (!list_empty(&mchan->free)) { in hidma_prep_dma_memcpy()
411 mdesc = list_first_entry(&mchan->free, struct hidma_desc, node); in hidma_prep_dma_memcpy()
414 spin_unlock_irqrestore(&mchan->lock, irqflags); in hidma_prep_dma_memcpy()
425 spin_lock_irqsave(&mchan->lock, irqflags); in hidma_prep_dma_memcpy()
426 list_add_tail(&mdesc->node, &mchan->prepared); in hidma_prep_dma_memcpy()
427 spin_unlock_irqrestore(&mchan->lock, irqflags); in hidma_prep_dma_memcpy()
436 struct hidma_chan *mchan = to_hidma_chan(dmach); in hidma_prep_dma_memset() local
438 struct hidma_dev *mdma = mchan->dmadev; in hidma_prep_dma_memset()
442 spin_lock_irqsave(&mchan->lock, irqflags); in hidma_prep_dma_memset()
443 if (!list_empty(&mchan->free)) { in hidma_prep_dma_memset()
444 mdesc = list_first_entry(&mchan->free, struct hidma_desc, node); in hidma_prep_dma_memset()
447 spin_unlock_irqrestore(&mchan->lock, irqflags); in hidma_prep_dma_memset()
458 spin_lock_irqsave(&mchan->lock, irqflags); in hidma_prep_dma_memset()
459 list_add_tail(&mdesc->node, &mchan->prepared); in hidma_prep_dma_memset()
460 spin_unlock_irqrestore(&mchan->lock, irqflags); in hidma_prep_dma_memset()
467 struct hidma_chan *mchan = to_hidma_chan(chan); in hidma_terminate_channel() local
468 struct hidma_dev *dmadev = to_hidma_dev(mchan->chan.device); in hidma_terminate_channel()
476 hidma_process_completed(mchan); in hidma_terminate_channel()
478 spin_lock_irqsave(&mchan->lock, irqflags); in hidma_terminate_channel()
479 mchan->last_success = 0; in hidma_terminate_channel()
480 list_splice_init(&mchan->active, &list); in hidma_terminate_channel()
481 list_splice_init(&mchan->prepared, &list); in hidma_terminate_channel()
482 list_splice_init(&mchan->completed, &list); in hidma_terminate_channel()
483 list_splice_init(&mchan->queued, &list); in hidma_terminate_channel()
484 spin_unlock_irqrestore(&mchan->lock, irqflags); in hidma_terminate_channel()
502 list_move(&mdesc->node, &mchan->free); in hidma_terminate_channel()
514 struct hidma_chan *mchan = to_hidma_chan(chan); in hidma_terminate_all() local
515 struct hidma_dev *dmadev = to_hidma_dev(mchan->chan.device); in hidma_terminate_all()
532 struct hidma_chan *mchan = to_hidma_chan(dmach); in hidma_free_chan_resources() local
533 struct hidma_dev *mdma = mchan->dmadev; in hidma_free_chan_resources()
541 spin_lock_irqsave(&mchan->lock, irqflags); in hidma_free_chan_resources()
544 list_splice_tail_init(&mchan->free, &descs); in hidma_free_chan_resources()
553 mchan->allocated = false; in hidma_free_chan_resources()
554 spin_unlock_irqrestore(&mchan->lock, irqflags); in hidma_free_chan_resources()
559 struct hidma_chan *mchan; in hidma_pause() local
562 mchan = to_hidma_chan(chan); in hidma_pause()
563 dmadev = to_hidma_dev(mchan->chan.device); in hidma_pause()
564 if (!mchan->paused) { in hidma_pause()
568 mchan->paused = true; in hidma_pause()
577 struct hidma_chan *mchan; in hidma_resume() local
581 mchan = to_hidma_chan(chan); in hidma_resume()
582 dmadev = to_hidma_dev(mchan->chan.device); in hidma_resume()
583 if (mchan->paused) { in hidma_resume()
587 mchan->paused = false; in hidma_resume()