Lines Matching refs:mchan

114 static void hidma_process_completed(struct hidma_chan *mchan)  in hidma_process_completed()  argument
116 struct dma_device *ddev = mchan->chan.device; in hidma_process_completed()
128 spin_lock_irqsave(&mchan->lock, irqflags); in hidma_process_completed()
129 list_splice_tail_init(&mchan->completed, &list); in hidma_process_completed()
130 spin_unlock_irqrestore(&mchan->lock, irqflags); in hidma_process_completed()
141 spin_lock_irqsave(&mchan->lock, irqflags); in hidma_process_completed()
143 spin_unlock_irqrestore(&mchan->lock, irqflags); in hidma_process_completed()
150 spin_lock_irqsave(&mchan->lock, irqflags); in hidma_process_completed()
151 list_move(&mdesc->node, &mchan->free); in hidma_process_completed()
154 mchan->last_success = last_cookie; in hidma_process_completed()
159 spin_unlock_irqrestore(&mchan->lock, irqflags); in hidma_process_completed()
173 struct hidma_chan *mchan = to_hidma_chan(mdesc->desc.chan); in hidma_callback() local
174 struct dma_device *ddev = mchan->chan.device; in hidma_callback()
179 spin_lock_irqsave(&mchan->lock, irqflags); in hidma_callback()
182 list_move_tail(&mdesc->node, &mchan->completed); in hidma_callback()
186 mchan->running = list_first_entry(&mchan->active, in hidma_callback()
189 spin_unlock_irqrestore(&mchan->lock, irqflags); in hidma_callback()
191 hidma_process_completed(mchan); in hidma_callback()
201 struct hidma_chan *mchan; in hidma_chan_init() local
204 mchan = devm_kzalloc(dmadev->ddev.dev, sizeof(*mchan), GFP_KERNEL); in hidma_chan_init()
205 if (!mchan) in hidma_chan_init()
209 mchan->dma_sig = dma_sig; in hidma_chan_init()
210 mchan->dmadev = dmadev; in hidma_chan_init()
211 mchan->chan.device = ddev; in hidma_chan_init()
212 dma_cookie_init(&mchan->chan); in hidma_chan_init()
214 INIT_LIST_HEAD(&mchan->free); in hidma_chan_init()
215 INIT_LIST_HEAD(&mchan->prepared); in hidma_chan_init()
216 INIT_LIST_HEAD(&mchan->active); in hidma_chan_init()
217 INIT_LIST_HEAD(&mchan->completed); in hidma_chan_init()
218 INIT_LIST_HEAD(&mchan->queued); in hidma_chan_init()
220 spin_lock_init(&mchan->lock); in hidma_chan_init()
221 list_add_tail(&mchan->chan.device_node, &ddev->channels); in hidma_chan_init()
236 struct hidma_chan *mchan = to_hidma_chan(dmach); in hidma_issue_pending() local
237 struct hidma_dev *dmadev = mchan->dmadev; in hidma_issue_pending()
242 spin_lock_irqsave(&mchan->lock, flags); in hidma_issue_pending()
243 list_for_each_entry_safe(qdesc, next, &mchan->queued, node) { in hidma_issue_pending()
245 list_move_tail(&qdesc->node, &mchan->active); in hidma_issue_pending()
248 if (!mchan->running) { in hidma_issue_pending()
249 struct hidma_desc *desc = list_first_entry(&mchan->active, in hidma_issue_pending()
252 mchan->running = desc; in hidma_issue_pending()
254 spin_unlock_irqrestore(&mchan->lock, flags); in hidma_issue_pending()
281 struct hidma_chan *mchan = to_hidma_chan(dmach); in hidma_tx_status() local
288 is_success = hidma_txn_is_success(cookie, mchan->last_success, in hidma_tx_status()
293 if (mchan->paused && (ret == DMA_IN_PROGRESS)) { in hidma_tx_status()
297 spin_lock_irqsave(&mchan->lock, flags); in hidma_tx_status()
298 if (mchan->running) in hidma_tx_status()
299 runcookie = mchan->running->desc.cookie; in hidma_tx_status()
306 spin_unlock_irqrestore(&mchan->lock, flags); in hidma_tx_status()
318 struct hidma_chan *mchan = to_hidma_chan(txd->chan); in hidma_tx_submit() local
319 struct hidma_dev *dmadev = mchan->dmadev; in hidma_tx_submit()
334 spin_lock_irqsave(&mchan->lock, irqflags); in hidma_tx_submit()
337 list_move_tail(&mdesc->node, &mchan->queued); in hidma_tx_submit()
342 spin_unlock_irqrestore(&mchan->lock, irqflags); in hidma_tx_submit()
349 struct hidma_chan *mchan = to_hidma_chan(dmach); in hidma_alloc_chan_resources() local
350 struct hidma_dev *dmadev = mchan->dmadev; in hidma_alloc_chan_resources()
357 if (mchan->allocated) in hidma_alloc_chan_resources()
370 rc = hidma_ll_request(dmadev->lldev, mchan->dma_sig, in hidma_alloc_chan_resources()
391 spin_lock_irqsave(&mchan->lock, irqflags); in hidma_alloc_chan_resources()
392 list_splice_tail_init(&descs, &mchan->free); in hidma_alloc_chan_resources()
393 mchan->allocated = true; in hidma_alloc_chan_resources()
394 spin_unlock_irqrestore(&mchan->lock, irqflags); in hidma_alloc_chan_resources()
402 struct hidma_chan *mchan = to_hidma_chan(dmach); in hidma_prep_dma_memcpy() local
404 struct hidma_dev *mdma = mchan->dmadev; in hidma_prep_dma_memcpy()
408 spin_lock_irqsave(&mchan->lock, irqflags); in hidma_prep_dma_memcpy()
409 if (!list_empty(&mchan->free)) { in hidma_prep_dma_memcpy()
410 mdesc = list_first_entry(&mchan->free, struct hidma_desc, node); in hidma_prep_dma_memcpy()
413 spin_unlock_irqrestore(&mchan->lock, irqflags); in hidma_prep_dma_memcpy()
423 spin_lock_irqsave(&mchan->lock, irqflags); in hidma_prep_dma_memcpy()
424 list_add_tail(&mdesc->node, &mchan->prepared); in hidma_prep_dma_memcpy()
425 spin_unlock_irqrestore(&mchan->lock, irqflags); in hidma_prep_dma_memcpy()
434 struct hidma_chan *mchan = to_hidma_chan(dmach); in hidma_prep_dma_memset() local
436 struct hidma_dev *mdma = mchan->dmadev; in hidma_prep_dma_memset()
440 spin_lock_irqsave(&mchan->lock, irqflags); in hidma_prep_dma_memset()
441 if (!list_empty(&mchan->free)) { in hidma_prep_dma_memset()
442 mdesc = list_first_entry(&mchan->free, struct hidma_desc, node); in hidma_prep_dma_memset()
445 spin_unlock_irqrestore(&mchan->lock, irqflags); in hidma_prep_dma_memset()
455 spin_lock_irqsave(&mchan->lock, irqflags); in hidma_prep_dma_memset()
456 list_add_tail(&mdesc->node, &mchan->prepared); in hidma_prep_dma_memset()
457 spin_unlock_irqrestore(&mchan->lock, irqflags); in hidma_prep_dma_memset()
464 struct hidma_chan *mchan = to_hidma_chan(chan); in hidma_terminate_channel() local
465 struct hidma_dev *dmadev = to_hidma_dev(mchan->chan.device); in hidma_terminate_channel()
473 hidma_process_completed(mchan); in hidma_terminate_channel()
475 spin_lock_irqsave(&mchan->lock, irqflags); in hidma_terminate_channel()
476 mchan->last_success = 0; in hidma_terminate_channel()
477 list_splice_init(&mchan->active, &list); in hidma_terminate_channel()
478 list_splice_init(&mchan->prepared, &list); in hidma_terminate_channel()
479 list_splice_init(&mchan->completed, &list); in hidma_terminate_channel()
480 list_splice_init(&mchan->queued, &list); in hidma_terminate_channel()
481 spin_unlock_irqrestore(&mchan->lock, irqflags); in hidma_terminate_channel()
499 list_move(&mdesc->node, &mchan->free); in hidma_terminate_channel()
511 struct hidma_chan *mchan = to_hidma_chan(chan); in hidma_terminate_all() local
512 struct hidma_dev *dmadev = to_hidma_dev(mchan->chan.device); in hidma_terminate_all()
529 struct hidma_chan *mchan = to_hidma_chan(dmach); in hidma_free_chan_resources() local
530 struct hidma_dev *mdma = mchan->dmadev; in hidma_free_chan_resources()
538 spin_lock_irqsave(&mchan->lock, irqflags); in hidma_free_chan_resources()
541 list_splice_tail_init(&mchan->free, &descs); in hidma_free_chan_resources()
550 mchan->allocated = 0; in hidma_free_chan_resources()
551 spin_unlock_irqrestore(&mchan->lock, irqflags); in hidma_free_chan_resources()
556 struct hidma_chan *mchan; in hidma_pause() local
559 mchan = to_hidma_chan(chan); in hidma_pause()
560 dmadev = to_hidma_dev(mchan->chan.device); in hidma_pause()
561 if (!mchan->paused) { in hidma_pause()
565 mchan->paused = true; in hidma_pause()
574 struct hidma_chan *mchan; in hidma_resume() local
578 mchan = to_hidma_chan(chan); in hidma_resume()
579 dmadev = to_hidma_dev(mchan->chan.device); in hidma_resume()
580 if (mchan->paused) { in hidma_resume()
584 mchan->paused = false; in hidma_resume()