Lines Matching refs:sdesc
127 void (*exec_desc)(struct sirfsoc_dma_desc *sdesc,
133 void (*exec)(struct sirfsoc_dma_desc *sdesc,
163 static void sirfsoc_dma_execute_hw_a7v2(struct sirfsoc_dma_desc *sdesc, in sirfsoc_dma_execute_hw_a7v2() argument
166 if (sdesc->chain) { in sirfsoc_dma_execute_hw_a7v2()
168 writel_relaxed((sdesc->dir << SIRFSOC_DMA_DIR_CTRL_BIT_ATLAS7) | in sirfsoc_dma_execute_hw_a7v2()
169 (sdesc->chain << in sirfsoc_dma_execute_hw_a7v2()
175 writel_relaxed(sdesc->xlen, base + SIRFSOC_DMA_CH_XLEN); in sirfsoc_dma_execute_hw_a7v2()
176 writel_relaxed(sdesc->ylen, base + SIRFSOC_DMA_CH_YLEN); in sirfsoc_dma_execute_hw_a7v2()
177 writel_relaxed(sdesc->width, base + SIRFSOC_DMA_WIDTH_ATLAS7); in sirfsoc_dma_execute_hw_a7v2()
178 writel_relaxed((sdesc->width*((sdesc->ylen+1)>>1)), in sirfsoc_dma_execute_hw_a7v2()
180 writel_relaxed((sdesc->dir << SIRFSOC_DMA_DIR_CTRL_BIT_ATLAS7) | in sirfsoc_dma_execute_hw_a7v2()
181 (sdesc->chain << in sirfsoc_dma_execute_hw_a7v2()
185 writel_relaxed(sdesc->chain ? SIRFSOC_DMA_INT_END_INT_ATLAS7 : in sirfsoc_dma_execute_hw_a7v2()
189 writel(sdesc->addr, base + SIRFSOC_DMA_CH_ADDR); in sirfsoc_dma_execute_hw_a7v2()
190 if (sdesc->cyclic) in sirfsoc_dma_execute_hw_a7v2()
194 static void sirfsoc_dma_execute_hw_a7v1(struct sirfsoc_dma_desc *sdesc, in sirfsoc_dma_execute_hw_a7v1() argument
199 writel_relaxed(sdesc->width, base + SIRFSOC_DMA_WIDTH_0 + cid * 4); in sirfsoc_dma_execute_hw_a7v1()
201 (sdesc->dir << SIRFSOC_DMA_DIR_CTRL_BIT), in sirfsoc_dma_execute_hw_a7v1()
203 writel_relaxed(sdesc->xlen, base + cid * 0x10 + SIRFSOC_DMA_CH_XLEN); in sirfsoc_dma_execute_hw_a7v1()
204 writel_relaxed(sdesc->ylen, base + cid * 0x10 + SIRFSOC_DMA_CH_YLEN); in sirfsoc_dma_execute_hw_a7v1()
207 writel(sdesc->addr >> 2, base + cid * 0x10 + SIRFSOC_DMA_CH_ADDR); in sirfsoc_dma_execute_hw_a7v1()
208 if (sdesc->cyclic) { in sirfsoc_dma_execute_hw_a7v1()
216 static void sirfsoc_dma_execute_hw_a6(struct sirfsoc_dma_desc *sdesc, in sirfsoc_dma_execute_hw_a6() argument
219 writel_relaxed(sdesc->width, base + SIRFSOC_DMA_WIDTH_0 + cid * 4); in sirfsoc_dma_execute_hw_a6()
221 (sdesc->dir << SIRFSOC_DMA_DIR_CTRL_BIT), in sirfsoc_dma_execute_hw_a6()
223 writel_relaxed(sdesc->xlen, base + cid * 0x10 + SIRFSOC_DMA_CH_XLEN); in sirfsoc_dma_execute_hw_a6()
224 writel_relaxed(sdesc->ylen, base + cid * 0x10 + SIRFSOC_DMA_CH_YLEN); in sirfsoc_dma_execute_hw_a6()
227 writel(sdesc->addr >> 2, base + cid * 0x10 + SIRFSOC_DMA_CH_ADDR); in sirfsoc_dma_execute_hw_a6()
228 if (sdesc->cyclic) { in sirfsoc_dma_execute_hw_a6()
241 struct sirfsoc_dma_desc *sdesc = NULL; in sirfsoc_dma_execute() local
249 sdesc = list_first_entry(&schan->queued, struct sirfsoc_dma_desc, in sirfsoc_dma_execute()
252 list_move_tail(&sdesc->node, &schan->active); in sirfsoc_dma_execute()
258 sdma->exec_desc(sdesc, cid, schan->mode, base); in sirfsoc_dma_execute()
260 if (sdesc->cyclic) in sirfsoc_dma_execute()
269 struct sirfsoc_dma_desc *sdesc = NULL; in sirfsoc_dma_irq() local
285 sdesc = list_first_entry(&schan->active, in sirfsoc_dma_irq()
287 if (!sdesc->cyclic) { in sirfsoc_dma_irq()
291 dma_cookie_complete(&sdesc->desc); in sirfsoc_dma_irq()
307 sdesc = list_first_entry(&schan->active, in sirfsoc_dma_irq()
309 if (!sdesc->cyclic) { in sirfsoc_dma_irq()
310 chain = sdesc->chain; in sirfsoc_dma_irq()
317 dma_cookie_complete(&sdesc->desc); in sirfsoc_dma_irq()
321 } else if (sdesc->cyclic && (is & in sirfsoc_dma_irq()
343 struct sirfsoc_dma_desc *sdesc; in sirfsoc_dma_process_completed() local
360 list_for_each_entry(sdesc, &list, node) { in sirfsoc_dma_process_completed()
361 desc = &sdesc->desc; in sirfsoc_dma_process_completed()
380 sdesc = list_first_entry(&schan->active, in sirfsoc_dma_process_completed()
387 desc = &sdesc->desc; in sirfsoc_dma_process_completed()
408 struct sirfsoc_dma_desc *sdesc; in sirfsoc_dma_tx_submit() local
412 sdesc = container_of(txd, struct sirfsoc_dma_desc, desc); in sirfsoc_dma_tx_submit()
417 list_move_tail(&sdesc->node, &schan->queued); in sirfsoc_dma_tx_submit()
561 struct sirfsoc_dma_desc *sdesc; in sirfsoc_dma_alloc_chan_resources() local
570 sdesc = kzalloc(sizeof(*sdesc), GFP_KERNEL); in sirfsoc_dma_alloc_chan_resources()
571 if (!sdesc) { in sirfsoc_dma_alloc_chan_resources()
577 dma_async_tx_descriptor_init(&sdesc->desc, chan); in sirfsoc_dma_alloc_chan_resources()
578 sdesc->desc.flags = DMA_CTRL_ACK; in sirfsoc_dma_alloc_chan_resources()
579 sdesc->desc.tx_submit = sirfsoc_dma_tx_submit; in sirfsoc_dma_alloc_chan_resources()
581 list_add_tail(&sdesc->node, &descs); in sirfsoc_dma_alloc_chan_resources()
601 struct sirfsoc_dma_desc *sdesc, *tmp; in sirfsoc_dma_free_chan_resources() local
619 list_for_each_entry_safe(sdesc, tmp, &descs, node) in sirfsoc_dma_free_chan_resources()
620 kfree(sdesc); in sirfsoc_dma_free_chan_resources()
648 struct sirfsoc_dma_desc *sdesc; in sirfsoc_dma_tx_status() local
662 sdesc = list_first_entry(&schan->active, struct sirfsoc_dma_desc, node); in sirfsoc_dma_tx_status()
663 if (sdesc->cyclic) in sirfsoc_dma_tx_status()
664 dma_request_bytes = (sdesc->xlen + 1) * (sdesc->ylen + 1) * in sirfsoc_dma_tx_status()
665 (sdesc->width * SIRFSOC_DMA_WORD_LEN); in sirfsoc_dma_tx_status()
667 dma_request_bytes = sdesc->xlen * SIRFSOC_DMA_WORD_LEN; in sirfsoc_dma_tx_status()
681 residue = dma_request_bytes - (dma_pos - sdesc->addr); in sirfsoc_dma_tx_status()
695 struct sirfsoc_dma_desc *sdesc = NULL; in sirfsoc_dma_prep_interleaved() local
707 sdesc = list_first_entry(&schan->free, struct sirfsoc_dma_desc, in sirfsoc_dma_prep_interleaved()
709 list_del(&sdesc->node); in sirfsoc_dma_prep_interleaved()
713 if (!sdesc) { in sirfsoc_dma_prep_interleaved()
728 sdesc->cyclic = 0; in sirfsoc_dma_prep_interleaved()
729 sdesc->xlen = xt->sgl[0].size / SIRFSOC_DMA_WORD_LEN; in sirfsoc_dma_prep_interleaved()
730 sdesc->width = (xt->sgl[0].size + xt->sgl[0].icg) / in sirfsoc_dma_prep_interleaved()
732 sdesc->ylen = xt->numf - 1; in sirfsoc_dma_prep_interleaved()
734 sdesc->addr = xt->src_start; in sirfsoc_dma_prep_interleaved()
735 sdesc->dir = 1; in sirfsoc_dma_prep_interleaved()
737 sdesc->addr = xt->dst_start; in sirfsoc_dma_prep_interleaved()
738 sdesc->dir = 0; in sirfsoc_dma_prep_interleaved()
741 list_add_tail(&sdesc->node, &schan->prepared); in sirfsoc_dma_prep_interleaved()
749 return &sdesc->desc; in sirfsoc_dma_prep_interleaved()
763 struct sirfsoc_dma_desc *sdesc = NULL; in sirfsoc_dma_prep_cyclic() local
783 sdesc = list_first_entry(&schan->free, struct sirfsoc_dma_desc, in sirfsoc_dma_prep_cyclic()
785 list_del(&sdesc->node); in sirfsoc_dma_prep_cyclic()
789 if (!sdesc) in sirfsoc_dma_prep_cyclic()
794 sdesc->addr = addr; in sirfsoc_dma_prep_cyclic()
795 sdesc->cyclic = 1; in sirfsoc_dma_prep_cyclic()
796 sdesc->xlen = 0; in sirfsoc_dma_prep_cyclic()
797 sdesc->ylen = buf_len / SIRFSOC_DMA_WORD_LEN - 1; in sirfsoc_dma_prep_cyclic()
798 sdesc->width = 1; in sirfsoc_dma_prep_cyclic()
799 list_add_tail(&sdesc->node, &schan->prepared); in sirfsoc_dma_prep_cyclic()
802 return &sdesc->desc; in sirfsoc_dma_prep_cyclic()
1061 struct sirfsoc_dma_desc *sdesc; in sirfsoc_dma_pm_resume() local
1089 sdesc = list_first_entry(&schan->active, in sirfsoc_dma_pm_resume()
1092 writel_relaxed(sdesc->width, in sirfsoc_dma_pm_resume()
1094 writel_relaxed(sdesc->xlen, in sirfsoc_dma_pm_resume()
1096 writel_relaxed(sdesc->ylen, in sirfsoc_dma_pm_resume()
1101 writel_relaxed(sdesc->addr, in sirfsoc_dma_pm_resume()
1104 writel_relaxed(sdesc->addr >> 2, in sirfsoc_dma_pm_resume()