Home
last modified time | relevance | path

Searched refs:vchan (Results 1 – 25 of 34) sorted by relevance

12

/Linux-v5.10/drivers/dma/
Dowl-dma.c191 struct owl_dma_vchan *vchan; member
380 static inline int owl_dma_cfg_lli(struct owl_dma_vchan *vchan, in owl_dma_cfg_lli() argument
387 struct owl_dma *od = to_owl_dma(vchan->vc.chan.device); in owl_dma_cfg_lli()
400 mode |= OWL_DMA_MODE_TS(vchan->drq) in owl_dma_cfg_lli()
413 mode |= OWL_DMA_MODE_TS(vchan->drq) in owl_dma_cfg_lli()
468 struct owl_dma_vchan *vchan) in owl_dma_get_pchan() argument
478 if (!pchan->vchan) { in owl_dma_get_pchan()
479 pchan->vchan = vchan; in owl_dma_get_pchan()
519 pchan->vchan = NULL; in owl_dma_terminate_pchan()
534 static int owl_dma_start_next_txd(struct owl_dma_vchan *vchan) in owl_dma_start_next_txd() argument
[all …]
Dsun4i-dma.c129 struct sun4i_dma_vchan *vchan; member
212 struct sun4i_dma_vchan *vchan = to_sun4i_dma_vchan(chan); in sun4i_dma_free_chan_resources() local
214 vchan_free_chan_resources(&vchan->vc); in sun4i_dma_free_chan_resources()
218 struct sun4i_dma_vchan *vchan) in find_and_use_pchan() argument
228 if (vchan->is_dedicated) { in find_and_use_pchan()
239 pchan->vchan = vchan; in find_and_use_pchan()
256 pchan->vchan = NULL; in release_pchan()
320 struct sun4i_dma_vchan *vchan) in __execute_vchan_pending() argument
328 lockdep_assert_held(&vchan->vc.lock); in __execute_vchan_pending()
331 pchan = find_and_use_pchan(priv, vchan); in __execute_vchan_pending()
[all …]
Dsun6i-dma.c170 struct sun6i_vchan *vchan; member
387 static inline void sun6i_dma_dump_lli(struct sun6i_vchan *vchan, in sun6i_dma_dump_lli() argument
392 dev_dbg(chan2dev(&vchan->vc.chan), in sun6i_dma_dump_lli()
427 static int sun6i_dma_start_desc(struct sun6i_vchan *vchan) in sun6i_dma_start_desc() argument
429 struct sun6i_dma_dev *sdev = to_sun6i_dma_dev(vchan->vc.chan.device); in sun6i_dma_start_desc()
430 struct virt_dma_desc *desc = vchan_next_desc(&vchan->vc); in sun6i_dma_start_desc()
431 struct sun6i_pchan *pchan = vchan->phy; in sun6i_dma_start_desc()
448 sun6i_dma_dump_lli(vchan, pchan->desc->v_lli); in sun6i_dma_start_desc()
453 vchan->irq_type = vchan->cyclic ? DMA_IRQ_PKG : DMA_IRQ_QUEUE; in sun6i_dma_start_desc()
458 irq_val |= vchan->irq_type << (irq_offset * DMA_IRQ_CHAN_WIDTH); in sun6i_dma_start_desc()
[all …]
Dst_fdma.c24 return container_of(c, struct st_fdma_chan, vchan.chan); in to_st_fdma_chan()
79 vdesc = vchan_next_desc(&fchan->vchan); in st_fdma_xfer_desc()
85 cmd = FDMA_CMD_START(fchan->vchan.chan.chan_id); in st_fdma_xfer_desc()
94 dev_dbg(fchan->fdev->dev, "start chan:%d\n", fchan->vchan.chan.chan_id); in st_fdma_xfer_desc()
101 int ch_id = fchan->vchan.chan.chan_id; in st_fdma_ch_sta_update()
139 spin_lock(&fchan->vchan.lock); in st_fdma_irq_handler()
157 spin_unlock(&fchan->vchan.lock); in st_fdma_irq_handler()
282 fchan->vchan.chan.chan_id, fchan->cfg.type); in st_fdma_alloc_chan_res()
294 __func__, fchan->vchan.chan.chan_id); in st_fdma_free_chan_res()
299 spin_lock_irqsave(&fchan->vchan.lock, flags); in st_fdma_free_chan_res()
[all …]
Dfsl-edma-common.c48 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_enable_request()
65 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_disable_request()
109 u32 ch = fsl_chan->vchan.chan.chan_id; in fsl_edma_chan_mux()
116 ch_off = fsl_chan->vchan.chan.chan_id % chans_per_mux; in fsl_edma_chan_mux()
166 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_terminate_all()
170 vchan_get_all_descriptors(&fsl_chan->vchan, &head); in fsl_edma_terminate_all()
171 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_terminate_all()
172 vchan_dma_desc_free_list(&fsl_chan->vchan, &head); in fsl_edma_terminate_all()
182 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_pause()
188 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_pause()
[all …]
Didma64.c107 struct idma64 *idma64 = to_idma64(idma64c->vchan.chan.device); in idma64_stop_transfer()
114 struct idma64 *idma64 = to_idma64(idma64c->vchan.chan.device); in idma64_start_transfer()
118 vdesc = vchan_next_desc(&idma64c->vchan); in idma64_start_transfer()
142 spin_lock(&idma64c->vchan.lock); in idma64_chan_irq()
159 spin_unlock(&idma64c->vchan.lock); in idma64_chan_irq()
326 return vchan_tx_prep(&idma64c->vchan, &desc->vdesc, flags); in idma64_prep_slave_sg()
334 spin_lock_irqsave(&idma64c->vchan.lock, flags); in idma64_issue_pending()
335 if (vchan_issue_pending(&idma64c->vchan) && !idma64c->desc) in idma64_issue_pending()
337 spin_unlock_irqrestore(&idma64c->vchan.lock, flags); in idma64_issue_pending()
378 spin_lock_irqsave(&idma64c->vchan.lock, flags); in idma64_tx_status()
[all …]
Ddma-jz4780.c129 struct virt_dma_chan vchan; member
166 return container_of(chan, struct jz4780_dma_chan, vchan.chan); in to_jz4780_dma_chan()
178 return container_of(jzchan->vchan.chan.device, struct jz4780_dma_dev, in jz4780_dma_chan_parent()
395 return vchan_tx_prep(&jzchan->vchan, &desc->vdesc, flags); in jz4780_dma_prep_slave_sg()
447 return vchan_tx_prep(&jzchan->vchan, &desc->vdesc, flags); in jz4780_dma_prep_dma_cyclic()
475 return vchan_tx_prep(&jzchan->vchan, &desc->vdesc, flags); in jz4780_dma_prep_dma_memcpy()
486 vdesc = vchan_next_desc(&jzchan->vchan); in jz4780_dma_begin()
558 spin_lock_irqsave(&jzchan->vchan.lock, flags); in jz4780_dma_issue_pending()
560 if (vchan_issue_pending(&jzchan->vchan) && !jzchan->desc) in jz4780_dma_issue_pending()
563 spin_unlock_irqrestore(&jzchan->vchan.lock, flags); in jz4780_dma_issue_pending()
[all …]
Dstm32-dma.c197 struct virt_dma_chan vchan; member
221 return container_of(chan->vchan.chan.device, struct stm32_dma_device, in stm32_dma_get_dev()
227 return container_of(c, struct stm32_dma_chan, vchan.chan); in to_stm32_dma_chan()
237 return &chan->vchan.chan.dev->device; in chan2dev()
490 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_dma_terminate_all()
499 vchan_get_all_descriptors(&chan->vchan, &head); in stm32_dma_terminate_all()
500 spin_unlock_irqrestore(&chan->vchan.lock, flags); in stm32_dma_terminate_all()
501 vchan_dma_desc_free_list(&chan->vchan, &head); in stm32_dma_terminate_all()
510 vchan_synchronize(&chan->vchan); in stm32_dma_synchronize()
547 vdesc = vchan_next_desc(&chan->vchan); in stm32_dma_start_transfer()
[all …]
Ddma-axi-dmac.c120 struct virt_dma_chan vchan; member
152 return container_of(chan->vchan.chan.device, struct axi_dmac, in chan_to_axi_dmac()
158 return container_of(c, struct axi_dmac_chan, vchan.chan); in to_axi_dmac_chan()
219 vdesc = vchan_next_desc(&chan->vchan); in axi_dmac_start_transfer()
419 spin_lock(&dmac->chan.vchan.lock); in axi_dmac_interrupt_handler()
430 spin_unlock(&dmac->chan.vchan.lock); in axi_dmac_interrupt_handler()
442 spin_lock_irqsave(&chan->vchan.lock, flags); in axi_dmac_terminate_all()
445 vchan_get_all_descriptors(&chan->vchan, &head); in axi_dmac_terminate_all()
447 spin_unlock_irqrestore(&chan->vchan.lock, flags); in axi_dmac_terminate_all()
449 vchan_dma_desc_free_list(&chan->vchan, &head); in axi_dmac_terminate_all()
[all …]
Dstm32-mdma.c259 struct virt_dma_chan vchan; member
286 return container_of(chan->vchan.chan.device, struct stm32_mdma_device, in stm32_mdma_get_dev()
292 return container_of(c, struct stm32_mdma_chan, vchan.chan); in to_stm32_mdma_chan()
302 return &chan->vchan.chan.dev->device; in chan2dev()
809 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in stm32_mdma_prep_slave_sg()
900 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in stm32_mdma_prep_dma_cyclic()
1086 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in stm32_mdma_prep_dma_memcpy()
1123 vdesc = vchan_next_desc(&chan->vchan); in stm32_mdma_start_transfer()
1164 dev_dbg(chan2dev(chan), "vchan %pK: started\n", &chan->vchan); in stm32_mdma_start_transfer()
1172 spin_lock_irqsave(&chan->vchan.lock, flags); in stm32_mdma_issue_pending()
[all …]
Dst_fdma.h125 struct virt_dma_chan vchan; member
186 + (fchan)->vchan.chan.chan_id * 0x4 \
191 + (fchan)->vchan.chan.chan_id * 0x4 \
208 + (fchan)->vchan.chan.chan_id * FDMA_NODE_SZ \
213 + (fchan)->vchan.chan.chan_id * FDMA_NODE_SZ \
Dfsl-qdma.c176 struct virt_dma_chan vchan; member
296 return container_of(chan, struct fsl_qdma_chan, vchan.chan); in to_fsl_qdma_chan()
313 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_qdma_free_chan_resources()
314 vchan_get_all_descriptors(&fsl_chan->vchan, &head); in fsl_qdma_free_chan_resources()
315 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_qdma_free_chan_resources()
317 vchan_dma_desc_free_list(&fsl_chan->vchan, &head); in fsl_qdma_free_chan_resources()
729 spin_lock(&fsl_comp->qchan->vchan.lock); in fsl_qdma_queue_transfer_complete()
732 spin_unlock(&fsl_comp->qchan->vchan.lock); in fsl_qdma_queue_transfer_complete()
985 return vchan_tx_prep(&fsl_chan->vchan, &fsl_comp->vdesc, flags); in fsl_qdma_prep_memcpy()
999 vdesc = vchan_next_desc(&fsl_chan->vchan); in fsl_qdma_enqueue_desc()
[all …]
Dfsl-edma.c27 vchan_synchronize(&fsl_chan->vchan); in fsl_edma_synchronize()
47 spin_lock(&fsl_chan->vchan.lock); in fsl_edma_tx_handler()
51 spin_unlock(&fsl_chan->vchan.lock); in fsl_edma_tx_handler()
68 spin_unlock(&fsl_chan->vchan.lock); in fsl_edma_tx_handler()
360 fsl_chan->vchan.desc_free = fsl_edma_free_desc; in fsl_edma_probe()
361 vchan_init(&fsl_chan->vchan, &fsl_edma->dma_dev); in fsl_edma_probe()
443 spin_lock_irqsave(&fsl_chan->vchan.lock, flags); in fsl_edma_suspend_late()
452 spin_unlock_irqrestore(&fsl_chan->vchan.lock, flags); in fsl_edma_suspend_late()
Dmcf-edma.c37 spin_lock(&mcf_chan->vchan.lock); in mcf_edma_tx_handler()
41 spin_unlock(&mcf_chan->vchan.lock); in mcf_edma_tx_handler()
58 spin_unlock(&mcf_chan->vchan.lock); in mcf_edma_tx_handler()
230 mcf_chan->vchan.desc_free = fsl_edma_free_desc; in mcf_edma_probe()
231 vchan_init(&mcf_chan->vchan, &mcf_edma->dma_dev); in mcf_edma_probe()
Dpxa_dma.c100 struct pxad_chan *vchan; member
151 dev_vdbg(&phy->vchan->vc.chan.dev->device, \
159 dev_vdbg(&phy->vchan->vc.chan.dev->device, \
166 dev_vdbg(&phy->vchan->vc.chan.dev->device, \
390 if (!phy->vchan) { in lookup_phy()
391 phy->vchan = pchan; in lookup_phy()
425 chan->phy->vchan = NULL; in pxad_free_phy()
455 if (!phy->vchan) in phy_enable()
458 dev_dbg(&phy->vchan->vc.chan.dev->device, in phy_enable()
462 pdev = to_pxad_dev(phy->vchan->vc.chan.device); in phy_enable()
[all …]
Dmmp_pdma.c120 struct mmp_pdma_chan *vchan; member
156 if (!phy->vchan) in enable_chan()
159 reg = DRCMR(phy->vchan->drcmr); in enable_chan()
163 if (phy->vchan->byte_align) in enable_chan()
196 if ((dcsr & DCSR_BUSERR) && (phy->vchan)) in clear_chan_irq()
197 dev_warn(phy->vchan->dev, "DCSR_BUSERR\n"); in clear_chan_irq()
209 tasklet_schedule(&phy->vchan->tasklet); in mmp_pdma_chan_handler()
261 if (!phy->vchan) { in lookup_phy()
262 phy->vchan = pchan; in lookup_phy()
288 pchan->phy->vchan = NULL; in mmp_pdma_free_phy()
Didma64.h128 struct virt_dma_chan vchan; member
143 return container_of(chan, struct idma64_chan, vchan.chan); in to_idma64_chan()
/Linux-v5.10/drivers/dma/hsu/
Dhsu.c113 vdesc = vchan_next_desc(&hsuc->vchan); in hsu_dma_start_transfer()
160 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_get_status()
162 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_get_status()
214 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_do_irq()
227 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_do_irq()
284 return vchan_tx_prep(&hsuc->vchan, &desc->vdesc, flags); in hsu_dma_prep_slave_sg()
292 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_issue_pending()
293 if (vchan_issue_pending(&hsuc->vchan) && !hsuc->desc) in hsu_dma_issue_pending()
295 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_issue_pending()
328 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_tx_status()
[all …]
Dhsu.h83 struct virt_dma_chan vchan; member
96 return container_of(chan, struct hsu_dma_chan, vchan.chan); in to_hsu_dma_chan()
/Linux-v5.10/drivers/dma/sf-pdma/
Dsf-pdma.c44 return container_of(dchan, struct sf_pdma_chan, vchan.chan); in to_sf_pdma_chan()
110 desc->async_tx = vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in sf_pdma_prep_dma_memcpy()
112 spin_lock_irqsave(&chan->vchan.lock, flags); in sf_pdma_prep_dma_memcpy()
115 spin_unlock_irqrestore(&chan->vchan.lock, flags); in sf_pdma_prep_dma_memcpy()
154 spin_lock_irqsave(&chan->vchan.lock, flags); in sf_pdma_free_chan_resources()
158 vchan_get_all_descriptors(&chan->vchan, &head); in sf_pdma_free_chan_resources()
160 spin_unlock_irqrestore(&chan->vchan.lock, flags); in sf_pdma_free_chan_resources()
161 vchan_dma_desc_free_list(&chan->vchan, &head); in sf_pdma_free_chan_resources()
174 spin_lock_irqsave(&chan->vchan.lock, flags); in sf_pdma_desc_residue()
183 vd = vchan_find_desc(&chan->vchan, cookie); in sf_pdma_desc_residue()
[all …]
/Linux-v5.10/drivers/staging/ralink-gdma/
Dralink-gdma.c108 struct virt_dma_chan vchan; member
141 return container_of(chan->vchan.chan.device, struct gdma_dma_dev, in gdma_dma_chan_get_dev()
147 return container_of(c, struct gdma_dmaengine_chan, vchan.chan); in to_gdma_dma_chan()
228 spin_lock_irqsave(&chan->vchan.lock, flags); in gdma_dma_terminate_all()
231 vchan_get_all_descriptors(&chan->vchan, &head); in gdma_dma_terminate_all()
232 spin_unlock_irqrestore(&chan->vchan.lock, flags); in gdma_dma_terminate_all()
234 vchan_dma_desc_free_list(&chan->vchan, &head); in gdma_dma_terminate_all()
417 vdesc = vchan_next_desc(&chan->vchan); in gdma_next_desc()
436 spin_lock_irqsave(&chan->vchan.lock, flags); in gdma_dma_chan_irq()
460 spin_unlock_irqrestore(&chan->vchan.lock, flags); in gdma_dma_chan_irq()
[all …]
/Linux-v5.10/drivers/staging/mt7621-dma/
Dmtk-hsdma.c144 struct virt_dma_chan vchan; member
168 return container_of(chan->vchan.chan.device, struct mtk_hsdam_engine, in mtk_hsdma_chan_get_dev()
174 return container_of(c, struct mtk_hsdma_chan, vchan.chan); in to_mtk_hsdma_chan()
290 spin_lock_bh(&chan->vchan.lock); in mtk_hsdma_terminate_all()
293 vchan_get_all_descriptors(&chan->vchan, &head); in mtk_hsdma_terminate_all()
294 spin_unlock_bh(&chan->vchan.lock); in mtk_hsdma_terminate_all()
296 vchan_dma_desc_free_list(&chan->vchan, &head); in mtk_hsdma_terminate_all()
389 vdesc = vchan_next_desc(&chan->vchan); in gdma_next_desc()
407 spin_lock_bh(&chan->vchan.lock); in mtk_hsdma_chan_done()
421 spin_unlock_bh(&chan->vchan.lock); in mtk_hsdma_chan_done()
[all …]
/Linux-v5.10/drivers/dma/xilinx/
Dxilinx_dpdma.c224 struct virt_dma_chan vchan; member
246 container_of(_chan, struct xilinx_dpdma_chan, vchan.chan)
863 vdesc = vchan_next_desc(&chan->vchan); in xilinx_dpdma_chan_queue_transfer()
1167 list_empty(&chan->vchan.desc_issued)) { in xilinx_dpdma_chan_handle_err()
1170 &chan->vchan.desc_issued); in xilinx_dpdma_chan_handle_err()
1204 vchan_tx_prep(&chan->vchan, &desc->vdesc, flags | DMA_CTRL_ACK); in xilinx_dpdma_prep_interleaved_dma()
1246 vchan_free_chan_resources(&chan->vchan); in xilinx_dpdma_free_chan_resources()
1257 spin_lock_irqsave(&chan->vchan.lock, flags); in xilinx_dpdma_issue_pending()
1258 if (vchan_issue_pending(&chan->vchan)) in xilinx_dpdma_issue_pending()
1260 spin_unlock_irqrestore(&chan->vchan.lock, flags); in xilinx_dpdma_issue_pending()
[all …]
/Linux-v5.10/drivers/dma/fsl-dpaa2-qdma/
Ddpaa2-qdma.c21 return container_of(chan, struct dpaa2_qdma_chan, vchan.chan); in to_dpaa2_qdma_chan()
71 spin_lock_irqsave(&dpaa2_chan->vchan.lock, flags); in dpaa2_qdma_free_chan_resources()
72 vchan_get_all_descriptors(&dpaa2_chan->vchan, &head); in dpaa2_qdma_free_chan_resources()
73 spin_unlock_irqrestore(&dpaa2_chan->vchan.lock, flags); in dpaa2_qdma_free_chan_resources()
75 vchan_dma_desc_free_list(&dpaa2_chan->vchan, &head); in dpaa2_qdma_free_chan_resources()
267 return vchan_tx_prep(&dpaa2_chan->vchan, &dpaa2_comp->vdesc, flags); in dpaa2_qdma_prep_memcpy()
280 spin_lock(&dpaa2_chan->vchan.lock); in dpaa2_qdma_issue_pending()
281 if (vchan_issue_pending(&dpaa2_chan->vchan)) { in dpaa2_qdma_issue_pending()
282 vdesc = vchan_next_desc(&dpaa2_chan->vchan); in dpaa2_qdma_issue_pending()
300 spin_unlock(&dpaa2_chan->vchan.lock); in dpaa2_qdma_issue_pending()
[all …]
/Linux-v5.10/drivers/dma/ti/
Dedma.c225 struct virt_dma_chan vchan; member
761 return container_of(c, struct edma_chan, vchan.chan); in to_edma_chan()
780 struct device *dev = echan->vchan.chan.device->dev; in edma_execute()
785 vdesc = vchan_next_desc(&echan->vchan); in edma_execute()
874 spin_lock_irqsave(&echan->vchan.lock, flags); in edma_terminate_all()
891 vchan_get_all_descriptors(&echan->vchan, &head); in edma_terminate_all()
892 spin_unlock_irqrestore(&echan->vchan.lock, flags); in edma_terminate_all()
893 vchan_dma_desc_free_list(&echan->vchan, &head); in edma_terminate_all()
902 vchan_synchronize(&echan->vchan); in edma_synchronize()
1159 return vchan_tx_prep(&echan->vchan, &edesc->vdesc, tx_flags); in edma_prep_slave_sg()
[all …]

12