| /Linux-v5.4/drivers/dma/ |
| D | dma-jz4780.c | 119 struct virt_dma_desc vdesc; member 170 struct virt_dma_desc *vdesc) in to_jz4780_dma_desc() argument 172 return container_of(vdesc, struct jz4780_dma_desc, vdesc); in to_jz4780_dma_desc() 254 static void jz4780_dma_desc_free(struct virt_dma_desc *vdesc) in jz4780_dma_desc_free() argument 256 struct jz4780_dma_desc *desc = to_jz4780_dma_desc(vdesc); in jz4780_dma_desc_free() 257 struct jz4780_dma_chan *jzchan = to_jz4780_dma_chan(vdesc->tx.chan); in jz4780_dma_desc_free() 374 jz4780_dma_desc_free(&jzchan->desc->vdesc); in jz4780_dma_prep_slave_sg() 395 return vchan_tx_prep(&jzchan->vchan, &desc->vdesc, flags); in jz4780_dma_prep_slave_sg() 421 jz4780_dma_desc_free(&jzchan->desc->vdesc); in jz4780_dma_prep_dma_cyclic() 447 return vchan_tx_prep(&jzchan->vchan, &desc->vdesc, flags); in jz4780_dma_prep_dma_cyclic() [all …]
|
| D | dma-axi-dmac.c | 98 struct virt_dma_desc vdesc; member 152 static struct axi_dmac_desc *to_axi_dmac_desc(struct virt_dma_desc *vdesc) in to_axi_dmac_desc() argument 154 return container_of(vdesc, struct axi_dmac_desc, vdesc); in to_axi_dmac_desc() 197 struct virt_dma_desc *vdesc; in axi_dmac_start_transfer() local 210 vdesc = vchan_next_desc(&chan->vchan); in axi_dmac_start_transfer() 211 if (!vdesc) in axi_dmac_start_transfer() 213 list_move_tail(&vdesc->node, &chan->active_descs); in axi_dmac_start_transfer() 214 desc = to_axi_dmac_desc(vdesc); in axi_dmac_start_transfer() 253 if (chan->hw_cyclic && desc->cyclic && !desc->vdesc.tx.callback && in axi_dmac_start_transfer() 269 struct axi_dmac_desc, vdesc.node); in axi_dmac_active_desc() [all …]
|
| D | idma64.c | 115 struct virt_dma_desc *vdesc; in idma64_start_transfer() local 118 vdesc = vchan_next_desc(&idma64c->vchan); in idma64_start_transfer() 119 if (!vdesc) { in idma64_start_transfer() 124 list_del(&vdesc->node); in idma64_start_transfer() 125 idma64c->desc = to_idma64_desc(vdesc); in idma64_start_transfer() 151 vchan_cookie_complete(&desc->vdesc); in idma64_chan_irq() 222 static void idma64_vdesc_free(struct virt_dma_desc *vdesc) in idma64_vdesc_free() argument 224 struct idma64_chan *idma64c = to_idma64_chan(vdesc->tx.chan); in idma64_vdesc_free() 226 idma64_desc_free(idma64c, to_idma64_desc(vdesc)); in idma64_vdesc_free() 326 return vchan_tx_prep(&idma64c->vchan, &desc->vdesc, flags); in idma64_prep_slave_sg() [all …]
|
| D | fsl-edma-common.c | 142 void fsl_edma_free_desc(struct virt_dma_desc *vdesc) in fsl_edma_free_desc() argument 147 fsl_desc = to_fsl_edma_desc(vdesc); in fsl_edma_free_desc() 266 struct virt_dma_desc *vdesc, bool in_progress) in fsl_edma_desc_residue() argument 312 struct virt_dma_desc *vdesc; in fsl_edma_tx_status() local 324 vdesc = vchan_find_desc(&fsl_chan->vchan, cookie); in fsl_edma_tx_status() 325 if (fsl_chan->edesc && cookie == fsl_chan->edesc->vdesc.tx.cookie) in fsl_edma_tx_status() 327 fsl_edma_desc_residue(fsl_chan, vdesc, true); in fsl_edma_tx_status() 328 else if (vdesc) in fsl_edma_tx_status() 330 fsl_edma_desc_residue(fsl_chan, vdesc, false); in fsl_edma_tx_status() 507 return vchan_tx_prep(&fsl_chan->vchan, &fsl_desc->vdesc, flags); in fsl_edma_prep_dma_cyclic() [all …]
|
| D | stm32-dma.c | 185 struct virt_dma_desc vdesc; member 226 static struct stm32_dma_desc *to_stm32_dma_desc(struct virt_dma_desc *vdesc) in to_stm32_dma_desc() argument 228 return container_of(vdesc, struct stm32_dma_desc, vdesc); in to_stm32_dma_desc() 533 struct virt_dma_desc *vdesc; in stm32_dma_start_transfer() local 544 vdesc = vchan_next_desc(&chan->vchan); in stm32_dma_start_transfer() 545 if (!vdesc) in stm32_dma_start_transfer() 548 chan->desc = to_stm32_dma_desc(vdesc); in stm32_dma_start_transfer() 619 vchan_cyclic_callback(&chan->desc->vdesc); in stm32_dma_handle_chan_done() 625 list_del(&chan->desc->vdesc.node); in stm32_dma_handle_chan_done() 626 vchan_cookie_complete(&chan->desc->vdesc); in stm32_dma_handle_chan_done() [all …]
|
| D | st_fdma.c | 28 return container_of(vd, struct st_fdma_desc, vdesc); in to_st_fdma_desc() 75 struct virt_dma_desc *vdesc; in st_fdma_xfer_desc() local 78 vdesc = vchan_next_desc(&fchan->vchan); in st_fdma_xfer_desc() 79 if (!vdesc) in st_fdma_xfer_desc() 82 fchan->fdesc = to_st_fdma_desc(vdesc); in st_fdma_xfer_desc() 143 list_del(&fchan->fdesc->vdesc.node); in st_fdma_irq_handler() 144 vchan_cookie_complete(&fchan->fdesc->vdesc); in st_fdma_irq_handler() 148 vchan_cyclic_callback(&fchan->fdesc->vdesc); in st_fdma_irq_handler() 224 static void st_fdma_free_desc(struct virt_dma_desc *vdesc) in st_fdma_free_desc() argument 229 fdesc = to_st_fdma_desc(vdesc); in st_fdma_free_desc() [all …]
|
| D | idma64.h | 114 struct virt_dma_desc vdesc; member 122 static inline struct idma64_desc *to_idma64_desc(struct virt_dma_desc *vdesc) in to_idma64_desc() argument 124 return container_of(vdesc, struct idma64_desc, vdesc); in to_idma64_desc()
|
| D | stm32-mdma.c | 251 struct virt_dma_desc vdesc; member 296 static struct stm32_mdma_desc *to_stm32_mdma_desc(struct virt_dma_desc *vdesc) in to_stm32_mdma_desc() argument 298 return container_of(vdesc, struct stm32_mdma_desc, vdesc); in to_stm32_mdma_desc() 368 static void stm32_mdma_desc_free(struct virt_dma_desc *vdesc) in stm32_mdma_desc_free() argument 370 struct stm32_mdma_desc *desc = to_stm32_mdma_desc(vdesc); in stm32_mdma_desc_free() 371 struct stm32_mdma_chan *chan = to_stm32_mdma_chan(vdesc->tx.chan); in stm32_mdma_desc_free() 810 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in stm32_mdma_prep_slave_sg() 901 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in stm32_mdma_prep_dma_cyclic() 1087 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in stm32_mdma_prep_dma_memcpy() 1119 struct virt_dma_desc *vdesc; in stm32_mdma_start_transfer() local [all …]
|
| D | fsl-qdma.c | 166 struct virt_dma_desc vdesc; member 193 struct virt_dma_desc vdesc; member 289 return container_of(vd, struct fsl_qdma_comp, vdesc); in to_fsl_qdma_comp() 690 vchan_cookie_complete(&fsl_comp->vdesc); in fsl_qdma_queue_transfer_complete() 934 return vchan_tx_prep(&fsl_chan->vchan, &fsl_comp->vdesc, flags); in fsl_qdma_prep_memcpy() 940 struct virt_dma_desc *vdesc; in fsl_qdma_enqueue_desc() local 948 vdesc = vchan_next_desc(&fsl_chan->vchan); in fsl_qdma_enqueue_desc() 949 if (!vdesc) in fsl_qdma_enqueue_desc() 951 list_del(&vdesc->node); in fsl_qdma_enqueue_desc() 952 fsl_comp = to_fsl_qdma_comp(vdesc); in fsl_qdma_enqueue_desc() [all …]
|
| D | fsl-edma-common.h | 132 struct virt_dma_desc vdesc; member 220 return container_of(vd, struct fsl_edma_desc, vdesc); in to_fsl_edma_desc() 226 void fsl_edma_free_desc(struct virt_dma_desc *vdesc);
|
| D | mcf-edma.c | 39 list_del(&mcf_chan->edesc->vdesc.node); in mcf_edma_tx_handler() 40 vchan_cookie_complete(&mcf_chan->edesc->vdesc); in mcf_edma_tx_handler() 45 vchan_cyclic_callback(&mcf_chan->edesc->vdesc); in mcf_edma_tx_handler()
|
| D | fsl-edma.c | 49 list_del(&fsl_chan->edesc->vdesc.node); in fsl_edma_tx_handler() 50 vchan_cookie_complete(&fsl_chan->edesc->vdesc); in fsl_edma_tx_handler() 55 vchan_cyclic_callback(&fsl_chan->edesc->vdesc); in fsl_edma_tx_handler()
|
| D | st_fdma.h | 96 struct virt_dma_desc vdesc; member
|
| D | img-mdc-dma.c | 176 struct virt_dma_desc *vdesc = container_of(t, struct virt_dma_desc, tx); in to_mdc_desc() local 178 return container_of(vdesc, struct mdc_tx_desc, vd); in to_mdc_desc()
|
| /Linux-v5.4/fs/nilfs2/ |
| D | ioctl.c | 556 struct nilfs_vdesc *vdesc, in nilfs_ioctl_move_inode_block() argument 562 if (vdesc->vd_flags == 0) in nilfs_ioctl_move_inode_block() 564 inode, vdesc->vd_offset, vdesc->vd_blocknr, in nilfs_ioctl_move_inode_block() 565 vdesc->vd_vblocknr, &bh); in nilfs_ioctl_move_inode_block() 568 inode, vdesc->vd_blocknr, vdesc->vd_vblocknr, &bh); in nilfs_ioctl_move_inode_block() 574 __func__, vdesc->vd_flags ? "node" : "data", in nilfs_ioctl_move_inode_block() 575 (unsigned long long)vdesc->vd_ino, in nilfs_ioctl_move_inode_block() 576 (unsigned long long)vdesc->vd_cno, in nilfs_ioctl_move_inode_block() 577 (unsigned long long)vdesc->vd_offset, in nilfs_ioctl_move_inode_block() 578 (unsigned long long)vdesc->vd_blocknr, in nilfs_ioctl_move_inode_block() [all …]
|
| /Linux-v5.4/drivers/dma/hsu/ |
| D | hsu.c | 110 struct virt_dma_desc *vdesc; in hsu_dma_start_transfer() local 113 vdesc = vchan_next_desc(&hsuc->vchan); in hsu_dma_start_transfer() 114 if (!vdesc) { in hsu_dma_start_transfer() 119 list_del(&vdesc->node); in hsu_dma_start_transfer() 120 hsuc->desc = to_hsu_dma_desc(vdesc); in hsu_dma_start_transfer() 222 vchan_cookie_complete(&desc->vdesc); in hsu_dma_do_irq() 250 static void hsu_dma_desc_free(struct virt_dma_desc *vdesc) in hsu_dma_desc_free() argument 252 struct hsu_dma_desc *desc = to_hsu_dma_desc(vdesc); in hsu_dma_desc_free() 284 return vchan_tx_prep(&hsuc->vchan, &desc->vdesc, flags); in hsu_dma_prep_slave_sg() 319 struct virt_dma_desc *vdesc; in hsu_dma_tx_status() local [all …]
|
| D | hsu.h | 68 struct virt_dma_desc vdesc; member 77 static inline struct hsu_dma_desc *to_hsu_dma_desc(struct virt_dma_desc *vdesc) in to_hsu_dma_desc() argument 79 return container_of(vdesc, struct hsu_dma_desc, vdesc); in to_hsu_dma_desc()
|
| /Linux-v5.4/drivers/staging/ralink-gdma/ |
| D | ralink-gdma.c | 97 struct virt_dma_desc vdesc; member 150 static struct gdma_dma_desc *to_gdma_dma_desc(struct virt_dma_desc *vdesc) in to_gdma_dma_desc() argument 152 return container_of(vdesc, struct gdma_dma_desc, vdesc); in to_gdma_dma_desc() 415 struct virt_dma_desc *vdesc; in gdma_next_desc() local 417 vdesc = vchan_next_desc(&chan->vchan); in gdma_next_desc() 418 if (!vdesc) { in gdma_next_desc() 422 chan->desc = to_gdma_dma_desc(vdesc); in gdma_next_desc() 440 vchan_cyclic_callback(&desc->vdesc); in gdma_dma_chan_irq() 447 list_del(&desc->vdesc.node); in gdma_dma_chan_irq() 448 vchan_cookie_complete(&desc->vdesc); in gdma_dma_chan_irq() [all …]
|
| /Linux-v5.4/drivers/staging/mt7621-dma/ |
| D | mtk-hsdma.c | 138 struct virt_dma_desc vdesc; member 178 struct virt_dma_desc *vdesc) in to_mtk_hsdma_desc() argument 180 return container_of(vdesc, struct mtk_hsdma_desc, vdesc); in to_mtk_hsdma_desc() 388 struct virt_dma_desc *vdesc; in gdma_next_desc() local 390 vdesc = vchan_next_desc(&chan->vchan); in gdma_next_desc() 391 if (!vdesc) { in gdma_next_desc() 395 chan->desc = to_mtk_hsdma_desc(vdesc); in gdma_next_desc() 412 list_del(&desc->vdesc.node); in mtk_hsdma_chan_done() 413 vchan_cookie_complete(&desc->vdesc); in mtk_hsdma_chan_done() 481 return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags); in mtk_hsdma_prep_dma_memcpy() [all …]
|
| /Linux-v5.4/drivers/sh/intc/ |
| D | virq.c | 122 struct irq_desc *vdesc = irq_to_desc(entry->irq); in intc_virq_handler() local 124 if (vdesc) { in intc_virq_handler() 125 handle = (unsigned long)irq_desc_get_handler_data(vdesc); in intc_virq_handler() 128 generic_handle_irq_desc(vdesc); in intc_virq_handler()
|
| /Linux-v5.4/drivers/dma/ti/ |
| D | edma.c | 179 struct virt_dma_desc vdesc; member 753 return container_of(tx, struct edma_desc, vdesc.tx); in to_edma_desc() 756 static void edma_desc_free(struct virt_dma_desc *vdesc) in edma_desc_free() argument 758 kfree(container_of(vdesc, struct edma_desc, vdesc)); in edma_desc_free() 765 struct virt_dma_desc *vdesc; in edma_execute() local 772 vdesc = vchan_next_desc(&echan->vchan); in edma_execute() 773 if (!vdesc) in edma_execute() 775 list_del(&vdesc->node); in edma_execute() 776 echan->edesc = to_edma_desc(&vdesc->tx); in edma_execute() 874 vchan_terminate_vdesc(&echan->edesc->vdesc); in edma_terminate_all() [all …]
|
| /Linux-v5.4/drivers/dma/dw-edma/ |
| D | dw-edma-core.c | 163 static void vchan_free_desc(struct virt_dma_desc *vdesc) in vchan_free_desc() argument 165 dw_edma_free_desc(vd2dw_edma_desc(vdesc)); in vchan_free_desc()
|
| /Linux-v5.4/drivers/dma/dw-axi-dmac/ |
| D | dw-axi-dmac-platform.c | 241 static void vchan_desc_put(struct virt_dma_desc *vdesc) in vchan_desc_put() argument 243 axi_desc_put(vd_to_axi_desc(vdesc)); in vchan_desc_put()
|