Lines Matching refs:sw_desc

505 					       struct xilinx_dpdma_sw_desc *sw_desc,  in xilinx_dpdma_sw_desc_set_dma_addrs()  argument
510 struct xilinx_dpdma_hw_desc *hw_desc = &sw_desc->hw; in xilinx_dpdma_sw_desc_set_dma_addrs()
537 prev->hw.next_desc = lower_32_bits(sw_desc->dma_addr); in xilinx_dpdma_sw_desc_set_dma_addrs()
541 upper_32_bits(sw_desc->dma_addr)); in xilinx_dpdma_sw_desc_set_dma_addrs()
555 struct xilinx_dpdma_sw_desc *sw_desc; in xilinx_dpdma_chan_alloc_sw_desc() local
558 sw_desc = dma_pool_zalloc(chan->desc_pool, GFP_ATOMIC, &dma_addr); in xilinx_dpdma_chan_alloc_sw_desc()
559 if (!sw_desc) in xilinx_dpdma_chan_alloc_sw_desc()
562 sw_desc->dma_addr = dma_addr; in xilinx_dpdma_chan_alloc_sw_desc()
564 return sw_desc; in xilinx_dpdma_chan_alloc_sw_desc()
576 struct xilinx_dpdma_sw_desc *sw_desc) in xilinx_dpdma_chan_free_sw_desc() argument
578 dma_pool_free(chan->desc_pool, sw_desc, sw_desc->dma_addr); in xilinx_dpdma_chan_free_sw_desc()
591 struct xilinx_dpdma_sw_desc *sw_desc; in xilinx_dpdma_chan_dump_tx_desc() local
598 list_for_each_entry(sw_desc, &tx_desc->descriptors, node) { in xilinx_dpdma_chan_dump_tx_desc()
599 struct xilinx_dpdma_hw_desc *hw_desc = &sw_desc->hw; in xilinx_dpdma_chan_dump_tx_desc()
602 dev_dbg(dev, "descriptor DMA addr: %pad\n", &sw_desc->dma_addr); in xilinx_dpdma_chan_dump_tx_desc()
656 struct xilinx_dpdma_sw_desc *sw_desc, *next; in xilinx_dpdma_chan_free_tx_desc() local
664 list_for_each_entry_safe(sw_desc, next, &desc->descriptors, node) { in xilinx_dpdma_chan_free_tx_desc()
665 list_del(&sw_desc->node); in xilinx_dpdma_chan_free_tx_desc()
666 xilinx_dpdma_chan_free_sw_desc(desc->chan, sw_desc); in xilinx_dpdma_chan_free_tx_desc()
688 struct xilinx_dpdma_sw_desc *sw_desc; in xilinx_dpdma_chan_prep_interleaved_dma() local
704 sw_desc = xilinx_dpdma_chan_alloc_sw_desc(chan); in xilinx_dpdma_chan_prep_interleaved_dma()
705 if (!sw_desc) { in xilinx_dpdma_chan_prep_interleaved_dma()
710 xilinx_dpdma_sw_desc_set_dma_addrs(chan->xdev, sw_desc, sw_desc, in xilinx_dpdma_chan_prep_interleaved_dma()
713 hw_desc = &sw_desc->hw; in xilinx_dpdma_chan_prep_interleaved_dma()
725 list_add_tail(&sw_desc->node, &tx_desc->descriptors); in xilinx_dpdma_chan_prep_interleaved_dma()
828 struct xilinx_dpdma_sw_desc *sw_desc; in xilinx_dpdma_chan_queue_transfer() local
858 list_for_each_entry(sw_desc, &desc->descriptors, node) in xilinx_dpdma_chan_queue_transfer()
859 sw_desc->hw.desc_id = desc->vdesc.tx.cookie in xilinx_dpdma_chan_queue_transfer()
862 sw_desc = list_first_entry(&desc->descriptors, in xilinx_dpdma_chan_queue_transfer()
865 lower_32_bits(sw_desc->dma_addr)); in xilinx_dpdma_chan_queue_transfer()
869 upper_32_bits(sw_desc->dma_addr))); in xilinx_dpdma_chan_queue_transfer()
1073 struct xilinx_dpdma_sw_desc *sw_desc; in xilinx_dpdma_chan_vsync_irq() local
1087 sw_desc = list_first_entry(&pending->descriptors, in xilinx_dpdma_chan_vsync_irq()
1089 if (sw_desc->hw.desc_id != desc_id) { in xilinx_dpdma_chan_vsync_irq()
1092 chan->id, sw_desc->hw.desc_id, desc_id); in xilinx_dpdma_chan_vsync_irq()