Lines Matching refs:dmatx
278 struct pl011_dmatx_data dmatx; member
443 uap->dmatx.chan = chan; in pl011_dma_probe()
446 dma_chan_name(uap->dmatx.chan)); in pl011_dma_probe()
534 if (uap->dmatx.chan) in pl011_dma_remove()
535 dma_release_channel(uap->dmatx.chan); in pl011_dma_remove()
551 struct pl011_dmatx_data *dmatx = &uap->dmatx; in pl011_dma_tx_callback() local
556 if (uap->dmatx.queued) in pl011_dma_tx_callback()
557 dma_unmap_sg(dmatx->chan->device->dev, &dmatx->sg, 1, in pl011_dma_tx_callback()
575 uap->dmatx.queued = false; in pl011_dma_tx_callback()
600 struct pl011_dmatx_data *dmatx = &uap->dmatx; in pl011_dma_tx_refill() local
601 struct dma_chan *chan = dmatx->chan; in pl011_dma_tx_refill()
615 uap->dmatx.queued = false; in pl011_dma_tx_refill()
630 memcpy(&dmatx->buf[0], &xmit->buf[xmit->tail], count); in pl011_dma_tx_refill()
639 memcpy(&dmatx->buf[0], &xmit->buf[xmit->tail], first); in pl011_dma_tx_refill()
641 memcpy(&dmatx->buf[first], &xmit->buf[0], second); in pl011_dma_tx_refill()
644 dmatx->sg.length = count; in pl011_dma_tx_refill()
646 if (dma_map_sg(dma_dev->dev, &dmatx->sg, 1, DMA_TO_DEVICE) != 1) { in pl011_dma_tx_refill()
647 uap->dmatx.queued = false; in pl011_dma_tx_refill()
652 desc = dmaengine_prep_slave_sg(chan, &dmatx->sg, 1, DMA_MEM_TO_DEV, in pl011_dma_tx_refill()
655 dma_unmap_sg(dma_dev->dev, &dmatx->sg, 1, DMA_TO_DEVICE); in pl011_dma_tx_refill()
656 uap->dmatx.queued = false; in pl011_dma_tx_refill()
677 uap->dmatx.queued = true; in pl011_dma_tx_refill()
710 if (uap->dmatx.queued) { in pl011_dma_tx_irq()
736 if (uap->dmatx.queued) { in pl011_dma_tx_stop()
761 if (!uap->dmatx.queued) { in pl011_dma_tx_start()
818 dmaengine_terminate_all(uap->dmatx.chan); in pl011_dma_flush_buffer()
820 if (uap->dmatx.queued) { in pl011_dma_flush_buffer()
821 dma_unmap_sg(uap->dmatx.chan->device->dev, &uap->dmatx.sg, 1, in pl011_dma_flush_buffer()
823 uap->dmatx.queued = false; in pl011_dma_flush_buffer()
1120 if (!uap->dmatx.chan) in pl011_dma_startup()
1123 uap->dmatx.buf = kmalloc(PL011_DMA_BUFFER_SIZE, GFP_KERNEL | __GFP_DMA); in pl011_dma_startup()
1124 if (!uap->dmatx.buf) { in pl011_dma_startup()
1130 sg_init_one(&uap->dmatx.sg, uap->dmatx.buf, PL011_DMA_BUFFER_SIZE); in pl011_dma_startup()
1205 dmaengine_terminate_all(uap->dmatx.chan); in pl011_dma_shutdown()
1206 if (uap->dmatx.queued) { in pl011_dma_shutdown()
1207 dma_unmap_sg(uap->dmatx.chan->device->dev, &uap->dmatx.sg, 1, in pl011_dma_shutdown()
1209 uap->dmatx.queued = false; in pl011_dma_shutdown()
1212 kfree(uap->dmatx.buf); in pl011_dma_shutdown()