Lines Matching +full:0 +full:xd

20 #define XDMAC_CH_WIDTH		0x100
22 #define XDMAC_TFA 0x08
24 #define XDMAC_TFA_MASK GENMASK(5, 0)
25 #define XDMAC_SADM 0x10
29 #define XDMAC_SADM_SAM_INC 0
30 #define XDMAC_DADM 0x14
35 #define XDMAC_EXSAD 0x18
36 #define XDMAC_EXDAD 0x1c
37 #define XDMAC_SAD 0x20
38 #define XDMAC_DAD 0x24
39 #define XDMAC_ITS 0x28
40 #define XDMAC_ITS_MASK GENMASK(25, 0)
41 #define XDMAC_TNUM 0x2c
42 #define XDMAC_TNUM_MASK GENMASK(15, 0)
43 #define XDMAC_TSS 0x30
44 #define XDMAC_TSS_REQ BIT(0)
45 #define XDMAC_IEN 0x34
47 #define XDMAC_IEN_ENDIEN BIT(0)
48 #define XDMAC_STAT 0x40
49 #define XDMAC_STAT_TENF BIT(0)
50 #define XDMAC_IR 0x44
52 #define XDMAC_IR_ENDF BIT(0)
53 #define XDMAC_ID 0x48
55 #define XDMAC_ID_ENDIDF BIT(0)
62 #define XDMAC_MAX_WORD_SIZE (XDMAC_ITS_MASK & ~GENMASK(3, 0))
89 struct uniphier_xdmac_desc *xd; member
132 struct uniphier_xdmac_desc *xd) in uniphier_xdmac_chan_start() argument
140 src_addr = xd->nodes[xd->cur_node].src; in uniphier_xdmac_chan_start()
141 dst_addr = xd->nodes[xd->cur_node].dst; in uniphier_xdmac_chan_start()
142 its = xd->nodes[xd->cur_node].burst_size; in uniphier_xdmac_chan_start()
143 tnum = xd->nodes[xd->cur_node].nr_burst; in uniphier_xdmac_chan_start()
149 if (xd->dir == DMA_DEV_TO_MEM) { in uniphier_xdmac_chan_start()
158 if (xd->dir == DMA_MEM_TO_DEV) { in uniphier_xdmac_chan_start()
210 writel(0, xc->reg_ch_base + XDMAC_TSS); in uniphier_xdmac_chan_stop()
220 struct uniphier_xdmac_desc *xd; in uniphier_xdmac_start() local
222 xd = uniphier_xdmac_next_desc(xc); in uniphier_xdmac_start()
223 if (xd) in uniphier_xdmac_start()
224 uniphier_xdmac_chan_start(xc, xd); in uniphier_xdmac_start()
226 /* set desc to chan regardless of xd is null */ in uniphier_xdmac_start()
227 xc->xd = xd; in uniphier_xdmac_start()
248 } else if ((stat & XDMAC_ID_ENDIDF) && xc->xd) { in uniphier_xdmac_chan_irq()
249 xc->xd->cur_node++; in uniphier_xdmac_chan_irq()
250 if (xc->xd->cur_node >= xc->xd->nr_node) { in uniphier_xdmac_chan_irq()
251 vchan_cookie_complete(&xc->xd->vd); in uniphier_xdmac_chan_irq()
254 uniphier_xdmac_chan_start(xc, xc->xd); in uniphier_xdmac_chan_irq()
269 for (i = 0; i < xdev->nr_chans; i++) in uniphier_xdmac_irq_handler()
285 struct uniphier_xdmac_desc *xd; in uniphier_xdmac_prep_dma_memcpy() local
295 xd = kzalloc(struct_size(xd, nodes, nr), GFP_NOWAIT); in uniphier_xdmac_prep_dma_memcpy()
296 if (!xd) in uniphier_xdmac_prep_dma_memcpy()
299 for (i = 0; i < nr; i++) { in uniphier_xdmac_prep_dma_memcpy()
301 xd->nodes[i].src = src; in uniphier_xdmac_prep_dma_memcpy()
302 xd->nodes[i].dst = dst; in uniphier_xdmac_prep_dma_memcpy()
303 xd->nodes[i].burst_size = burst_size; in uniphier_xdmac_prep_dma_memcpy()
304 xd->nodes[i].nr_burst = len / burst_size; in uniphier_xdmac_prep_dma_memcpy()
311 xd->dir = DMA_MEM_TO_MEM; in uniphier_xdmac_prep_dma_memcpy()
312 xd->nr_node = nr; in uniphier_xdmac_prep_dma_memcpy()
313 xd->cur_node = 0; in uniphier_xdmac_prep_dma_memcpy()
315 return vchan_tx_prep(vc, &xd->vd, flags); in uniphier_xdmac_prep_dma_memcpy()
326 struct uniphier_xdmac_desc *xd; in uniphier_xdmac_prep_slave_sg() local
351 xd = kzalloc(struct_size(xd, nodes, sg_len), GFP_NOWAIT); in uniphier_xdmac_prep_slave_sg()
352 if (!xd) in uniphier_xdmac_prep_slave_sg()
356 xd->nodes[i].src = (direction == DMA_DEV_TO_MEM) in uniphier_xdmac_prep_slave_sg()
358 xd->nodes[i].dst = (direction == DMA_MEM_TO_DEV) in uniphier_xdmac_prep_slave_sg()
360 xd->nodes[i].burst_size = maxburst * buswidth; in uniphier_xdmac_prep_slave_sg()
361 xd->nodes[i].nr_burst = in uniphier_xdmac_prep_slave_sg()
362 sg_dma_len(sg) / xd->nodes[i].burst_size; in uniphier_xdmac_prep_slave_sg()
372 if (sg_dma_len(sg) % xd->nodes[i].burst_size) { in uniphier_xdmac_prep_slave_sg()
375 kfree(xd); in uniphier_xdmac_prep_slave_sg()
379 if (xd->nodes[i].nr_burst > XDMAC_MAX_WORDS) { in uniphier_xdmac_prep_slave_sg()
382 kfree(xd); in uniphier_xdmac_prep_slave_sg()
387 xd->dir = direction; in uniphier_xdmac_prep_slave_sg()
388 xd->nr_node = sg_len; in uniphier_xdmac_prep_slave_sg()
389 xd->cur_node = 0; in uniphier_xdmac_prep_slave_sg()
391 return vchan_tx_prep(vc, &xd->vd, flags); in uniphier_xdmac_prep_slave_sg()
402 return 0; in uniphier_xdmac_slave_config()
410 int ret = 0; in uniphier_xdmac_terminate_all()
415 if (xc->xd) { in uniphier_xdmac_terminate_all()
416 vchan_terminate_vdesc(&xc->xd->vd); in uniphier_xdmac_terminate_all()
417 xc->xd = NULL; in uniphier_xdmac_terminate_all()
443 if (vchan_issue_pending(vc) && !xc->xd) in uniphier_xdmac_issue_pending()
470 int chan_id = dma_spec->args[0]; in of_dma_uniphier_xlate()
501 xdev->reg_base = devm_platform_ioremap_resource(pdev, 0); in uniphier_xdmac_probe()
526 for (i = 0; i < nr_chans; i++) in uniphier_xdmac_probe()
529 irq = platform_get_irq(pdev, 0); in uniphier_xdmac_probe()
530 if (irq < 0) in uniphier_xdmac_probe()
558 return 0; in uniphier_xdmac_probe()
590 return 0; in uniphier_xdmac_remove()