Lines Matching +full:0 +full:xc

20 #define XDMAC_CH_WIDTH		0x100
22 #define XDMAC_TFA 0x08
24 #define XDMAC_TFA_MASK GENMASK(5, 0)
25 #define XDMAC_SADM 0x10
29 #define XDMAC_SADM_SAM_INC 0
30 #define XDMAC_DADM 0x14
35 #define XDMAC_EXSAD 0x18
36 #define XDMAC_EXDAD 0x1c
37 #define XDMAC_SAD 0x20
38 #define XDMAC_DAD 0x24
39 #define XDMAC_ITS 0x28
40 #define XDMAC_ITS_MASK GENMASK(25, 0)
41 #define XDMAC_TNUM 0x2c
42 #define XDMAC_TNUM_MASK GENMASK(15, 0)
43 #define XDMAC_TSS 0x30
44 #define XDMAC_TSS_REQ BIT(0)
45 #define XDMAC_IEN 0x34
47 #define XDMAC_IEN_ENDIEN BIT(0)
48 #define XDMAC_STAT 0x40
49 #define XDMAC_STAT_TENF BIT(0)
50 #define XDMAC_IR 0x44
52 #define XDMAC_IR_ENDF BIT(0)
53 #define XDMAC_ID 0x48
55 #define XDMAC_ID_ENDIDF BIT(0)
62 #define XDMAC_MAX_WORD_SIZE (XDMAC_ITS_MASK & ~GENMASK(3, 0))
115 /* xc->vc.lock must be held by caller */
117 uniphier_xdmac_next_desc(struct uniphier_xdmac_chan *xc) in uniphier_xdmac_next_desc() argument
121 vd = vchan_next_desc(&xc->vc); in uniphier_xdmac_next_desc()
130 /* xc->vc.lock must be held by caller */
131 static void uniphier_xdmac_chan_start(struct uniphier_xdmac_chan *xc, in uniphier_xdmac_chan_start() argument
150 buswidth = xc->sconfig.src_addr_width; in uniphier_xdmac_chan_start()
159 buswidth = xc->sconfig.dst_addr_width; in uniphier_xdmac_chan_start()
168 val |= FIELD_PREP(XDMAC_TFA_MASK, xc->req_factor); in uniphier_xdmac_chan_start()
169 writel(val, xc->reg_ch_base + XDMAC_TFA); in uniphier_xdmac_chan_start()
172 writel(lower_32_bits(src_addr), xc->reg_ch_base + XDMAC_SAD); in uniphier_xdmac_chan_start()
173 writel(upper_32_bits(src_addr), xc->reg_ch_base + XDMAC_EXSAD); in uniphier_xdmac_chan_start()
175 writel(lower_32_bits(dst_addr), xc->reg_ch_base + XDMAC_DAD); in uniphier_xdmac_chan_start()
176 writel(upper_32_bits(dst_addr), xc->reg_ch_base + XDMAC_EXDAD); in uniphier_xdmac_chan_start()
180 writel(src_mode, xc->reg_ch_base + XDMAC_SADM); in uniphier_xdmac_chan_start()
181 writel(dst_mode, xc->reg_ch_base + XDMAC_DADM); in uniphier_xdmac_chan_start()
183 writel(its, xc->reg_ch_base + XDMAC_ITS); in uniphier_xdmac_chan_start()
184 writel(tnum, xc->reg_ch_base + XDMAC_TNUM); in uniphier_xdmac_chan_start()
188 xc->reg_ch_base + XDMAC_IEN); in uniphier_xdmac_chan_start()
191 val = readl(xc->reg_ch_base + XDMAC_TSS); in uniphier_xdmac_chan_start()
193 writel(val, xc->reg_ch_base + XDMAC_TSS); in uniphier_xdmac_chan_start()
196 /* xc->vc.lock must be held by caller */
197 static int uniphier_xdmac_chan_stop(struct uniphier_xdmac_chan *xc) in uniphier_xdmac_chan_stop() argument
202 val = readl(xc->reg_ch_base + XDMAC_IEN); in uniphier_xdmac_chan_stop()
204 writel(val, xc->reg_ch_base + XDMAC_IEN); in uniphier_xdmac_chan_stop()
207 val = readl(xc->reg_ch_base + XDMAC_TSS); in uniphier_xdmac_chan_stop()
209 writel(0, xc->reg_ch_base + XDMAC_TSS); in uniphier_xdmac_chan_stop()
212 return readl_poll_timeout(xc->reg_ch_base + XDMAC_STAT, val, in uniphier_xdmac_chan_stop()
216 /* xc->vc.lock must be held by caller */
217 static void uniphier_xdmac_start(struct uniphier_xdmac_chan *xc) in uniphier_xdmac_start() argument
221 xd = uniphier_xdmac_next_desc(xc); in uniphier_xdmac_start()
223 uniphier_xdmac_chan_start(xc, xd); in uniphier_xdmac_start()
226 xc->xd = xd; in uniphier_xdmac_start()
229 static void uniphier_xdmac_chan_irq(struct uniphier_xdmac_chan *xc) in uniphier_xdmac_chan_irq() argument
234 spin_lock(&xc->vc.lock); in uniphier_xdmac_chan_irq()
236 stat = readl(xc->reg_ch_base + XDMAC_ID); in uniphier_xdmac_chan_irq()
239 ret = uniphier_xdmac_chan_stop(xc); in uniphier_xdmac_chan_irq()
241 dev_err(xc->xdev->ddev.dev, in uniphier_xdmac_chan_irq()
244 dev_err(xc->xdev->ddev.dev, in uniphier_xdmac_chan_irq()
247 } else if ((stat & XDMAC_ID_ENDIDF) && xc->xd) { in uniphier_xdmac_chan_irq()
248 xc->xd->cur_node++; in uniphier_xdmac_chan_irq()
249 if (xc->xd->cur_node >= xc->xd->nr_node) { in uniphier_xdmac_chan_irq()
250 vchan_cookie_complete(&xc->xd->vd); in uniphier_xdmac_chan_irq()
251 uniphier_xdmac_start(xc); in uniphier_xdmac_chan_irq()
253 uniphier_xdmac_chan_start(xc, xc->xd); in uniphier_xdmac_chan_irq()
258 writel(stat, xc->reg_ch_base + XDMAC_IR); in uniphier_xdmac_chan_irq()
260 spin_unlock(&xc->vc.lock); in uniphier_xdmac_chan_irq()
268 for (i = 0; i < xdev->nr_chans; i++) in uniphier_xdmac_irq_handler()
298 for (i = 0; i < nr; i++) { in uniphier_xdmac_prep_dma_memcpy()
312 xd->cur_node = 0; in uniphier_xdmac_prep_dma_memcpy()
324 struct uniphier_xdmac_chan *xc = to_uniphier_xdmac_chan(vc); in uniphier_xdmac_prep_slave_sg() local
335 buswidth = xc->sconfig.src_addr_width; in uniphier_xdmac_prep_slave_sg()
336 maxburst = xc->sconfig.src_maxburst; in uniphier_xdmac_prep_slave_sg()
338 buswidth = xc->sconfig.dst_addr_width; in uniphier_xdmac_prep_slave_sg()
339 maxburst = xc->sconfig.dst_maxburst; in uniphier_xdmac_prep_slave_sg()
344 if (maxburst > xc->xdev->ddev.max_burst) { in uniphier_xdmac_prep_slave_sg()
345 dev_err(xc->xdev->ddev.dev, in uniphier_xdmac_prep_slave_sg()
356 ? xc->sconfig.src_addr : sg_dma_address(sg); in uniphier_xdmac_prep_slave_sg()
358 ? xc->sconfig.dst_addr : sg_dma_address(sg); in uniphier_xdmac_prep_slave_sg()
372 dev_err(xc->xdev->ddev.dev, in uniphier_xdmac_prep_slave_sg()
379 dev_err(xc->xdev->ddev.dev, in uniphier_xdmac_prep_slave_sg()
388 xd->cur_node = 0; in uniphier_xdmac_prep_slave_sg()
397 struct uniphier_xdmac_chan *xc = to_uniphier_xdmac_chan(vc); in uniphier_xdmac_slave_config() local
399 memcpy(&xc->sconfig, config, sizeof(*config)); in uniphier_xdmac_slave_config()
401 return 0; in uniphier_xdmac_slave_config()
407 struct uniphier_xdmac_chan *xc = to_uniphier_xdmac_chan(vc); in uniphier_xdmac_terminate_all() local
409 int ret = 0; in uniphier_xdmac_terminate_all()
414 if (xc->xd) { in uniphier_xdmac_terminate_all()
415 vchan_terminate_vdesc(&xc->xd->vd); in uniphier_xdmac_terminate_all()
416 xc->xd = NULL; in uniphier_xdmac_terminate_all()
417 ret = uniphier_xdmac_chan_stop(xc); in uniphier_xdmac_terminate_all()
437 struct uniphier_xdmac_chan *xc = to_uniphier_xdmac_chan(vc); in uniphier_xdmac_issue_pending() local
442 if (vchan_issue_pending(vc) && !xc->xd) in uniphier_xdmac_issue_pending()
443 uniphier_xdmac_start(xc); in uniphier_xdmac_issue_pending()
456 struct uniphier_xdmac_chan *xc = &xdev->channels[ch]; in uniphier_xdmac_chan_init() local
458 xc->xdev = xdev; in uniphier_xdmac_chan_init()
459 xc->reg_ch_base = xdev->reg_base + XDMAC_CH_WIDTH * ch; in uniphier_xdmac_chan_init()
460 xc->vc.desc_free = uniphier_xdmac_desc_free; in uniphier_xdmac_chan_init()
462 vchan_init(&xc->vc, &xdev->ddev); in uniphier_xdmac_chan_init()
469 int chan_id = dma_spec->args[0]; in of_dma_uniphier_xlate()
500 xdev->reg_base = devm_platform_ioremap_resource(pdev, 0); in uniphier_xdmac_probe()
525 for (i = 0; i < nr_chans; i++) in uniphier_xdmac_probe()
528 irq = platform_get_irq(pdev, 0); in uniphier_xdmac_probe()
529 if (irq < 0) in uniphier_xdmac_probe()
557 return 0; in uniphier_xdmac_probe()
589 return 0; in uniphier_xdmac_remove()