Lines Matching refs:nbpf

199 	struct nbpf_device *nbpf;  member
315 static inline u32 nbpf_read(struct nbpf_device *nbpf, in nbpf_read() argument
318 u32 data = ioread32(nbpf->base + offset); in nbpf_read()
319 dev_dbg(nbpf->dma_dev.dev, "%s(0x%p + 0x%x) = 0x%x\n", in nbpf_read()
320 __func__, nbpf->base, offset, data); in nbpf_read()
324 static inline void nbpf_write(struct nbpf_device *nbpf, in nbpf_write() argument
327 iowrite32(data, nbpf->base + offset); in nbpf_write()
328 dev_dbg(nbpf->dma_dev.dev, "%s(0x%p + 0x%x) = 0x%x\n", in nbpf_write()
329 __func__, nbpf->base, offset, data); in nbpf_write()
339 u32 status = nbpf_read(chan->nbpf, NBPF_DSTAT_END); in nbpf_status_get()
341 return status & BIT(chan - chan->nbpf->chan); in nbpf_status_get()
349 static u32 nbpf_error_get(struct nbpf_device *nbpf) in nbpf_error_get() argument
351 return nbpf_read(nbpf, NBPF_DSTAT_ER); in nbpf_error_get()
354 static struct nbpf_channel *nbpf_error_get_channel(struct nbpf_device *nbpf, u32 error) in nbpf_error_get_channel() argument
356 return nbpf->chan + __ffs(error); in nbpf_error_get_channel()
394 dev_dbg(chan->nbpf->dma_dev.dev, "%s(): next 0x%x, cur 0x%x\n", __func__, in nbpf_start()
427 static u32 nbpf_xfer_ds(struct nbpf_device *nbpf, size_t size, in nbpf_xfer_ds() argument
430 int max_burst = nbpf->config->buffer_size * 8; in nbpf_xfer_ds()
432 if (nbpf->max_burst_mem_read || nbpf->max_burst_mem_write) { in nbpf_xfer_ds()
435 max_burst = min_not_zero(nbpf->max_burst_mem_read, in nbpf_xfer_ds()
436 nbpf->max_burst_mem_write); in nbpf_xfer_ds()
439 if (nbpf->max_burst_mem_read) in nbpf_xfer_ds()
440 max_burst = nbpf->max_burst_mem_read; in nbpf_xfer_ds()
443 if (nbpf->max_burst_mem_write) in nbpf_xfer_ds()
444 max_burst = nbpf->max_burst_mem_write; in nbpf_xfer_ds()
456 static size_t nbpf_xfer_size(struct nbpf_device *nbpf, in nbpf_xfer_size() argument
484 return nbpf_xfer_ds(nbpf, size, DMA_TRANS_NONE); in nbpf_xfer_size()
533 mem_xfer = nbpf_xfer_ds(chan->nbpf, size, direction); in nbpf_prep_one()
587 static void nbpf_configure(struct nbpf_device *nbpf) in nbpf_configure() argument
589 nbpf_write(nbpf, NBPF_CTRL, NBPF_CTRL_LVINT); in nbpf_configure()
850 dev_dbg(chan->nbpf->dma_dev.dev, "%s(): force-free desc %p cookie %d\n", in nbpf_chan_idle()
898 chan->slave_dst_width = nbpf_xfer_size(chan->nbpf, in nbpf_config()
900 chan->slave_dst_burst = nbpf_xfer_size(chan->nbpf, in nbpf_config()
904 chan->slave_src_width = nbpf_xfer_size(chan->nbpf, in nbpf_config()
906 chan->slave_src_burst = nbpf_xfer_size(chan->nbpf, in nbpf_config()
1084 struct nbpf_device *nbpf = ofdma->of_dma_data; in nbpf_of_xlate() local
1091 dchan = dma_get_any_slave_channel(&nbpf->dma_dev); in nbpf_of_xlate()
1219 struct nbpf_device *nbpf = dev; in nbpf_err_irq() local
1220 u32 error = nbpf_error_get(nbpf); in nbpf_err_irq()
1222 dev_warn(nbpf->dma_dev.dev, "DMA error IRQ %u\n", irq); in nbpf_err_irq()
1228 struct nbpf_channel *chan = nbpf_error_get_channel(nbpf, error); in nbpf_err_irq()
1232 error = nbpf_error_get(nbpf); in nbpf_err_irq()
1238 static int nbpf_chan_probe(struct nbpf_device *nbpf, int n) in nbpf_chan_probe() argument
1240 struct dma_device *dma_dev = &nbpf->dma_dev; in nbpf_chan_probe()
1241 struct nbpf_channel *chan = nbpf->chan + n; in nbpf_chan_probe()
1244 chan->nbpf = nbpf; in nbpf_chan_probe()
1245 chan->base = nbpf->base + NBPF_REG_CHAN_OFFSET + NBPF_REG_CHAN_SIZE * n; in nbpf_chan_probe()
1288 struct nbpf_device *nbpf; in nbpf_probe() local
1306 nbpf = devm_kzalloc(dev, struct_size(nbpf, chan, num_channels), in nbpf_probe()
1308 if (!nbpf) in nbpf_probe()
1311 dma_dev = &nbpf->dma_dev; in nbpf_probe()
1315 nbpf->base = devm_ioremap_resource(dev, iomem); in nbpf_probe()
1316 if (IS_ERR(nbpf->base)) in nbpf_probe()
1317 return PTR_ERR(nbpf->base); in nbpf_probe()
1319 nbpf->clk = devm_clk_get(dev, NULL); in nbpf_probe()
1320 if (IS_ERR(nbpf->clk)) in nbpf_probe()
1321 return PTR_ERR(nbpf->clk); in nbpf_probe()
1324 &nbpf->max_burst_mem_read); in nbpf_probe()
1326 &nbpf->max_burst_mem_write); in nbpf_probe()
1328 nbpf->config = cfg; in nbpf_probe()
1353 nbpf->chan[i].irq = irqbuf[0]; in nbpf_probe()
1362 for (i = 0, chan = nbpf->chan; i <= num_channels; in nbpf_probe()
1370 if (chan != nbpf->chan + num_channels) in nbpf_probe()
1380 nbpf->chan[i].irq = irq; in nbpf_probe()
1385 IRQF_SHARED, "dma error", nbpf); in nbpf_probe()
1388 nbpf->eirq = eirq; in nbpf_probe()
1394 ret = nbpf_chan_probe(nbpf, i); in nbpf_probe()
1429 platform_set_drvdata(pdev, nbpf); in nbpf_probe()
1431 ret = clk_prepare_enable(nbpf->clk); in nbpf_probe()
1435 nbpf_configure(nbpf); in nbpf_probe()
1441 ret = of_dma_controller_register(np, nbpf_of_xlate, nbpf); in nbpf_probe()
1450 clk_disable_unprepare(nbpf->clk); in nbpf_probe()
1457 struct nbpf_device *nbpf = platform_get_drvdata(pdev); in nbpf_remove() local
1460 devm_free_irq(&pdev->dev, nbpf->eirq, nbpf); in nbpf_remove()
1462 for (i = 0; i < nbpf->config->num_channels; i++) { in nbpf_remove()
1463 struct nbpf_channel *chan = nbpf->chan + i; in nbpf_remove()
1471 dma_async_device_unregister(&nbpf->dma_dev); in nbpf_remove()
1472 clk_disable_unprepare(nbpf->clk); in nbpf_remove()
1494 struct nbpf_device *nbpf = dev_get_drvdata(dev); in nbpf_runtime_suspend() local
1495 clk_disable_unprepare(nbpf->clk); in nbpf_runtime_suspend()
1501 struct nbpf_device *nbpf = dev_get_drvdata(dev); in nbpf_runtime_resume() local
1502 return clk_prepare_enable(nbpf->clk); in nbpf_runtime_resume()