Lines Matching refs:pch

1547 	struct dma_pl330_chan *pch;  in dma_pl330_rqcb()  local
1553 pch = desc->pchan; in dma_pl330_rqcb()
1556 if (!pch) in dma_pl330_rqcb()
1559 spin_lock_irqsave(&pch->lock, flags); in dma_pl330_rqcb()
1563 spin_unlock_irqrestore(&pch->lock, flags); in dma_pl330_rqcb()
1565 tasklet_schedule(&pch->task); in dma_pl330_rqcb()
2030 static inline void fill_queue(struct dma_pl330_chan *pch) in fill_queue() argument
2035 list_for_each_entry(desc, &pch->work_list, node) { in fill_queue()
2041 ret = pl330_submit_req(pch->thread, desc); in fill_queue()
2050 dev_err(pch->dmac->ddma.dev, "%s:%d Bad Desc(%d)\n", in fill_queue()
2052 tasklet_schedule(&pch->task); in fill_queue()
2059 struct dma_pl330_chan *pch = (struct dma_pl330_chan *)data; in pl330_tasklet() local
2064 spin_lock_irqsave(&pch->lock, flags); in pl330_tasklet()
2067 list_for_each_entry_safe(desc, _dt, &pch->work_list, node) in pl330_tasklet()
2069 if (!pch->cyclic) in pl330_tasklet()
2071 list_move_tail(&desc->node, &pch->completed_list); in pl330_tasklet()
2075 fill_queue(pch); in pl330_tasklet()
2077 if (list_empty(&pch->work_list)) { in pl330_tasklet()
2078 spin_lock(&pch->thread->dmac->lock); in pl330_tasklet()
2079 _stop(pch->thread); in pl330_tasklet()
2080 spin_unlock(&pch->thread->dmac->lock); in pl330_tasklet()
2082 pch->active = false; in pl330_tasklet()
2085 spin_lock(&pch->thread->dmac->lock); in pl330_tasklet()
2086 _start(pch->thread); in pl330_tasklet()
2087 spin_unlock(&pch->thread->dmac->lock); in pl330_tasklet()
2090 while (!list_empty(&pch->completed_list)) { in pl330_tasklet()
2093 desc = list_first_entry(&pch->completed_list, in pl330_tasklet()
2098 if (pch->cyclic) { in pl330_tasklet()
2100 list_move_tail(&desc->node, &pch->work_list); in pl330_tasklet()
2102 pch->active = true; in pl330_tasklet()
2103 spin_lock(&pch->thread->dmac->lock); in pl330_tasklet()
2104 _start(pch->thread); in pl330_tasklet()
2105 spin_unlock(&pch->thread->dmac->lock); in pl330_tasklet()
2110 list_move_tail(&desc->node, &pch->dmac->desc_pool); in pl330_tasklet()
2116 spin_unlock_irqrestore(&pch->lock, flags); in pl330_tasklet()
2118 spin_lock_irqsave(&pch->lock, flags); in pl330_tasklet()
2121 spin_unlock_irqrestore(&pch->lock, flags); in pl330_tasklet()
2125 pm_runtime_mark_last_busy(pch->dmac->ddma.dev); in pl330_tasklet()
2126 pm_runtime_put_autosuspend(pch->dmac->ddma.dev); in pl330_tasklet()
2152 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_alloc_chan_resources() local
2153 struct pl330_dmac *pl330 = pch->dmac; in pl330_alloc_chan_resources()
2159 pch->cyclic = false; in pl330_alloc_chan_resources()
2161 pch->thread = pl330_request_channel(pl330); in pl330_alloc_chan_resources()
2162 if (!pch->thread) { in pl330_alloc_chan_resources()
2167 tasklet_init(&pch->task, pl330_tasklet, (unsigned long) pch); in pl330_alloc_chan_resources()
2193 static void pl330_unprep_slave_fifo(struct dma_pl330_chan *pch) in pl330_unprep_slave_fifo() argument
2195 if (pch->dir != DMA_NONE) in pl330_unprep_slave_fifo()
2196 dma_unmap_resource(pch->chan.device->dev, pch->fifo_dma, in pl330_unprep_slave_fifo()
2197 1 << pch->burst_sz, pch->dir, 0); in pl330_unprep_slave_fifo()
2198 pch->dir = DMA_NONE; in pl330_unprep_slave_fifo()
2202 static bool pl330_prep_slave_fifo(struct dma_pl330_chan *pch, in pl330_prep_slave_fifo() argument
2205 struct device *dev = pch->chan.device->dev; in pl330_prep_slave_fifo()
2209 if (pch->dir == dma_dir) in pl330_prep_slave_fifo()
2212 pl330_unprep_slave_fifo(pch); in pl330_prep_slave_fifo()
2213 pch->fifo_dma = dma_map_resource(dev, pch->fifo_addr, in pl330_prep_slave_fifo()
2214 1 << pch->burst_sz, dma_dir, 0); in pl330_prep_slave_fifo()
2215 if (dma_mapping_error(dev, pch->fifo_dma)) in pl330_prep_slave_fifo()
2218 pch->dir = dma_dir; in pl330_prep_slave_fifo()
2238 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_config_write() local
2240 pl330_unprep_slave_fifo(pch); in pl330_config_write()
2243 pch->fifo_addr = slave_config->dst_addr; in pl330_config_write()
2245 pch->burst_sz = __ffs(slave_config->dst_addr_width); in pl330_config_write()
2246 pch->burst_len = fixup_burst_len(slave_config->dst_maxburst, in pl330_config_write()
2247 pch->dmac->quirks); in pl330_config_write()
2250 pch->fifo_addr = slave_config->src_addr; in pl330_config_write()
2252 pch->burst_sz = __ffs(slave_config->src_addr_width); in pl330_config_write()
2253 pch->burst_len = fixup_burst_len(slave_config->src_maxburst, in pl330_config_write()
2254 pch->dmac->quirks); in pl330_config_write()
2263 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_config() local
2265 memcpy(&pch->slave_config, slave_config, sizeof(*slave_config)); in pl330_config()
2272 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_terminate_all() local
2275 struct pl330_dmac *pl330 = pch->dmac; in pl330_terminate_all()
2279 spin_lock_irqsave(&pch->lock, flags); in pl330_terminate_all()
2282 _stop(pch->thread); in pl330_terminate_all()
2283 pch->thread->req[0].desc = NULL; in pl330_terminate_all()
2284 pch->thread->req[1].desc = NULL; in pl330_terminate_all()
2285 pch->thread->req_running = -1; in pl330_terminate_all()
2288 power_down = pch->active; in pl330_terminate_all()
2289 pch->active = false; in pl330_terminate_all()
2292 list_for_each_entry(desc, &pch->submitted_list, node) { in pl330_terminate_all()
2297 list_for_each_entry(desc, &pch->work_list , node) { in pl330_terminate_all()
2302 list_splice_tail_init(&pch->submitted_list, &pl330->desc_pool); in pl330_terminate_all()
2303 list_splice_tail_init(&pch->work_list, &pl330->desc_pool); in pl330_terminate_all()
2304 list_splice_tail_init(&pch->completed_list, &pl330->desc_pool); in pl330_terminate_all()
2305 spin_unlock_irqrestore(&pch->lock, flags); in pl330_terminate_all()
2323 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_pause() local
2324 struct pl330_dmac *pl330 = pch->dmac; in pl330_pause()
2328 spin_lock_irqsave(&pch->lock, flags); in pl330_pause()
2331 _stop(pch->thread); in pl330_pause()
2334 spin_unlock_irqrestore(&pch->lock, flags); in pl330_pause()
2343 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_free_chan_resources() local
2344 struct pl330_dmac *pl330 = pch->dmac; in pl330_free_chan_resources()
2347 tasklet_kill(&pch->task); in pl330_free_chan_resources()
2349 pm_runtime_get_sync(pch->dmac->ddma.dev); in pl330_free_chan_resources()
2352 pl330_release_channel(pch->thread); in pl330_free_chan_resources()
2353 pch->thread = NULL; in pl330_free_chan_resources()
2355 if (pch->cyclic) in pl330_free_chan_resources()
2356 list_splice_tail_init(&pch->work_list, &pch->dmac->desc_pool); in pl330_free_chan_resources()
2359 pm_runtime_mark_last_busy(pch->dmac->ddma.dev); in pl330_free_chan_resources()
2360 pm_runtime_put_autosuspend(pch->dmac->ddma.dev); in pl330_free_chan_resources()
2361 pl330_unprep_slave_fifo(pch); in pl330_free_chan_resources()
2364 static int pl330_get_current_xferred_count(struct dma_pl330_chan *pch, in pl330_get_current_xferred_count() argument
2367 struct pl330_thread *thrd = pch->thread; in pl330_get_current_xferred_count()
2368 struct pl330_dmac *pl330 = pch->dmac; in pl330_get_current_xferred_count()
2381 pm_runtime_mark_last_busy(pch->dmac->ddma.dev); in pl330_get_current_xferred_count()
2398 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_tx_status() local
2409 spin_lock_irqsave(&pch->lock, flags); in pl330_tx_status()
2410 spin_lock(&pch->thread->dmac->lock); in pl330_tx_status()
2412 if (pch->thread->req_running != -1) in pl330_tx_status()
2413 running = pch->thread->req[pch->thread->req_running].desc; in pl330_tx_status()
2415 last_enq = pch->thread->req[pch->thread->lstenq].desc; in pl330_tx_status()
2418 list_for_each_entry(desc, &pch->work_list, node) { in pl330_tx_status()
2423 pl330_get_current_xferred_count(pch, desc); in pl330_tx_status()
2453 spin_unlock(&pch->thread->dmac->lock); in pl330_tx_status()
2454 spin_unlock_irqrestore(&pch->lock, flags); in pl330_tx_status()
2464 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_issue_pending() local
2467 spin_lock_irqsave(&pch->lock, flags); in pl330_issue_pending()
2468 if (list_empty(&pch->work_list)) { in pl330_issue_pending()
2474 WARN_ON(list_empty(&pch->submitted_list)); in pl330_issue_pending()
2475 pch->active = true; in pl330_issue_pending()
2476 pm_runtime_get_sync(pch->dmac->ddma.dev); in pl330_issue_pending()
2478 list_splice_tail_init(&pch->submitted_list, &pch->work_list); in pl330_issue_pending()
2479 spin_unlock_irqrestore(&pch->lock, flags); in pl330_issue_pending()
2481 pl330_tasklet((unsigned long)pch); in pl330_issue_pending()
2492 struct dma_pl330_chan *pch = to_pchan(tx->chan); in pl330_tx_submit() local
2496 spin_lock_irqsave(&pch->lock, flags); in pl330_tx_submit()
2501 if (pch->cyclic) { in pl330_tx_submit()
2509 list_move_tail(&desc->node, &pch->submitted_list); in pl330_tx_submit()
2514 list_add_tail(&last->node, &pch->submitted_list); in pl330_tx_submit()
2515 spin_unlock_irqrestore(&pch->lock, flags); in pl330_tx_submit()
2577 static struct dma_pl330_desc *pl330_get_desc(struct dma_pl330_chan *pch) in pl330_get_desc() argument
2579 struct pl330_dmac *pl330 = pch->dmac; in pl330_get_desc()
2580 u8 *peri_id = pch->chan.private; in pl330_get_desc()
2599 desc->pchan = pch; in pl330_get_desc()
2603 desc->peri = peri_id ? pch->chan.chan_id : 0; in pl330_get_desc()
2604 desc->rqcfg.pcfg = &pch->dmac->pcfg; in pl330_get_desc()
2606 dma_async_tx_descriptor_init(&desc->txd, &pch->chan); in pl330_get_desc()
2620 __pl330_prep_dma_memcpy(struct dma_pl330_chan *pch, dma_addr_t dst, in __pl330_prep_dma_memcpy() argument
2623 struct dma_pl330_desc *desc = pl330_get_desc(pch); in __pl330_prep_dma_memcpy()
2626 dev_err(pch->dmac->ddma.dev, "%s:%d Unable to fetch desc\n", in __pl330_prep_dma_memcpy()
2649 struct dma_pl330_chan *pch = desc->pchan; in get_burst_len() local
2650 struct pl330_dmac *pl330 = pch->dmac; in get_burst_len()
2670 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_prep_dma_cyclic() local
2671 struct pl330_dmac *pl330 = pch->dmac; in pl330_prep_dma_cyclic()
2680 dev_err(pch->dmac->ddma.dev, "%s:%d Invalid dma direction\n", in pl330_prep_dma_cyclic()
2685 pl330_config_write(chan, &pch->slave_config, direction); in pl330_prep_dma_cyclic()
2687 if (!pl330_prep_slave_fifo(pch, direction)) in pl330_prep_dma_cyclic()
2691 desc = pl330_get_desc(pch); in pl330_prep_dma_cyclic()
2693 dev_err(pch->dmac->ddma.dev, "%s:%d Unable to fetch desc\n", in pl330_prep_dma_cyclic()
2719 dst = pch->fifo_dma; in pl330_prep_dma_cyclic()
2724 src = pch->fifo_dma; in pl330_prep_dma_cyclic()
2732 desc->rqcfg.brst_size = pch->burst_sz; in pl330_prep_dma_cyclic()
2733 desc->rqcfg.brst_len = pch->burst_len; in pl330_prep_dma_cyclic()
2748 pch->cyclic = true; in pl330_prep_dma_cyclic()
2759 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_prep_dma_memcpy() local
2763 if (unlikely(!pch || !len)) in pl330_prep_dma_memcpy()
2766 pl330 = pch->dmac; in pl330_prep_dma_memcpy()
2768 desc = __pl330_prep_dma_memcpy(pch, dst, src, len); in pl330_prep_dma_memcpy()
2834 struct dma_pl330_chan *pch = to_pchan(chan); in pl330_prep_slave_sg() local
2838 if (unlikely(!pch || !sgl || !sg_len)) in pl330_prep_slave_sg()
2841 pl330_config_write(chan, &pch->slave_config, direction); in pl330_prep_slave_sg()
2843 if (!pl330_prep_slave_fifo(pch, direction)) in pl330_prep_slave_sg()
2850 desc = pl330_get_desc(pch); in pl330_prep_slave_sg()
2852 struct pl330_dmac *pl330 = pch->dmac; in pl330_prep_slave_sg()
2854 dev_err(pch->dmac->ddma.dev, in pl330_prep_slave_sg()
2870 fill_px(&desc->px, pch->fifo_dma, sg_dma_address(sg), in pl330_prep_slave_sg()
2875 fill_px(&desc->px, sg_dma_address(sg), pch->fifo_dma, in pl330_prep_slave_sg()
2879 desc->rqcfg.brst_size = pch->burst_sz; in pl330_prep_slave_sg()
2880 desc->rqcfg.brst_len = pch->burst_len; in pl330_prep_slave_sg()
2922 struct dma_pl330_chan *pch = &pl330->peripherals[pr]; in pl330_debugfs_show() local
2924 if (!pch->thread || thrd->id != pch->thread->id) in pl330_debugfs_show()
2999 struct dma_pl330_chan *pch, *_p; in pl330_probe() local
3093 pl330->peripherals = kcalloc(num_chan, sizeof(*pch), GFP_KERNEL); in pl330_probe()
3100 pch = &pl330->peripherals[i]; in pl330_probe()
3102 pch->chan.private = adev->dev.of_node; in pl330_probe()
3103 INIT_LIST_HEAD(&pch->submitted_list); in pl330_probe()
3104 INIT_LIST_HEAD(&pch->work_list); in pl330_probe()
3105 INIT_LIST_HEAD(&pch->completed_list); in pl330_probe()
3106 spin_lock_init(&pch->lock); in pl330_probe()
3107 pch->thread = NULL; in pl330_probe()
3108 pch->chan.device = pd; in pl330_probe()
3109 pch->dmac = pl330; in pl330_probe()
3110 pch->dir = DMA_NONE; in pl330_probe()
3113 list_add_tail(&pch->chan.device_node, &pd->channels); in pl330_probe()
3183 list_for_each_entry_safe(pch, _p, &pl330->ddma.channels, in pl330_probe()
3187 list_del(&pch->chan.device_node); in pl330_probe()
3190 if (pch->thread) { in pl330_probe()
3191 pl330_terminate_all(&pch->chan); in pl330_probe()
3192 pl330_free_chan_resources(&pch->chan); in pl330_probe()
3209 struct dma_pl330_chan *pch, *_p; in pl330_remove() local
3226 list_for_each_entry_safe(pch, _p, &pl330->ddma.channels, in pl330_remove()
3230 list_del(&pch->chan.device_node); in pl330_remove()
3233 if (pch->thread) { in pl330_remove()
3234 pl330_terminate_all(&pch->chan); in pl330_remove()
3235 pl330_free_chan_resources(&pch->chan); in pl330_remove()