Lines Matching +full:short +full:- +full:descriptor
1 // SPDX-License-Identifier: GPL-2.0-only
21 #include <linux/dma-mapping.h>
46 if (hsuc->direction == DMA_MEM_TO_DEV) in hsu_chan_enable()
48 else if (hsuc->direction == DMA_DEV_TO_MEM) in hsu_chan_enable()
56 struct dma_slave_config *config = &hsuc->config; in hsu_dma_chan_start()
57 struct hsu_dma_desc *desc = hsuc->desc; in hsu_dma_chan_start()
62 if (hsuc->direction == DMA_MEM_TO_DEV) { in hsu_dma_chan_start()
63 bsr = config->dst_maxburst; in hsu_dma_chan_start()
64 mtsr = config->dst_addr_width; in hsu_dma_chan_start()
65 } else if (hsuc->direction == DMA_DEV_TO_MEM) { in hsu_dma_chan_start()
66 bsr = config->src_maxburst; in hsu_dma_chan_start()
67 mtsr = config->src_addr_width; in hsu_dma_chan_start()
77 count = desc->nents - desc->active; in hsu_dma_chan_start()
79 hsu_chan_writel(hsuc, HSU_CH_DxSAR(i), desc->sg[i].addr); in hsu_dma_chan_start()
80 hsu_chan_writel(hsuc, HSU_CH_DxTSR(i), desc->sg[i].len); in hsu_dma_chan_start()
86 desc->active++; in hsu_dma_chan_start()
88 /* Only for the last descriptor in the chain */ in hsu_dma_chan_start()
89 dcr |= HSU_CH_DCR_CHSOD(count - 1); in hsu_dma_chan_start()
90 dcr |= HSU_CH_DCR_CHDI(count - 1); in hsu_dma_chan_start()
112 /* Get the next descriptor */ in hsu_dma_start_transfer()
113 vdesc = vchan_next_desc(&hsuc->vchan); in hsu_dma_start_transfer()
115 hsuc->desc = NULL; in hsu_dma_start_transfer()
119 list_del(&vdesc->node); in hsu_dma_start_transfer()
120 hsuc->desc = to_hsu_dma_desc(vdesc); in hsu_dma_start_transfer()
122 /* Start the channel with a new descriptor */ in hsu_dma_start_transfer()
127 * hsu_dma_get_status() - get DMA channel status
143 int hsu_dma_get_status(struct hsu_dma_chip *chip, unsigned short nr, in hsu_dma_get_status()
151 if (nr >= chip->hsu->nr_channels) in hsu_dma_get_status()
152 return -EINVAL; in hsu_dma_get_status()
154 hsuc = &chip->hsu->chan[nr]; in hsu_dma_get_status()
160 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_get_status()
162 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_get_status()
167 return -EIO; in hsu_dma_get_status()
174 * At this point, at least one of Descriptor Time Out, Channel Error in hsu_dma_get_status()
175 * or Descriptor Done bits must be set. Clear the Descriptor Time Out in hsu_dma_get_status()
176 * bits and if sr is still non-zero, it must be channel error or in hsu_dma_get_status()
177 * descriptor done which are higher priority than timeout and handled in hsu_dma_get_status()
189 * hsu_dma_do_irq() - DMA interrupt handler
195 * This function handles Channel Error and Descriptor Done interrupts.
202 int hsu_dma_do_irq(struct hsu_dma_chip *chip, unsigned short nr, u32 status) in hsu_dma_do_irq()
210 if (nr >= chip->hsu->nr_channels) in hsu_dma_do_irq()
213 hsuc = &chip->hsu->chan[nr]; in hsu_dma_do_irq()
214 stat = this_cpu_ptr(hsuc->vchan.chan.local); in hsu_dma_do_irq()
216 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_do_irq()
217 desc = hsuc->desc; in hsu_dma_do_irq()
220 desc->status = DMA_ERROR; in hsu_dma_do_irq()
221 } else if (desc->active < desc->nents) { in hsu_dma_do_irq()
224 vchan_cookie_complete(&desc->vdesc); in hsu_dma_do_irq()
225 desc->status = DMA_COMPLETE; in hsu_dma_do_irq()
226 stat->bytes_transferred += desc->length; in hsu_dma_do_irq()
230 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_do_irq()
244 desc->sg = kcalloc(nents, sizeof(*desc->sg), GFP_NOWAIT); in hsu_dma_alloc_desc()
245 if (!desc->sg) { in hsu_dma_alloc_desc()
257 kfree(desc->sg); in hsu_dma_desc_free()
276 desc->sg[i].addr = sg_dma_address(sg); in hsu_dma_prep_slave_sg()
277 desc->sg[i].len = sg_dma_len(sg); in hsu_dma_prep_slave_sg()
279 desc->length += sg_dma_len(sg); in hsu_dma_prep_slave_sg()
282 desc->nents = sg_len; in hsu_dma_prep_slave_sg()
283 desc->direction = direction; in hsu_dma_prep_slave_sg()
284 /* desc->active = 0 by kzalloc */ in hsu_dma_prep_slave_sg()
285 desc->status = DMA_IN_PROGRESS; in hsu_dma_prep_slave_sg()
287 return vchan_tx_prep(&hsuc->vchan, &desc->vdesc, flags); in hsu_dma_prep_slave_sg()
295 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_issue_pending()
296 if (vchan_issue_pending(&hsuc->vchan) && !hsuc->desc) in hsu_dma_issue_pending()
298 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_issue_pending()
303 struct hsu_dma_desc *desc = hsuc->desc; in hsu_dma_active_desc_size()
307 for (i = desc->active; i < desc->nents; i++) in hsu_dma_active_desc_size()
308 bytes += desc->sg[i].len; in hsu_dma_active_desc_size()
310 i = HSU_DMA_CHAN_NR_DESC - 1; in hsu_dma_active_desc_size()
313 } while (--i >= 0); in hsu_dma_active_desc_size()
331 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_tx_status()
332 vdesc = vchan_find_desc(&hsuc->vchan, cookie); in hsu_dma_tx_status()
333 if (hsuc->desc && cookie == hsuc->desc->vdesc.tx.cookie) { in hsu_dma_tx_status()
336 status = hsuc->desc->status; in hsu_dma_tx_status()
338 bytes = to_hsu_dma_desc(vdesc)->length; in hsu_dma_tx_status()
341 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_tx_status()
351 memcpy(&hsuc->config, config, sizeof(hsuc->config)); in hsu_dma_slave_config()
361 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_pause()
362 if (hsuc->desc && hsuc->desc->status == DMA_IN_PROGRESS) { in hsu_dma_pause()
364 hsuc->desc->status = DMA_PAUSED; in hsu_dma_pause()
366 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_pause()
376 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_resume()
377 if (hsuc->desc && hsuc->desc->status == DMA_PAUSED) { in hsu_dma_resume()
378 hsuc->desc->status = DMA_IN_PROGRESS; in hsu_dma_resume()
381 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_resume()
392 spin_lock_irqsave(&hsuc->vchan.lock, flags); in hsu_dma_terminate_all()
395 if (hsuc->desc) { in hsu_dma_terminate_all()
396 hsu_dma_desc_free(&hsuc->desc->vdesc); in hsu_dma_terminate_all()
397 hsuc->desc = NULL; in hsu_dma_terminate_all()
400 vchan_get_all_descriptors(&hsuc->vchan, &head); in hsu_dma_terminate_all()
401 spin_unlock_irqrestore(&hsuc->vchan.lock, flags); in hsu_dma_terminate_all()
402 vchan_dma_desc_free_list(&hsuc->vchan, &head); in hsu_dma_terminate_all()
416 vchan_synchronize(&hsuc->vchan); in hsu_dma_synchronize()
422 void __iomem *addr = chip->regs + chip->offset; in hsu_dma_probe()
423 unsigned short i; in hsu_dma_probe()
426 hsu = devm_kzalloc(chip->dev, sizeof(*hsu), GFP_KERNEL); in hsu_dma_probe()
428 return -ENOMEM; in hsu_dma_probe()
430 chip->hsu = hsu; in hsu_dma_probe()
433 hsu->nr_channels = (chip->length - chip->offset) / HSU_DMA_CHAN_LENGTH; in hsu_dma_probe()
435 hsu->chan = devm_kcalloc(chip->dev, hsu->nr_channels, in hsu_dma_probe()
436 sizeof(*hsu->chan), GFP_KERNEL); in hsu_dma_probe()
437 if (!hsu->chan) in hsu_dma_probe()
438 return -ENOMEM; in hsu_dma_probe()
440 INIT_LIST_HEAD(&hsu->dma.channels); in hsu_dma_probe()
441 for (i = 0; i < hsu->nr_channels; i++) { in hsu_dma_probe()
442 struct hsu_dma_chan *hsuc = &hsu->chan[i]; in hsu_dma_probe()
444 hsuc->vchan.desc_free = hsu_dma_desc_free; in hsu_dma_probe()
445 vchan_init(&hsuc->vchan, &hsu->dma); in hsu_dma_probe()
447 hsuc->direction = (i & 0x1) ? DMA_DEV_TO_MEM : DMA_MEM_TO_DEV; in hsu_dma_probe()
448 hsuc->reg = addr + i * HSU_DMA_CHAN_LENGTH; in hsu_dma_probe()
451 dma_cap_set(DMA_SLAVE, hsu->dma.cap_mask); in hsu_dma_probe()
452 dma_cap_set(DMA_PRIVATE, hsu->dma.cap_mask); in hsu_dma_probe()
454 hsu->dma.device_free_chan_resources = hsu_dma_free_chan_resources; in hsu_dma_probe()
456 hsu->dma.device_prep_slave_sg = hsu_dma_prep_slave_sg; in hsu_dma_probe()
458 hsu->dma.device_issue_pending = hsu_dma_issue_pending; in hsu_dma_probe()
459 hsu->dma.device_tx_status = hsu_dma_tx_status; in hsu_dma_probe()
461 hsu->dma.device_config = hsu_dma_slave_config; in hsu_dma_probe()
462 hsu->dma.device_pause = hsu_dma_pause; in hsu_dma_probe()
463 hsu->dma.device_resume = hsu_dma_resume; in hsu_dma_probe()
464 hsu->dma.device_terminate_all = hsu_dma_terminate_all; in hsu_dma_probe()
465 hsu->dma.device_synchronize = hsu_dma_synchronize; in hsu_dma_probe()
467 hsu->dma.src_addr_widths = HSU_DMA_BUSWIDTHS; in hsu_dma_probe()
468 hsu->dma.dst_addr_widths = HSU_DMA_BUSWIDTHS; in hsu_dma_probe()
469 hsu->dma.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); in hsu_dma_probe()
470 hsu->dma.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in hsu_dma_probe()
472 hsu->dma.dev = chip->dev; in hsu_dma_probe()
474 dma_set_max_seg_size(hsu->dma.dev, HSU_CH_DxTSR_MASK); in hsu_dma_probe()
476 ret = dma_async_device_register(&hsu->dma); in hsu_dma_probe()
480 dev_info(chip->dev, "Found HSU DMA, %d channels\n", hsu->nr_channels); in hsu_dma_probe()
487 struct hsu_dma *hsu = chip->hsu; in hsu_dma_remove()
488 unsigned short i; in hsu_dma_remove()
490 dma_async_device_unregister(&hsu->dma); in hsu_dma_remove()
492 for (i = 0; i < hsu->nr_channels; i++) { in hsu_dma_remove()
493 struct hsu_dma_chan *hsuc = &hsu->chan[i]; in hsu_dma_remove()
495 tasklet_kill(&hsuc->vchan.task); in hsu_dma_remove()