Lines Matching refs:atdma

125 	struct at_dma	*atdma = to_at_dma(chan->device);  in atc_alloc_descriptor()  local
128 desc = dma_pool_zalloc(atdma->dma_desc_pool, gfp_flags, &phys); in atc_alloc_descriptor()
230 struct at_dma *atdma = to_at_dma(atchan->chan_common.device); in atc_dostart() local
261 dma_writel(atdma, CHER, atchan->mask); in atc_dostart()
456 struct at_dma *atdma = to_at_dma(atchan->chan_common.device); in atc_chain_complete() local
487 dma_pool_free(atdma->memset_pool, desc->memset_vaddr, in atc_chain_complete()
606 struct at_dma *atdma = (struct at_dma *)dev_id; in at_dma_interrupt() local
613 imr = dma_readl(atdma, EBCIMR); in at_dma_interrupt()
614 status = dma_readl(atdma, EBCISR); in at_dma_interrupt()
620 dev_vdbg(atdma->dma_common.dev, in at_dma_interrupt()
624 for (i = 0; i < atdma->dma_common.chancnt; i++) { in at_dma_interrupt()
625 atchan = &atdma->chan[i]; in at_dma_interrupt()
629 dma_writel(atdma, CHDR, in at_dma_interrupt()
904 struct at_dma *atdma = to_at_dma(chan->device); in atc_prep_dma_memset() local
924 vaddr = dma_pool_alloc(atdma->memset_pool, GFP_NOWAIT, &paddr); in atc_prep_dma_memset()
961 dma_pool_free(atdma->memset_pool, vaddr, paddr); in atc_prep_dma_memset()
972 struct at_dma *atdma = to_at_dma(chan->device); in atc_prep_dma_memset_sg() local
989 vaddr = dma_pool_alloc(atdma->memset_pool, GFP_NOWAIT, &paddr); in atc_prep_dma_memset_sg()
1368 struct at_dma *atdma = to_at_dma(chan->device); in atc_pause() local
1376 dma_writel(atdma, CHER, AT_DMA_SUSP(chan_id)); in atc_pause()
1387 struct at_dma *atdma = to_at_dma(chan->device); in atc_resume() local
1398 dma_writel(atdma, CHDR, AT_DMA_RES(chan_id)); in atc_resume()
1409 struct at_dma *atdma = to_at_dma(chan->device); in atc_terminate_all() local
1424 dma_writel(atdma, CHDR, AT_DMA_RES(chan_id) | atchan->mask); in atc_terminate_all()
1427 while (dma_readl(atdma, CHSR) & atchan->mask) in atc_terminate_all()
1525 struct at_dma *atdma = to_at_dma(chan->device); in atc_alloc_chan_resources() local
1552 BUG_ON(!atslave->dma_dev || atslave->dma_dev != atdma->dma_common.dev); in atc_alloc_chan_resources()
1563 dev_err(atdma->dma_common.dev, in atc_alloc_chan_resources()
1588 struct at_dma *atdma = to_at_dma(chan->device); in atc_free_chan_resources() local
1601 dma_pool_free(atdma->dma_desc_pool, desc, desc->txd.phys); in atc_free_chan_resources()
1757 static void at_dma_off(struct at_dma *atdma) in at_dma_off() argument
1759 dma_writel(atdma, EN, 0); in at_dma_off()
1762 dma_writel(atdma, EBCIDR, -1L); in at_dma_off()
1765 while (dma_readl(atdma, CHSR) & atdma->all_chan_mask) in at_dma_off()
1772 struct at_dma *atdma; in at_dma_probe() local
1803 atdma = kzalloc(size, GFP_KERNEL); in at_dma_probe()
1804 if (!atdma) in at_dma_probe()
1808 atdma->dma_common.cap_mask = plat_dat->cap_mask; in at_dma_probe()
1809 atdma->all_chan_mask = (1 << plat_dat->nr_channels) - 1; in at_dma_probe()
1817 atdma->regs = ioremap(io->start, size); in at_dma_probe()
1818 if (!atdma->regs) { in at_dma_probe()
1823 atdma->clk = clk_get(&pdev->dev, "dma_clk"); in at_dma_probe()
1824 if (IS_ERR(atdma->clk)) { in at_dma_probe()
1825 err = PTR_ERR(atdma->clk); in at_dma_probe()
1828 err = clk_prepare_enable(atdma->clk); in at_dma_probe()
1833 at_dma_off(atdma); in at_dma_probe()
1835 err = request_irq(irq, at_dma_interrupt, 0, "at_hdmac", atdma); in at_dma_probe()
1839 platform_set_drvdata(pdev, atdma); in at_dma_probe()
1842 atdma->dma_desc_pool = dma_pool_create("at_hdmac_desc_pool", in at_dma_probe()
1845 if (!atdma->dma_desc_pool) { in at_dma_probe()
1852 atdma->memset_pool = dma_pool_create("at_hdmac_memset_pool", in at_dma_probe()
1854 if (!atdma->memset_pool) { in at_dma_probe()
1861 while (dma_readl(atdma, EBCISR)) in at_dma_probe()
1865 INIT_LIST_HEAD(&atdma->dma_common.channels); in at_dma_probe()
1867 struct at_dma_chan *atchan = &atdma->chan[i]; in at_dma_probe()
1871 atchan->chan_common.device = &atdma->dma_common; in at_dma_probe()
1874 &atdma->dma_common.channels); in at_dma_probe()
1876 atchan->ch_regs = atdma->regs + ch_regs(i); in at_dma_probe()
1885 atc_enable_chan_irq(atdma, i); in at_dma_probe()
1889 atdma->dma_common.device_alloc_chan_resources = atc_alloc_chan_resources; in at_dma_probe()
1890 atdma->dma_common.device_free_chan_resources = atc_free_chan_resources; in at_dma_probe()
1891 atdma->dma_common.device_tx_status = atc_tx_status; in at_dma_probe()
1892 atdma->dma_common.device_issue_pending = atc_issue_pending; in at_dma_probe()
1893 atdma->dma_common.dev = &pdev->dev; in at_dma_probe()
1896 if (dma_has_cap(DMA_INTERLEAVE, atdma->dma_common.cap_mask)) in at_dma_probe()
1897 atdma->dma_common.device_prep_interleaved_dma = atc_prep_dma_interleaved; in at_dma_probe()
1899 if (dma_has_cap(DMA_MEMCPY, atdma->dma_common.cap_mask)) in at_dma_probe()
1900 atdma->dma_common.device_prep_dma_memcpy = atc_prep_dma_memcpy; in at_dma_probe()
1902 if (dma_has_cap(DMA_MEMSET, atdma->dma_common.cap_mask)) { in at_dma_probe()
1903 atdma->dma_common.device_prep_dma_memset = atc_prep_dma_memset; in at_dma_probe()
1904 atdma->dma_common.device_prep_dma_memset_sg = atc_prep_dma_memset_sg; in at_dma_probe()
1905 atdma->dma_common.fill_align = DMAENGINE_ALIGN_4_BYTES; in at_dma_probe()
1908 if (dma_has_cap(DMA_SLAVE, atdma->dma_common.cap_mask)) { in at_dma_probe()
1909 atdma->dma_common.device_prep_slave_sg = atc_prep_slave_sg; in at_dma_probe()
1911 dma_cap_set(DMA_CYCLIC, atdma->dma_common.cap_mask); in at_dma_probe()
1912 atdma->dma_common.device_prep_dma_cyclic = atc_prep_dma_cyclic; in at_dma_probe()
1913 atdma->dma_common.device_config = atc_config; in at_dma_probe()
1914 atdma->dma_common.device_pause = atc_pause; in at_dma_probe()
1915 atdma->dma_common.device_resume = atc_resume; in at_dma_probe()
1916 atdma->dma_common.device_terminate_all = atc_terminate_all; in at_dma_probe()
1917 atdma->dma_common.src_addr_widths = ATC_DMA_BUSWIDTHS; in at_dma_probe()
1918 atdma->dma_common.dst_addr_widths = ATC_DMA_BUSWIDTHS; in at_dma_probe()
1919 atdma->dma_common.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); in at_dma_probe()
1920 atdma->dma_common.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in at_dma_probe()
1923 dma_writel(atdma, EN, AT_DMA_ENABLE); in at_dma_probe()
1926 dma_has_cap(DMA_MEMCPY, atdma->dma_common.cap_mask) ? "cpy " : "", in at_dma_probe()
1927 dma_has_cap(DMA_MEMSET, atdma->dma_common.cap_mask) ? "set " : "", in at_dma_probe()
1928 dma_has_cap(DMA_SLAVE, atdma->dma_common.cap_mask) ? "slave " : "", in at_dma_probe()
1931 err = dma_async_device_register(&atdma->dma_common); in at_dma_probe()
1944 at_dma_xlate, atdma); in at_dma_probe()
1954 dma_async_device_unregister(&atdma->dma_common); in at_dma_probe()
1956 dma_pool_destroy(atdma->memset_pool); in at_dma_probe()
1958 dma_pool_destroy(atdma->dma_desc_pool); in at_dma_probe()
1960 free_irq(platform_get_irq(pdev, 0), atdma); in at_dma_probe()
1962 clk_disable_unprepare(atdma->clk); in at_dma_probe()
1964 clk_put(atdma->clk); in at_dma_probe()
1966 iounmap(atdma->regs); in at_dma_probe()
1967 atdma->regs = NULL; in at_dma_probe()
1971 kfree(atdma); in at_dma_probe()
1977 struct at_dma *atdma = platform_get_drvdata(pdev); in at_dma_remove() local
1981 at_dma_off(atdma); in at_dma_remove()
1984 dma_async_device_unregister(&atdma->dma_common); in at_dma_remove()
1986 dma_pool_destroy(atdma->memset_pool); in at_dma_remove()
1987 dma_pool_destroy(atdma->dma_desc_pool); in at_dma_remove()
1988 free_irq(platform_get_irq(pdev, 0), atdma); in at_dma_remove()
1990 list_for_each_entry_safe(chan, _chan, &atdma->dma_common.channels, in at_dma_remove()
1995 atc_disable_chan_irq(atdma, chan->chan_id); in at_dma_remove()
2001 clk_disable_unprepare(atdma->clk); in at_dma_remove()
2002 clk_put(atdma->clk); in at_dma_remove()
2004 iounmap(atdma->regs); in at_dma_remove()
2005 atdma->regs = NULL; in at_dma_remove()
2010 kfree(atdma); in at_dma_remove()
2017 struct at_dma *atdma = platform_get_drvdata(pdev); in at_dma_shutdown() local
2020 clk_disable_unprepare(atdma->clk); in at_dma_shutdown()
2025 struct at_dma *atdma = dev_get_drvdata(dev); in at_dma_prepare() local
2028 list_for_each_entry_safe(chan, _chan, &atdma->dma_common.channels, in at_dma_prepare()
2059 struct at_dma *atdma = dev_get_drvdata(dev); in at_dma_suspend_noirq() local
2063 list_for_each_entry_safe(chan, _chan, &atdma->dma_common.channels, in at_dma_suspend_noirq()
2071 atdma->save_imr = dma_readl(atdma, EBCIMR); in at_dma_suspend_noirq()
2074 at_dma_off(atdma); in at_dma_suspend_noirq()
2075 clk_disable_unprepare(atdma->clk); in at_dma_suspend_noirq()
2081 struct at_dma *atdma = to_at_dma(atchan->chan_common.device); in atc_resume_cyclic() local
2090 dma_writel(atdma, CHER, atchan->mask); in atc_resume_cyclic()
2100 struct at_dma *atdma = dev_get_drvdata(dev); in at_dma_resume_noirq() local
2104 clk_prepare_enable(atdma->clk); in at_dma_resume_noirq()
2105 dma_writel(atdma, EN, AT_DMA_ENABLE); in at_dma_resume_noirq()
2108 while (dma_readl(atdma, EBCISR)) in at_dma_resume_noirq()
2112 dma_writel(atdma, EBCIER, atdma->save_imr); in at_dma_resume_noirq()
2113 list_for_each_entry_safe(chan, _chan, &atdma->dma_common.channels, in at_dma_resume_noirq()