/Linux-v5.15/drivers/spi/ |
D | spi-pxa2xx-dma.c | 95 chan = drv_data->controller->dma_tx; in pxa2xx_spi_dma_prepare_one() 124 dmaengine_terminate_async(drv_data->controller->dma_tx); in pxa2xx_spi_dma_transfer() 162 dmaengine_terminate_async(drv_data->controller->dma_tx); in pxa2xx_spi_dma_prepare() 170 dma_async_issue_pending(drv_data->controller->dma_tx); in pxa2xx_spi_dma_start() 179 dmaengine_terminate_sync(drv_data->controller->dma_tx); in pxa2xx_spi_dma_stop() 192 controller->dma_tx = dma_request_slave_channel_compat(mask, in pxa2xx_spi_dma_setup() 194 if (!controller->dma_tx) in pxa2xx_spi_dma_setup() 200 dma_release_channel(controller->dma_tx); in pxa2xx_spi_dma_setup() 201 controller->dma_tx = NULL; in pxa2xx_spi_dma_setup() 217 if (controller->dma_tx) { in pxa2xx_spi_dma_release() [all …]
|
D | spi-at91-usart.c | 135 ctlr->dma_tx = dma_request_chan(dev, "tx"); in at91_usart_spi_configure_dma() 136 if (IS_ERR_OR_NULL(ctlr->dma_tx)) { in at91_usart_spi_configure_dma() 137 if (IS_ERR(ctlr->dma_tx)) { in at91_usart_spi_configure_dma() 138 err = PTR_ERR(ctlr->dma_tx); in at91_usart_spi_configure_dma() 178 if (dmaengine_slave_config(ctlr->dma_tx, &slave_config)) { in at91_usart_spi_configure_dma() 189 if (!IS_ERR_OR_NULL(ctlr->dma_tx)) in at91_usart_spi_configure_dma() 190 dma_release_channel(ctlr->dma_tx); in at91_usart_spi_configure_dma() 193 ctlr->dma_tx = NULL; in at91_usart_spi_configure_dma() 204 if (ctlr->dma_tx) in at91_usart_spi_release_dma() 205 dma_release_channel(ctlr->dma_tx); in at91_usart_spi_release_dma() [all …]
|
D | spi-sun6i.c | 227 dmaengine_slave_config(master->dma_tx, &txconf); in sun6i_spi_prepare_dma() 229 txdesc = dmaengine_prep_slave_sg(master->dma_tx, in sun6i_spi_prepare_dma() 248 dma_async_issue_pending(master->dma_tx); in sun6i_spi_prepare_dma() 442 dmaengine_terminate_sync(master->dma_tx); in sun6i_spi_transfer_one() 622 master->dma_tx = dma_request_chan(&pdev->dev, "tx"); in sun6i_spi_probe() 623 if (IS_ERR(master->dma_tx)) { in sun6i_spi_probe() 625 if (PTR_ERR(master->dma_tx) == -EPROBE_DEFER) { in sun6i_spi_probe() 630 master->dma_tx = NULL; in sun6i_spi_probe() 643 if (master->dma_tx && master->dma_rx) { in sun6i_spi_probe() 679 if (master->dma_tx) in sun6i_spi_probe() [all …]
|
D | spi-bcm2835.c | 609 dmaengine_terminate_async(ctlr->dma_tx); in bcm2835_spi_dma_rx_done() 680 chan = ctlr->dma_tx; in bcm2835_spi_prepare_sg() 801 dma_async_issue_pending(ctlr->dma_tx); in bcm2835_spi_transfer_one_dma() 815 dmaengine_terminate_sync(ctlr->dma_tx); in bcm2835_spi_transfer_one_dma() 859 if (ctlr->dma_tx) { in bcm2835_dma_release() 860 dmaengine_terminate_sync(ctlr->dma_tx); in bcm2835_dma_release() 866 dma_unmap_page_attrs(ctlr->dma_tx->device->dev, in bcm2835_dma_release() 871 dma_release_channel(ctlr->dma_tx); in bcm2835_dma_release() 872 ctlr->dma_tx = NULL; in bcm2835_dma_release() 900 ctlr->dma_tx = dma_request_chan(dev, "tx"); in bcm2835_dma_init() [all …]
|
D | spi-pic32.c | 309 if (!master->dma_rx || !master->dma_tx) in pic32_spi_dma_transfer() 322 desc_tx = dmaengine_prep_slave_sg(master->dma_tx, in pic32_spi_dma_transfer() 347 dma_async_issue_pending(master->dma_tx); in pic32_spi_dma_transfer() 375 ret = dmaengine_slave_config(master->dma_tx, &cfg); in pic32_spi_dma_config() 555 dmaengine_terminate_all(master->dma_tx); in pic32_spi_one_transfer() 626 master->dma_tx = dma_request_chan(dev, "spi-tx"); in pic32_spi_dma_prep() 627 if (IS_ERR(master->dma_tx)) { in pic32_spi_dma_prep() 628 if (PTR_ERR(master->dma_tx) == -EPROBE_DEFER) in pic32_spi_dma_prep() 633 master->dma_tx = NULL; in pic32_spi_dma_prep() 651 if (master->dma_tx) { in pic32_spi_dma_prep() [all …]
|
D | spi-fsl-lpspi.c | 357 ret = dmaengine_slave_config(controller->dma_tx, &tx); in fsl_lpspi_dma_configure() 571 desc_tx = dmaengine_prep_slave_sg(controller->dma_tx, in fsl_lpspi_dma_transfer() 575 dmaengine_terminate_all(controller->dma_tx); in fsl_lpspi_dma_transfer() 583 dma_async_issue_pending(controller->dma_tx); in fsl_lpspi_dma_transfer() 596 dmaengine_terminate_all(controller->dma_tx); in fsl_lpspi_dma_transfer() 606 dmaengine_terminate_all(controller->dma_tx); in fsl_lpspi_dma_transfer() 616 dmaengine_terminate_all(controller->dma_tx); in fsl_lpspi_dma_transfer() 626 dmaengine_terminate_all(controller->dma_tx); in fsl_lpspi_dma_transfer() 645 if (controller->dma_tx) { in fsl_lpspi_dma_exit() 646 dma_release_channel(controller->dma_tx); in fsl_lpspi_dma_exit() [all …]
|
D | spi-uniphier.c | 356 if ((!master->dma_tx && !master->dma_rx) in uniphier_spi_can_dma() 357 || (!master->dma_tx && t->tx_buf) in uniphier_spi_can_dma() 442 dmaengine_slave_config(master->dma_tx, &txconf); in uniphier_spi_transfer_one_dma() 445 master->dma_tx, in uniphier_spi_transfer_one_dma() 458 dma_async_issue_pending(master->dma_tx); in uniphier_spi_transfer_one_dma() 590 dmaengine_terminate_async(master->dma_tx); in uniphier_spi_handle_err() 716 master->dma_tx = dma_request_chan(&pdev->dev, "tx"); in uniphier_spi_probe() 717 if (IS_ERR_OR_NULL(master->dma_tx)) { in uniphier_spi_probe() 718 if (PTR_ERR(master->dma_tx) == -EPROBE_DEFER) { in uniphier_spi_probe() 722 master->dma_tx = NULL; in uniphier_spi_probe() [all …]
|
D | spi-rockchip.c | 282 dmaengine_terminate_async(ctlr->dma_tx); in rockchip_spi_handle_err() 458 dmaengine_slave_config(ctlr->dma_tx, &txconf); in rockchip_spi_prepare_dma() 461 ctlr->dma_tx, in rockchip_spi_prepare_dma() 486 dma_async_issue_pending(ctlr->dma_tx); in rockchip_spi_prepare_dma() 781 ctlr->dma_tx = dma_request_chan(rs->dev, "tx"); in rockchip_spi_probe() 782 if (IS_ERR(ctlr->dma_tx)) { in rockchip_spi_probe() 784 if (PTR_ERR(ctlr->dma_tx) == -EPROBE_DEFER) { in rockchip_spi_probe() 789 ctlr->dma_tx = NULL; in rockchip_spi_probe() 802 if (ctlr->dma_tx && ctlr->dma_rx) { in rockchip_spi_probe() 828 if (ctlr->dma_tx) in rockchip_spi_probe() [all …]
|
D | spi-stm32.c | 307 struct dma_chan *dma_tx; member 659 if (spi->cur_usedma && spi->dma_tx) in stm32f4_spi_disable() 660 dmaengine_terminate_all(spi->dma_tx); in stm32f4_spi_disable() 698 if (spi->cur_usedma && spi->dma_tx) in stm32h7_spi_disable() 699 dmaengine_terminate_all(spi->dma_tx); in stm32h7_spi_disable() 1247 if (spi->tx_buf && spi->dma_tx) { in stm32_spi_transfer_one_dma() 1249 dmaengine_slave_config(spi->dma_tx, &tx_dma_conf); in stm32_spi_transfer_one_dma() 1252 spi->dma_tx, xfer->tx_sg.sgl, in stm32_spi_transfer_one_dma() 1258 if ((spi->tx_buf && spi->dma_tx && !tx_dma_desc) || in stm32_spi_transfer_one_dma() 1289 dma_async_issue_pending(spi->dma_tx); in stm32_spi_transfer_one_dma() [all …]
|
D | spi-omap2-mcspi.c | 92 struct dma_chan *dma_tx; member 414 dmaengine_slave_config(mcspi_dma->dma_tx, &cfg); in omap2_mcspi_tx_dma() 416 tx = dmaengine_prep_slave_sg(mcspi_dma->dma_tx, xfer->tx_sg.sgl, in omap2_mcspi_tx_dma() 427 dma_async_issue_pending(mcspi_dma->dma_tx); in omap2_mcspi_tx_dma() 642 dmaengine_terminate_sync(mcspi_dma->dma_tx); in omap2_mcspi_txrx_dma() 999 mcspi_dma->dma_tx = dma_request_chan(mcspi->dev, in omap2_mcspi_request_dma() 1001 if (IS_ERR(mcspi_dma->dma_tx)) { in omap2_mcspi_request_dma() 1002 ret = PTR_ERR(mcspi_dma->dma_tx); in omap2_mcspi_request_dma() 1003 mcspi_dma->dma_tx = NULL; in omap2_mcspi_request_dma() 1028 if (mcspi_dma->dma_tx) { in omap2_mcspi_release_dma() [all …]
|
D | spi-davinci.c | 121 struct dma_chan *dma_tx; member 396 if (dspi->dma_rx && dspi->dma_tx) in davinci_spi_of_setup() 622 dmaengine_slave_config(dspi->dma_tx, &dma_tx_conf); in davinci_spi_bufs() 640 txdesc = dmaengine_prep_slave_sg(dspi->dma_tx, in davinci_spi_bufs() 658 dma_async_issue_pending(dspi->dma_tx); in davinci_spi_bufs() 752 dspi->dma_tx = dma_request_chan(sdev, "tx"); in davinci_spi_request_dma() 753 if (IS_ERR(dspi->dma_tx)) { in davinci_spi_request_dma() 755 return PTR_ERR(dspi->dma_tx); in davinci_spi_request_dma() 964 dspi->dma_tx = NULL; in davinci_spi_probe() 1002 dma_release_channel(dspi->dma_tx); in davinci_spi_probe() [all …]
|
D | spi-ep93xx.c | 94 struct dma_chan *dma_tx; member 303 chan = espi->dma_tx; in ep93xx_spi_dma_prepare() 387 chan = espi->dma_tx; in ep93xx_spi_dma_finish() 431 dma_async_issue_pending(espi->dma_tx); in ep93xx_spi_dma_transfer() 614 espi->dma_tx = dma_request_channel(mask, ep93xx_spi_dma_filter, in ep93xx_spi_setup_dma() 616 if (!espi->dma_tx) { in ep93xx_spi_setup_dma() 638 if (espi->dma_tx) { in ep93xx_spi_release_dma() 639 dma_release_channel(espi->dma_tx); in ep93xx_spi_release_dma()
|
D | spi-sh-msiof.c | 770 dma_sync_single_for_device(p->ctlr->dma_tx->device->dev, in sh_msiof_dma_once() 772 desc_tx = dmaengine_prep_slave_single(p->ctlr->dma_tx, in sh_msiof_dma_once() 806 dma_async_issue_pending(p->ctlr->dma_tx); in sh_msiof_dma_once() 856 dmaengine_terminate_sync(p->ctlr->dma_tx); in sh_msiof_dma_once() 932 while (ctlr->dma_tx && len > 15) { in sh_msiof_transfer_one() 1195 ctlr->dma_tx = sh_msiof_request_dma_chan(dev, DMA_MEM_TO_DEV, in sh_msiof_request_dma() 1197 if (!ctlr->dma_tx) in sh_msiof_request_dma() 1213 tx_dev = ctlr->dma_tx->device->dev; in sh_msiof_request_dma() 1237 dma_release_channel(ctlr->dma_tx); in sh_msiof_request_dma() 1238 ctlr->dma_tx = NULL; in sh_msiof_request_dma() [all …]
|
D | spi-atmel.c | 472 if (dmaengine_slave_config(master->dma_tx, slave_config)) { in atmel_spi_dma_slave_config() 503 master->dma_tx = dma_request_chan(dev, "tx"); in atmel_spi_configure_dma() 504 if (IS_ERR(master->dma_tx)) { in atmel_spi_configure_dma() 505 err = PTR_ERR(master->dma_tx); in atmel_spi_configure_dma() 527 dma_chan_name(master->dma_tx), in atmel_spi_configure_dma() 534 if (!IS_ERR(master->dma_tx)) in atmel_spi_configure_dma() 535 dma_release_channel(master->dma_tx); in atmel_spi_configure_dma() 537 master->dma_tx = master->dma_rx = NULL; in atmel_spi_configure_dma() 545 if (master->dma_tx) in atmel_spi_stop_dma() 546 dmaengine_terminate_all(master->dma_tx); in atmel_spi_stop_dma() [all …]
|
D | spi-qup.c | 400 chan = master->dma_tx; in spi_qup_prep_sg() 420 dmaengine_terminate_all(master->dma_tx); in spi_qup_dma_terminate() 498 dma_async_issue_pending(master->dma_tx); in spi_qup_do_dma() 905 IS_ERR_OR_NULL(master->dma_tx)) in spi_qup_can_dma() 922 if (!IS_ERR_OR_NULL(master->dma_tx)) in spi_qup_release_dma() 923 dma_release_channel(master->dma_tx); in spi_qup_release_dma() 939 master->dma_tx = dma_request_chan(dev, "tx"); in spi_qup_init_dma() 940 if (IS_ERR(master->dma_tx)) { in spi_qup_init_dma() 941 ret = PTR_ERR(master->dma_tx); in spi_qup_init_dma() 962 ret = dmaengine_slave_config(master->dma_tx, tx_conf); in spi_qup_init_dma() [all …]
|
D | spi-imx.c | 1181 ret = dmaengine_slave_config(master->dma_tx, &tx); in spi_imx_dma_configure() 1274 if (master->dma_tx) { in spi_imx_sdma_exit() 1275 dma_release_channel(master->dma_tx); in spi_imx_sdma_exit() 1276 master->dma_tx = NULL; in spi_imx_sdma_exit() 1288 master->dma_tx = dma_request_chan(dev, "tx"); in spi_imx_sdma_init() 1289 if (IS_ERR(master->dma_tx)) { in spi_imx_sdma_init() 1290 ret = PTR_ERR(master->dma_tx); in spi_imx_sdma_init() 1292 master->dma_tx = NULL; in spi_imx_sdma_init() 1399 desc_tx = dmaengine_prep_slave_sg(master->dma_tx, in spi_imx_dma_transfer() 1403 dmaengine_terminate_all(master->dma_tx); in spi_imx_dma_transfer() [all …]
|
/Linux-v5.15/arch/powerpc/platforms/512x/ |
D | mpc512x_lpbfifo.c | 169 struct dma_async_tx_descriptor *dma_tx = NULL; in mpc512x_lpbfifo_kick() local 259 dma_tx = dmaengine_prep_slave_sg(lpbfifo.chan, &sg, in mpc512x_lpbfifo_kick() 261 if (!dma_tx) { in mpc512x_lpbfifo_kick() 265 dma_tx->callback = mpc512x_lpbfifo_callback; in mpc512x_lpbfifo_kick() 266 dma_tx->callback_param = NULL; in mpc512x_lpbfifo_kick() 315 cookie = dma_tx->tx_submit(dma_tx); in mpc512x_lpbfifo_kick()
|
/Linux-v5.15/drivers/i2c/busses/ |
D | i2c-sh_mobile.c | 135 struct dma_chan *dma_tx; member 448 ? pd->dma_rx : pd->dma_tx; in sh_mobile_i2c_dma_unmap() 463 dmaengine_terminate_sync(pd->dma_tx); in sh_mobile_i2c_cleanup_dma() 519 struct dma_chan *chan = read ? pd->dma_rx : pd->dma_tx; in sh_mobile_i2c_xfer_dma() 529 chan = pd->dma_tx = sh_mobile_i2c_request_dma_chan(pd->dev, DMA_MEM_TO_DEV, in sh_mobile_i2c_xfer_dma() 828 if (!IS_ERR(pd->dma_tx)) { in sh_mobile_i2c_release_dma() 829 dma_release_channel(pd->dma_tx); in sh_mobile_i2c_release_dma() 830 pd->dma_tx = ERR_PTR(-EPROBE_DEFER); in sh_mobile_i2c_release_dma() 919 pd->dma_rx = pd->dma_tx = ERR_PTR(-EPROBE_DEFER); in sh_mobile_i2c_probe()
|
D | i2c-rcar.c | 138 struct dma_chan *dma_tx; member 373 ? priv->dma_rx : priv->dma_tx; in rcar_i2c_dma_unmap() 396 dmaengine_terminate_all(priv->dma_tx); in rcar_i2c_cleanup_dma() 416 struct dma_chan *chan = read ? priv->dma_rx : priv->dma_tx; in rcar_i2c_dma() 766 chan = read ? priv->dma_rx : priv->dma_tx; in rcar_i2c_request_dma() 776 priv->dma_tx = chan; in rcar_i2c_request_dma() 781 if (!IS_ERR(priv->dma_tx)) { in rcar_i2c_release_dma() 782 dma_release_channel(priv->dma_tx); in rcar_i2c_release_dma() 783 priv->dma_tx = ERR_PTR(-EPROBE_DEFER); in rcar_i2c_release_dma() 1067 priv->dma_rx = priv->dma_tx = ERR_PTR(-EPROBE_DEFER); in rcar_i2c_probe()
|
/Linux-v5.15/drivers/net/ethernet/samsung/sxgbe/ |
D | sxgbe_dma.c | 44 int fix_burst, int pbl, dma_addr_t dma_tx, in sxgbe_dma_channel_init() argument 66 writel(upper_32_bits(dma_tx), in sxgbe_dma_channel_init() 68 writel(lower_32_bits(dma_tx), in sxgbe_dma_channel_init() 80 dma_addr = dma_tx + ((t_rsize - 1) * SXGBE_DESC_SIZE_BYTES); in sxgbe_dma_channel_init()
|
/Linux-v5.15/drivers/mtd/nand/raw/ |
D | intel-nand-controller.c | 120 struct dma_chan *dma_tx; member 285 dmaengine_terminate_async(ebu_host->dma_tx); in ebu_dma_tx_callback() 308 chan = ebu_host->dma_tx; in ebu_dma_start() 575 if (ebu_host->dma_tx) in ebu_dma_cleanup() 576 dma_release_channel(ebu_host->dma_tx); in ebu_dma_cleanup() 633 ebu_host->dma_tx = dma_request_chan(dev, "tx"); in ebu_nand_probe() 634 if (IS_ERR(ebu_host->dma_tx)) { in ebu_nand_probe() 635 ret = dev_err_probe(dev, PTR_ERR(ebu_host->dma_tx), in ebu_nand_probe()
|
/Linux-v5.15/drivers/net/ethernet/micrel/ |
D | ks8842.c | 147 #define KS8842_USE_DMA(adapter) (((adapter)->dma_tx.channel != -1) && \ 159 struct ks8842_tx_dma_ctl dma_tx; member 422 struct ks8842_tx_dma_ctl *ctl = &adapter->dma_tx; in ks8842_tx_frame_dma() 845 struct ks8842_tx_dma_ctl *ctl = &adapter->dma_tx; in ks8842_dma_tx_cb() 861 struct ks8842_tx_dma_ctl *tx_ctl = &adapter->dma_tx; in ks8842_stop_dma() 883 struct ks8842_tx_dma_ctl *tx_ctl = &adapter->dma_tx; in ks8842_dealloc_dma_bufs() 915 struct ks8842_tx_dma_ctl *tx_ctl = &adapter->dma_tx; in ks8842_alloc_dma_bufs() 988 adapter->dma_tx.channel = -1; in ks8842_open() 1042 if (adapter->dma_tx.adesc) in ks8842_xmit_frame() 1173 adapter->dma_tx.channel = pdata->tx_dma_channel; in ks8842_probe() [all …]
|
/Linux-v5.15/drivers/mmc/host/ |
D | omap.c | 129 struct dma_chan *dma_tx; member 411 c = host->dma_tx; in mmc_omap_release_dma() 1001 c = host->dma_tx; in mmc_omap_prepare_data() 1077 host->dma_tx : host->dma_rx; in mmc_omap_start_request() 1388 host->dma_tx = dma_request_chan(&pdev->dev, "tx"); in mmc_omap_probe() 1389 if (IS_ERR(host->dma_tx)) { in mmc_omap_probe() 1390 ret = PTR_ERR(host->dma_tx); in mmc_omap_probe() 1396 host->dma_tx = NULL; in mmc_omap_probe() 1404 if (host->dma_tx) in mmc_omap_probe() 1405 dma_release_channel(host->dma_tx); in mmc_omap_probe() [all …]
|
D | davinci_mmc.c | 192 struct dma_chan *dma_tx; member 395 sync_dev = host->dma_tx; in davinci_abort_dma() 415 chan = host->dma_tx; in mmc_davinci_send_dma_request() 416 dmaengine_slave_config(host->dma_tx, &dma_tx_conf); in mmc_davinci_send_dma_request() 418 desc = dmaengine_prep_slave_sg(host->dma_tx, in mmc_davinci_send_dma_request() 491 dma_release_channel(host->dma_tx); in davinci_release_dma_channels() 497 host->dma_tx = dma_request_chan(mmc_dev(host->mmc), "tx"); in davinci_acquire_dma_channels() 498 if (IS_ERR(host->dma_tx)) { in davinci_acquire_dma_channels() 500 return PTR_ERR(host->dma_tx); in davinci_acquire_dma_channels() 506 dma_release_channel(host->dma_tx); in davinci_acquire_dma_channels()
|
/Linux-v5.15/drivers/net/ethernet/calxeda/ |
D | xgmac.c | 360 struct xgmac_dma_desc *dma_tx; member 750 priv->dma_tx = dma_alloc_coherent(priv->device, in xgmac_dma_desc_rings_init() 755 if (!priv->dma_tx) in xgmac_dma_desc_rings_init() 760 priv->dma_rx, priv->dma_tx, in xgmac_dma_desc_rings_init() 771 desc_init_tx_desc(priv->dma_tx, DMA_TX_RING_SZ); in xgmac_dma_desc_rings_init() 822 p = priv->dma_tx + i; in xgmac_free_tx_skbufs() 843 if (priv->dma_tx) { in xgmac_free_dma_desc_rings() 846 priv->dma_tx, priv->dma_tx_phy); in xgmac_free_dma_desc_rings() 847 priv->dma_tx = NULL; in xgmac_free_dma_desc_rings() 871 struct xgmac_dma_desc *p = priv->dma_tx + entry; in xgmac_tx_complete() [all …]
|