Searched refs:tx_dma_desc (Results 1 – 11 of 11) sorted by relevance
/Linux-v5.10/drivers/net/ethernet/altera/ |
D | altera_msgdma.c | 111 csrwr32(lower_32_bits(buffer->dma_addr), priv->tx_dma_desc, in msgdma_tx_buffer() 113 csrwr32(upper_32_bits(buffer->dma_addr), priv->tx_dma_desc, in msgdma_tx_buffer() 115 csrwr32(0, priv->tx_dma_desc, msgdma_descroffs(write_addr_lo)); in msgdma_tx_buffer() 116 csrwr32(0, priv->tx_dma_desc, msgdma_descroffs(write_addr_hi)); in msgdma_tx_buffer() 117 csrwr32(buffer->len, priv->tx_dma_desc, msgdma_descroffs(len)); in msgdma_tx_buffer() 118 csrwr32(0, priv->tx_dma_desc, msgdma_descroffs(burst_seq_num)); in msgdma_tx_buffer() 119 csrwr32(MSGDMA_DESC_TX_STRIDE, priv->tx_dma_desc, in msgdma_tx_buffer() 121 csrwr32(MSGDMA_DESC_CTL_TX_SINGLE, priv->tx_dma_desc, in msgdma_tx_buffer()
|
D | altera_sgdma.c | 80 (void __force *)priv->tx_dma_desc, in sgdma_initialize() 90 memset_io(priv->tx_dma_desc, 0, priv->txdescmem); in sgdma_initialize() 119 memset_io(priv->tx_dma_desc, 0, priv->txdescmem); in sgdma_reset() 170 (struct sgdma_descrip __iomem *)priv->tx_dma_desc; in sgdma_tx_buffer() 205 ((csrrd8(priv->tx_dma_desc, sgdma_descroffs(control)) in sgdma_tx_completions() 410 uintptr_t offs = (uintptr_t)desc - (uintptr_t)priv->tx_dma_desc; in sgdma_txphysaddr()
|
D | altera_tse.h | 424 void __iomem *tx_dma_desc; member
|
D | altera_tse_main.c | 1385 priv->tx_dma_desc = descmap; in altera_tse_probe() 1418 &priv->tx_dma_desc); in altera_tse_probe()
|
/Linux-v5.10/drivers/dma/ |
D | at_xdmac.c | 238 struct dma_async_tx_descriptor tx_dma_desc; member 276 return container_of(txd, struct at_xdmac_desc, tx_dma_desc); in txd_to_at_desc() 348 reg = AT_XDMAC_CNDA_NDA(first->tx_dma_desc.phys) in at_xdmac_start_xfer() 450 dma_async_tx_descriptor_init(&desc->tx_dma_desc, chan); in at_xdmac_alloc_desc() 451 desc->tx_dma_desc.tx_submit = at_xdmac_tx_submit; in at_xdmac_alloc_desc() 452 desc->tx_dma_desc.phys = phys; in at_xdmac_alloc_desc() 491 prev->lld.mbr_nda = desc->tx_dma_desc.phys; in at_xdmac_queue_desc() 723 first->tx_dma_desc.flags = flags; in at_xdmac_prep_slave_sg() 726 ret = &first->tx_dma_desc; in at_xdmac_prep_slave_sg() 777 __func__, desc, &desc->tx_dma_desc.phys); in at_xdmac_prep_dma_cyclic() [all …]
|
/Linux-v5.10/drivers/spi/ |
D | spi-stm32.c | 1291 struct dma_async_tx_descriptor *tx_dma_desc, *rx_dma_desc; in stm32_spi_transfer_one_dma() local 1312 tx_dma_desc = NULL; in stm32_spi_transfer_one_dma() 1317 tx_dma_desc = dmaengine_prep_slave_sg( in stm32_spi_transfer_one_dma() 1324 if ((spi->tx_buf && spi->dma_tx && !tx_dma_desc) || in stm32_spi_transfer_one_dma() 1328 if (spi->cur_comm == SPI_FULL_DUPLEX && (!tx_dma_desc || !rx_dma_desc)) in stm32_spi_transfer_one_dma() 1343 if (tx_dma_desc) { in stm32_spi_transfer_one_dma() 1346 tx_dma_desc->callback = spi->cfg->dma_tx_cb; in stm32_spi_transfer_one_dma() 1347 tx_dma_desc->callback_param = spi; in stm32_spi_transfer_one_dma() 1350 if (dma_submit_error(dmaengine_submit(tx_dma_desc))) { in stm32_spi_transfer_one_dma()
|
D | spi-tegra20-slink.c | 204 struct dma_async_tx_descriptor *tx_dma_desc; member 436 tspi->tx_dma_desc = dmaengine_prep_slave_single(tspi->tx_dma_chan, in tegra_slink_start_tx_dma() 439 if (!tspi->tx_dma_desc) { in tegra_slink_start_tx_dma() 444 tspi->tx_dma_desc->callback = tegra_slink_dma_complete; in tegra_slink_start_tx_dma() 445 tspi->tx_dma_desc->callback_param = &tspi->tx_dma_complete; in tegra_slink_start_tx_dma() 447 dmaengine_submit(tspi->tx_dma_desc); in tegra_slink_start_tx_dma()
|
D | spi-tegra114.c | 220 struct dma_async_tx_descriptor *tx_dma_desc; member 476 tspi->tx_dma_desc = dmaengine_prep_slave_single(tspi->tx_dma_chan, in tegra_spi_start_tx_dma() 479 if (!tspi->tx_dma_desc) { in tegra_spi_start_tx_dma() 484 tspi->tx_dma_desc->callback = tegra_spi_dma_complete; in tegra_spi_start_tx_dma() 485 tspi->tx_dma_desc->callback_param = &tspi->tx_dma_complete; in tegra_spi_start_tx_dma() 487 dmaengine_submit(tspi->tx_dma_desc); in tegra_spi_start_tx_dma()
|
/Linux-v5.10/drivers/tty/serial/ |
D | serial-tegra.c | 132 struct dma_async_tx_descriptor *tx_dma_desc; member 521 async_tx_ack(tup->tx_dma_desc); in tegra_uart_tx_dma_complete() 543 tup->tx_dma_desc = dmaengine_prep_slave_single(tup->tx_dma_chan, in tegra_uart_start_tx_dma() 546 if (!tup->tx_dma_desc) { in tegra_uart_start_tx_dma() 551 tup->tx_dma_desc->callback = tegra_uart_tx_dma_complete; in tegra_uart_start_tx_dma() 552 tup->tx_dma_desc->callback_param = tup; in tegra_uart_start_tx_dma() 555 tup->tx_cookie = dmaengine_submit(tup->tx_dma_desc); in tegra_uart_start_tx_dma() 621 async_tx_ack(tup->tx_dma_desc); in tegra_uart_stop_tx()
|
D | sirfsoc_uart.c | 243 sirfport->tx_dma_desc = dmaengine_prep_slave_single( in sirfsoc_uart_tx_with_dma() 246 if (!sirfport->tx_dma_desc) { in sirfsoc_uart_tx_with_dma() 250 sirfport->tx_dma_desc->callback = in sirfsoc_uart_tx_with_dma() 252 sirfport->tx_dma_desc->callback_param = (void *)sirfport; in sirfsoc_uart_tx_with_dma() 255 dmaengine_submit(sirfport->tx_dma_desc); in sirfsoc_uart_tx_with_dma()
|
D | sirfsoc_uart.h | 425 struct dma_async_tx_descriptor *tx_dma_desc; member
|