Home
last modified time | relevance | path

Searched refs:tx_dma_desc (Results 1 – 11 of 11) sorted by relevance

/Linux-v4.19/drivers/net/ethernet/altera/
Daltera_msgdma.c122 csrwr32(lower_32_bits(buffer->dma_addr), priv->tx_dma_desc, in msgdma_tx_buffer()
124 csrwr32(upper_32_bits(buffer->dma_addr), priv->tx_dma_desc, in msgdma_tx_buffer()
126 csrwr32(0, priv->tx_dma_desc, msgdma_descroffs(write_addr_lo)); in msgdma_tx_buffer()
127 csrwr32(0, priv->tx_dma_desc, msgdma_descroffs(write_addr_hi)); in msgdma_tx_buffer()
128 csrwr32(buffer->len, priv->tx_dma_desc, msgdma_descroffs(len)); in msgdma_tx_buffer()
129 csrwr32(0, priv->tx_dma_desc, msgdma_descroffs(burst_seq_num)); in msgdma_tx_buffer()
130 csrwr32(MSGDMA_DESC_TX_STRIDE, priv->tx_dma_desc, in msgdma_tx_buffer()
132 csrwr32(MSGDMA_DESC_CTL_TX_SINGLE, priv->tx_dma_desc, in msgdma_tx_buffer()
Daltera_sgdma.c91 (void __force *)priv->tx_dma_desc, in sgdma_initialize()
101 memset_io(priv->tx_dma_desc, 0, priv->txdescmem); in sgdma_initialize()
130 memset_io(priv->tx_dma_desc, 0, priv->txdescmem); in sgdma_reset()
181 (struct sgdma_descrip __iomem *)priv->tx_dma_desc; in sgdma_tx_buffer()
216 ((csrrd8(priv->tx_dma_desc, sgdma_descroffs(control)) in sgdma_tx_completions()
421 uintptr_t offs = (uintptr_t)desc - (uintptr_t)priv->tx_dma_desc; in sgdma_txphysaddr()
Daltera_tse.h435 void __iomem *tx_dma_desc; member
Daltera_tse_main.c1397 priv->tx_dma_desc = descmap; in altera_tse_probe()
1430 &priv->tx_dma_desc); in altera_tse_probe()
/Linux-v4.19/drivers/dma/
Dat_xdmac.c248 struct dma_async_tx_descriptor tx_dma_desc; member
286 return container_of(txd, struct at_xdmac_desc, tx_dma_desc); in txd_to_at_desc()
353 reg = AT_XDMAC_CNDA_NDA(first->tx_dma_desc.phys) in at_xdmac_start_xfer()
449 dma_async_tx_descriptor_init(&desc->tx_dma_desc, chan); in at_xdmac_alloc_desc()
450 desc->tx_dma_desc.tx_submit = at_xdmac_tx_submit; in at_xdmac_alloc_desc()
451 desc->tx_dma_desc.phys = phys; in at_xdmac_alloc_desc()
490 prev->lld.mbr_nda = desc->tx_dma_desc.phys; in at_xdmac_queue_desc()
722 first->tx_dma_desc.flags = flags; in at_xdmac_prep_slave_sg()
725 ret = &first->tx_dma_desc; in at_xdmac_prep_slave_sg()
776 __func__, desc, &desc->tx_dma_desc.phys); in at_xdmac_prep_dma_cyclic()
[all …]
/Linux-v4.19/drivers/spi/
Dspi-stm32.c759 struct dma_async_tx_descriptor *tx_dma_desc, *rx_dma_desc; in stm32_spi_transfer_one_dma() local
780 tx_dma_desc = NULL; in stm32_spi_transfer_one_dma()
785 tx_dma_desc = dmaengine_prep_slave_sg( in stm32_spi_transfer_one_dma()
792 if ((spi->tx_buf && !tx_dma_desc) || in stm32_spi_transfer_one_dma()
808 if (tx_dma_desc) { in stm32_spi_transfer_one_dma()
810 tx_dma_desc->callback = stm32_spi_dma_cb; in stm32_spi_transfer_one_dma()
811 tx_dma_desc->callback_param = spi; in stm32_spi_transfer_one_dma()
814 if (dma_submit_error(dmaengine_submit(tx_dma_desc))) { in stm32_spi_transfer_one_dma()
Dspi-tegra20-slink.c215 struct dma_async_tx_descriptor *tx_dma_desc; member
447 tspi->tx_dma_desc = dmaengine_prep_slave_single(tspi->tx_dma_chan, in tegra_slink_start_tx_dma()
450 if (!tspi->tx_dma_desc) { in tegra_slink_start_tx_dma()
455 tspi->tx_dma_desc->callback = tegra_slink_dma_complete; in tegra_slink_start_tx_dma()
456 tspi->tx_dma_desc->callback_param = &tspi->tx_dma_complete; in tegra_slink_start_tx_dma()
458 dmaengine_submit(tspi->tx_dma_desc); in tegra_slink_start_tx_dma()
Dspi-tegra114.c213 struct dma_async_tx_descriptor *tx_dma_desc; member
438 tspi->tx_dma_desc = dmaengine_prep_slave_single(tspi->tx_dma_chan, in tegra_spi_start_tx_dma()
441 if (!tspi->tx_dma_desc) { in tegra_spi_start_tx_dma()
446 tspi->tx_dma_desc->callback = tegra_spi_dma_complete; in tegra_spi_start_tx_dma()
447 tspi->tx_dma_desc->callback_param = &tspi->tx_dma_complete; in tegra_spi_start_tx_dma()
449 dmaengine_submit(tspi->tx_dma_desc); in tegra_spi_start_tx_dma()
/Linux-v4.19/drivers/tty/serial/
Dserial-tegra.c119 struct dma_async_tx_descriptor *tx_dma_desc; member
399 async_tx_ack(tup->tx_dma_desc); in tegra_uart_tx_dma_complete()
420 tup->tx_dma_desc = dmaengine_prep_slave_single(tup->tx_dma_chan, in tegra_uart_start_tx_dma()
423 if (!tup->tx_dma_desc) { in tegra_uart_start_tx_dma()
428 tup->tx_dma_desc->callback = tegra_uart_tx_dma_complete; in tegra_uart_start_tx_dma()
429 tup->tx_dma_desc->callback_param = tup; in tegra_uart_start_tx_dma()
432 tup->tx_cookie = dmaengine_submit(tup->tx_dma_desc); in tegra_uart_start_tx_dma()
495 async_tx_ack(tup->tx_dma_desc); in tegra_uart_stop_tx()
Dsirfsoc_uart.c243 sirfport->tx_dma_desc = dmaengine_prep_slave_single( in sirfsoc_uart_tx_with_dma()
246 if (!sirfport->tx_dma_desc) { in sirfsoc_uart_tx_with_dma()
250 sirfport->tx_dma_desc->callback = in sirfsoc_uart_tx_with_dma()
252 sirfport->tx_dma_desc->callback_param = (void *)sirfport; in sirfsoc_uart_tx_with_dma()
255 dmaengine_submit(sirfport->tx_dma_desc); in sirfsoc_uart_tx_with_dma()
Dsirfsoc_uart.h424 struct dma_async_tx_descriptor *tx_dma_desc; member