Home
last modified time | relevance | path

Searched refs:dma_tx (Results 1 – 10 of 10) sorted by relevance

/Zephyr-Core-3.5.0/drivers/serial/
Duart_xmc4xxx.c69 struct uart_dma_stream dma_tx; member
462 .data.tx.buf = data->dma_tx.buffer, in async_evt_tx_done()
463 .data.tx.len = data->dma_tx.counter}; in async_evt_tx_done()
465 data->dma_tx.buffer = NULL; in async_evt_tx_done()
466 data->dma_tx.buffer_len = 0; in async_evt_tx_done()
467 data->dma_tx.counter = 0; in async_evt_tx_done()
477 .data.tx.buf = data->dma_tx.buffer, in async_evt_tx_abort()
478 .data.tx.len = data->dma_tx.counter}; in async_evt_tx_abort()
480 data->dma_tx.buffer = NULL; in async_evt_tx_abort()
481 data->dma_tx.buffer_len = 0; in async_evt_tx_abort()
[all …]
Duart_stm32.c1125 LOG_DBG("tx done: %d", data->dma_tx.counter); in async_evt_tx_done()
1129 .data.tx.buf = data->dma_tx.buffer, in async_evt_tx_done()
1130 .data.tx.len = data->dma_tx.counter in async_evt_tx_done()
1134 data->dma_tx.buffer_length = 0; in async_evt_tx_done()
1135 data->dma_tx.counter = 0; in async_evt_tx_done()
1142 LOG_DBG("tx abort: %d", data->dma_tx.counter); in async_evt_tx_abort()
1146 .data.tx.buf = data->dma_tx.buffer, in async_evt_tx_abort()
1147 .data.tx.len = data->dma_tx.counter in async_evt_tx_abort()
1151 data->dma_tx.buffer_length = 0; in async_evt_tx_abort()
1152 data->dma_tx.counter = 0; in async_evt_tx_abort()
[all …]
Duart_stm32.h101 struct uart_dma_stream dma_tx; member
/Zephyr-Core-3.5.0/drivers/spi/
Dspi_xmc4xxx.c61 struct spi_xmc4xxx_dma_stream dma_tx; member
80 if (dev_dma == data->dma_tx.dev_dma && dma_channel == data->dma_tx.dma_channel) { in spi_xmc4xxx_dma_callback()
353 struct spi_xmc4xxx_dma_stream *dma_tx = &data->dma_tx; in spi_xmc4xxx_transceive_dma() local
428 dma_tx->blk_cfg.source_address = (uint32_t)ctx->tx_buf; in spi_xmc4xxx_transceive_dma()
429 dma_tx->blk_cfg.source_addr_adj = DMA_ADDR_ADJ_INCREMENT; in spi_xmc4xxx_transceive_dma()
431 dma_tx->blk_cfg.source_address = (uint32_t)&tx_dummy_data; in spi_xmc4xxx_transceive_dma()
432 dma_tx->blk_cfg.source_addr_adj = DMA_ADDR_ADJ_NO_CHANGE; in spi_xmc4xxx_transceive_dma()
435 dma_tx->blk_cfg.block_size = dma_len; in spi_xmc4xxx_transceive_dma()
437 ret = dma_config(dma_tx->dev_dma, dma_tx->dma_channel, &dma_tx->dma_cfg); in spi_xmc4xxx_transceive_dma()
448 ret = dma_start(dma_tx->dev_dma, dma_tx->dma_channel); in spi_xmc4xxx_transceive_dma()
[all …]
Dspi_mcux_lpspi.c62 struct stream dma_tx; member
267 if (channel == data->dma_tx.channel) { in spi_mcux_dma_callback()
312 struct stream *stream = &data->dma_tx; in spi_mcux_dma_tx_load()
342 return dma_config(data->dma_tx.dma_dev, data->dma_tx.channel, in spi_mcux_dma_tx_load()
438 ret = dma_start(lpspi_data->dma_tx.dma_dev, in spi_mcux_dma_rxtx_load()
439 lpspi_data->dma_tx.channel); in spi_mcux_dma_rxtx_load()
574 if (data->dma_rx.dma_dev && data->dma_tx.dma_dev) { in spi_mcux_transceive()
593 if (data->dma_rx.dma_dev && data->dma_tx.dma_dev) { in spi_mcux_transceive_async()
632 if (data->dma_tx.dma_dev && data->dma_rx.dma_dev) { in spi_mcux_init()
633 if (!device_is_ready(data->dma_tx.dma_dev)) { in spi_mcux_init()
[all …]
Dspi_ll_stm32.c114 if (channel == data->dma_tx.channel) { in dma_callback()
139 struct stream *stream = &data->dma_tx; in spi_stm32_dma_tx_load()
158 if (data->dma_tx.src_addr_increment) { in spi_stm32_dma_tx_load()
167 if (data->dma_tx.dst_addr_increment) { in spi_stm32_dma_tx_load()
174 blk_cfg->fifo_mode_control = data->dma_tx.fifo_threshold; in spi_stm32_dma_tx_load()
181 ret = dma_config(data->dma_tx.dma_dev, data->dma_tx.channel, in spi_stm32_dma_tx_load()
189 return dma_start(data->dma_tx.dma_dev, data->dma_tx.channel); in spi_stm32_dma_tx_load()
264 dma_segment_len = len * data->dma_tx.dma_cfg.source_data_size; in spi_dma_move_buffers()
931 dma_stop(data->dma_tx.dma_dev, data->dma_tx.channel); in transceive_dma()
954 if ((data->dma_tx.dma_dev != NULL) in spi_stm32_transceive()
[all …]
Dspi_mcux_flexcomm.c65 struct stream dma_tx; member
313 if (channel == data->dma_tx.channel) { in spi_mcux_dma_callback()
385 struct stream *stream = &data->dma_tx; in spi_mcux_dma_tx_load()
466 ret = dma_config(data->dma_tx.dma_dev, data->dma_tx.channel, in spi_mcux_dma_tx_load()
493 return dma_start(data->dma_tx.dma_dev, data->dma_tx.channel); in spi_mcux_dma_tx_load()
611 data->dma_tx.dma_cfg.source_data_size = data_size; in transceive_dma()
612 data->dma_tx.dma_cfg.dest_data_size = data_size; in transceive_dma()
781 if (!device_is_ready(data->dma_tx.dma_dev)) { in spi_mcux_init()
782 LOG_ERR("%s device is not ready", data->dma_tx.dma_dev->name); in spi_mcux_init()
829 .dma_tx = { \
Dspi_ll_stm32.h64 struct stream dma_tx; member
/Zephyr-Core-3.5.0/subsys/mgmt/ec_host_cmd/backends/
Dec_host_cmd_backend_spi_stm32.c151 struct dma_stream *dma_tx; member
198 static struct dma_stream dma_tx = {SPI_DMA_CHANNEL_INIT(id, tx, TX, MEMORY, PERIPHERAL)}
205 .dma_tx = &dma_tx, \
260 if (channel == hc_spi->dma_tx->channel) { in dma_callback()
318 if ((hc_spi->dma_tx->dma_dev != NULL) && !device_is_ready(hc_spi->dma_tx->dma_dev)) { in spi_init()
319 LOG_ERR("%s device not ready", hc_spi->dma_tx->dma_dev->name); in spi_init()
367 ret = dma_reload(hc_spi->dma_tx->dma_dev, hc_spi->dma_tx->channel, (uint32_t)hc_spi->tx_buf, in reload_dma_tx()
374 ret = dma_start(hc_spi->dma_tx->dma_dev, hc_spi->dma_tx->channel); in reload_dma_tx()
387 struct dma_stream *stream = hc_spi->dma_tx; in spi_config_dma_tx()
403 blk_cfg->fifo_mode_control = hc_spi->dma_tx->fifo_threshold; in spi_config_dma_tx()
[all …]
/Zephyr-Core-3.5.0/drivers/disk/
Dsdmmc_stm32.c83 struct sdmmc_dma_stream dma_tx; member
235 err = stm32_sdmmc_configure_dma(&dma_tx_handle, &priv->dma_tx); in stm32_sdmmc_dma_init()