Searched refs:dma_rx (Results 1 – 10 of 10) sorted by relevance
/Zephyr-Core-3.5.0/drivers/serial/ |
D | uart_xmc4xxx.c | 68 struct uart_dma_stream dma_rx; member 163 if (data->dma_rx.buffer_len) { in uart_xmc4xxx_isr() 171 async_timer_start(&data->dma_rx.timeout_work, data->dma_rx.timeout); in uart_xmc4xxx_isr() 412 if (buffer_type == CURRENT_BUFFER && !data->dma_rx.buffer) { in async_evt_rx_release_buffer() 421 event.data.rx_buf.buf = data->dma_rx.buffer; in async_evt_rx_release_buffer() 422 data->dma_rx.buffer = NULL; in async_evt_rx_release_buffer() 423 data->dma_rx.buffer_len = 0; in async_evt_rx_release_buffer() 435 data->dma_rx.buffer = NULL; in async_evt_rx_disabled() 436 data->dma_rx.buffer_len = 0; in async_evt_rx_disabled() 437 data->dma_rx.offset = 0; in async_evt_rx_disabled() [all …]
|
D | uart_stm32.c | 1090 LOG_DBG("rx_rdy: (%d %d)", data->dma_rx.offset, data->dma_rx.counter); in async_evt_rx_rdy() 1094 .data.rx.buf = data->dma_rx.buffer, in async_evt_rx_rdy() 1095 .data.rx.len = data->dma_rx.counter - data->dma_rx.offset, in async_evt_rx_rdy() 1096 .data.rx.offset = data->dma_rx.offset in async_evt_rx_rdy() 1100 data->dma_rx.offset = data->dma_rx.counter; in async_evt_rx_rdy() 1115 .data.rx_stop.data.len = data->dma_rx.counter, in async_evt_rx_err() 1117 .data.rx_stop.data.buf = data->dma_rx.buffer in async_evt_rx_err() 1170 .data.rx_buf.buf = data->dma_rx.buffer, in async_evt_rx_buf_release() 1191 if (dma_get_status(data->dma_rx.dma_dev, in uart_stm32_dma_rx_flush() 1192 data->dma_rx.dma_channel, &stat) == 0) { in uart_stm32_dma_rx_flush() [all …]
|
D | uart_stm32.h | 100 struct uart_dma_stream dma_rx; member
|
/Zephyr-Core-3.5.0/drivers/spi/ |
D | spi_xmc4xxx.c | 60 struct spi_xmc4xxx_dma_stream dma_rx; member 82 } else if (dev_dma == data->dma_rx.dev_dma && in spi_xmc4xxx_dma_callback() 83 dma_channel == data->dma_rx.dma_channel) { in spi_xmc4xxx_dma_callback() 354 struct spi_xmc4xxx_dma_stream *dma_rx = &data->dma_rx; in spi_xmc4xxx_transceive_dma() local 402 dma_rx->blk_cfg.dest_address = (uint32_t)ctx->rx_buf; in spi_xmc4xxx_transceive_dma() 403 dma_rx->blk_cfg.dest_addr_adj = DMA_ADDR_ADJ_INCREMENT; in spi_xmc4xxx_transceive_dma() 404 dma_rx->blk_cfg.block_size = dma_len; in spi_xmc4xxx_transceive_dma() 405 dma_rx->blk_cfg.source_addr_adj = DMA_ADDR_ADJ_NO_CHANGE; in spi_xmc4xxx_transceive_dma() 407 ret = dma_config(dma_rx->dev_dma, dma_rx->dma_channel, &dma_rx->dma_cfg); in spi_xmc4xxx_transceive_dma() 416 ret = dma_start(dma_rx->dev_dma, dma_rx->dma_channel); in spi_xmc4xxx_transceive_dma() [all …]
|
D | spi_mcux_lpspi.c | 61 struct stream dma_rx; member 271 } else if (channel == data->dma_rx.channel) { in spi_mcux_dma_callback() 355 struct stream *stream = &data->dma_rx; in spi_mcux_dma_rx_load() 384 return dma_config(data->dma_rx.dma_dev, data->dma_rx.channel, in spi_mcux_dma_rx_load() 444 ret = dma_start(lpspi_data->dma_rx.dma_dev, in spi_mcux_dma_rxtx_load() 445 lpspi_data->dma_rx.channel); in spi_mcux_dma_rxtx_load() 574 if (data->dma_rx.dma_dev && data->dma_tx.dma_dev) { in spi_mcux_transceive() 593 if (data->dma_rx.dma_dev && data->dma_tx.dma_dev) { in spi_mcux_transceive_async() 632 if (data->dma_tx.dma_dev && data->dma_rx.dma_dev) { in spi_mcux_init() 638 if (!device_is_ready(data->dma_rx.dma_dev)) { in spi_mcux_init() [all …]
|
D | spi_ll_stm32.c | 117 } else if (channel == data->dma_rx.channel) { in dma_callback() 201 struct stream *stream = &data->dma_rx; in spi_stm32_dma_rx_load() 217 if (data->dma_rx.dst_addr_increment) { in spi_stm32_dma_rx_load() 225 if (data->dma_rx.src_addr_increment) { in spi_stm32_dma_rx_load() 232 blk_cfg->fifo_mode_control = data->dma_rx.fifo_threshold; in spi_stm32_dma_rx_load() 240 ret = dma_config(data->dma_rx.dma_dev, data->dma_rx.channel, in spi_stm32_dma_rx_load() 248 return dma_start(data->dma_rx.dma_dev, data->dma_rx.channel); in spi_stm32_dma_rx_load() 257 dma_segment_len = len * data->dma_rx.dma_cfg.dest_data_size; in spi_dma_move_buffers() 930 dma_stop(data->dma_rx.dma_dev, data->dma_rx.channel); in transceive_dma() 955 && (data->dma_rx.dma_dev != NULL)) { in spi_stm32_transceive() [all …]
|
D | spi_mcux_flexcomm.c | 64 struct stream dma_rx; member 316 } else if (channel == data->dma_rx.channel) { in spi_mcux_dma_callback() 506 struct stream *stream = &data->dma_rx; in spi_mcux_dma_rx_load() 533 ret = dma_config(data->dma_rx.dma_dev, data->dma_rx.channel, in spi_mcux_dma_rx_load() 541 return dma_start(data->dma_rx.dma_dev, data->dma_rx.channel); in spi_mcux_dma_rx_load() 609 data->dma_rx.dma_cfg.source_data_size = data_size; in transceive_dma() 610 data->dma_rx.dma_cfg.dest_data_size = data_size; in transceive_dma() 786 if (!device_is_ready(data->dma_rx.dma_dev)) { in spi_mcux_init() 787 LOG_ERR("%s device is not ready", data->dma_rx.dma_dev->name); in spi_mcux_init() 839 .dma_rx = { \
|
D | spi_ll_stm32.h | 63 struct stream dma_rx; member
|
/Zephyr-Core-3.5.0/subsys/mgmt/ec_host_cmd/backends/ |
D | ec_host_cmd_backend_spi_stm32.c | 150 struct dma_stream *dma_rx; member 197 static struct dma_stream dma_rx = {SPI_DMA_CHANNEL_INIT(id, rx, RX, PERIPHERAL, MEMORY)}; \ 204 .dma_rx = &dma_rx, \ 313 if ((hc_spi->dma_rx->dma_dev != NULL) && !device_is_ready(hc_spi->dma_rx->dma_dev)) { in spi_init() 314 LOG_ERR("%s device not ready", hc_spi->dma_rx->dma_dev->name); in spi_init() 425 ret = dma_reload(hc_spi->dma_rx->dma_dev, hc_spi->dma_rx->channel, dma_source_addr(spi), in reload_dma_rx() 431 ret = dma_start(hc_spi->dma_rx->dma_dev, hc_spi->dma_rx->channel); in reload_dma_rx() 444 struct dma_stream *stream = hc_spi->dma_rx; in spi_config_dma_rx() 461 blk_cfg->fifo_mode_control = hc_spi->dma_rx->fifo_threshold; in spi_config_dma_rx() 467 ret = dma_config(hc_spi->dma_rx->dma_dev, hc_spi->dma_rx->channel, &stream->dma_cfg); in spi_config_dma_rx() [all …]
|
/Zephyr-Core-3.5.0/drivers/disk/ |
D | sdmmc_stm32.c | 82 struct sdmmc_dma_stream dma_rx; member 244 err = stm32_sdmmc_configure_dma(&dma_rx_handle, &priv->dma_rx); in stm32_sdmmc_dma_init()
|