/Zephyr-latest/drivers/serial/ |
D | uart_xmc4xxx.c | 68 struct uart_dma_stream dma_rx; member 163 if (data->dma_rx.buffer_len) { in uart_xmc4xxx_isr() 171 async_timer_start(&data->dma_rx.timeout_work, data->dma_rx.timeout); in uart_xmc4xxx_isr() 412 if (buffer_type == CURRENT_BUFFER && !data->dma_rx.buffer) { in async_evt_rx_release_buffer() 421 event.data.rx_buf.buf = data->dma_rx.buffer; in async_evt_rx_release_buffer() 422 data->dma_rx.buffer = NULL; in async_evt_rx_release_buffer() 423 data->dma_rx.buffer_len = 0; in async_evt_rx_release_buffer() 438 if (data->dma_rx.buffer_len == 0 || data->async_cb == NULL) { in async_evt_rx_stopped() 442 rx->buf = data->dma_rx.buffer; in async_evt_rx_stopped() 443 if (dma_get_status(data->dma_rx.dma_dev, data->dma_rx.dma_channel, &stat) == 0) { in async_evt_rx_stopped() [all …]
|
D | uart_stm32.c | 1111 LOG_DBG("rx_rdy: (%d %d)", data->dma_rx.offset, data->dma_rx.counter); in async_evt_rx_rdy() 1115 .data.rx.buf = data->dma_rx.buffer, in async_evt_rx_rdy() 1116 .data.rx.len = data->dma_rx.counter - data->dma_rx.offset, in async_evt_rx_rdy() 1117 .data.rx.offset = data->dma_rx.offset in async_evt_rx_rdy() 1121 data->dma_rx.offset = data->dma_rx.counter; in async_evt_rx_rdy() 1136 .data.rx_stop.data.len = data->dma_rx.counter, in async_evt_rx_err() 1138 .data.rx_stop.data.buf = data->dma_rx.buffer in async_evt_rx_err() 1191 .data.rx_buf.buf = data->dma_rx.buffer, in async_evt_rx_buf_release() 1212 if (dma_get_status(data->dma_rx.dma_dev, in uart_stm32_dma_rx_flush() 1213 data->dma_rx.dma_channel, &stat) == 0) { in uart_stm32_dma_rx_flush() [all …]
|
D | uart_stm32.h | 102 struct uart_dma_stream dma_rx; member
|
/Zephyr-latest/drivers/spi/ |
D | spi_andes_atcspi200.c | 44 struct stream dma_rx; member 233 data->dma_rx.dma_blk_cfg.next_block = NULL; in spi_dma_move_buffers() 261 dma_stop(data->dma_rx.dma_dev, data->dma_rx.channel); in dma_rx_callback() 269 error = dma_start(data->dma_rx.dma_dev, data->dma_rx.channel); in dma_rx_callback() 427 memset(&data->dma_rx.dma_blk_cfg, 0, sizeof(struct dma_block_config)); in spi_dma_rx_load() 430 data->dma_rx.dma_blk_cfg.block_size = data->chunk_len / in spi_dma_rx_load() 431 data->dma_rx.dma_cfg.dest_data_size; in spi_dma_rx_load() 433 data->dma_rx.dma_blk_cfg.block_size = ctx->current_rx->len / in spi_dma_rx_load() 434 data->dma_rx.dma_cfg.dest_data_size; in spi_dma_rx_load() 440 data->dma_rx.dma_blk_cfg.dest_address = (uintptr_t)&dummy_rx_tx_buffer; in spi_dma_rx_load() [all …]
|
D | spi_xmc4xxx.c | 61 struct spi_xmc4xxx_dma_stream dma_rx; member 83 } else if (dev_dma == data->dma_rx.dev_dma && in spi_xmc4xxx_dma_callback() 84 dma_channel == data->dma_rx.dma_channel) { in spi_xmc4xxx_dma_callback() 356 struct spi_xmc4xxx_dma_stream *dma_rx = &data->dma_rx; in spi_xmc4xxx_transceive_dma() local 404 dma_rx->blk_cfg.dest_address = (uint32_t)ctx->rx_buf; in spi_xmc4xxx_transceive_dma() 405 dma_rx->blk_cfg.dest_addr_adj = DMA_ADDR_ADJ_INCREMENT; in spi_xmc4xxx_transceive_dma() 406 dma_rx->blk_cfg.block_size = dma_len; in spi_xmc4xxx_transceive_dma() 407 dma_rx->blk_cfg.source_addr_adj = DMA_ADDR_ADJ_NO_CHANGE; in spi_xmc4xxx_transceive_dma() 409 ret = dma_config(dma_rx->dev_dma, dma_rx->dma_channel, &dma_rx->dma_cfg); in spi_xmc4xxx_transceive_dma() 418 ret = dma_start(dma_rx->dev_dma, dma_rx->dma_channel); in spi_xmc4xxx_transceive_dma() [all …]
|
D | spi_mcux_flexcomm.c | 68 struct stream dma_rx; member 320 } else if (channel == data->dma_rx.channel) { in spi_mcux_dma_callback() 521 struct stream *stream = &data->dma_rx; in spi_mcux_dma_rx_load() 546 ret = dma_config(data->dma_rx.dma_dev, data->dma_rx.channel, in spi_mcux_dma_rx_load() 554 return dma_start(data->dma_rx.dma_dev, data->dma_rx.channel); in spi_mcux_dma_rx_load() 623 data->dma_rx.dma_cfg.source_data_size = data_size; in transceive_dma() 624 data->dma_rx.dma_cfg.dest_data_size = data_size; in transceive_dma() 810 if (!device_is_ready(data->dma_rx.dma_dev)) { in spi_mcux_init() 811 LOG_ERR("%s device is not ready", data->dma_rx.dma_dev->name); in spi_mcux_init() 867 .dma_rx = { \
|
D | spi_ll_stm32.c | 144 } else if (channel == spi_dma_data->dma_rx.channel) { in dma_callback() 227 struct stream *stream = &data->dma_rx; in spi_stm32_dma_rx_load() 243 if (data->dma_rx.dst_addr_increment) { in spi_stm32_dma_rx_load() 251 if (data->dma_rx.src_addr_increment) { in spi_stm32_dma_rx_load() 258 blk_cfg->fifo_mode_control = data->dma_rx.fifo_threshold; in spi_stm32_dma_rx_load() 266 ret = dma_config(data->dma_rx.dma_dev, data->dma_rx.channel, in spi_stm32_dma_rx_load() 274 return dma_start(data->dma_rx.dma_dev, data->dma_rx.channel); in spi_stm32_dma_rx_load() 283 dma_segment_len = len * data->dma_rx.dma_cfg.dest_data_size; in spi_dma_move_buffers() 1096 err = dma_stop(data->dma_rx.dma_dev, data->dma_rx.channel); in transceive_dma() 1129 && (data->dma_rx.dma_dev != NULL)) { in spi_stm32_transceive() [all …]
|
D | spi_mcux_lpspi.c | 86 struct spi_dma_stream dma_rx; member 230 return (data->dma_tx.dma_dev && data->dma_rx.dma_dev); in lpspi_inst_has_dma() 250 } else if (channel == data->dma_rx.channel) { in spi_mcux_dma_callback() 338 struct spi_dma_stream *stream = &data->dma_rx; in spi_mcux_dma_rx_load() 400 ret = dma_start(data->dma_rx.dma_dev, data->dma_rx.channel); in spi_mcux_dma_rxtx_load() 753 lpspi_dma_dev_ready(data->dma_rx.dma_dev); in lpspi_dma_devs_ready() 819 (.dma_rx = {.dma_dev = DEVICE_DT_GET(DT_INST_DMAS_CTLR_BY_NAME(n, rx)), \
|
D | spi_ll_stm32.h | 68 struct stream dma_rx; member
|
/Zephyr-latest/subsys/mgmt/ec_host_cmd/backends/ |
D | ec_host_cmd_backend_spi_stm32.c | 170 struct dma_stream *dma_rx; member 219 static struct dma_stream dma_rx = {SPI_DMA_CHANNEL_INIT(id, rx, RX, PERIPHERAL, MEMORY)}; \ 226 .dma_rx = &dma_rx, \ 365 if ((hc_spi->dma_rx->dma_dev != NULL) && !device_is_ready(hc_spi->dma_rx->dma_dev)) { in spi_init() 366 LOG_ERR("%s device not ready", hc_spi->dma_rx->dma_dev->name); in spi_init() 485 ret = dma_reload(hc_spi->dma_rx->dma_dev, hc_spi->dma_rx->channel, dma_source_addr(spi), in reload_dma_rx() 491 ret = dma_start(hc_spi->dma_rx->dma_dev, hc_spi->dma_rx->channel); in reload_dma_rx() 504 struct dma_stream *stream = hc_spi->dma_rx; in spi_config_dma_rx() 521 blk_cfg->fifo_mode_control = hc_spi->dma_rx->fifo_threshold; in spi_config_dma_rx() 527 ret = dma_config(hc_spi->dma_rx->dma_dev, hc_spi->dma_rx->channel, &stream->dma_cfg); in spi_config_dma_rx() [all …]
|
/Zephyr-latest/drivers/disk/ |
D | sdmmc_stm32.c | 90 struct sdmmc_dma_stream dma_rx; member 247 err = stm32_sdmmc_configure_dma(&priv->dma_rx_handle, &priv->dma_rx); in stm32_sdmmc_dma_init() 262 struct sdmmc_dma_stream *dma_rx = &priv->dma_rx; in stm32_sdmmc_dma_deinit() local 270 ret = dma_stop(dma_rx->dev, dma_rx->channel); in stm32_sdmmc_dma_deinit()
|
/Zephyr-latest/drivers/flash/ |
D | flash_stm32_xspi.h | 109 struct stream dma_rx; member
|
D | flash_stm32_xspi.c | 2187 if (flash_stm32_xspi_dma_init(&hdma_rx, &dev_data->dma_rx) != 0) { in flash_stm32_xspi_init()
|
/Zephyr-latest/drivers/i3c/ |
D | i3c_stm32.c | 140 struct i3c_stm32_dma_stream dma_rx; /* RX DMA channel config */ member 1140 dma_stream = &(data->dma_rx); in i3c_stm32_dma_msg_config() 1435 dev, &data->dma_rx, LL_I3C_DMA_GetRegAddr(i3c, LL_I3C_DMA_REG_DATA_RECEIVE_BYTE), in i3c_stm32_init_dma()
|