Lines Matching refs:dma_tx
1146 LOG_DBG("tx done: %d", data->dma_tx.counter); in async_evt_tx_done()
1150 .data.tx.buf = data->dma_tx.buffer, in async_evt_tx_done()
1151 .data.tx.len = data->dma_tx.counter in async_evt_tx_done()
1155 data->dma_tx.buffer_length = 0; in async_evt_tx_done()
1156 data->dma_tx.counter = 0; in async_evt_tx_done()
1163 LOG_DBG("tx abort: %d", data->dma_tx.counter); in async_evt_tx_abort()
1167 .data.tx.buf = data->dma_tx.buffer, in async_evt_tx_abort()
1168 .data.tx.len = data->dma_tx.counter in async_evt_tx_abort()
1172 data->dma_tx.buffer_length = 0; in async_evt_tx_abort()
1173 data->dma_tx.counter = 0; in async_evt_tx_abort()
1462 (void)k_work_cancel_delayable(&data->dma_tx.timeout_work); in uart_stm32_dma_tx_cb()
1464 if (!dma_get_status(data->dma_tx.dma_dev, in uart_stm32_dma_tx_cb()
1465 data->dma_tx.dma_channel, &stat)) { in uart_stm32_dma_tx_cb()
1466 data->dma_tx.counter = data->dma_tx.buffer_length - in uart_stm32_dma_tx_cb()
1470 data->dma_tx.buffer_length = 0; in uart_stm32_dma_tx_cb()
1552 if (data->dma_tx.dma_dev == NULL) { in uart_stm32_async_tx()
1556 if (data->dma_tx.buffer_length != 0) { in uart_stm32_async_tx()
1567 data->dma_tx.buffer = (uint8_t *)tx_data; in uart_stm32_async_tx()
1568 data->dma_tx.buffer_length = buf_size; in uart_stm32_async_tx()
1569 data->dma_tx.timeout = timeout; in uart_stm32_async_tx()
1571 LOG_DBG("tx: l=%d", data->dma_tx.buffer_length); in uart_stm32_async_tx()
1580 data->dma_tx.blk_cfg.source_address = (uint32_t)data->dma_tx.buffer; in uart_stm32_async_tx()
1581 data->dma_tx.blk_cfg.block_size = data->dma_tx.buffer_length; in uart_stm32_async_tx()
1583 ret = dma_config(data->dma_tx.dma_dev, data->dma_tx.dma_channel, in uart_stm32_async_tx()
1584 &data->dma_tx.dma_cfg); in uart_stm32_async_tx()
1591 if (dma_start(data->dma_tx.dma_dev, data->dma_tx.dma_channel)) { in uart_stm32_async_tx()
1597 async_timer_start(&data->dma_tx.timeout_work, data->dma_tx.timeout); in uart_stm32_async_tx()
1689 size_t tx_buffer_length = data->dma_tx.buffer_length; in uart_stm32_async_tx_abort()
1696 (void)k_work_cancel_delayable(&data->dma_tx.timeout_work); in uart_stm32_async_tx_abort()
1697 if (!dma_get_status(data->dma_tx.dma_dev, in uart_stm32_async_tx_abort()
1698 data->dma_tx.dma_channel, &stat)) { in uart_stm32_async_tx_abort()
1699 data->dma_tx.counter = tx_buffer_length - stat.pending_length; in uart_stm32_async_tx_abort()
1703 dma_suspend(data->dma_tx.dma_dev, data->dma_tx.dma_channel); in uart_stm32_async_tx_abort()
1705 dma_stop(data->dma_tx.dma_dev, data->dma_tx.dma_channel); in uart_stm32_async_tx_abort()
1735 struct uart_stm32_data, dma_tx); in uart_stm32_async_tx_timeout()
1788 if (data->dma_tx.dma_dev != NULL) { in uart_stm32_async_init()
1789 if (!device_is_ready(data->dma_tx.dma_dev)) { in uart_stm32_async_init()
1800 k_work_init_delayable(&data->dma_tx.timeout_work, in uart_stm32_async_init()
1843 memset(&data->dma_tx.blk_cfg, 0, sizeof(data->dma_tx.blk_cfg)); in uart_stm32_async_init()
1849 data->dma_tx.blk_cfg.dest_address = in uart_stm32_async_init()
1852 data->dma_tx.blk_cfg.dest_address = in uart_stm32_async_init()
1857 data->dma_tx.blk_cfg.source_address = 0; /* not ready */ in uart_stm32_async_init()
1859 if (data->dma_tx.src_addr_increment) { in uart_stm32_async_init()
1860 data->dma_tx.blk_cfg.source_addr_adj = DMA_ADDR_ADJ_INCREMENT; in uart_stm32_async_init()
1862 data->dma_tx.blk_cfg.source_addr_adj = DMA_ADDR_ADJ_NO_CHANGE; in uart_stm32_async_init()
1865 if (data->dma_tx.dst_addr_increment) { in uart_stm32_async_init()
1866 data->dma_tx.blk_cfg.dest_addr_adj = DMA_ADDR_ADJ_INCREMENT; in uart_stm32_async_init()
1868 data->dma_tx.blk_cfg.dest_addr_adj = DMA_ADDR_ADJ_NO_CHANGE; in uart_stm32_async_init()
1871 data->dma_tx.blk_cfg.fifo_mode_control = data->dma_tx.fifo_threshold; in uart_stm32_async_init()
1873 data->dma_tx.dma_cfg.head_block = &data->dma_tx.blk_cfg; in uart_stm32_async_init()
1874 data->dma_tx.dma_cfg.user_data = (void *)dev; in uart_stm32_async_init()