Lines Matching refs:dma_tx

1125 	LOG_DBG("tx done: %d", data->dma_tx.counter);  in async_evt_tx_done()
1129 .data.tx.buf = data->dma_tx.buffer, in async_evt_tx_done()
1130 .data.tx.len = data->dma_tx.counter in async_evt_tx_done()
1134 data->dma_tx.buffer_length = 0; in async_evt_tx_done()
1135 data->dma_tx.counter = 0; in async_evt_tx_done()
1142 LOG_DBG("tx abort: %d", data->dma_tx.counter); in async_evt_tx_abort()
1146 .data.tx.buf = data->dma_tx.buffer, in async_evt_tx_abort()
1147 .data.tx.len = data->dma_tx.counter in async_evt_tx_abort()
1151 data->dma_tx.buffer_length = 0; in async_evt_tx_abort()
1152 data->dma_tx.counter = 0; in async_evt_tx_abort()
1401 (void)k_work_cancel_delayable(&data->dma_tx.timeout_work); in uart_stm32_dma_tx_cb()
1403 if (!dma_get_status(data->dma_tx.dma_dev, in uart_stm32_dma_tx_cb()
1404 data->dma_tx.dma_channel, &stat)) { in uart_stm32_dma_tx_cb()
1405 data->dma_tx.counter = data->dma_tx.buffer_length - in uart_stm32_dma_tx_cb()
1409 data->dma_tx.buffer_length = 0; in uart_stm32_dma_tx_cb()
1489 if (data->dma_tx.dma_dev == NULL) { in uart_stm32_async_tx()
1493 if (data->dma_tx.buffer_length != 0) { in uart_stm32_async_tx()
1497 data->dma_tx.buffer = (uint8_t *)tx_data; in uart_stm32_async_tx()
1498 data->dma_tx.buffer_length = buf_size; in uart_stm32_async_tx()
1499 data->dma_tx.timeout = timeout; in uart_stm32_async_tx()
1501 LOG_DBG("tx: l=%d", data->dma_tx.buffer_length); in uart_stm32_async_tx()
1510 data->dma_tx.blk_cfg.source_address = (uint32_t)data->dma_tx.buffer; in uart_stm32_async_tx()
1511 data->dma_tx.blk_cfg.block_size = data->dma_tx.buffer_length; in uart_stm32_async_tx()
1513 ret = dma_config(data->dma_tx.dma_dev, data->dma_tx.dma_channel, in uart_stm32_async_tx()
1514 &data->dma_tx.dma_cfg); in uart_stm32_async_tx()
1521 if (dma_start(data->dma_tx.dma_dev, data->dma_tx.dma_channel)) { in uart_stm32_async_tx()
1527 async_timer_start(&data->dma_tx.timeout_work, data->dma_tx.timeout); in uart_stm32_async_tx()
1604 size_t tx_buffer_length = data->dma_tx.buffer_length; in uart_stm32_async_tx_abort()
1611 (void)k_work_cancel_delayable(&data->dma_tx.timeout_work); in uart_stm32_async_tx_abort()
1612 if (!dma_get_status(data->dma_tx.dma_dev, in uart_stm32_async_tx_abort()
1613 data->dma_tx.dma_channel, &stat)) { in uart_stm32_async_tx_abort()
1614 data->dma_tx.counter = tx_buffer_length - stat.pending_length; in uart_stm32_async_tx_abort()
1618 dma_suspend(data->dma_tx.dma_dev, data->dma_tx.dma_channel); in uart_stm32_async_tx_abort()
1620 dma_stop(data->dma_tx.dma_dev, data->dma_tx.dma_channel); in uart_stm32_async_tx_abort()
1650 struct uart_stm32_data, dma_tx); in uart_stm32_async_tx_timeout()
1683 if (data->dma_tx.dma_dev != NULL) { in uart_stm32_async_init()
1684 if (!device_is_ready(data->dma_tx.dma_dev)) { in uart_stm32_async_init()
1695 k_work_init_delayable(&data->dma_tx.timeout_work, in uart_stm32_async_init()
1738 memset(&data->dma_tx.blk_cfg, 0, sizeof(data->dma_tx.blk_cfg)); in uart_stm32_async_init()
1744 data->dma_tx.blk_cfg.dest_address = in uart_stm32_async_init()
1747 data->dma_tx.blk_cfg.dest_address = in uart_stm32_async_init()
1752 data->dma_tx.blk_cfg.source_address = 0; /* not ready */ in uart_stm32_async_init()
1754 if (data->dma_tx.src_addr_increment) { in uart_stm32_async_init()
1755 data->dma_tx.blk_cfg.source_addr_adj = DMA_ADDR_ADJ_INCREMENT; in uart_stm32_async_init()
1757 data->dma_tx.blk_cfg.source_addr_adj = DMA_ADDR_ADJ_NO_CHANGE; in uart_stm32_async_init()
1760 if (data->dma_tx.dst_addr_increment) { in uart_stm32_async_init()
1761 data->dma_tx.blk_cfg.dest_addr_adj = DMA_ADDR_ADJ_INCREMENT; in uart_stm32_async_init()
1763 data->dma_tx.blk_cfg.dest_addr_adj = DMA_ADDR_ADJ_NO_CHANGE; in uart_stm32_async_init()
1766 data->dma_tx.blk_cfg.fifo_mode_control = data->dma_tx.fifo_threshold; in uart_stm32_async_init()
1768 data->dma_tx.dma_cfg.head_block = &data->dma_tx.blk_cfg; in uart_stm32_async_init()
1769 data->dma_tx.dma_cfg.user_data = (void *)dev; in uart_stm32_async_init()