Lines Matching refs:dma_tx

69 	struct uart_dma_stream dma_tx;  member
462 .data.tx.buf = data->dma_tx.buffer, in async_evt_tx_done()
463 .data.tx.len = data->dma_tx.counter}; in async_evt_tx_done()
465 data->dma_tx.buffer = NULL; in async_evt_tx_done()
466 data->dma_tx.buffer_len = 0; in async_evt_tx_done()
467 data->dma_tx.counter = 0; in async_evt_tx_done()
477 .data.tx.buf = data->dma_tx.buffer, in async_evt_tx_abort()
478 .data.tx.len = data->dma_tx.counter}; in async_evt_tx_abort()
480 data->dma_tx.buffer = NULL; in async_evt_tx_abort()
481 data->dma_tx.buffer_len = 0; in async_evt_tx_abort()
482 data->dma_tx.counter = 0; in async_evt_tx_abort()
522 k_work_cancel_delayable(&data->dma_tx.timeout_work); in uart_xmc4xxx_async_tx_abort()
523 tx_buffer_len = data->dma_tx.buffer_len; in uart_xmc4xxx_async_tx_abort()
530 if (!dma_get_status(data->dma_tx.dma_dev, data->dma_tx.dma_channel, &stat)) { in uart_xmc4xxx_async_tx_abort()
531 data->dma_tx.counter = tx_buffer_len - stat.pending_length; in uart_xmc4xxx_async_tx_abort()
534 dma_stop(data->dma_tx.dma_dev, data->dma_tx.dma_channel); in uart_xmc4xxx_async_tx_abort()
548 struct uart_xmc4xxx_data *data = CONTAINER_OF(tx_stream, struct uart_xmc4xxx_data, dma_tx); in uart_xmc4xxx_async_tx_timeout()
578 if (data->dma_tx.dma_dev != NULL) { in uart_xmc4xxx_async_init()
579 if (!device_is_ready(data->dma_tx.dma_dev)) { in uart_xmc4xxx_async_init()
583 k_work_init_delayable(&data->dma_tx.timeout_work, uart_xmc4xxx_async_tx_timeout); in uart_xmc4xxx_async_init()
586 data->dma_tx.blk_cfg.dest_address = (uint32_t)&config->uart->IN[0]; in uart_xmc4xxx_async_init()
588 data->dma_tx.blk_cfg.dest_address = (uint32_t)&config->uart->TBUF[0]; in uart_xmc4xxx_async_init()
591 data->dma_tx.blk_cfg.source_addr_adj = DMA_ADDR_ADJ_INCREMENT; in uart_xmc4xxx_async_init()
592 data->dma_tx.blk_cfg.dest_addr_adj = DMA_ADDR_ADJ_NO_CHANGE; in uart_xmc4xxx_async_init()
593 data->dma_tx.dma_cfg.head_block = &data->dma_tx.blk_cfg; in uart_xmc4xxx_async_init()
594 data->dma_tx.dma_cfg.user_data = (void *)dev; in uart_xmc4xxx_async_init()
625 if (data->dma_tx.dma_dev == NULL) { in uart_xmc4xxx_async_tx()
635 if (data->dma_tx.buffer_len != 0) { in uart_xmc4xxx_async_tx()
639 data->dma_tx.buffer = (uint8_t *)tx_data; in uart_xmc4xxx_async_tx()
640 data->dma_tx.buffer_len = buf_size; in uart_xmc4xxx_async_tx()
641 data->dma_tx.timeout = timeout; in uart_xmc4xxx_async_tx()
644 data->dma_tx.blk_cfg.source_address = (uint32_t)data->dma_tx.buffer; in uart_xmc4xxx_async_tx()
645 data->dma_tx.blk_cfg.block_size = data->dma_tx.buffer_len; in uart_xmc4xxx_async_tx()
647 ret = dma_config(data->dma_tx.dma_dev, data->dma_tx.dma_channel, &data->dma_tx.dma_cfg); in uart_xmc4xxx_async_tx()
661 async_timer_start(&data->dma_tx.timeout_work, data->dma_tx.timeout); in uart_xmc4xxx_async_tx()
663 return dma_start(data->dma_tx.dma_dev, data->dma_tx.dma_channel); in uart_xmc4xxx_async_tx()
803 size_t tx_buffer_len = data->dma_tx.buffer_len; in uart_xmc4xxx_dma_tx_cb()
810 __ASSERT_NO_MSG(channel == data->dma_tx.dma_channel); in uart_xmc4xxx_dma_tx_cb()
812 k_work_cancel_delayable(&data->dma_tx.timeout_work); in uart_xmc4xxx_dma_tx_cb()
818 if (!dma_get_status(data->dma_tx.dma_dev, channel, &stat)) { in uart_xmc4xxx_dma_tx_cb()
819 data->dma_tx.counter = tx_buffer_len - stat.pending_length; in uart_xmc4xxx_dma_tx_cb()
824 if (data->dma_tx.buffer == NULL) { in uart_xmc4xxx_dma_tx_cb()
825 dma_stop(data->dma_tx.dma_dev, data->dma_tx.dma_channel); in uart_xmc4xxx_dma_tx_cb()