Lines Matching refs:dma_tx

69 	struct uart_dma_stream dma_tx;  member
484 .data.tx.buf = data->dma_tx.buffer, in async_evt_tx_done()
485 .data.tx.len = data->dma_tx.counter}; in async_evt_tx_done()
487 data->dma_tx.buffer = NULL; in async_evt_tx_done()
488 data->dma_tx.buffer_len = 0; in async_evt_tx_done()
489 data->dma_tx.counter = 0; in async_evt_tx_done()
499 .data.tx.buf = data->dma_tx.buffer, in async_evt_tx_abort()
500 .data.tx.len = data->dma_tx.counter}; in async_evt_tx_abort()
502 data->dma_tx.buffer = NULL; in async_evt_tx_abort()
503 data->dma_tx.buffer_len = 0; in async_evt_tx_abort()
504 data->dma_tx.counter = 0; in async_evt_tx_abort()
544 k_work_cancel_delayable(&data->dma_tx.timeout_work); in uart_xmc4xxx_async_tx_abort()
545 tx_buffer_len = data->dma_tx.buffer_len; in uart_xmc4xxx_async_tx_abort()
552 if (!dma_get_status(data->dma_tx.dma_dev, data->dma_tx.dma_channel, &stat)) { in uart_xmc4xxx_async_tx_abort()
553 data->dma_tx.counter = tx_buffer_len - stat.pending_length; in uart_xmc4xxx_async_tx_abort()
556 dma_stop(data->dma_tx.dma_dev, data->dma_tx.dma_channel); in uart_xmc4xxx_async_tx_abort()
570 struct uart_xmc4xxx_data *data = CONTAINER_OF(tx_stream, struct uart_xmc4xxx_data, dma_tx); in uart_xmc4xxx_async_tx_timeout()
600 if (data->dma_tx.dma_dev != NULL) { in uart_xmc4xxx_async_init()
601 if (!device_is_ready(data->dma_tx.dma_dev)) { in uart_xmc4xxx_async_init()
605 k_work_init_delayable(&data->dma_tx.timeout_work, uart_xmc4xxx_async_tx_timeout); in uart_xmc4xxx_async_init()
608 data->dma_tx.blk_cfg.dest_address = (uint32_t)&config->uart->IN[0]; in uart_xmc4xxx_async_init()
610 data->dma_tx.blk_cfg.dest_address = (uint32_t)&config->uart->TBUF[0]; in uart_xmc4xxx_async_init()
613 data->dma_tx.blk_cfg.source_addr_adj = DMA_ADDR_ADJ_INCREMENT; in uart_xmc4xxx_async_init()
614 data->dma_tx.blk_cfg.dest_addr_adj = DMA_ADDR_ADJ_NO_CHANGE; in uart_xmc4xxx_async_init()
615 data->dma_tx.dma_cfg.head_block = &data->dma_tx.blk_cfg; in uart_xmc4xxx_async_init()
616 data->dma_tx.dma_cfg.user_data = (void *)dev; in uart_xmc4xxx_async_init()
647 if (data->dma_tx.dma_dev == NULL) { in uart_xmc4xxx_async_tx()
657 if (data->dma_tx.buffer_len != 0) { in uart_xmc4xxx_async_tx()
661 data->dma_tx.buffer = (uint8_t *)tx_data; in uart_xmc4xxx_async_tx()
662 data->dma_tx.buffer_len = buf_size; in uart_xmc4xxx_async_tx()
663 data->dma_tx.timeout = timeout; in uart_xmc4xxx_async_tx()
666 data->dma_tx.blk_cfg.source_address = (uint32_t)data->dma_tx.buffer; in uart_xmc4xxx_async_tx()
667 data->dma_tx.blk_cfg.block_size = data->dma_tx.buffer_len; in uart_xmc4xxx_async_tx()
669 ret = dma_config(data->dma_tx.dma_dev, data->dma_tx.dma_channel, &data->dma_tx.dma_cfg); in uart_xmc4xxx_async_tx()
683 async_timer_start(&data->dma_tx.timeout_work, data->dma_tx.timeout); in uart_xmc4xxx_async_tx()
685 return dma_start(data->dma_tx.dma_dev, data->dma_tx.dma_channel); in uart_xmc4xxx_async_tx()
831 size_t tx_buffer_len = data->dma_tx.buffer_len; in uart_xmc4xxx_dma_tx_cb()
838 __ASSERT_NO_MSG(channel == data->dma_tx.dma_channel); in uart_xmc4xxx_dma_tx_cb()
840 k_work_cancel_delayable(&data->dma_tx.timeout_work); in uart_xmc4xxx_dma_tx_cb()
846 if (!dma_get_status(data->dma_tx.dma_dev, channel, &stat)) { in uart_xmc4xxx_dma_tx_cb()
847 data->dma_tx.counter = tx_buffer_len - stat.pending_length; in uart_xmc4xxx_dma_tx_cb()
852 if (data->dma_tx.buffer == NULL) { in uart_xmc4xxx_dma_tx_cb()
853 dma_stop(data->dma_tx.dma_dev, data->dma_tx.dma_channel); in uart_xmc4xxx_dma_tx_cb()