Lines Matching +full:uart +full:- +full:dev
5 * SPDX-License-Identifier: Apache-2.0
13 #include <zephyr/drivers/uart.h>
26 XMC_USIC_CH_t *uart; member
65 const struct device *dev; member
75 static int uart_xmc4xxx_poll_in(const struct device *dev, unsigned char *c) in uart_xmc4xxx_poll_in() argument
77 const struct uart_xmc4xxx_config *config = dev->config; in uart_xmc4xxx_poll_in()
80 if (config->fifo_rx_size > 0) { in uart_xmc4xxx_poll_in()
81 fifo_empty = XMC_USIC_CH_RXFIFO_IsEmpty(config->uart); in uart_xmc4xxx_poll_in()
83 fifo_empty = !XMC_USIC_CH_GetReceiveBufferStatus(config->uart); in uart_xmc4xxx_poll_in()
86 return -1; in uart_xmc4xxx_poll_in()
89 *c = (unsigned char)XMC_UART_CH_GetReceivedData(config->uart); in uart_xmc4xxx_poll_in()
94 static void uart_xmc4xxx_poll_out(const struct device *dev, unsigned char c) in uart_xmc4xxx_poll_out() argument
96 const struct uart_xmc4xxx_config *config = dev->config; in uart_xmc4xxx_poll_out()
98 /* XMC_UART_CH_Transmit() only blocks for UART to finish transmitting */ in uart_xmc4xxx_poll_out()
100 while (config->fifo_tx_size > 0 && XMC_USIC_CH_TXFIFO_IsFull(config->uart)) { in uart_xmc4xxx_poll_out()
102 XMC_UART_CH_Transmit(config->uart, c); in uart_xmc4xxx_poll_out()
115 if (config->fifo_tx_size > 0) { in disable_tx_events()
116 XMC_USIC_CH_TXFIFO_DisableEvent(config->uart, in disable_tx_events()
119 XMC_USIC_CH_DisableEvent(config->uart, XMC_USIC_CH_EVENT_TRANSMIT_SHIFT); in disable_tx_events()
127 if (config->fifo_tx_size > 0) { in enable_tx_events()
129 while (XMC_USIC_CH_TXFIFO_IsFull(config->uart)) { in enable_tx_events()
131 XMC_USIC_CH_TXFIFO_EnableEvent(config->uart, in enable_tx_events()
134 XMC_USIC_CH_EnableEvent(config->uart, XMC_USIC_CH_EVENT_TRANSMIT_SHIFT); in enable_tx_events()
150 const struct device *dev = arg; in uart_xmc4xxx_isr() local
151 struct uart_xmc4xxx_data *data = dev->data; in uart_xmc4xxx_isr()
154 if (data->user_cb) { in uart_xmc4xxx_isr()
155 data->user_cb(dev, data->user_data); in uart_xmc4xxx_isr()
160 const struct uart_xmc4xxx_config *config = dev->config; in uart_xmc4xxx_isr()
163 if (data->dma_rx.buffer_len) { in uart_xmc4xxx_isr()
169 irq_disable(config->irq_num_rx); in uart_xmc4xxx_isr()
171 async_timer_start(&data->dma_rx.timeout_work, data->dma_rx.timeout); in uart_xmc4xxx_isr()
177 static void uart_xmc4xxx_configure_service_requests(const struct device *dev) in uart_xmc4xxx_configure_service_requests() argument
179 struct uart_xmc4xxx_data *data = dev->data; in uart_xmc4xxx_configure_service_requests()
180 const struct uart_xmc4xxx_config *config = dev->config; in uart_xmc4xxx_configure_service_requests()
182 __ASSERT(config->irq_num_tx >= USIC_IRQ_MIN && config->irq_num_tx <= USIC_IRQ_MAX, in uart_xmc4xxx_configure_service_requests()
184 data->service_request_tx = (config->irq_num_tx - USIC_IRQ_MIN) % IRQS_PER_USIC; in uart_xmc4xxx_configure_service_requests()
186 if (config->fifo_tx_size > 0) { in uart_xmc4xxx_configure_service_requests()
188 config->uart, XMC_USIC_CH_TXFIFO_INTERRUPT_NODE_POINTER_STANDARD, in uart_xmc4xxx_configure_service_requests()
189 data->service_request_tx); in uart_xmc4xxx_configure_service_requests()
192 config->uart, XMC_USIC_CH_INTERRUPT_NODE_POINTER_TRANSMIT_SHIFT, in uart_xmc4xxx_configure_service_requests()
193 data->service_request_tx); in uart_xmc4xxx_configure_service_requests()
196 __ASSERT(config->irq_num_rx >= USIC_IRQ_MIN && config->irq_num_rx <= USIC_IRQ_MAX, in uart_xmc4xxx_configure_service_requests()
198 data->service_request_rx = (config->irq_num_rx - USIC_IRQ_MIN) % IRQS_PER_USIC; in uart_xmc4xxx_configure_service_requests()
200 if (config->fifo_rx_size > 0) { in uart_xmc4xxx_configure_service_requests()
202 config->uart, XMC_USIC_CH_RXFIFO_INTERRUPT_NODE_POINTER_STANDARD, in uart_xmc4xxx_configure_service_requests()
203 data->service_request_rx); in uart_xmc4xxx_configure_service_requests()
205 config->uart, XMC_USIC_CH_RXFIFO_INTERRUPT_NODE_POINTER_ALTERNATE, in uart_xmc4xxx_configure_service_requests()
206 data->service_request_rx); in uart_xmc4xxx_configure_service_requests()
208 XMC_USIC_CH_SetInterruptNodePointer(config->uart, in uart_xmc4xxx_configure_service_requests()
210 data->service_request_rx); in uart_xmc4xxx_configure_service_requests()
212 config->uart, XMC_USIC_CH_INTERRUPT_NODE_POINTER_ALTERNATE_RECEIVE, in uart_xmc4xxx_configure_service_requests()
213 data->service_request_rx); in uart_xmc4xxx_configure_service_requests()
217 static int uart_xmc4xxx_irq_tx_ready(const struct device *dev) in uart_xmc4xxx_irq_tx_ready() argument
219 const struct uart_xmc4xxx_config *config = dev->config; in uart_xmc4xxx_irq_tx_ready()
221 if (config->fifo_tx_size > 0) { in uart_xmc4xxx_irq_tx_ready()
222 return !XMC_USIC_CH_TXFIFO_IsFull(config->uart); in uart_xmc4xxx_irq_tx_ready()
224 return XMC_USIC_CH_GetTransmitBufferStatus(config->uart) == in uart_xmc4xxx_irq_tx_ready()
229 static void uart_xmc4xxx_irq_rx_disable(const struct device *dev) in uart_xmc4xxx_irq_rx_disable() argument
231 const struct uart_xmc4xxx_config *config = dev->config; in uart_xmc4xxx_irq_rx_disable()
233 if (config->fifo_rx_size > 0) { in uart_xmc4xxx_irq_rx_disable()
234 XMC_USIC_CH_RXFIFO_DisableEvent(config->uart, in uart_xmc4xxx_irq_rx_disable()
238 XMC_USIC_CH_DisableEvent(config->uart, XMC_USIC_CH_EVENT_STANDARD_RECEIVE | in uart_xmc4xxx_irq_rx_disable()
242 static void uart_xmc4xxx_irq_rx_enable(const struct device *dev) in uart_xmc4xxx_irq_rx_enable() argument
244 const struct uart_xmc4xxx_config *config = dev->config; in uart_xmc4xxx_irq_rx_enable()
247 /* re-enable the IRQ as it may have been disabled during async_rx */ in uart_xmc4xxx_irq_rx_enable()
248 clear_pending_interrupt(config->irq_num_rx); in uart_xmc4xxx_irq_rx_enable()
249 irq_enable(config->irq_num_rx); in uart_xmc4xxx_irq_rx_enable()
251 if (config->fifo_rx_size > 0) { in uart_xmc4xxx_irq_rx_enable()
252 XMC_USIC_CH_RXFIFO_Flush(config->uart); in uart_xmc4xxx_irq_rx_enable()
253 XMC_USIC_CH_RXFIFO_SetSizeTriggerLimit(config->uart, config->fifo_rx_size, 0); in uart_xmc4xxx_irq_rx_enable()
255 config->uart->RBCTR |= BIT(USIC_CH_RBCTR_SRBTEN_Pos); in uart_xmc4xxx_irq_rx_enable()
257 XMC_USIC_CH_RXFIFO_EnableEvent(config->uart, in uart_xmc4xxx_irq_rx_enable()
261 /* flush out any received bytes while the uart rx irq was disabled */ in uart_xmc4xxx_irq_rx_enable()
262 recv_status = XMC_USIC_CH_GetReceiveBufferStatus(config->uart); in uart_xmc4xxx_irq_rx_enable()
264 XMC_UART_CH_GetReceivedData(config->uart); in uart_xmc4xxx_irq_rx_enable()
267 XMC_UART_CH_GetReceivedData(config->uart); in uart_xmc4xxx_irq_rx_enable()
270 XMC_USIC_CH_EnableEvent(config->uart, XMC_USIC_CH_EVENT_STANDARD_RECEIVE | in uart_xmc4xxx_irq_rx_enable()
278 static int uart_xmc4xxx_fifo_fill(const struct device *dev, const uint8_t *tx_data, int len) in uart_xmc4xxx_fifo_fill() argument
280 const struct uart_xmc4xxx_config *config = dev->config; in uart_xmc4xxx_fifo_fill()
286 XMC_UART_CH_Transmit(config->uart, tx_data[i]); in uart_xmc4xxx_fifo_fill()
287 if (config->fifo_tx_size == 0) { in uart_xmc4xxx_fifo_fill()
291 fifo_full = XMC_USIC_CH_TXFIFO_IsFull(config->uart); in uart_xmc4xxx_fifo_fill()
299 static int uart_xmc4xxx_fifo_read(const struct device *dev, uint8_t *rx_data, const int size) in uart_xmc4xxx_fifo_read() argument
301 const struct uart_xmc4xxx_config *config = dev->config; in uart_xmc4xxx_fifo_read()
307 if (config->fifo_rx_size > 0) { in uart_xmc4xxx_fifo_read()
308 fifo_empty = XMC_USIC_CH_RXFIFO_IsEmpty(config->uart); in uart_xmc4xxx_fifo_read()
310 fifo_empty = !XMC_USIC_CH_GetReceiveBufferStatus(config->uart); in uart_xmc4xxx_fifo_read()
315 rx_data[i] = XMC_UART_CH_GetReceivedData(config->uart); in uart_xmc4xxx_fifo_read()
320 static void uart_xmc4xxx_irq_tx_enable(const struct device *dev) in uart_xmc4xxx_irq_tx_enable() argument
322 const struct uart_xmc4xxx_config *config = dev->config; in uart_xmc4xxx_irq_tx_enable()
323 const struct uart_xmc4xxx_data *data = dev->data; in uart_xmc4xxx_irq_tx_enable()
325 clear_pending_interrupt(config->irq_num_tx); in uart_xmc4xxx_irq_tx_enable()
326 irq_enable(config->irq_num_tx); in uart_xmc4xxx_irq_tx_enable()
330 XMC_USIC_CH_TriggerServiceRequest(config->uart, data->service_request_tx); in uart_xmc4xxx_irq_tx_enable()
333 static void uart_xmc4xxx_irq_tx_disable(const struct device *dev) in uart_xmc4xxx_irq_tx_disable() argument
335 const struct uart_xmc4xxx_config *config = dev->config; in uart_xmc4xxx_irq_tx_disable()
337 if (config->fifo_tx_size > 0) { in uart_xmc4xxx_irq_tx_disable()
338 XMC_USIC_CH_TXFIFO_DisableEvent(config->uart, in uart_xmc4xxx_irq_tx_disable()
341 XMC_USIC_CH_DisableEvent(config->uart, XMC_USIC_CH_EVENT_TRANSMIT_SHIFT); in uart_xmc4xxx_irq_tx_disable()
345 static int uart_xmc4xxx_irq_rx_ready(const struct device *dev) in uart_xmc4xxx_irq_rx_ready() argument
347 const struct uart_xmc4xxx_config *config = dev->config; in uart_xmc4xxx_irq_rx_ready()
349 if (config->fifo_rx_size > 0) { in uart_xmc4xxx_irq_rx_ready()
350 return !XMC_USIC_CH_RXFIFO_IsEmpty(config->uart); in uart_xmc4xxx_irq_rx_ready()
352 return XMC_USIC_CH_GetReceiveBufferStatus(config->uart); in uart_xmc4xxx_irq_rx_ready()
356 static void uart_xmc4xxx_irq_callback_set(const struct device *dev, in uart_xmc4xxx_irq_callback_set() argument
359 struct uart_xmc4xxx_data *data = dev->data; in uart_xmc4xxx_irq_callback_set()
361 data->user_cb = cb; in uart_xmc4xxx_irq_callback_set()
362 data->user_data = user_data; in uart_xmc4xxx_irq_callback_set()
365 data->async_cb = NULL; in uart_xmc4xxx_irq_callback_set()
366 data->async_user_data = NULL; in uart_xmc4xxx_irq_callback_set()
371 static int uart_xmc4xxx_irq_is_pending(const struct device *dev) in uart_xmc4xxx_irq_is_pending() argument
373 const struct uart_xmc4xxx_config *config = dev->config; in uart_xmc4xxx_irq_is_pending()
374 uint32_t irq_num_tx = config->irq_num_tx; in uart_xmc4xxx_irq_is_pending()
375 uint32_t irq_num_rx = config->irq_num_rx; in uart_xmc4xxx_irq_is_pending()
381 /* bit 0 -> irq 0, bit 1 -> irq 1,... */ in uart_xmc4xxx_irq_is_pending()
399 if (data->async_cb) { in async_evt_rx_buf_request()
400 data->async_cb(data->dev, &evt, data->async_user_data); in async_evt_rx_buf_request()
408 if (buffer_type == NEXT_BUFFER && !data->rx_next_buffer) { in async_evt_rx_release_buffer()
412 if (buffer_type == CURRENT_BUFFER && !data->dma_rx.buffer) { in async_evt_rx_release_buffer()
417 event.data.rx_buf.buf = data->rx_next_buffer; in async_evt_rx_release_buffer()
418 data->rx_next_buffer = NULL; in async_evt_rx_release_buffer()
419 data->rx_next_buffer_len = 0; in async_evt_rx_release_buffer()
421 event.data.rx_buf.buf = data->dma_rx.buffer; in async_evt_rx_release_buffer()
422 data->dma_rx.buffer = NULL; in async_evt_rx_release_buffer()
423 data->dma_rx.buffer_len = 0; in async_evt_rx_release_buffer()
426 if (data->async_cb) { in async_evt_rx_release_buffer()
427 data->async_cb(data->dev, &event, data->async_user_data); in async_evt_rx_release_buffer()
438 if (data->dma_rx.buffer_len == 0 || data->async_cb == NULL) { in async_evt_rx_stopped()
442 rx->buf = data->dma_rx.buffer; in async_evt_rx_stopped()
443 if (dma_get_status(data->dma_rx.dma_dev, data->dma_rx.dma_channel, &stat) == 0) { in async_evt_rx_stopped()
444 data->dma_rx.counter = data->dma_rx.buffer_len - stat.pending_length; in async_evt_rx_stopped()
447 rx->len = data->dma_rx.counter - data->dma_rx.offset; in async_evt_rx_stopped()
448 rx->offset = data->dma_rx.counter; in async_evt_rx_stopped()
450 data->async_cb(data->dev, &event, data->async_user_data); in async_evt_rx_stopped()
457 data->dma_rx.buffer = NULL; in async_evt_rx_disabled()
458 data->dma_rx.buffer_len = 0; in async_evt_rx_disabled()
459 data->dma_rx.offset = 0; in async_evt_rx_disabled()
460 data->dma_rx.counter = 0; in async_evt_rx_disabled()
462 if (data->async_cb) { in async_evt_rx_disabled()
463 data->async_cb(data->dev, &event, data->async_user_data); in async_evt_rx_disabled()
470 .data.rx.buf = (uint8_t *)data->dma_rx.buffer, in async_evt_rx_rdy()
471 .data.rx.len = data->dma_rx.counter - data->dma_rx.offset, in async_evt_rx_rdy()
472 .data.rx.offset = data->dma_rx.offset}; in async_evt_rx_rdy()
474 data->dma_rx.offset = data->dma_rx.counter; in async_evt_rx_rdy()
476 if (event.data.rx.len > 0 && data->async_cb) { in async_evt_rx_rdy()
477 data->async_cb(data->dev, &event, data->async_user_data); in async_evt_rx_rdy()
484 .data.tx.buf = data->dma_tx.buffer, in async_evt_tx_done()
485 .data.tx.len = data->dma_tx.counter}; in async_evt_tx_done()
487 data->dma_tx.buffer = NULL; in async_evt_tx_done()
488 data->dma_tx.buffer_len = 0; in async_evt_tx_done()
489 data->dma_tx.counter = 0; in async_evt_tx_done()
491 if (data->async_cb) { in async_evt_tx_done()
492 data->async_cb(data->dev, &event, data->async_user_data); in async_evt_tx_done()
499 .data.tx.buf = data->dma_tx.buffer, in async_evt_tx_abort()
500 .data.tx.len = data->dma_tx.counter}; in async_evt_tx_abort()
502 data->dma_tx.buffer = NULL; in async_evt_tx_abort()
503 data->dma_tx.buffer_len = 0; in async_evt_tx_abort()
504 data->dma_tx.counter = 0; in async_evt_tx_abort()
506 if (data->async_cb) { in async_evt_tx_abort()
507 data->async_cb(data->dev, &event, data->async_user_data); in async_evt_tx_abort()
520 if (data->dma_rx.buffer_len == 0) { in uart_xmc4xxx_async_rx_timeout()
525 if (dma_get_status(data->dma_rx.dma_dev, data->dma_rx.dma_channel, &stat) == 0) { in uart_xmc4xxx_async_rx_timeout()
526 size_t rx_rcv_len = data->dma_rx.buffer_len - stat.pending_length; in uart_xmc4xxx_async_rx_timeout()
528 if (rx_rcv_len > data->dma_rx.offset) { in uart_xmc4xxx_async_rx_timeout()
529 data->dma_rx.counter = rx_rcv_len; in uart_xmc4xxx_async_rx_timeout()
534 async_timer_start(&data->dma_rx.timeout_work, data->dma_rx.timeout); in uart_xmc4xxx_async_rx_timeout()
537 static int uart_xmc4xxx_async_tx_abort(const struct device *dev) in uart_xmc4xxx_async_tx_abort() argument
539 struct uart_xmc4xxx_data *data = dev->data; in uart_xmc4xxx_async_tx_abort()
544 k_work_cancel_delayable(&data->dma_tx.timeout_work); in uart_xmc4xxx_async_tx_abort()
545 tx_buffer_len = data->dma_tx.buffer_len; in uart_xmc4xxx_async_tx_abort()
549 return -EINVAL; in uart_xmc4xxx_async_tx_abort()
552 if (!dma_get_status(data->dma_tx.dma_dev, data->dma_tx.dma_channel, &stat)) { in uart_xmc4xxx_async_tx_abort()
553 data->dma_tx.counter = tx_buffer_len - stat.pending_length; in uart_xmc4xxx_async_tx_abort()
556 dma_stop(data->dma_tx.dma_dev, data->dma_tx.dma_channel); in uart_xmc4xxx_async_tx_abort()
557 disable_tx_events(dev->config); in uart_xmc4xxx_async_tx_abort()
572 uart_xmc4xxx_async_tx_abort(data->dev); in uart_xmc4xxx_async_tx_timeout()
575 static int uart_xmc4xxx_async_init(const struct device *dev) in uart_xmc4xxx_async_init() argument
577 const struct uart_xmc4xxx_config *config = dev->config; in uart_xmc4xxx_async_init()
578 struct uart_xmc4xxx_data *data = dev->data; in uart_xmc4xxx_async_init()
580 data->dev = dev; in uart_xmc4xxx_async_init()
582 if (data->dma_rx.dma_dev != NULL) { in uart_xmc4xxx_async_init()
583 if (!device_is_ready(data->dma_rx.dma_dev)) { in uart_xmc4xxx_async_init()
584 return -ENODEV; in uart_xmc4xxx_async_init()
587 k_work_init_delayable(&data->dma_rx.timeout_work, uart_xmc4xxx_async_rx_timeout); in uart_xmc4xxx_async_init()
588 if (config->fifo_rx_size > 0) { in uart_xmc4xxx_async_init()
589 data->dma_rx.blk_cfg.source_address = (uint32_t)&config->uart->OUTR; in uart_xmc4xxx_async_init()
591 data->dma_rx.blk_cfg.source_address = (uint32_t)&config->uart->RBUF; in uart_xmc4xxx_async_init()
594 data->dma_rx.blk_cfg.source_addr_adj = DMA_ADDR_ADJ_NO_CHANGE; in uart_xmc4xxx_async_init()
595 data->dma_rx.blk_cfg.dest_addr_adj = DMA_ADDR_ADJ_INCREMENT; in uart_xmc4xxx_async_init()
596 data->dma_rx.dma_cfg.head_block = &data->dma_rx.blk_cfg; in uart_xmc4xxx_async_init()
597 data->dma_rx.dma_cfg.user_data = (void *)dev; in uart_xmc4xxx_async_init()
600 if (data->dma_tx.dma_dev != NULL) { in uart_xmc4xxx_async_init()
601 if (!device_is_ready(data->dma_tx.dma_dev)) { in uart_xmc4xxx_async_init()
602 return -ENODEV; in uart_xmc4xxx_async_init()
605 k_work_init_delayable(&data->dma_tx.timeout_work, uart_xmc4xxx_async_tx_timeout); in uart_xmc4xxx_async_init()
607 if (config->fifo_tx_size > 0) { in uart_xmc4xxx_async_init()
608 data->dma_tx.blk_cfg.dest_address = (uint32_t)&config->uart->IN[0]; in uart_xmc4xxx_async_init()
610 data->dma_tx.blk_cfg.dest_address = (uint32_t)&config->uart->TBUF[0]; in uart_xmc4xxx_async_init()
613 data->dma_tx.blk_cfg.source_addr_adj = DMA_ADDR_ADJ_INCREMENT; in uart_xmc4xxx_async_init()
614 data->dma_tx.blk_cfg.dest_addr_adj = DMA_ADDR_ADJ_NO_CHANGE; in uart_xmc4xxx_async_init()
615 data->dma_tx.dma_cfg.head_block = &data->dma_tx.blk_cfg; in uart_xmc4xxx_async_init()
616 data->dma_tx.dma_cfg.user_data = (void *)dev; in uart_xmc4xxx_async_init()
622 static int uart_xmc4xxx_async_callback_set(const struct device *dev, uart_callback_t callback, in uart_xmc4xxx_async_callback_set() argument
625 struct uart_xmc4xxx_data *data = dev->data; in uart_xmc4xxx_async_callback_set()
627 data->async_cb = callback; in uart_xmc4xxx_async_callback_set()
628 data->async_user_data = user_data; in uart_xmc4xxx_async_callback_set()
631 data->user_cb = NULL; in uart_xmc4xxx_async_callback_set()
632 data->user_data = NULL; in uart_xmc4xxx_async_callback_set()
638 static int uart_xmc4xxx_async_tx(const struct device *dev, const uint8_t *tx_data, size_t buf_size, in uart_xmc4xxx_async_tx() argument
641 struct uart_xmc4xxx_data *data = dev->data; in uart_xmc4xxx_async_tx()
642 const struct uart_xmc4xxx_config *config = dev->config; in uart_xmc4xxx_async_tx()
645 /* Assume threads are pre-emptive so this call cannot be interrupted */ in uart_xmc4xxx_async_tx()
647 if (data->dma_tx.dma_dev == NULL) { in uart_xmc4xxx_async_tx()
648 return -ENODEV; in uart_xmc4xxx_async_tx()
652 return -EINVAL; in uart_xmc4xxx_async_tx()
657 if (data->dma_tx.buffer_len != 0) { in uart_xmc4xxx_async_tx()
658 return -EBUSY; in uart_xmc4xxx_async_tx()
661 data->dma_tx.buffer = (uint8_t *)tx_data; in uart_xmc4xxx_async_tx()
662 data->dma_tx.buffer_len = buf_size; in uart_xmc4xxx_async_tx()
663 data->dma_tx.timeout = timeout; in uart_xmc4xxx_async_tx()
666 data->dma_tx.blk_cfg.source_address = (uint32_t)data->dma_tx.buffer; in uart_xmc4xxx_async_tx()
667 data->dma_tx.blk_cfg.block_size = data->dma_tx.buffer_len; in uart_xmc4xxx_async_tx()
669 ret = dma_config(data->dma_tx.dma_dev, data->dma_tx.dma_channel, &data->dma_tx.dma_cfg); in uart_xmc4xxx_async_tx()
675 while (!uart_xmc4xxx_irq_tx_ready(dev)) { in uart_xmc4xxx_async_tx()
679 irq_disable(config->irq_num_tx); in uart_xmc4xxx_async_tx()
681 XMC_USIC_CH_TriggerServiceRequest(config->uart, data->service_request_tx); in uart_xmc4xxx_async_tx()
683 async_timer_start(&data->dma_tx.timeout_work, data->dma_tx.timeout); in uart_xmc4xxx_async_tx()
685 return dma_start(data->dma_tx.dma_dev, data->dma_tx.dma_channel); in uart_xmc4xxx_async_tx()
688 static int uart_xmc4xxx_async_rx_enable(const struct device *dev, uint8_t *buf, size_t len, in uart_xmc4xxx_async_rx_enable() argument
691 struct uart_xmc4xxx_data *data = dev->data; in uart_xmc4xxx_async_rx_enable()
694 if (data->dma_rx.dma_dev == NULL) { in uart_xmc4xxx_async_rx_enable()
695 return -ENODEV; in uart_xmc4xxx_async_rx_enable()
698 if (data->dma_rx.buffer_len != 0) { in uart_xmc4xxx_async_rx_enable()
699 return -EBUSY; in uart_xmc4xxx_async_rx_enable()
702 uart_xmc4xxx_irq_rx_disable(dev); in uart_xmc4xxx_async_rx_enable()
704 data->dma_rx.buffer = buf; in uart_xmc4xxx_async_rx_enable()
705 data->dma_rx.buffer_len = len; in uart_xmc4xxx_async_rx_enable()
706 data->dma_rx.timeout = timeout; in uart_xmc4xxx_async_rx_enable()
708 data->dma_rx.blk_cfg.dest_address = (uint32_t)data->dma_rx.buffer; in uart_xmc4xxx_async_rx_enable()
709 data->dma_rx.blk_cfg.block_size = data->dma_rx.buffer_len; in uart_xmc4xxx_async_rx_enable()
711 ret = dma_config(data->dma_rx.dma_dev, data->dma_rx.dma_channel, &data->dma_rx.dma_cfg); in uart_xmc4xxx_async_rx_enable()
719 uart_xmc4xxx_irq_rx_enable(dev); in uart_xmc4xxx_async_rx_enable()
721 return dma_start(data->dma_rx.dma_dev, data->dma_rx.dma_channel); in uart_xmc4xxx_async_rx_enable()
728 struct uart_xmc4xxx_data *data = dev_uart->data; in uart_xmc4xxx_dma_rx_cb()
732 __ASSERT_NO_MSG(channel == data->dma_rx.dma_channel); in uart_xmc4xxx_dma_rx_cb()
734 k_work_cancel_delayable(&data->dma_rx.timeout_work); in uart_xmc4xxx_dma_rx_cb()
739 dma_stop(data->dma_rx.dma_dev, data->dma_rx.dma_channel); in uart_xmc4xxx_dma_rx_cb()
746 if (data->dma_rx.buffer_len == 0) { in uart_xmc4xxx_dma_rx_cb()
750 data->dma_rx.counter = data->dma_rx.buffer_len; in uart_xmc4xxx_dma_rx_cb()
755 if (!data->rx_next_buffer) { in uart_xmc4xxx_dma_rx_cb()
757 dma_stop(data->dma_rx.dma_dev, data->dma_rx.dma_channel); in uart_xmc4xxx_dma_rx_cb()
762 data->dma_rx.buffer = data->rx_next_buffer; in uart_xmc4xxx_dma_rx_cb()
763 data->dma_rx.buffer_len = data->rx_next_buffer_len; in uart_xmc4xxx_dma_rx_cb()
764 data->dma_rx.offset = 0; in uart_xmc4xxx_dma_rx_cb()
765 data->dma_rx.counter = 0; in uart_xmc4xxx_dma_rx_cb()
766 data->rx_next_buffer = NULL; in uart_xmc4xxx_dma_rx_cb()
767 data->rx_next_buffer_len = 0; in uart_xmc4xxx_dma_rx_cb()
769 ret = dma_reload(data->dma_rx.dma_dev, data->dma_rx.dma_channel, in uart_xmc4xxx_dma_rx_cb()
770 data->dma_rx.blk_cfg.source_address, (uint32_t)data->dma_rx.buffer, in uart_xmc4xxx_dma_rx_cb()
771 data->dma_rx.buffer_len); in uart_xmc4xxx_dma_rx_cb()
775 dma_stop(data->dma_rx.dma_dev, data->dma_rx.dma_channel); in uart_xmc4xxx_dma_rx_cb()
781 dma_start(data->dma_rx.dma_dev, data->dma_rx.dma_channel); in uart_xmc4xxx_dma_rx_cb()
784 async_timer_start(&data->dma_rx.timeout_work, data->dma_rx.timeout); in uart_xmc4xxx_dma_rx_cb()
789 static int uart_xmc4xxx_async_rx_disable(const struct device *dev) in uart_xmc4xxx_async_rx_disable() argument
791 struct uart_xmc4xxx_data *data = dev->data; in uart_xmc4xxx_async_rx_disable()
795 k_work_cancel_delayable(&data->dma_rx.timeout_work); in uart_xmc4xxx_async_rx_disable()
799 if (data->dma_rx.buffer_len == 0) { in uart_xmc4xxx_async_rx_disable()
800 __ASSERT_NO_MSG(data->dma_rx.buffer == NULL); in uart_xmc4xxx_async_rx_disable()
802 return -EINVAL; in uart_xmc4xxx_async_rx_disable()
805 dma_stop(data->dma_rx.dma_dev, data->dma_rx.dma_channel); in uart_xmc4xxx_async_rx_disable()
806 uart_xmc4xxx_irq_rx_disable(dev); in uart_xmc4xxx_async_rx_disable()
808 if (dma_get_status(data->dma_rx.dma_dev, data->dma_rx.dma_channel, &stat) == 0) { in uart_xmc4xxx_async_rx_disable()
809 size_t rx_rcv_len = data->dma_rx.buffer_len - stat.pending_length; in uart_xmc4xxx_async_rx_disable()
811 if (rx_rcv_len > data->dma_rx.offset) { in uart_xmc4xxx_async_rx_disable()
812 data->dma_rx.counter = rx_rcv_len; in uart_xmc4xxx_async_rx_disable()
830 struct uart_xmc4xxx_data *data = dev_uart->data; in uart_xmc4xxx_dma_tx_cb()
831 size_t tx_buffer_len = data->dma_tx.buffer_len; in uart_xmc4xxx_dma_tx_cb()
838 __ASSERT_NO_MSG(channel == data->dma_tx.dma_channel); in uart_xmc4xxx_dma_tx_cb()
840 k_work_cancel_delayable(&data->dma_tx.timeout_work); in uart_xmc4xxx_dma_tx_cb()
846 if (!dma_get_status(data->dma_tx.dma_dev, channel, &stat)) { in uart_xmc4xxx_dma_tx_cb()
847 data->dma_tx.counter = tx_buffer_len - stat.pending_length; in uart_xmc4xxx_dma_tx_cb()
852 if (data->dma_tx.buffer == NULL) { in uart_xmc4xxx_dma_tx_cb()
853 dma_stop(data->dma_tx.dma_dev, data->dma_tx.dma_channel); in uart_xmc4xxx_dma_tx_cb()
854 disable_tx_events(dev_uart->config); in uart_xmc4xxx_dma_tx_cb()
858 static int uart_xmc4xxx_rx_buf_rsp(const struct device *dev, uint8_t *buf, size_t len) in uart_xmc4xxx_rx_buf_rsp() argument
860 struct uart_xmc4xxx_data *data = dev->data; in uart_xmc4xxx_rx_buf_rsp()
866 if (data->dma_rx.buffer_len == 0U) { in uart_xmc4xxx_rx_buf_rsp()
867 ret = -EACCES; in uart_xmc4xxx_rx_buf_rsp()
871 if (data->rx_next_buffer_len != 0U) { in uart_xmc4xxx_rx_buf_rsp()
872 ret = -EBUSY; in uart_xmc4xxx_rx_buf_rsp()
876 data->rx_next_buffer = buf; in uart_xmc4xxx_rx_buf_rsp()
877 data->rx_next_buffer_len = len; in uart_xmc4xxx_rx_buf_rsp()
886 static int uart_xmc4xxx_init(const struct device *dev) in uart_xmc4xxx_init() argument
889 const struct uart_xmc4xxx_config *config = dev->config; in uart_xmc4xxx_init()
890 struct uart_xmc4xxx_data *data = dev->data; in uart_xmc4xxx_init()
891 uint8_t fifo_offset = config->fifo_start_offset; in uart_xmc4xxx_init()
893 data->config.data_bits = 8U; in uart_xmc4xxx_init()
894 data->config.stop_bits = 1U; in uart_xmc4xxx_init()
896 XMC_UART_CH_Init(config->uart, &(data->config)); in uart_xmc4xxx_init()
898 if (config->fifo_tx_size > 0) { in uart_xmc4xxx_init()
900 fifo_offset = ROUND_UP(fifo_offset, BIT(config->fifo_tx_size)); in uart_xmc4xxx_init()
901 XMC_USIC_CH_TXFIFO_Configure(config->uart, fifo_offset, config->fifo_tx_size, 1); in uart_xmc4xxx_init()
902 fifo_offset += BIT(config->fifo_tx_size); in uart_xmc4xxx_init()
905 if (config->fifo_rx_size > 0) { in uart_xmc4xxx_init()
907 fifo_offset = ROUND_UP(fifo_offset, BIT(config->fifo_rx_size)); in uart_xmc4xxx_init()
908 XMC_USIC_CH_RXFIFO_Configure(config->uart, fifo_offset, config->fifo_rx_size, 0); in uart_xmc4xxx_init()
909 fifo_offset += BIT(config->fifo_rx_size); in uart_xmc4xxx_init()
913 return -EINVAL; in uart_xmc4xxx_init()
916 /* Connect UART RX to logical 1. It is connected to proper pin after pinctrl is applied */ in uart_xmc4xxx_init()
917 XMC_UART_CH_SetInputSource(config->uart, XMC_UART_CH_INPUT_RXD, 0x7); in uart_xmc4xxx_init()
919 /* Start the UART before pinctrl, because the USIC is driving the TX line */ in uart_xmc4xxx_init()
921 XMC_UART_CH_Start(config->uart); in uart_xmc4xxx_init()
923 ret = pinctrl_apply_state(config->pcfg, PINCTRL_STATE_DEFAULT); in uart_xmc4xxx_init()
927 /* Connect UART RX to the target pin */ in uart_xmc4xxx_init()
928 XMC_UART_CH_SetInputSource(config->uart, XMC_UART_CH_INPUT_RXD, in uart_xmc4xxx_init()
929 config->input_src); in uart_xmc4xxx_init()
932 config->irq_config_func(dev); in uart_xmc4xxx_init()
933 uart_xmc4xxx_configure_service_requests(dev); in uart_xmc4xxx_init()
937 ret = uart_xmc4xxx_async_init(dev); in uart_xmc4xxx_init()
943 static DEVICE_API(uart, uart_xmc4xxx_driver_api) = {
994 static void uart_xmc4xxx_irq_setup_##index(const struct device *dev) \
1026 .uart = (XMC_USIC_CH_t *)DT_INST_REG_ADDR(index), \