1 /*
2  * Copyright (c) 2019 Mohamed ElShahawi (extremegtx@hotmail.com)
3  * Copyright (c) 2023-2025 Espressif Systems (Shanghai) Co., Ltd.
4  *
5  * SPDX-License-Identifier: Apache-2.0
6  */
7 
8 #define DT_DRV_COMPAT espressif_esp32_uart
9 
10 /* Include esp-idf headers first to avoid redefining BIT() macro */
11 /* TODO: include w/o prefix */
12 #ifdef CONFIG_SOC_SERIES_ESP32
13 #include <esp32/rom/ets_sys.h>
14 #include <esp32/rom/gpio.h>
15 #include <soc/dport_reg.h>
16 #elif defined(CONFIG_SOC_SERIES_ESP32S2)
17 #include <esp32s2/rom/ets_sys.h>
18 #include <esp32s2/rom/gpio.h>
19 #include <soc/dport_reg.h>
20 #elif defined(CONFIG_SOC_SERIES_ESP32S3)
21 #include <esp32s3/rom/ets_sys.h>
22 #include <esp32s3/rom/gpio.h>
23 #include <zephyr/dt-bindings/clock/esp32s3_clock.h>
24 #elif defined(CONFIG_SOC_SERIES_ESP32C2)
25 #include <esp32c2/rom/ets_sys.h>
26 #include <esp32c2/rom/gpio.h>
27 #include <zephyr/dt-bindings/clock/esp32c2_clock.h>
28 #elif defined(CONFIG_SOC_SERIES_ESP32C3)
29 #include <esp32c3/rom/ets_sys.h>
30 #include <esp32c3/rom/gpio.h>
31 #include <zephyr/dt-bindings/clock/esp32c3_clock.h>
32 #elif defined(CONFIG_SOC_SERIES_ESP32C6)
33 #include <esp32c6/rom/ets_sys.h>
34 #include <esp32c6/rom/gpio.h>
35 #include <zephyr/dt-bindings/clock/esp32c6_clock.h>
36 #elif defined(CONFIG_SOC_SERIES_ESP32H2)
37 #include <esp32h2/rom/ets_sys.h>
38 #include <esp32h2/rom/gpio.h>
39 #include <zephyr/dt-bindings/clock/esp32h2_clock.h>
40 #endif
41 #ifdef CONFIG_UART_ASYNC_API
42 #include <zephyr/drivers/dma.h>
43 #include <zephyr/drivers/dma/dma_esp32.h>
44 #include <hal/uhci_ll.h>
45 #include <hal/gdma_ll.h>
46 #include <hal/gdma_hal.h>
47 #include <hal/dma_types.h>
48 #endif
49 #include <soc/uart_struct.h>
50 #include <hal/uart_ll.h>
51 #include <hal/uart_hal.h>
52 #include <hal/uart_types.h>
53 #include <esp_clk_tree.h>
54 #include <zephyr/drivers/pinctrl.h>
55 
56 #include <soc/uart_reg.h>
57 #include <zephyr/device.h>
58 #include <soc.h>
59 #include <zephyr/drivers/uart.h>
60 
61 #include <zephyr/drivers/interrupt_controller/intc_esp32.h>
62 
63 #include <zephyr/drivers/clock_control.h>
64 #include <errno.h>
65 #include <zephyr/sys/util.h>
66 #include <esp_attr.h>
67 #include <zephyr/logging/log.h>
68 
69 LOG_MODULE_REGISTER(uart_esp32, CONFIG_UART_LOG_LEVEL);
70 
71 struct uart_esp32_config {
72 	const struct device *clock_dev;
73 	const struct pinctrl_dev_config *pcfg;
74 	const clock_control_subsys_t clock_subsys;
75 	int irq_source;
76 	int irq_priority;
77 	int irq_flags;
78 	bool tx_invert;
79 	bool rx_invert;
80 #if CONFIG_UART_ASYNC_API
81 	const struct device *dma_dev;
82 	uint8_t tx_dma_channel;
83 	uint8_t rx_dma_channel;
84 	bool uhci_slip_tx;
85 	bool uhci_slip_rx;
86 #endif
87 };
88 
89 #if CONFIG_UART_ASYNC_API
90 struct uart_esp32_async_data {
91 	struct k_work_delayable tx_timeout_work;
92 	const uint8_t *tx_buf;
93 	size_t tx_len;
94 	struct k_work_delayable rx_timeout_work;
95 	uint8_t *rx_buf;
96 	uint8_t *rx_next_buf;
97 	size_t rx_len;
98 	size_t rx_next_len;
99 	size_t rx_timeout;
100 	volatile size_t rx_counter;
101 	size_t rx_offset;
102 	uart_callback_t cb;
103 	void *user_data;
104 };
105 #endif
106 
107 /* driver data */
108 struct uart_esp32_data {
109 	struct uart_config uart_config;
110 	uart_hal_context_t hal;
111 #ifdef CONFIG_UART_INTERRUPT_DRIVEN
112 	uart_irq_callback_user_data_t irq_cb;
113 	void *irq_cb_data;
114 #endif
115 #if CONFIG_UART_ASYNC_API
116 	struct uart_esp32_async_data async;
117 	uhci_dev_t *uhci_dev;
118 	const struct device *uart_dev;
119 #endif
120 };
121 
122 #define UART_FIFO_LIMIT	    (UART_LL_FIFO_DEF_LEN)
123 #define UART_TX_FIFO_THRESH (CONFIG_UART_ESP32_TX_FIFO_THRESH)
124 #define UART_RX_FIFO_THRESH (CONFIG_UART_ESP32_RX_FIFO_THRESH)
125 
126 #if CONFIG_UART_INTERRUPT_DRIVEN || CONFIG_UART_ASYNC_API
127 static void uart_esp32_isr(void *arg);
128 #endif
129 
uart_esp32_poll_in(const struct device * dev,unsigned char * p_char)130 static int uart_esp32_poll_in(const struct device *dev, unsigned char *p_char)
131 {
132 	struct uart_esp32_data *data = dev->data;
133 	int inout_rd_len = 1;
134 
135 	if (uart_hal_get_rxfifo_len(&data->hal) == 0) {
136 		return -1;
137 	}
138 
139 	uart_hal_read_rxfifo(&data->hal, p_char, &inout_rd_len);
140 
141 	return 0;
142 }
143 
uart_esp32_poll_out(const struct device * dev,unsigned char c)144 static void uart_esp32_poll_out(const struct device *dev, unsigned char c)
145 {
146 	struct uart_esp32_data *data = dev->data;
147 	uint32_t written;
148 
149 	/* Wait for space in FIFO */
150 	while (uart_hal_get_txfifo_len(&data->hal) == 0) {
151 		; /* Wait */
152 	}
153 
154 	/* Send a character */
155 	uart_hal_write_txfifo(&data->hal, &c, 1, &written);
156 }
157 
uart_esp32_err_check(const struct device * dev)158 static int uart_esp32_err_check(const struct device *dev)
159 {
160 	struct uart_esp32_data *data = dev->data;
161 	uint32_t mask = uart_hal_get_intsts_mask(&data->hal);
162 	uint32_t err = mask & (UART_INTR_PARITY_ERR | UART_INTR_FRAM_ERR);
163 
164 	return err;
165 }
166 
167 #ifdef CONFIG_UART_USE_RUNTIME_CONFIGURE
168 
uart_esp32_get_standard_baud(uint32_t calc_baud)169 static uint32_t uart_esp32_get_standard_baud(uint32_t calc_baud)
170 {
171 	const uint32_t standard_bauds[] = {9600,  14400,  19200,  38400,  57600,
172 					   74880, 115200, 230400, 460800, 921600};
173 	int num_bauds = ARRAY_SIZE(standard_bauds);
174 	uint32_t baud = calc_baud;
175 
176 	/* Find the standard baudrate within 0.1% range. If no close
177 	 * value is found, input is returned.
178 	 */
179 	for (int i = 0; i < num_bauds; i++) {
180 		float range = (float)abs(calc_baud - standard_bauds[i]) / standard_bauds[i];
181 
182 		if (range < 0.001f) {
183 			baud = standard_bauds[i];
184 			break;
185 		}
186 	}
187 
188 	return baud;
189 }
190 
uart_esp32_config_get(const struct device * dev,struct uart_config * cfg)191 static int uart_esp32_config_get(const struct device *dev, struct uart_config *cfg)
192 {
193 	struct uart_esp32_data *data = dev->data;
194 	uart_parity_t parity;
195 	uart_stop_bits_t stop_bit;
196 	uart_word_length_t data_bit;
197 	uart_hw_flowcontrol_t hw_flow;
198 	uart_sclk_t src_clk;
199 	uint32_t sclk_freq;
200 	uint32_t calc_baud;
201 
202 	uart_hal_get_sclk(&data->hal, &src_clk);
203 	esp_clk_tree_src_get_freq_hz((soc_module_clk_t)src_clk,
204 		ESP_CLK_TREE_SRC_FREQ_PRECISION_CACHED, &sclk_freq);
205 
206 	uart_hal_get_baudrate(&data->hal, &calc_baud, sclk_freq);
207 	cfg->baudrate = uart_esp32_get_standard_baud(calc_baud);
208 
209 	uart_hal_get_parity(&data->hal, &parity);
210 	switch (parity) {
211 	case UART_PARITY_DISABLE:
212 		cfg->parity = UART_CFG_PARITY_NONE;
213 		break;
214 	case UART_PARITY_EVEN:
215 		cfg->parity = UART_CFG_PARITY_EVEN;
216 		break;
217 	case UART_PARITY_ODD:
218 		cfg->parity = UART_CFG_PARITY_ODD;
219 		break;
220 	default:
221 		return -ENOTSUP;
222 	}
223 
224 	uart_hal_get_stop_bits(&data->hal, &stop_bit);
225 	switch (stop_bit) {
226 	case UART_STOP_BITS_1:
227 		cfg->stop_bits = UART_CFG_STOP_BITS_1;
228 		break;
229 	case UART_STOP_BITS_1_5:
230 		cfg->stop_bits = UART_CFG_STOP_BITS_1_5;
231 		break;
232 	case UART_STOP_BITS_2:
233 		cfg->stop_bits = UART_CFG_STOP_BITS_2;
234 		break;
235 	default:
236 		return -ENOTSUP;
237 	}
238 
239 	uart_hal_get_data_bit_num(&data->hal, &data_bit);
240 	switch (data_bit) {
241 	case UART_DATA_5_BITS:
242 		cfg->data_bits = UART_CFG_DATA_BITS_5;
243 		break;
244 	case UART_DATA_6_BITS:
245 		cfg->data_bits = UART_CFG_DATA_BITS_6;
246 		break;
247 	case UART_DATA_7_BITS:
248 		cfg->data_bits = UART_CFG_DATA_BITS_7;
249 		break;
250 	case UART_DATA_8_BITS:
251 		cfg->data_bits = UART_CFG_DATA_BITS_8;
252 		break;
253 	default:
254 		return -ENOTSUP;
255 	}
256 
257 	uart_hal_get_hw_flow_ctrl(&data->hal, &hw_flow);
258 	switch (hw_flow) {
259 	case UART_HW_FLOWCTRL_DISABLE:
260 		cfg->flow_ctrl = UART_CFG_FLOW_CTRL_NONE;
261 		break;
262 	case UART_HW_FLOWCTRL_CTS_RTS:
263 		cfg->flow_ctrl = UART_CFG_FLOW_CTRL_RTS_CTS;
264 		break;
265 	default:
266 		return -ENOTSUP;
267 	}
268 
269 	if (uart_hal_is_mode_rs485_half_duplex(&data->hal)) {
270 		cfg->flow_ctrl = UART_CFG_FLOW_CTRL_RS485;
271 	}
272 
273 	return 0;
274 }
275 #endif /* CONFIG_UART_USE_RUNTIME_CONFIGURE */
276 
uart_esp32_configure(const struct device * dev,const struct uart_config * cfg)277 static int uart_esp32_configure(const struct device *dev, const struct uart_config *cfg)
278 {
279 	const struct uart_esp32_config *config = dev->config;
280 	struct uart_esp32_data *data = dev->data;
281 	uart_sclk_t src_clk;
282 	uint32_t sclk_freq;
283 	uint32_t inv_mask = 0;
284 
285 	if (!device_is_ready(config->clock_dev)) {
286 		return -ENODEV;
287 	}
288 
289 	clock_control_on(config->clock_dev, config->clock_subsys);
290 
291 	uart_hal_set_sclk(&data->hal, UART_SCLK_DEFAULT);
292 	uart_hal_set_rxfifo_full_thr(&data->hal, UART_RX_FIFO_THRESH);
293 	uart_hal_set_txfifo_empty_thr(&data->hal, UART_TX_FIFO_THRESH);
294 	uart_hal_rxfifo_rst(&data->hal);
295 	uart_hal_txfifo_rst(&data->hal);
296 
297 	switch (cfg->parity) {
298 	case UART_CFG_PARITY_NONE:
299 		uart_hal_set_parity(&data->hal, UART_PARITY_DISABLE);
300 		break;
301 	case UART_CFG_PARITY_EVEN:
302 		uart_hal_set_parity(&data->hal, UART_PARITY_EVEN);
303 		break;
304 	case UART_CFG_PARITY_ODD:
305 		uart_hal_set_parity(&data->hal, UART_PARITY_ODD);
306 		break;
307 	default:
308 		return -ENOTSUP;
309 	}
310 
311 	switch (cfg->stop_bits) {
312 	case UART_CFG_STOP_BITS_1:
313 		uart_hal_set_stop_bits(&data->hal, UART_STOP_BITS_1);
314 		break;
315 	case UART_CFG_STOP_BITS_1_5:
316 		uart_hal_set_stop_bits(&data->hal, UART_STOP_BITS_1_5);
317 		break;
318 	case UART_CFG_STOP_BITS_2:
319 		uart_hal_set_stop_bits(&data->hal, UART_STOP_BITS_2);
320 		break;
321 	default:
322 		return -ENOTSUP;
323 	}
324 
325 	switch (cfg->data_bits) {
326 	case UART_CFG_DATA_BITS_5:
327 		uart_hal_set_data_bit_num(&data->hal, UART_DATA_5_BITS);
328 		break;
329 	case UART_CFG_DATA_BITS_6:
330 		uart_hal_set_data_bit_num(&data->hal, UART_DATA_6_BITS);
331 		break;
332 	case UART_CFG_DATA_BITS_7:
333 		uart_hal_set_data_bit_num(&data->hal, UART_DATA_7_BITS);
334 		break;
335 	case UART_CFG_DATA_BITS_8:
336 		uart_hal_set_data_bit_num(&data->hal, UART_DATA_8_BITS);
337 		break;
338 	default:
339 		return -ENOTSUP;
340 	}
341 
342 	uart_hal_set_mode(&data->hal, UART_MODE_UART);
343 
344 	switch (cfg->flow_ctrl) {
345 	case UART_CFG_FLOW_CTRL_NONE:
346 		uart_hal_set_hw_flow_ctrl(&data->hal, UART_HW_FLOWCTRL_DISABLE, 0);
347 		break;
348 	case UART_CFG_FLOW_CTRL_RTS_CTS:
349 		uart_hal_set_hw_flow_ctrl(&data->hal, UART_HW_FLOWCTRL_CTS_RTS, 10);
350 		break;
351 	case UART_CFG_FLOW_CTRL_RS485:
352 		uart_hal_set_mode(&data->hal, UART_MODE_RS485_HALF_DUPLEX);
353 		break;
354 	default:
355 		return -ENOTSUP;
356 	}
357 
358 	uart_hal_get_sclk(&data->hal, &src_clk);
359 	esp_clk_tree_src_get_freq_hz((soc_module_clk_t)src_clk,
360 		ESP_CLK_TREE_SRC_FREQ_PRECISION_CACHED, &sclk_freq);
361 	uart_hal_set_baudrate(&data->hal, cfg->baudrate, sclk_freq);
362 
363 	uart_hal_set_rx_timeout(&data->hal, 0x16);
364 
365 	if (config->tx_invert) {
366 		inv_mask |= UART_SIGNAL_TXD_INV;
367 	}
368 	if (config->rx_invert) {
369 		inv_mask |= UART_SIGNAL_RXD_INV;
370 	}
371 
372 	if (inv_mask) {
373 		uart_hal_inverse_signal(&data->hal, inv_mask);
374 	}
375 
376 	return 0;
377 }
378 
379 #ifdef CONFIG_UART_INTERRUPT_DRIVEN
380 
uart_esp32_fifo_fill(const struct device * dev,const uint8_t * tx_data,int len)381 static int uart_esp32_fifo_fill(const struct device *dev, const uint8_t *tx_data, int len)
382 {
383 	struct uart_esp32_data *data = dev->data;
384 	uint32_t written = 0;
385 
386 	if (len < 0) {
387 		return 0;
388 	}
389 
390 	uart_hal_write_txfifo(&data->hal, tx_data, len, &written);
391 	return written;
392 }
393 
uart_esp32_fifo_read(const struct device * dev,uint8_t * rx_data,const int len)394 static int uart_esp32_fifo_read(const struct device *dev, uint8_t *rx_data, const int len)
395 {
396 	struct uart_esp32_data *data = dev->data;
397 	const int num_rx = uart_hal_get_rxfifo_len(&data->hal);
398 	int read = MIN(len, num_rx);
399 
400 	if (!read) {
401 		return 0;
402 	}
403 
404 	uart_hal_read_rxfifo(&data->hal, rx_data, &read);
405 	return read;
406 }
407 
uart_esp32_irq_tx_enable(const struct device * dev)408 static void uart_esp32_irq_tx_enable(const struct device *dev)
409 {
410 	struct uart_esp32_data *data = dev->data;
411 
412 	uart_hal_clr_intsts_mask(&data->hal, UART_INTR_TXFIFO_EMPTY);
413 	uart_hal_ena_intr_mask(&data->hal, UART_INTR_TXFIFO_EMPTY);
414 }
415 
uart_esp32_irq_tx_disable(const struct device * dev)416 static void uart_esp32_irq_tx_disable(const struct device *dev)
417 {
418 	struct uart_esp32_data *data = dev->data;
419 
420 	uart_hal_disable_intr_mask(&data->hal, UART_INTR_TXFIFO_EMPTY);
421 }
422 
uart_esp32_irq_tx_ready(const struct device * dev)423 static int uart_esp32_irq_tx_ready(const struct device *dev)
424 {
425 	struct uart_esp32_data *data = dev->data;
426 
427 	return (uart_hal_get_txfifo_len(&data->hal) > 0 &&
428 		uart_hal_get_intr_ena_status(&data->hal) & UART_INTR_TXFIFO_EMPTY);
429 }
430 
uart_esp32_irq_rx_disable(const struct device * dev)431 static void uart_esp32_irq_rx_disable(const struct device *dev)
432 {
433 	struct uart_esp32_data *data = dev->data;
434 
435 	uart_hal_disable_intr_mask(&data->hal, UART_INTR_RXFIFO_FULL);
436 	uart_hal_disable_intr_mask(&data->hal, UART_INTR_RXFIFO_TOUT);
437 }
438 
uart_esp32_irq_tx_complete(const struct device * dev)439 static int uart_esp32_irq_tx_complete(const struct device *dev)
440 {
441 	struct uart_esp32_data *data = dev->data;
442 
443 	return uart_hal_is_tx_idle(&data->hal);
444 }
445 
uart_esp32_irq_rx_ready(const struct device * dev)446 static int uart_esp32_irq_rx_ready(const struct device *dev)
447 {
448 	struct uart_esp32_data *data = dev->data;
449 
450 	return (uart_hal_get_rxfifo_len(&data->hal) > 0);
451 }
452 
uart_esp32_irq_err_enable(const struct device * dev)453 static void uart_esp32_irq_err_enable(const struct device *dev)
454 {
455 	struct uart_esp32_data *data = dev->data;
456 
457 	/* enable framing, parity */
458 	uart_hal_ena_intr_mask(&data->hal, UART_INTR_FRAM_ERR);
459 	uart_hal_ena_intr_mask(&data->hal, UART_INTR_PARITY_ERR);
460 }
461 
uart_esp32_irq_err_disable(const struct device * dev)462 static void uart_esp32_irq_err_disable(const struct device *dev)
463 {
464 	struct uart_esp32_data *data = dev->data;
465 
466 	uart_hal_disable_intr_mask(&data->hal, UART_INTR_FRAM_ERR);
467 	uart_hal_disable_intr_mask(&data->hal, UART_INTR_PARITY_ERR);
468 }
469 
uart_esp32_irq_is_pending(const struct device * dev)470 static int uart_esp32_irq_is_pending(const struct device *dev)
471 {
472 	return uart_esp32_irq_rx_ready(dev) || uart_esp32_irq_tx_ready(dev);
473 }
474 
uart_esp32_irq_update(const struct device * dev)475 static int uart_esp32_irq_update(const struct device *dev)
476 {
477 	struct uart_esp32_data *data = dev->data;
478 
479 	uart_hal_clr_intsts_mask(&data->hal, UART_INTR_RXFIFO_FULL);
480 	uart_hal_clr_intsts_mask(&data->hal, UART_INTR_RXFIFO_TOUT);
481 	uart_hal_clr_intsts_mask(&data->hal, UART_INTR_TXFIFO_EMPTY);
482 
483 	return 1;
484 }
485 
uart_esp32_irq_callback_set(const struct device * dev,uart_irq_callback_user_data_t cb,void * cb_data)486 static void uart_esp32_irq_callback_set(const struct device *dev, uart_irq_callback_user_data_t cb,
487 					void *cb_data)
488 {
489 	struct uart_esp32_data *data = dev->data;
490 
491 	data->irq_cb = cb;
492 	data->irq_cb_data = cb_data;
493 
494 #if defined(CONFIG_UART_EXCLUSIVE_API_CALLBACKS)
495 	data->async.cb = NULL;
496 	data->async.user_data = NULL;
497 #endif
498 }
499 
500 #endif /* CONFIG_UART_INTERRUPT_DRIVEN */
501 
502 #ifdef CONFIG_UART_ASYNC_API
503 
uart_esp32_async_timer_start(struct k_work_delayable * work,size_t timeout)504 static inline void uart_esp32_async_timer_start(struct k_work_delayable *work, size_t timeout)
505 {
506 	if ((timeout != SYS_FOREVER_US) && (timeout != 0)) {
507 		LOG_DBG("Async timer started for %d us", timeout);
508 		k_work_reschedule(work, K_USEC(timeout));
509 	}
510 }
511 
512 #endif
513 #if CONFIG_UART_ASYNC_API || CONFIG_UART_INTERRUPT_DRIVEN
514 
uart_esp32_irq_rx_enable(const struct device * dev)515 static void uart_esp32_irq_rx_enable(const struct device *dev)
516 {
517 	struct uart_esp32_data *data = dev->data;
518 
519 	uart_hal_clr_intsts_mask(&data->hal, UART_INTR_RXFIFO_FULL);
520 	uart_hal_clr_intsts_mask(&data->hal, UART_INTR_RXFIFO_TOUT);
521 	uart_hal_ena_intr_mask(&data->hal, UART_INTR_RXFIFO_FULL);
522 	uart_hal_ena_intr_mask(&data->hal, UART_INTR_RXFIFO_TOUT);
523 }
524 
uart_esp32_isr(void * arg)525 static void IRAM_ATTR uart_esp32_isr(void *arg)
526 {
527 	const struct device *dev = (const struct device *)arg;
528 	struct uart_esp32_data *data = dev->data;
529 	uint32_t uart_intr_status = uart_hal_get_intsts_mask(&data->hal);
530 
531 	if (uart_intr_status == 0) {
532 		return;
533 	}
534 	uart_hal_clr_intsts_mask(&data->hal, uart_intr_status);
535 
536 #if CONFIG_UART_INTERRUPT_DRIVEN
537 	/* Verify if the callback has been registered */
538 	if (data->irq_cb) {
539 		data->irq_cb(dev, data->irq_cb_data);
540 	}
541 #endif
542 
543 #if CONFIG_UART_ASYNC_API
544 	if (uart_intr_status & UART_INTR_RXFIFO_FULL) {
545 		data->async.rx_counter++;
546 		uart_esp32_async_timer_start(&data->async.rx_timeout_work, data->async.rx_timeout);
547 	}
548 #endif
549 }
550 
551 #endif
552 
553 #if CONFIG_UART_ASYNC_API
uart_esp32_dma_rx_done(const struct device * dma_dev,void * user_data,uint32_t channel,int status)554 static void IRAM_ATTR uart_esp32_dma_rx_done(const struct device *dma_dev, void *user_data,
555 					     uint32_t channel, int status)
556 {
557 	const struct device *uart_dev = user_data;
558 	const struct uart_esp32_config *config = uart_dev->config;
559 	struct uart_esp32_data *data = uart_dev->data;
560 	gdma_hal_context_t *dma_hal = dma_dev->data;
561 	struct uart_event evt = {0};
562 	dma_descriptor_t *desc;
563 	size_t rx_bytes;
564 	unsigned int key = irq_lock();
565 
566 	/*
567 	 * Read actual transferred bytes from DMA descriptor.
568 	 * Direct LL calls used because this ISR context requires IRAM-safe code.
569 	 * Note: We SET rx_counter (not add) because the UART ISR also increments
570 	 * rx_counter on RXFIFO_FULL interrupts, and the DMA descriptor contains
571 	 * the authoritative byte count.
572 	 */
573 	desc = (dma_descriptor_t *)gdma_ll_rx_get_success_eof_desc_addr(dma_hal->dev,
574 									config->rx_dma_channel / 2);
575 	if (desc) {
576 		rx_bytes = desc->dw0.length;
577 	} else {
578 		/* Fallback to full buffer if descriptor unavailable */
579 		rx_bytes = data->async.rx_len;
580 	}
581 
582 	data->async.rx_counter = data->async.rx_offset + rx_bytes;
583 
584 	/*
585 	 * If buffer is not full and no timeout is configured, reload DMA to
586 	 * continue receiving into the same buffer. The timeout callback will
587 	 * handle notifications for partial data.
588 	 */
589 	if (data->async.rx_counter < data->async.rx_len &&
590 	    data->async.rx_timeout == SYS_FOREVER_US) {
591 		dma_reload(config->dma_dev, config->rx_dma_channel, 0,
592 			   (uint32_t)data->async.rx_buf + data->async.rx_counter,
593 			   data->async.rx_len - data->async.rx_counter);
594 		dma_start(config->dma_dev, config->rx_dma_channel);
595 		data->uhci_dev->pkt_thres.thrs = data->async.rx_len - data->async.rx_counter;
596 		irq_unlock(key);
597 		return;
598 	}
599 
600 	/* Notify RX_RDY */
601 	evt.type = UART_RX_RDY;
602 	evt.data.rx.buf = data->async.rx_buf;
603 	evt.data.rx.len = data->async.rx_counter - data->async.rx_offset;
604 	evt.data.rx.offset = data->async.rx_offset;
605 
606 	if (data->async.cb && evt.data.rx.len) {
607 		data->async.cb(data->uart_dev, &evt, data->async.user_data);
608 	}
609 
610 	data->async.rx_offset = 0;
611 	data->async.rx_counter = 0;
612 
613 	/* Release current buffer */
614 	evt.type = UART_RX_BUF_RELEASED;
615 	evt.data.rx_buf.buf = data->async.rx_buf;
616 	if (data->async.cb) {
617 		data->async.cb(uart_dev, &evt, data->async.user_data);
618 	}
619 
620 	/* Load next buffer and request another */
621 	data->async.rx_buf = data->async.rx_next_buf;
622 	data->async.rx_len = data->async.rx_next_len;
623 	data->async.rx_next_buf = NULL;
624 	data->async.rx_next_len = 0U;
625 	evt.type = UART_RX_BUF_REQUEST;
626 	if (data->async.cb) {
627 		data->async.cb(uart_dev, &evt, data->async.user_data);
628 	}
629 
630 	/* Notify RX_DISABLED when there is no buffer */
631 	if (!data->async.rx_buf) {
632 		evt.type = UART_RX_DISABLED;
633 		if (data->async.cb) {
634 			data->async.cb(uart_dev, &evt, data->async.user_data);
635 		}
636 	} else {
637 		/* Reload DMA with new buffer */
638 		dma_reload(config->dma_dev, config->rx_dma_channel, 0, (uint32_t)data->async.rx_buf,
639 			   data->async.rx_len);
640 		dma_start(config->dma_dev, config->rx_dma_channel);
641 		data->uhci_dev->pkt_thres.thrs = data->async.rx_len;
642 	}
643 
644 	irq_unlock(key);
645 }
646 
uart_esp32_dma_tx_done(const struct device * dma_dev,void * user_data,uint32_t channel,int status)647 static void IRAM_ATTR uart_esp32_dma_tx_done(const struct device *dma_dev, void *user_data,
648 					     uint32_t channel, int status)
649 {
650 	const struct device *uart_dev = user_data;
651 	struct uart_esp32_data *data = uart_dev->data;
652 	struct uart_event evt = {0};
653 	unsigned int key = irq_lock();
654 
655 	k_work_cancel_delayable(&data->async.tx_timeout_work);
656 
657 	evt.type = UART_TX_DONE;
658 	evt.data.tx.buf = data->async.tx_buf;
659 	evt.data.tx.len = data->async.tx_len;
660 	if (data->async.cb) {
661 		data->async.cb(uart_dev, &evt, data->async.user_data);
662 	}
663 
664 	/* Reset TX Buffer */
665 	data->async.tx_buf = NULL;
666 	data->async.tx_len = 0U;
667 	irq_unlock(key);
668 }
669 
uart_esp32_async_tx_abort(const struct device * dev)670 static int uart_esp32_async_tx_abort(const struct device *dev)
671 {
672 	const struct uart_esp32_config *config = dev->config;
673 	struct uart_esp32_data *data = dev->data;
674 	struct dma_status dma_status = {0};
675 	struct uart_event evt = {0};
676 	size_t tx_sent;
677 	int err = 0;
678 	unsigned int key = irq_lock();
679 
680 	k_work_cancel_delayable(&data->async.tx_timeout_work);
681 
682 	/*
683 	 * Get actual transferred bytes from DMA status before stopping.
684 	 * This tells us how many bytes were actually sent before the abort.
685 	 */
686 	if (dma_get_status(config->dma_dev, config->tx_dma_channel, &dma_status) == 0) {
687 		tx_sent = dma_status.total_copied;
688 	} else {
689 		tx_sent = data->async.tx_len;
690 	}
691 
692 	err = dma_stop(config->dma_dev, config->tx_dma_channel);
693 	if (err) {
694 		LOG_ERR("Error stopping Tx DMA (%d)", err);
695 		goto unlock;
696 	}
697 
698 	evt.type = UART_TX_ABORTED;
699 	evt.data.tx.buf = data->async.tx_buf;
700 	evt.data.tx.len = tx_sent;
701 
702 	if (data->async.cb) {
703 		data->async.cb(dev, &evt, data->async.user_data);
704 	}
705 
706 unlock:
707 	irq_unlock(key);
708 	return err;
709 }
710 
uart_esp32_async_tx_timeout(struct k_work * work)711 static void uart_esp32_async_tx_timeout(struct k_work *work)
712 {
713 	struct k_work_delayable *dwork = k_work_delayable_from_work(work);
714 	struct uart_esp32_async_data *async =
715 		CONTAINER_OF(dwork, struct uart_esp32_async_data, tx_timeout_work);
716 	struct uart_esp32_data *data = CONTAINER_OF(async, struct uart_esp32_data, async);
717 
718 	uart_esp32_async_tx_abort(data->uart_dev);
719 }
720 
uart_esp32_async_rx_timeout(struct k_work * work)721 static void uart_esp32_async_rx_timeout(struct k_work *work)
722 {
723 	struct k_work_delayable *dwork = k_work_delayable_from_work(work);
724 	struct uart_esp32_async_data *async =
725 		CONTAINER_OF(dwork, struct uart_esp32_async_data, rx_timeout_work);
726 	struct uart_esp32_data *data = CONTAINER_OF(async, struct uart_esp32_data, async);
727 	const struct uart_esp32_config *config = data->uart_dev->config;
728 	struct dma_status dma_status = {0};
729 	struct uart_event evt = {0};
730 	size_t rx_count;
731 	unsigned int key;
732 
733 	/*
734 	 * Get actual transferred bytes from DMA descriptor.
735 	 * In DMA mode, data goes directly to memory via UHCI+GDMA, bypassing
736 	 * the UART FIFO. So rx_counter (updated by FIFO Full ISR) is not reliable.
737 	 * Instead, read total_copied from DMA status.
738 	 */
739 	if (dma_get_status(config->dma_dev, config->rx_dma_channel, &dma_status) == 0) {
740 		rx_count = dma_status.total_copied;
741 	} else {
742 		/* Fallback to rx_counter if DMA status unavailable */
743 		rx_count = data->async.rx_counter;
744 	}
745 
746 	key = irq_lock();
747 
748 	/* Update rx_counter with actual DMA progress */
749 	data->async.rx_counter = rx_count;
750 
751 	evt.type = UART_RX_RDY;
752 	evt.data.rx.buf = data->async.rx_buf;
753 	evt.data.rx.len = data->async.rx_counter - data->async.rx_offset;
754 	evt.data.rx.offset = data->async.rx_offset;
755 
756 	if (data->async.cb && evt.data.rx.len) {
757 		data->async.cb(data->uart_dev, &evt, data->async.user_data);
758 	}
759 
760 	data->async.rx_offset = data->async.rx_counter;
761 	k_work_cancel_delayable(&data->async.rx_timeout_work);
762 	irq_unlock(key);
763 }
764 
uart_esp32_async_callback_set(const struct device * dev,uart_callback_t callback,void * user_data)765 static int uart_esp32_async_callback_set(const struct device *dev, uart_callback_t callback,
766 					 void *user_data)
767 {
768 	struct uart_esp32_data *data = dev->data;
769 
770 	if (!callback) {
771 		return -EINVAL;
772 	}
773 
774 	data->async.cb = callback;
775 	data->async.user_data = user_data;
776 
777 #if defined(CONFIG_UART_EXCLUSIVE_API_CALLBACKS)
778 	data->irq_cb = NULL;
779 	data->irq_cb_data = NULL;
780 #endif
781 
782 	return 0;
783 }
784 
uart_esp32_async_tx(const struct device * dev,const uint8_t * buf,size_t len,int32_t timeout)785 static int uart_esp32_async_tx(const struct device *dev, const uint8_t *buf, size_t len,
786 			       int32_t timeout)
787 {
788 	const struct uart_esp32_config *config = dev->config;
789 	struct uart_esp32_data *data = dev->data;
790 	struct dma_config dma_cfg = {0};
791 	struct dma_block_config dma_blk = {0};
792 	struct dma_status dma_status = {0};
793 	int err = 0;
794 	unsigned int key = irq_lock();
795 
796 	if (config->tx_dma_channel == 0xFF) {
797 		LOG_ERR("Tx DMA channel is not configured");
798 		err = -ENOTSUP;
799 		goto unlock;
800 	}
801 
802 	err = dma_get_status(config->dma_dev, config->tx_dma_channel, &dma_status);
803 	if (err) {
804 		LOG_ERR("Unable to get Tx status (%d)", err);
805 		goto unlock;
806 	}
807 
808 	if (dma_status.busy) {
809 		LOG_ERR("Tx DMA Channel is busy");
810 		err = -EBUSY;
811 		goto unlock;
812 	}
813 
814 	data->async.tx_buf = buf;
815 	data->async.tx_len = len;
816 
817 	dma_cfg.channel_direction = MEMORY_TO_PERIPHERAL;
818 	dma_cfg.dma_callback = uart_esp32_dma_tx_done;
819 	dma_cfg.user_data = (void *)dev;
820 	dma_cfg.dma_slot = ESP_GDMA_TRIG_PERIPH_UHCI0;
821 	dma_cfg.block_count = 1;
822 	dma_cfg.head_block = &dma_blk;
823 	dma_blk.block_size = len;
824 	dma_blk.source_address = (uint32_t)buf;
825 
826 	err = dma_config(config->dma_dev, config->tx_dma_channel, &dma_cfg);
827 	if (err) {
828 		LOG_ERR("Error configuring Tx DMA (%d)", err);
829 		goto unlock;
830 	}
831 
832 	uart_esp32_async_timer_start(&data->async.tx_timeout_work, timeout);
833 
834 	err = dma_start(config->dma_dev, config->tx_dma_channel);
835 	if (err) {
836 		LOG_ERR("Error starting Tx DMA (%d)", err);
837 		goto unlock;
838 	}
839 
840 unlock:
841 	irq_unlock(key);
842 	return err;
843 }
844 
uart_esp32_async_rx_enable(const struct device * dev,uint8_t * buf,size_t len,int32_t timeout)845 static int uart_esp32_async_rx_enable(const struct device *dev, uint8_t *buf, size_t len,
846 				      int32_t timeout)
847 {
848 	const struct uart_esp32_config *config = dev->config;
849 	struct uart_esp32_data *data = dev->data;
850 	struct dma_config dma_cfg = {0};
851 	struct dma_block_config dma_blk = {0};
852 	struct dma_status dma_status = {0};
853 	int err = 0;
854 	struct uart_event evt = {0};
855 
856 	if (config->rx_dma_channel == 0xFF) {
857 		LOG_ERR("Rx DMA channel is not configured");
858 		return -ENOTSUP;
859 	}
860 
861 	err = dma_get_status(config->dma_dev, config->rx_dma_channel, &dma_status);
862 	if (err) {
863 		LOG_ERR("Unable to get Rx status (%d)", err);
864 		return err;
865 	}
866 
867 	if (dma_status.busy) {
868 		LOG_ERR("Rx DMA Channel is busy");
869 		return -EBUSY;
870 	}
871 
872 	unsigned int key = irq_lock();
873 
874 	data->async.rx_buf = buf;
875 	data->async.rx_len = len;
876 	data->async.rx_timeout = timeout;
877 
878 	dma_cfg.channel_direction = PERIPHERAL_TO_MEMORY;
879 	dma_cfg.dma_callback = uart_esp32_dma_rx_done;
880 	dma_cfg.user_data = (void *)dev;
881 	dma_cfg.dma_slot = ESP_GDMA_TRIG_PERIPH_UHCI0;
882 	dma_cfg.block_count = 1;
883 	dma_cfg.head_block = &dma_blk;
884 	dma_blk.block_size = len;
885 	dma_blk.dest_address = (uint32_t)data->async.rx_buf;
886 
887 	err = dma_config(config->dma_dev, config->rx_dma_channel, &dma_cfg);
888 	if (err) {
889 		LOG_ERR("Error configuring Rx DMA (%d)", err);
890 		goto unlock;
891 	}
892 
893 	/*
894 	 * Enable interrupt on first receive byte so we can start async timer
895 	 */
896 	uart_hal_set_rxfifo_full_thr(&data->hal, 1);
897 	uart_esp32_irq_rx_enable(dev);
898 
899 	err = dma_start(config->dma_dev, config->rx_dma_channel);
900 	if (err) {
901 		LOG_ERR("Error starting Rx DMA (%d)", err);
902 		goto unlock;
903 	}
904 
905 	data->uhci_dev->pkt_thres.thrs = len;
906 
907 	/*
908 	 * Configure UHCI EOF mode based on timeout setting.
909 	 * For SYS_FOREVER_US, only use length-based EOF to avoid spurious
910 	 * callbacks when UART goes idle before buffer is full.
911 	 * For other timeouts, also enable idle-based EOF.
912 	 */
913 	if (timeout == SYS_FOREVER_US) {
914 		data->uhci_dev->conf0.len_eof_en = 1;
915 		data->uhci_dev->conf0.uart_idle_eof_en = 0;
916 	} else {
917 		data->uhci_dev->conf0.len_eof_en = 1;
918 		data->uhci_dev->conf0.uart_idle_eof_en = 1;
919 	}
920 
921 	/**
922 	 * Request next buffer
923 	 */
924 	evt.type = UART_RX_BUF_REQUEST;
925 	if (data->async.cb) {
926 		data->async.cb(dev, &evt, data->async.user_data);
927 	}
928 
929 unlock:
930 	irq_unlock(key);
931 	return err;
932 }
933 
uart_esp32_async_rx_buf_rsp(const struct device * dev,uint8_t * buf,size_t len)934 static int uart_esp32_async_rx_buf_rsp(const struct device *dev, uint8_t *buf, size_t len)
935 {
936 	struct uart_esp32_data *data = dev->data;
937 
938 	data->async.rx_next_buf = buf;
939 	data->async.rx_next_len = len;
940 
941 	return 0;
942 }
943 
uart_esp32_async_rx_disable(const struct device * dev)944 static int uart_esp32_async_rx_disable(const struct device *dev)
945 {
946 	const struct uart_esp32_config *config = dev->config;
947 	struct uart_esp32_data *data = dev->data;
948 	unsigned int key = irq_lock();
949 	int err = 0;
950 	struct uart_event evt = {0};
951 
952 	k_work_cancel_delayable(&data->async.rx_timeout_work);
953 
954 	if (!data->async.rx_len) {
955 		err = -EINVAL;
956 		goto unlock;
957 	}
958 
959 	err = dma_stop(config->dma_dev, config->rx_dma_channel);
960 	if (err) {
961 		LOG_ERR("Error stopping Rx DMA (%d)", err);
962 		goto unlock;
963 	}
964 
965 	/*If any bytes have been received notify RX_RDY*/
966 	evt.type = UART_RX_RDY;
967 	evt.data.rx.buf = data->async.rx_buf;
968 	evt.data.rx.len = data->async.rx_counter - data->async.rx_offset;
969 	evt.data.rx.offset = data->async.rx_offset;
970 
971 	if (data->async.cb && evt.data.rx.len) {
972 		data->async.cb(data->uart_dev, &evt, data->async.user_data);
973 	}
974 
975 	data->async.rx_offset = 0;
976 	data->async.rx_counter = 0;
977 
978 	/* Release current buffer*/
979 	evt.type = UART_RX_BUF_RELEASED;
980 	evt.data.rx_buf.buf = data->async.rx_buf;
981 
982 	if (data->async.cb) {
983 		data->async.cb(dev, &evt, data->async.user_data);
984 	}
985 
986 	data->async.rx_len = 0;
987 	data->async.rx_buf = NULL;
988 
989 	/*Release next buffer*/
990 	if (data->async.rx_next_len) {
991 		evt.type = UART_RX_BUF_RELEASED;
992 		evt.data.rx_buf.buf = data->async.rx_next_buf;
993 		if (data->async.cb) {
994 			data->async.cb(dev, &evt, data->async.user_data);
995 		}
996 
997 		data->async.rx_next_len = 0;
998 		data->async.rx_next_buf = NULL;
999 	}
1000 
1001 	/*Notify UART_RX_DISABLED*/
1002 	evt.type = UART_RX_DISABLED;
1003 	if (data->async.cb) {
1004 		data->async.cb(dev, &evt, data->async.user_data);
1005 	}
1006 
1007 unlock:
1008 	irq_unlock(key);
1009 	return err;
1010 }
1011 
1012 #endif /* CONFIG_UART_ASYNC_API */
1013 
uart_esp32_init(const struct device * dev)1014 static int uart_esp32_init(const struct device *dev)
1015 {
1016 	int ret;
1017 	struct uart_esp32_data *data = dev->data;
1018 	const struct uart_esp32_config *config = dev->config;
1019 
1020 	ret = pinctrl_apply_state(config->pcfg, PINCTRL_STATE_DEFAULT);
1021 	if (ret < 0) {
1022 		LOG_ERR("Error configuring UART pins (%d)", ret);
1023 		return ret;
1024 	}
1025 
1026 	ret = uart_esp32_configure(dev, &data->uart_config);
1027 	if (ret < 0) {
1028 		LOG_ERR("Error configuring UART (%d)", ret);
1029 		return ret;
1030 	}
1031 
1032 #if CONFIG_UART_INTERRUPT_DRIVEN || CONFIG_UART_ASYNC_API
1033 	ret = esp_intr_alloc(config->irq_source,
1034 			     ESP_PRIO_TO_FLAGS(config->irq_priority) |
1035 				     ESP_INT_FLAGS_CHECK(config->irq_flags) | ESP_INTR_FLAG_IRAM,
1036 			     (intr_handler_t)uart_esp32_isr, (void *)dev, NULL);
1037 	if (ret < 0) {
1038 		LOG_ERR("Error allocating UART interrupt (%d)", ret);
1039 		return ret;
1040 	}
1041 #endif
1042 #if CONFIG_UART_ASYNC_API
1043 	if (config->dma_dev) {
1044 		if (!device_is_ready(config->dma_dev)) {
1045 			LOG_ERR("DMA device is not ready");
1046 			return -ENODEV;
1047 		}
1048 
1049 		clock_control_on(config->clock_dev, (clock_control_subsys_t)ESP32_UHCI0_MODULE);
1050 		uhci_ll_init(data->uhci_dev);
1051 		uhci_ll_set_eof_mode(data->uhci_dev, UHCI_RX_IDLE_EOF | UHCI_RX_LEN_EOF);
1052 
1053 		/* Configure SLIP encoding/decoding */
1054 		data->uhci_dev->escape_conf.tx_c0_esc_en = config->uhci_slip_tx ? 1 : 0;
1055 		data->uhci_dev->escape_conf.rx_c0_esc_en = config->uhci_slip_rx ? 1 : 0;
1056 
1057 		uhci_ll_attach_uart_port(data->uhci_dev, uart_hal_get_port_num(&data->hal));
1058 		data->uart_dev = dev;
1059 
1060 		k_work_init_delayable(&data->async.tx_timeout_work, uart_esp32_async_tx_timeout);
1061 		k_work_init_delayable(&data->async.rx_timeout_work, uart_esp32_async_rx_timeout);
1062 	}
1063 #endif
1064 	return 0;
1065 }
1066 
1067 static DEVICE_API(uart, uart_esp32_api) = {
1068 	.poll_in = uart_esp32_poll_in,
1069 	.poll_out = uart_esp32_poll_out,
1070 	.err_check = uart_esp32_err_check,
1071 #ifdef CONFIG_UART_USE_RUNTIME_CONFIGURE
1072 	.configure = uart_esp32_configure,
1073 	.config_get = uart_esp32_config_get,
1074 #endif
1075 #ifdef CONFIG_UART_INTERRUPT_DRIVEN
1076 	.fifo_fill = uart_esp32_fifo_fill,
1077 	.fifo_read = uart_esp32_fifo_read,
1078 	.irq_tx_enable = uart_esp32_irq_tx_enable,
1079 	.irq_tx_disable = uart_esp32_irq_tx_disable,
1080 	.irq_tx_ready = uart_esp32_irq_tx_ready,
1081 	.irq_rx_enable = uart_esp32_irq_rx_enable,
1082 	.irq_rx_disable = uart_esp32_irq_rx_disable,
1083 	.irq_tx_complete = uart_esp32_irq_tx_complete,
1084 	.irq_rx_ready = uart_esp32_irq_rx_ready,
1085 	.irq_err_enable = uart_esp32_irq_err_enable,
1086 	.irq_err_disable = uart_esp32_irq_err_disable,
1087 	.irq_is_pending = uart_esp32_irq_is_pending,
1088 	.irq_update = uart_esp32_irq_update,
1089 	.irq_callback_set = uart_esp32_irq_callback_set,
1090 #endif /* CONFIG_UART_INTERRUPT_DRIVEN */
1091 #if CONFIG_UART_ASYNC_API
1092 	.callback_set = uart_esp32_async_callback_set,
1093 	.tx = uart_esp32_async_tx,
1094 	.tx_abort = uart_esp32_async_tx_abort,
1095 	.rx_enable = uart_esp32_async_rx_enable,
1096 	.rx_buf_rsp = uart_esp32_async_rx_buf_rsp,
1097 	.rx_disable = uart_esp32_async_rx_disable,
1098 #endif /*CONFIG_UART_ASYNC_API*/
1099 };
1100 
1101 #if CONFIG_UART_ASYNC_API
1102 #define ESP_UART_DMA_INIT(n)                                                                       \
1103 	.dma_dev = ESP32_DT_INST_DMA_CTLR(n, tx),                                                  \
1104 	.tx_dma_channel = ESP32_DT_INST_DMA_CELL(n, tx, channel),                                  \
1105 	.rx_dma_channel = ESP32_DT_INST_DMA_CELL(n, rx, channel),                                  \
1106 	.uhci_slip_tx = DT_INST_PROP_OR(n, uhci_slip_tx, false),                                   \
1107 	.uhci_slip_rx = DT_INST_PROP_OR(n, uhci_slip_rx, false)
1108 
1109 #define ESP_UART_UHCI_INIT(n)                                                                      \
1110 	.uhci_dev = COND_CODE_1(DT_INST_NODE_HAS_PROP(n, dmas), (&UHCI0), (NULL))
1111 
1112 #else
1113 #define ESP_UART_DMA_INIT(n)
1114 #define ESP_UART_UHCI_INIT(n)
1115 #endif
1116 
1117 #define ESP32_UART_INIT(idx)                                                                       \
1118                                                                                                    \
1119 	PINCTRL_DT_INST_DEFINE(idx);                                                               \
1120                                                                                                    \
1121 	static const DRAM_ATTR struct uart_esp32_config uart_esp32_cfg_port_##idx = {              \
1122 		.clock_dev = DEVICE_DT_GET(DT_INST_CLOCKS_CTLR(idx)),                              \
1123 		.pcfg = PINCTRL_DT_INST_DEV_CONFIG_GET(idx),                                       \
1124 		.clock_subsys = (clock_control_subsys_t)DT_INST_CLOCKS_CELL(idx, offset),          \
1125 		.irq_source = DT_INST_IRQ_BY_IDX(idx, 0, irq),                                     \
1126 		.irq_priority = DT_INST_IRQ_BY_IDX(idx, 0, priority),                              \
1127 		.irq_flags = DT_INST_IRQ_BY_IDX(idx, 0, flags),                                    \
1128 		.tx_invert = DT_INST_PROP_OR(idx, tx_invert, false),                               \
1129 		.rx_invert = DT_INST_PROP_OR(idx, rx_invert, false),                               \
1130 		ESP_UART_DMA_INIT(idx)};                                                           \
1131                                                                                                    \
1132 	static struct uart_esp32_data uart_esp32_data_##idx = {                                    \
1133 		.uart_config = {.baudrate = DT_INST_PROP(idx, current_speed),                      \
1134 				.parity = DT_INST_ENUM_IDX(idx, parity),                           \
1135 				.stop_bits = DT_INST_ENUM_IDX(idx, stop_bits),                     \
1136 				.data_bits = DT_INST_ENUM_IDX(idx, data_bits),                     \
1137 				.flow_ctrl = MAX(COND_CODE_1(DT_INST_PROP(idx, hw_rs485_hd_mode),  \
1138 							     (UART_CFG_FLOW_CTRL_RS485),           \
1139 							     (UART_CFG_FLOW_CTRL_NONE)),           \
1140 						 COND_CODE_1(DT_INST_PROP(idx, hw_flow_control),   \
1141 							     (UART_CFG_FLOW_CTRL_RTS_CTS),         \
1142 							     (UART_CFG_FLOW_CTRL_NONE)))},         \
1143 		.hal =                                                                             \
1144 			{                                                                          \
1145 				.dev = (uart_dev_t *)DT_INST_REG_ADDR(idx),                        \
1146 			},                                                                         \
1147 		ESP_UART_UHCI_INIT(idx)};                                                          \
1148                                                                                                    \
1149 	DEVICE_DT_INST_DEFINE(idx, uart_esp32_init, NULL, &uart_esp32_data_##idx,                  \
1150 			      &uart_esp32_cfg_port_##idx, PRE_KERNEL_1,                            \
1151 			      CONFIG_SERIAL_INIT_PRIORITY, &uart_esp32_api);
1152 
1153 DT_INST_FOREACH_STATUS_OKAY(ESP32_UART_INIT);
1154