1 /*
2  * Copyright (c) 2023-2024 Analog Devices, Inc.
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  */
6 
7 #ifdef CONFIG_UART_ASYNC_API
8 #include <zephyr/drivers/dma.h>
9 #include <wrap_max32_dma.h>
10 #endif
11 #include <zephyr/drivers/pinctrl.h>
12 #include <zephyr/drivers/uart.h>
13 #include <zephyr/kernel.h>
14 #include <zephyr/logging/log.h>
15 #include <zephyr/drivers/clock_control/adi_max32_clock_control.h>
16 
17 #include <wrap_max32_uart.h>
18 
19 #define DT_DRV_COMPAT adi_max32_uart
20 
21 LOG_MODULE_REGISTER(uart_max32, CONFIG_UART_LOG_LEVEL);
22 
23 #ifdef CONFIG_UART_ASYNC_API
24 struct max32_uart_dma_config {
25 	const struct device *dev;
26 	const uint32_t channel;
27 	const uint32_t slot;
28 };
29 #endif /* CONFIG_UART_ASYNC_API */
30 
31 struct max32_uart_config {
32 	mxc_uart_regs_t *regs;
33 	const struct pinctrl_dev_config *pctrl;
34 	const struct device *clock;
35 	struct max32_perclk perclk;
36 	struct uart_config uart_conf;
37 #if defined(CONFIG_UART_INTERRUPT_DRIVEN) || defined(CONFIG_UART_ASYNC_API)
38 	uart_irq_config_func_t irq_config_func;
39 #endif /* CONFIG_UART_INTERRUPT_DRIVEN || CONFIG_UART_ASYNC_API */
40 #ifdef CONFIG_UART_ASYNC_API
41 	const struct max32_uart_dma_config tx_dma;
42 	const struct max32_uart_dma_config rx_dma;
43 #endif /* CONFIG_UART_ASYNC_API */
44 };
45 
46 #ifdef CONFIG_UART_ASYNC_API
47 #define MAX32_UART_TX_CACHE_NUM 2
48 struct max32_uart_async_tx {
49 	const uint8_t *buf;
50 	const uint8_t *src;
51 	size_t len;
52 	uint8_t cache[MAX32_UART_TX_CACHE_NUM][CONFIG_UART_TX_CACHE_LEN];
53 	uint8_t cache_id;
54 	struct dma_block_config dma_blk;
55 	int32_t timeout;
56 	struct k_work_delayable timeout_work;
57 };
58 
59 struct max32_uart_async_rx {
60 	uint8_t *buf;
61 	size_t len;
62 	size_t offset;
63 	size_t counter;
64 	uint8_t *next_buf;
65 	size_t next_len;
66 	int32_t timeout;
67 	struct k_work_delayable timeout_work;
68 };
69 
70 struct max32_uart_async_data {
71 	const struct device *uart_dev;
72 	struct max32_uart_async_tx tx;
73 	struct max32_uart_async_rx rx;
74 	uart_callback_t cb;
75 	void *user_data;
76 };
77 #endif
78 
79 struct max32_uart_data {
80 #ifdef CONFIG_UART_INTERRUPT_DRIVEN
81 	uart_irq_callback_user_data_t cb; /* Interrupt callback */
82 	void *cb_data;                    /* Interrupt callback arg */
83 	uint32_t flags;                   /* Cached interrupt flags */
84 	uint32_t status;                  /* Cached status flags */
85 #endif
86 #ifdef CONFIG_UART_ASYNC_API
87 	struct max32_uart_async_data async;
88 #endif
89 	struct uart_config conf; /* baudrate, stopbits, ... */
90 };
91 
92 #ifdef CONFIG_UART_INTERRUPT_DRIVEN
93 static void uart_max32_isr(const struct device *dev);
94 #endif
95 
96 #ifdef CONFIG_UART_ASYNC_API
97 static int uart_max32_tx_dma_load(const struct device *dev, uint8_t *buf, size_t len);
98 #endif
99 
api_poll_out(const struct device * dev,unsigned char c)100 static void api_poll_out(const struct device *dev, unsigned char c)
101 {
102 	const struct max32_uart_config *cfg = dev->config;
103 
104 	MXC_UART_WriteCharacter(cfg->regs, c);
105 }
106 
api_poll_in(const struct device * dev,unsigned char * c)107 static int api_poll_in(const struct device *dev, unsigned char *c)
108 {
109 	int val;
110 	const struct max32_uart_config *cfg = dev->config;
111 
112 	val = MXC_UART_ReadCharacterRaw(cfg->regs);
113 	if (val >= 0) {
114 		*c = (unsigned char)val;
115 	} else {
116 		return -1;
117 	}
118 
119 	return 0;
120 }
121 
api_err_check(const struct device * dev)122 static int api_err_check(const struct device *dev)
123 {
124 	int err = 0;
125 	uint32_t flags;
126 	const struct max32_uart_config *cfg = dev->config;
127 
128 	flags = MXC_UART_GetFlags(cfg->regs);
129 
130 	if (flags & ADI_MAX32_UART_ERROR_FRAMING) {
131 		err |= UART_ERROR_FRAMING;
132 	}
133 
134 	if (flags & ADI_MAX32_UART_ERROR_PARITY) {
135 		err |= UART_ERROR_PARITY;
136 	}
137 
138 	if (flags & ADI_MAX32_UART_ERROR_OVERRUN) {
139 		err |= UART_ERROR_OVERRUN;
140 	}
141 
142 	return err;
143 }
144 
api_configure(const struct device * dev,const struct uart_config * uart_cfg)145 static int api_configure(const struct device *dev, const struct uart_config *uart_cfg)
146 {
147 	int err;
148 	const struct max32_uart_config *const cfg = dev->config;
149 	mxc_uart_regs_t *regs = cfg->regs;
150 	struct max32_uart_data *data = dev->data;
151 
152 	/*
153 	 *  Set parity
154 	 */
155 	if (data->conf.parity != uart_cfg->parity) {
156 		mxc_uart_parity_t mxc_parity;
157 
158 		switch (uart_cfg->parity) {
159 		case UART_CFG_PARITY_NONE:
160 			mxc_parity = ADI_MAX32_UART_CFG_PARITY_NONE;
161 			break;
162 		case UART_CFG_PARITY_ODD:
163 			mxc_parity = ADI_MAX32_UART_CFG_PARITY_ODD;
164 			break;
165 		case UART_CFG_PARITY_EVEN:
166 			mxc_parity = ADI_MAX32_UART_CFG_PARITY_EVEN;
167 			break;
168 		case UART_CFG_PARITY_MARK:
169 #if defined(ADI_MAX32_UART_CFG_PARITY_MARK)
170 			mxc_parity = ADI_MAX32_UART_CFG_PARITY_MARK;
171 			break;
172 #else
173 			return -ENOTSUP;
174 #endif
175 		case UART_CFG_PARITY_SPACE:
176 #if defined(ADI_MAX32_UART_CFG_PARITY_SPACE)
177 			mxc_parity = ADI_MAX32_UART_CFG_PARITY_SPACE;
178 			break;
179 #else
180 			return -ENOTSUP;
181 #endif
182 		default:
183 			return -EINVAL;
184 		}
185 
186 		err = MXC_UART_SetParity(regs, mxc_parity);
187 		if (err < 0) {
188 			return -ENOTSUP;
189 		}
190 		/* incase of success keep configuration */
191 		data->conf.parity = uart_cfg->parity;
192 	}
193 
194 	/*
195 	 *  Set stop bit
196 	 */
197 	if (data->conf.stop_bits != uart_cfg->stop_bits) {
198 		if (uart_cfg->stop_bits == UART_CFG_STOP_BITS_1) {
199 			err = MXC_UART_SetStopBits(regs, MXC_UART_STOP_1);
200 		} else if (uart_cfg->stop_bits == UART_CFG_STOP_BITS_2) {
201 			err = MXC_UART_SetStopBits(regs, MXC_UART_STOP_2);
202 		} else {
203 			return -ENOTSUP;
204 		}
205 		if (err < 0) {
206 			return -ENOTSUP;
207 		}
208 		/* incase of success keep configuration */
209 		data->conf.stop_bits = uart_cfg->stop_bits;
210 	}
211 
212 	/*
213 	 *  Set data bit
214 	 *  Valid data for MAX32  is 5-6-7-8
215 	 *  Valid data for Zepyhr is 0-1-2-3
216 	 *  Added +5 to index match.
217 	 */
218 	if (data->conf.data_bits != uart_cfg->data_bits) {
219 		err = MXC_UART_SetDataSize(regs, (5 + uart_cfg->data_bits));
220 		if (err < 0) {
221 			return -ENOTSUP;
222 		}
223 		/* incase of success keep configuration */
224 		data->conf.data_bits = uart_cfg->data_bits;
225 	}
226 
227 	/*
228 	 *  Set flow control
229 	 *  Flow control not implemented yet so that only support no flow mode
230 	 */
231 	if (data->conf.flow_ctrl != uart_cfg->flow_ctrl) {
232 		if (uart_cfg->flow_ctrl != UART_CFG_FLOW_CTRL_NONE) {
233 			return -ENOTSUP;
234 		}
235 		data->conf.flow_ctrl = uart_cfg->flow_ctrl;
236 	}
237 
238 	/*
239 	 *  Set baudrate
240 	 */
241 	if (data->conf.baudrate != uart_cfg->baudrate) {
242 		err = Wrap_MXC_UART_SetFrequency(regs, uart_cfg->baudrate, cfg->perclk.clk_src);
243 		if (err < 0) {
244 			return -ENOTSUP;
245 		}
246 		/* In case of success keep configuration */
247 		data->conf.baudrate = uart_cfg->baudrate;
248 	}
249 	return 0;
250 }
251 
252 #ifdef CONFIG_UART_USE_RUNTIME_CONFIGURE
253 
api_config_get(const struct device * dev,struct uart_config * uart_cfg)254 static int api_config_get(const struct device *dev, struct uart_config *uart_cfg)
255 {
256 	struct max32_uart_data *data = dev->data;
257 
258 	/* copy configs from global setting */
259 	*uart_cfg = data->conf;
260 
261 	return 0;
262 }
263 
264 #endif /* CONFIG_UART_USE_RUNTIME_CONFIGURE */
265 
266 #ifdef CONFIG_UART_ASYNC_API
267 static void uart_max32_async_tx_timeout(struct k_work *work);
268 static void uart_max32_async_rx_timeout(struct k_work *work);
269 #endif /* CONFIG_UART_ASYNC_API */
270 
uart_max32_init(const struct device * dev)271 static int uart_max32_init(const struct device *dev)
272 {
273 	int ret;
274 	const struct max32_uart_config *const cfg = dev->config;
275 	mxc_uart_regs_t *regs = cfg->regs;
276 #ifdef CONFIG_UART_ASYNC_API
277 	struct max32_uart_data *data = dev->data;
278 #endif
279 
280 	if (!device_is_ready(cfg->clock)) {
281 		LOG_ERR("Clock control device not ready");
282 		return -ENODEV;
283 	}
284 
285 	ret = MXC_UART_Shutdown(regs);
286 	if (ret) {
287 		return ret;
288 	}
289 
290 	ret = clock_control_on(cfg->clock, (clock_control_subsys_t)&cfg->perclk);
291 	if (ret != 0) {
292 		LOG_ERR("Cannot enable UART clock");
293 		return ret;
294 	}
295 
296 	ret = Wrap_MXC_UART_SetClockSource(regs, cfg->perclk.clk_src);
297 	if (ret != 0) {
298 		LOG_ERR("Cannot set UART clock source");
299 		return ret;
300 	}
301 
302 	ret = pinctrl_apply_state(cfg->pctrl, PINCTRL_STATE_DEFAULT);
303 	if (ret) {
304 		return ret;
305 	}
306 
307 	ret = api_configure(dev, &cfg->uart_conf);
308 	if (ret) {
309 		return ret;
310 	}
311 
312 	ret = Wrap_MXC_UART_Init(regs);
313 	if (ret) {
314 		return ret;
315 	}
316 
317 #if defined(CONFIG_UART_INTERRUPT_DRIVEN) || defined(CONFIG_UART_ASYNC_API)
318 	/* Clear any pending UART RX/TX interrupts */
319 	MXC_UART_ClearFlags(regs, (ADI_MAX32_UART_INT_RX | ADI_MAX32_UART_INT_TX));
320 	cfg->irq_config_func(dev);
321 #endif
322 
323 #ifdef CONFIG_UART_ASYNC_API
324 	data->async.uart_dev = dev;
325 	k_work_init_delayable(&data->async.tx.timeout_work, uart_max32_async_tx_timeout);
326 	k_work_init_delayable(&data->async.rx.timeout_work, uart_max32_async_rx_timeout);
327 	data->async.rx.len = 0;
328 	data->async.rx.offset = 0;
329 #endif
330 
331 	return ret;
332 }
333 
334 #ifdef CONFIG_UART_INTERRUPT_DRIVEN
335 
api_fifo_fill(const struct device * dev,const uint8_t * tx_data,int size)336 static int api_fifo_fill(const struct device *dev, const uint8_t *tx_data, int size)
337 {
338 	unsigned int num_tx = 0;
339 	const struct max32_uart_config *cfg = dev->config;
340 
341 	num_tx = MXC_UART_WriteTXFIFO(cfg->regs, (unsigned char *)tx_data, size);
342 
343 	return (int)num_tx;
344 }
345 
api_fifo_read(const struct device * dev,uint8_t * rx_data,const int size)346 static int api_fifo_read(const struct device *dev, uint8_t *rx_data, const int size)
347 {
348 	unsigned int num_rx = 0;
349 	const struct max32_uart_config *cfg = dev->config;
350 
351 	num_rx = MXC_UART_ReadRXFIFO(cfg->regs, (unsigned char *)rx_data, size);
352 	if (num_rx == 0) {
353 		MXC_UART_ClearFlags(cfg->regs, ADI_MAX32_UART_INT_RX);
354 	}
355 
356 	return num_rx;
357 }
358 
api_irq_tx_enable(const struct device * dev)359 static void api_irq_tx_enable(const struct device *dev)
360 {
361 	const struct max32_uart_config *cfg = dev->config;
362 	unsigned int key;
363 
364 	MXC_UART_EnableInt(cfg->regs, ADI_MAX32_UART_INT_TX | ADI_MAX32_UART_INT_TX_OEM);
365 
366 	key = irq_lock();
367 	uart_max32_isr(dev);
368 	irq_unlock(key);
369 }
370 
api_irq_tx_disable(const struct device * dev)371 static void api_irq_tx_disable(const struct device *dev)
372 {
373 	const struct max32_uart_config *cfg = dev->config;
374 
375 	MXC_UART_DisableInt(cfg->regs, ADI_MAX32_UART_INT_TX | ADI_MAX32_UART_INT_TX_OEM);
376 }
377 
api_irq_tx_ready(const struct device * dev)378 static int api_irq_tx_ready(const struct device *dev)
379 {
380 	struct max32_uart_data *const data = dev->data;
381 	const struct max32_uart_config *cfg = dev->config;
382 	uint32_t inten = Wrap_MXC_UART_GetRegINTEN(cfg->regs);
383 
384 	return ((inten & (ADI_MAX32_UART_INT_TX | ADI_MAX32_UART_INT_TX_OEM)) &&
385 		!(data->status & ADI_MAX32_UART_STATUS_TX_FULL));
386 }
387 
api_irq_tx_complete(const struct device * dev)388 static int api_irq_tx_complete(const struct device *dev)
389 {
390 	const struct max32_uart_config *cfg = dev->config;
391 
392 	if (MXC_UART_GetActive(cfg->regs) == E_BUSY) {
393 		return 0;
394 	} else {
395 		return 1; /* transmission completed */
396 	}
397 }
398 
api_irq_rx_ready(const struct device * dev)399 static int api_irq_rx_ready(const struct device *dev)
400 {
401 	struct max32_uart_data *const data = dev->data;
402 	const struct max32_uart_config *cfg = dev->config;
403 	uint32_t inten = Wrap_MXC_UART_GetRegINTEN(cfg->regs);
404 
405 	return ((inten & ADI_MAX32_UART_INT_RX) && !(data->status & ADI_MAX32_UART_RX_EMPTY));
406 }
407 
api_irq_err_enable(const struct device * dev)408 static void api_irq_err_enable(const struct device *dev)
409 {
410 	const struct max32_uart_config *cfg = dev->config;
411 
412 	MXC_UART_EnableInt(cfg->regs, ADI_MAX32_UART_ERROR_INTERRUPTS);
413 }
414 
api_irq_err_disable(const struct device * dev)415 static void api_irq_err_disable(const struct device *dev)
416 {
417 	const struct max32_uart_config *cfg = dev->config;
418 
419 	MXC_UART_DisableInt(cfg->regs, ADI_MAX32_UART_ERROR_INTERRUPTS);
420 }
421 
api_irq_is_pending(const struct device * dev)422 static int api_irq_is_pending(const struct device *dev)
423 {
424 	struct max32_uart_data *const data = dev->data;
425 
426 	return (data->flags & (ADI_MAX32_UART_INT_RX | ADI_MAX32_UART_INT_TX));
427 }
428 
api_irq_update(const struct device * dev)429 static int api_irq_update(const struct device *dev)
430 {
431 	struct max32_uart_data *const data = dev->data;
432 	const struct max32_uart_config *const cfg = dev->config;
433 
434 	data->flags = MXC_UART_GetFlags(cfg->regs);
435 	data->status = MXC_UART_GetStatus(cfg->regs);
436 
437 	MXC_UART_ClearFlags(cfg->regs, data->flags);
438 
439 	return 1;
440 }
441 
api_irq_callback_set(const struct device * dev,uart_irq_callback_user_data_t cb,void * cb_data)442 static void api_irq_callback_set(const struct device *dev, uart_irq_callback_user_data_t cb,
443 				 void *cb_data)
444 {
445 	struct max32_uart_data *const dev_data = dev->data;
446 
447 	dev_data->cb = cb;
448 	dev_data->cb_data = cb_data;
449 }
450 
451 #endif /* CONFIG_UART_INTERRUPT_DRIVEN */
452 
453 #if defined(CONFIG_UART_INTERRUPT_DRIVEN) || defined(CONFIG_UART_ASYNC_API)
api_irq_rx_enable(const struct device * dev)454 static void api_irq_rx_enable(const struct device *dev)
455 {
456 	const struct max32_uart_config *cfg = dev->config;
457 
458 	MXC_UART_EnableInt(cfg->regs, ADI_MAX32_UART_INT_RX);
459 }
460 
api_irq_rx_disable(const struct device * dev)461 static void api_irq_rx_disable(const struct device *dev)
462 {
463 	const struct max32_uart_config *cfg = dev->config;
464 
465 	MXC_UART_DisableInt(cfg->regs, ADI_MAX32_UART_INT_RX);
466 }
467 
uart_max32_isr(const struct device * dev)468 static void uart_max32_isr(const struct device *dev)
469 {
470 	struct max32_uart_data *data = dev->data;
471 	const struct max32_uart_config *cfg = dev->config;
472 	uint32_t intfl;
473 
474 	intfl = MXC_UART_GetFlags(cfg->regs);
475 
476 #ifdef CONFIG_UART_INTERRUPT_DRIVEN
477 	if (data->cb) {
478 		data->cb(dev, data->cb_data);
479 	}
480 #endif /* CONFIG_UART_INTERRUPT_DRIVEN */
481 
482 #ifdef CONFIG_UART_ASYNC_API
483 	if (data->async.rx.timeout != SYS_FOREVER_US && data->async.rx.timeout != 0 &&
484 	    (intfl & ADI_MAX32_UART_INT_RX)) {
485 		k_work_reschedule(&data->async.rx.timeout_work, K_USEC(data->async.rx.timeout));
486 	}
487 #endif /* CONFIG_UART_ASYNC_API */
488 
489 	/* Clear RX/TX interrupts flag after cb is called */
490 	MXC_UART_ClearFlags(cfg->regs, intfl);
491 }
492 #endif /* CONFIG_UART_INTERRUPT_DRIVEN || CONFIG_UART_ASYNC_API */
493 
494 #if defined(CONFIG_UART_ASYNC_API)
495 
async_timer_start(struct k_work_delayable * work,int32_t timeout)496 static inline void async_timer_start(struct k_work_delayable *work, int32_t timeout)
497 {
498 	if ((timeout != SYS_FOREVER_US) && (timeout != 0)) {
499 		k_work_reschedule(work, K_USEC(timeout));
500 	}
501 }
502 
async_user_callback(const struct device * dev,struct uart_event * evt)503 static void async_user_callback(const struct device *dev, struct uart_event *evt)
504 {
505 	const struct max32_uart_data *data = dev->data;
506 
507 	if (data->async.cb) {
508 		data->async.cb(dev, evt, data->async.user_data);
509 	}
510 }
511 
load_tx_cache(const uint8_t * src,size_t len,uint8_t * dest)512 static uint32_t load_tx_cache(const uint8_t *src, size_t len, uint8_t *dest)
513 {
514 	memcpy(dest, src, MIN(len, CONFIG_UART_TX_CACHE_LEN));
515 
516 	return MIN(len, CONFIG_UART_TX_CACHE_LEN);
517 }
518 
uart_max32_async_tx_callback(const struct device * dma_dev,void * user_data,uint32_t channel,int status)519 static void uart_max32_async_tx_callback(const struct device *dma_dev, void *user_data,
520 					 uint32_t channel, int status)
521 {
522 	const struct device *dev = user_data;
523 	const struct max32_uart_config *config = dev->config;
524 	struct max32_uart_data *data = dev->data;
525 	struct max32_uart_async_tx *tx = &data->async.tx;
526 	struct dma_status dma_stat;
527 	int ret;
528 
529 	unsigned int key = irq_lock();
530 
531 	dma_get_status(config->tx_dma.dev, config->tx_dma.channel, &dma_stat);
532 	/* Skip callback if channel is still busy */
533 	if (dma_stat.busy) {
534 		irq_unlock(key);
535 		return;
536 	}
537 
538 	k_work_cancel_delayable(&tx->timeout_work);
539 	Wrap_MXC_UART_DisableTxDMA(config->regs);
540 
541 	irq_unlock(key);
542 
543 	tx->len -= tx->dma_blk.block_size;
544 	if (tx->len > 0) {
545 		tx->cache_id = !(tx->cache_id);
546 		ret = uart_max32_tx_dma_load(dev, tx->cache[tx->cache_id],
547 					     MIN(tx->len, CONFIG_UART_TX_CACHE_LEN));
548 		if (ret < 0) {
549 			LOG_ERR("Error configuring Tx DMA (%d)", ret);
550 			return;
551 		}
552 
553 		ret = dma_start(config->tx_dma.dev, config->tx_dma.channel);
554 		if (ret < 0) {
555 			LOG_ERR("Error starting Tx DMA (%d)", ret);
556 			return;
557 		}
558 
559 		async_timer_start(&tx->timeout_work, tx->timeout);
560 
561 		Wrap_MXC_UART_SetTxDMALevel(config->regs, 2);
562 		Wrap_MXC_UART_EnableTxDMA(config->regs);
563 
564 		/* Load next chunk as well */
565 		if (tx->len > CONFIG_UART_TX_CACHE_LEN) {
566 			tx->src += load_tx_cache(tx->src, tx->len - CONFIG_UART_TX_CACHE_LEN,
567 						 tx->cache[!(tx->cache_id)]);
568 		}
569 	} else {
570 		struct uart_event tx_done = {
571 			.type = status == 0 ? UART_TX_DONE : UART_TX_ABORTED,
572 			.data.tx.buf = tx->buf,
573 			.data.tx.len = tx->len,
574 		};
575 		async_user_callback(dev, &tx_done);
576 	}
577 }
578 
uart_max32_tx_dma_load(const struct device * dev,uint8_t * buf,size_t len)579 static int uart_max32_tx_dma_load(const struct device *dev, uint8_t *buf, size_t len)
580 {
581 	int ret;
582 	const struct max32_uart_config *config = dev->config;
583 	struct max32_uart_data *data = dev->data;
584 	struct dma_config dma_cfg = {0};
585 	struct dma_block_config *dma_blk = &data->async.tx.dma_blk;
586 
587 	dma_cfg.channel_direction = MEMORY_TO_PERIPHERAL;
588 	dma_cfg.dma_callback = uart_max32_async_tx_callback;
589 	dma_cfg.user_data = (void *)dev;
590 	dma_cfg.dma_slot = config->tx_dma.slot;
591 	dma_cfg.block_count = 1;
592 	dma_cfg.source_data_size = 1U;
593 	dma_cfg.source_burst_length = 1U;
594 	dma_cfg.dest_data_size = 1U;
595 	dma_cfg.head_block = dma_blk;
596 	dma_blk->block_size = len;
597 	dma_blk->source_address = (uint32_t)buf;
598 
599 	ret = dma_config(config->tx_dma.dev, config->tx_dma.channel, &dma_cfg);
600 	if (ret < 0) {
601 		return ret;
602 	}
603 
604 	return 0;
605 }
606 
api_callback_set(const struct device * dev,uart_callback_t callback,void * user_data)607 static int api_callback_set(const struct device *dev, uart_callback_t callback, void *user_data)
608 {
609 	struct max32_uart_data *data = dev->data;
610 
611 	data->async.cb = callback;
612 	data->async.user_data = user_data;
613 
614 	return 0;
615 }
616 
api_tx(const struct device * dev,const uint8_t * buf,size_t len,int32_t timeout)617 static int api_tx(const struct device *dev, const uint8_t *buf, size_t len, int32_t timeout)
618 {
619 	struct max32_uart_data *data = dev->data;
620 	const struct max32_uart_config *config = dev->config;
621 	struct dma_status dma_stat;
622 	int ret;
623 	bool use_cache = false;
624 	unsigned int key = irq_lock();
625 
626 	if (config->tx_dma.channel == 0xFF) {
627 		LOG_ERR("Tx DMA channel is not configured");
628 		ret = -ENOTSUP;
629 		goto unlock;
630 	}
631 
632 	ret = dma_get_status(config->tx_dma.dev, config->tx_dma.channel, &dma_stat);
633 	if (ret < 0 || dma_stat.busy) {
634 		LOG_ERR("DMA Tx %s", ret < 0 ? "error" : "busy");
635 		irq_unlock(key);
636 		return ret < 0 ? ret : -EBUSY;
637 	}
638 
639 	data->async.tx.buf = buf;
640 	data->async.tx.len = len;
641 	data->async.tx.src = data->async.tx.buf;
642 
643 	if (((uint32_t)buf < MXC_SRAM_MEM_BASE) ||
644 	    (((uint32_t)buf + len) > (MXC_SRAM_MEM_BASE + MXC_SRAM_MEM_SIZE))) {
645 		use_cache = true;
646 		len = load_tx_cache(data->async.tx.src, MIN(len, CONFIG_UART_TX_CACHE_LEN),
647 				    data->async.tx.cache[0]);
648 		data->async.tx.src += len;
649 		data->async.tx.cache_id = 0;
650 	}
651 
652 	ret = uart_max32_tx_dma_load(dev, use_cache ? data->async.tx.cache[0] : ((uint8_t *)buf),
653 				     len);
654 	if (ret < 0) {
655 		LOG_ERR("Error configuring Tx DMA (%d)", ret);
656 		goto unlock;
657 	}
658 
659 	ret = dma_start(config->tx_dma.dev, config->tx_dma.channel);
660 	if (ret < 0) {
661 		LOG_ERR("Error starting Tx DMA (%d)", ret);
662 		goto unlock;
663 	}
664 
665 	data->async.tx.timeout = timeout;
666 	async_timer_start(&data->async.tx.timeout_work, timeout);
667 
668 	Wrap_MXC_UART_SetTxDMALevel(config->regs, 2);
669 	Wrap_MXC_UART_EnableTxDMA(config->regs);
670 
671 unlock:
672 	irq_unlock(key);
673 
674 	return ret;
675 }
676 
api_tx_abort(const struct device * dev)677 static int api_tx_abort(const struct device *dev)
678 {
679 	int ret;
680 	struct max32_uart_data *data = dev->data;
681 	const struct max32_uart_config *config = dev->config;
682 	struct dma_status dma_stat;
683 	size_t bytes_sent;
684 
685 	unsigned int key = irq_lock();
686 
687 	k_work_cancel_delayable(&data->async.tx.timeout_work);
688 
689 	Wrap_MXC_UART_DisableTxDMA(config->regs);
690 
691 	ret = dma_get_status(config->tx_dma.dev, config->tx_dma.channel, &dma_stat);
692 	if (!dma_stat.busy) {
693 		irq_unlock(key);
694 		return 0;
695 	}
696 
697 	bytes_sent = (ret == 0) ? (data->async.tx.len - dma_stat.pending_length) : 0;
698 
699 	ret = dma_stop(config->tx_dma.dev, config->tx_dma.channel);
700 
701 	irq_unlock(key);
702 
703 	if (ret == 0) {
704 		struct uart_event tx_aborted = {
705 			.type = UART_TX_ABORTED,
706 			.data.tx.buf = data->async.tx.buf,
707 			.data.tx.len = bytes_sent,
708 		};
709 		async_user_callback(dev, &tx_aborted);
710 	}
711 
712 	return 0;
713 }
714 
uart_max32_async_tx_timeout(struct k_work * work)715 static void uart_max32_async_tx_timeout(struct k_work *work)
716 {
717 	struct k_work_delayable *dwork = k_work_delayable_from_work(work);
718 	struct max32_uart_async_tx *tx =
719 		CONTAINER_OF(dwork, struct max32_uart_async_tx, timeout_work);
720 	struct max32_uart_async_data *async = CONTAINER_OF(tx, struct max32_uart_async_data, tx);
721 	struct max32_uart_data *data = CONTAINER_OF(async, struct max32_uart_data, async);
722 
723 	api_tx_abort(data->async.uart_dev);
724 }
725 
api_rx_disable(const struct device * dev)726 static int api_rx_disable(const struct device *dev)
727 {
728 	struct max32_uart_data *data = dev->data;
729 	const struct max32_uart_config *config = dev->config;
730 	int ret;
731 	unsigned int key = irq_lock();
732 
733 	k_work_cancel_delayable(&data->async.rx.timeout_work);
734 
735 	Wrap_MXC_UART_DisableRxDMA(config->regs);
736 
737 	ret = dma_stop(config->rx_dma.dev, config->rx_dma.channel);
738 	if (ret) {
739 		LOG_ERR("Error stopping Rx DMA (%d)", ret);
740 		irq_unlock(key);
741 		return ret;
742 	}
743 
744 	api_irq_rx_disable(dev);
745 
746 	irq_unlock(key);
747 
748 	/* Release current buffer event */
749 	struct uart_event rel_event = {
750 		.type = UART_RX_BUF_RELEASED,
751 		.data.rx_buf.buf = data->async.rx.buf,
752 	};
753 	async_user_callback(dev, &rel_event);
754 
755 	/* Disable RX event */
756 	struct uart_event rx_disabled = {.type = UART_RX_DISABLED};
757 
758 	async_user_callback(dev, &rx_disabled);
759 
760 	data->async.rx.buf = NULL;
761 	data->async.rx.len = 0;
762 	data->async.rx.counter = 0;
763 	data->async.rx.offset = 0;
764 
765 	if (data->async.rx.next_buf) {
766 		/* Release next buffer event */
767 		struct uart_event next_rel_event = {
768 			.type = UART_RX_BUF_RELEASED,
769 			.data.rx_buf.buf = data->async.rx.next_buf,
770 		};
771 		async_user_callback(dev, &next_rel_event);
772 		data->async.rx.next_buf = NULL;
773 		data->async.rx.next_len = 0;
774 	}
775 
776 	return 0;
777 }
778 
uart_max32_async_rx_callback(const struct device * dma_dev,void * user_data,uint32_t channel,int status)779 static void uart_max32_async_rx_callback(const struct device *dma_dev, void *user_data,
780 					 uint32_t channel, int status)
781 {
782 	const struct device *dev = user_data;
783 	const struct max32_uart_config *config = dev->config;
784 	struct max32_uart_data *data = dev->data;
785 	struct max32_uart_async_data *async = &data->async;
786 	struct dma_status dma_stat;
787 	size_t total_rx;
788 
789 	unsigned int key = irq_lock();
790 
791 	dma_get_status(config->rx_dma.dev, config->rx_dma.channel, &dma_stat);
792 
793 	if (dma_stat.pending_length > 0) {
794 		irq_unlock(key);
795 		return;
796 	}
797 
798 	total_rx = async->rx.len - dma_stat.pending_length;
799 
800 	api_irq_rx_disable(dev);
801 
802 	irq_unlock(key);
803 
804 	if (total_rx > async->rx.offset) {
805 		async->rx.counter = total_rx - async->rx.offset;
806 		struct uart_event rdy_event = {
807 			.type = UART_RX_RDY,
808 			.data.rx.buf = async->rx.buf,
809 			.data.rx.len = async->rx.counter,
810 			.data.rx.offset = async->rx.offset,
811 		};
812 		async_user_callback(dev, &rdy_event);
813 	}
814 
815 	if (async->rx.next_buf) {
816 		async->rx.offset = 0;
817 		async->rx.counter = 0;
818 
819 		struct uart_event rel_event = {
820 			.type = UART_RX_BUF_RELEASED,
821 			.data.rx_buf.buf = async->rx.buf,
822 		};
823 		async_user_callback(dev, &rel_event);
824 
825 		async->rx.buf = async->rx.next_buf;
826 		async->rx.len = async->rx.next_len;
827 
828 		async->rx.next_buf = NULL;
829 		async->rx.next_len = 0;
830 		struct uart_event req_event = {
831 			.type = UART_RX_BUF_REQUEST,
832 		};
833 		async_user_callback(dev, &req_event);
834 
835 		dma_reload(config->rx_dma.dev, config->rx_dma.channel, config->rx_dma.slot,
836 			   (uint32_t)async->rx.buf, async->rx.len);
837 		dma_start(config->rx_dma.dev, config->rx_dma.channel);
838 
839 		api_irq_rx_enable(dev);
840 		async_timer_start(&async->rx.timeout_work, async->rx.timeout);
841 	} else {
842 		api_rx_disable(dev);
843 	}
844 }
845 
api_rx_enable(const struct device * dev,uint8_t * buf,size_t len,int32_t timeout)846 static int api_rx_enable(const struct device *dev, uint8_t *buf, size_t len, int32_t timeout)
847 {
848 	struct max32_uart_data *data = dev->data;
849 	const struct max32_uart_config *config = dev->config;
850 	struct dma_status dma_stat;
851 	struct dma_config dma_cfg = {0};
852 	struct dma_block_config dma_blk = {0};
853 	int ret;
854 
855 	unsigned int key = irq_lock();
856 
857 	if (config->rx_dma.channel == 0xFF) {
858 		LOG_ERR("Rx DMA channel is not configured");
859 		irq_unlock(key);
860 		return -ENOTSUP;
861 	}
862 
863 	ret = dma_get_status(config->rx_dma.dev, config->rx_dma.channel, &dma_stat);
864 	if (ret < 0 || dma_stat.busy) {
865 		LOG_ERR("DMA Rx %s", ret < 0 ? "error" : "busy");
866 		irq_unlock(key);
867 		return ret < 0 ? ret : -EBUSY;
868 	}
869 
870 	data->async.rx.buf = buf;
871 	data->async.rx.len = len;
872 
873 	dma_cfg.channel_direction = MEMORY_TO_PERIPHERAL;
874 	dma_cfg.dma_callback = uart_max32_async_rx_callback;
875 	dma_cfg.user_data = (void *)dev;
876 	dma_cfg.dma_slot = config->rx_dma.slot;
877 	dma_cfg.block_count = 1;
878 	dma_cfg.source_data_size = 1U;
879 	dma_cfg.source_burst_length = 1U;
880 	dma_cfg.dest_data_size = 1U;
881 	dma_cfg.head_block = &dma_blk;
882 	dma_blk.block_size = len;
883 	dma_blk.dest_address = (uint32_t)buf;
884 
885 	ret = dma_config(config->rx_dma.dev, config->rx_dma.channel, &dma_cfg);
886 	if (ret < 0) {
887 		LOG_ERR("Error configuring Rx DMA (%d)", ret);
888 		irq_unlock(key);
889 		return ret;
890 	}
891 
892 	ret = dma_start(config->rx_dma.dev, config->rx_dma.channel);
893 	if (ret < 0) {
894 		LOG_ERR("Error starting Rx DMA (%d)", ret);
895 		irq_unlock(key);
896 		return ret;
897 	}
898 
899 	data->async.rx.timeout = timeout;
900 
901 	Wrap_MXC_UART_SetRxDMALevel(config->regs, 1);
902 	Wrap_MXC_UART_EnableRxDMA(config->regs);
903 
904 	struct uart_event buf_req = {
905 		.type = UART_RX_BUF_REQUEST,
906 	};
907 
908 	async_user_callback(dev, &buf_req);
909 
910 	api_irq_rx_enable(dev);
911 	async_timer_start(&data->async.rx.timeout_work, timeout);
912 
913 	irq_unlock(key);
914 	return ret;
915 }
916 
api_rx_buf_rsp(const struct device * dev,uint8_t * buf,size_t len)917 static int api_rx_buf_rsp(const struct device *dev, uint8_t *buf, size_t len)
918 {
919 	struct max32_uart_data *data = dev->data;
920 
921 	data->async.rx.next_buf = buf;
922 	data->async.rx.next_len = len;
923 
924 	return 0;
925 }
926 
uart_max32_async_rx_timeout(struct k_work * work)927 static void uart_max32_async_rx_timeout(struct k_work *work)
928 {
929 	struct k_work_delayable *dwork = k_work_delayable_from_work(work);
930 	struct max32_uart_async_rx *rx =
931 		CONTAINER_OF(dwork, struct max32_uart_async_rx, timeout_work);
932 	struct max32_uart_async_data *async = CONTAINER_OF(rx, struct max32_uart_async_data, rx);
933 	struct max32_uart_data *data = CONTAINER_OF(async, struct max32_uart_data, async);
934 	const struct max32_uart_config *config = data->async.uart_dev->config;
935 	struct dma_status dma_stat;
936 	uint32_t total_rx;
937 
938 	unsigned int key = irq_lock();
939 
940 	dma_get_status(config->rx_dma.dev, config->rx_dma.channel, &dma_stat);
941 
942 	api_irq_rx_disable(data->async.uart_dev);
943 	k_work_cancel_delayable(&data->async.rx.timeout_work);
944 
945 	irq_unlock(key);
946 
947 	total_rx = async->rx.len - dma_stat.pending_length;
948 
949 	if (total_rx > async->rx.offset) {
950 		async->rx.counter = total_rx - async->rx.offset;
951 		struct uart_event rdy_event = {
952 			.type = UART_RX_RDY,
953 			.data.rx.buf = async->rx.buf,
954 			.data.rx.len = async->rx.counter,
955 			.data.rx.offset = async->rx.offset,
956 		};
957 		async_user_callback(async->uart_dev, &rdy_event);
958 	}
959 	async->rx.offset += async->rx.counter;
960 	async->rx.counter = 0;
961 
962 	api_irq_rx_enable(data->async.uart_dev);
963 }
964 
965 #endif
966 
967 static DEVICE_API(uart, uart_max32_driver_api) = {
968 	.poll_in = api_poll_in,
969 	.poll_out = api_poll_out,
970 	.err_check = api_err_check,
971 #ifdef CONFIG_UART_USE_RUNTIME_CONFIGURE
972 	.configure = api_configure,
973 	.config_get = api_config_get,
974 #endif /* CONFIG_UART_USE_RUNTIME_CONFIGURE */
975 #ifdef CONFIG_UART_INTERRUPT_DRIVEN
976 	.fifo_fill = api_fifo_fill,
977 	.fifo_read = api_fifo_read,
978 	.irq_tx_enable = api_irq_tx_enable,
979 	.irq_tx_disable = api_irq_tx_disable,
980 	.irq_tx_ready = api_irq_tx_ready,
981 	.irq_rx_enable = api_irq_rx_enable,
982 	.irq_rx_disable = api_irq_rx_disable,
983 	.irq_tx_complete = api_irq_tx_complete,
984 	.irq_rx_ready = api_irq_rx_ready,
985 	.irq_err_enable = api_irq_err_enable,
986 	.irq_err_disable = api_irq_err_disable,
987 	.irq_is_pending = api_irq_is_pending,
988 	.irq_update = api_irq_update,
989 	.irq_callback_set = api_irq_callback_set,
990 #endif /* CONFIG_UART_INTERRUPT_DRIVEN */
991 #ifdef CONFIG_UART_ASYNC_API
992 	.callback_set = api_callback_set,
993 	.tx = api_tx,
994 	.tx_abort = api_tx_abort,
995 	.rx_enable = api_rx_enable,
996 	.rx_buf_rsp = api_rx_buf_rsp,
997 	.rx_disable = api_rx_disable,
998 #endif /* CONFIG_UART_ASYNC_API */
999 };
1000 
1001 #ifdef CONFIG_UART_ASYNC_API
1002 #define MAX32_DT_INST_DMA_CTLR(n, name)                                                            \
1003 	COND_CODE_1(DT_INST_NODE_HAS_PROP(n, dmas),                                                \
1004 		    (DEVICE_DT_GET(DT_INST_DMAS_CTLR_BY_NAME(n, name))), (NULL))
1005 
1006 #define MAX32_DT_INST_DMA_CELL(n, name, cell)                                                      \
1007 	COND_CODE_1(DT_INST_NODE_HAS_PROP(n, dmas), (DT_INST_DMAS_CELL_BY_NAME(n, name, cell)),    \
1008 		    (0xff))
1009 
1010 #define MAX32_UART_DMA_INIT(n)                                                                     \
1011 	.tx_dma.dev = MAX32_DT_INST_DMA_CTLR(n, tx),                                               \
1012 	.tx_dma.channel = MAX32_DT_INST_DMA_CELL(n, tx, channel),                                  \
1013 	.tx_dma.slot = MAX32_DT_INST_DMA_CELL(n, tx, slot),                                        \
1014 	.rx_dma.dev = MAX32_DT_INST_DMA_CTLR(n, rx),                                               \
1015 	.rx_dma.channel = MAX32_DT_INST_DMA_CELL(n, rx, channel),                                  \
1016 	.rx_dma.slot = MAX32_DT_INST_DMA_CELL(n, rx, slot),
1017 #else
1018 #define MAX32_UART_DMA_INIT(n)
1019 #endif
1020 
1021 #if defined(CONFIG_UART_INTERRUPT_DRIVEN) || defined(CONFIG_UART_ASYNC_API)
1022 #define MAX32_UART_USE_IRQ 1
1023 #else
1024 #define MAX32_UART_USE_IRQ 0
1025 #endif
1026 
1027 #define MAX32_UART_INIT(_num)                                                                      \
1028 	PINCTRL_DT_INST_DEFINE(_num);                                                              \
1029 	IF_ENABLED(MAX32_UART_USE_IRQ,                                                             \
1030 		   (static void uart_max32_irq_init_##_num(const struct device *dev)               \
1031 		   {             \
1032 			   IRQ_CONNECT(DT_INST_IRQN(_num), DT_INST_IRQ(_num, priority),            \
1033 				       uart_max32_isr, DEVICE_DT_INST_GET(_num), 0);               \
1034 			   irq_enable(DT_INST_IRQN(_num));                                         \
1035 		   }));                                                                            \
1036 	static const struct max32_uart_config max32_uart_config_##_num = {                         \
1037 		.regs = (mxc_uart_regs_t *)DT_INST_REG_ADDR(_num),                                 \
1038 		.pctrl = PINCTRL_DT_INST_DEV_CONFIG_GET(_num),                                     \
1039 		.clock = DEVICE_DT_GET(DT_INST_CLOCKS_CTLR(_num)),                                 \
1040 		.perclk.bus = DT_INST_CLOCKS_CELL(_num, offset),                                   \
1041 		.perclk.bit = DT_INST_CLOCKS_CELL(_num, bit),                                      \
1042 		.perclk.clk_src =                                                                  \
1043 			DT_INST_PROP_OR(_num, clock_source, ADI_MAX32_PRPH_CLK_SRC_PCLK),          \
1044 		.uart_conf.baudrate = DT_INST_PROP(_num, current_speed),                           \
1045 		.uart_conf.parity = DT_INST_ENUM_IDX(_num, parity),                                \
1046 		.uart_conf.data_bits = DT_INST_ENUM_IDX(_num, data_bits),                          \
1047 		.uart_conf.stop_bits = DT_INST_ENUM_IDX(_num, stop_bits),                          \
1048 		.uart_conf.flow_ctrl =                                                             \
1049 			DT_INST_PROP_OR(index, hw_flow_control, UART_CFG_FLOW_CTRL_NONE),          \
1050 		MAX32_UART_DMA_INIT(_num) IF_ENABLED(                                              \
1051 			MAX32_UART_USE_IRQ, (.irq_config_func = uart_max32_irq_init_##_num,))};    \
1052 	static struct max32_uart_data max32_uart_data##_num = {                                    \
1053 		IF_ENABLED(CONFIG_UART_INTERRUPT_DRIVEN, (.cb = NULL,))};                          \
1054 	DEVICE_DT_INST_DEFINE(_num, uart_max32_init, NULL, &max32_uart_data##_num,                 \
1055 			      &max32_uart_config_##_num, PRE_KERNEL_1,                             \
1056 			      CONFIG_SERIAL_INIT_PRIORITY, (void *)&uart_max32_driver_api);
1057 
1058 DT_INST_FOREACH_STATUS_OKAY(MAX32_UART_INIT)
1059