1 /*
2  * Copyright (c) 2022 Cypress Semiconductor Corporation (an Infineon company) or
3  * an affiliate of Cypress Semiconductor Corporation
4  *
5  * SPDX-License-Identifier: Apache-2.0
6  */
7 
8 /**
9  * @brief UART driver for Infineon CAT1 MCU family.
10  *
11  */
12 
13 #define DT_DRV_COMPAT infineon_cat1_uart
14 
15 #include <zephyr/drivers/uart.h>
16 #include <zephyr/drivers/pinctrl.h>
17 #include <cyhal_uart.h>
18 #include <cyhal_utils_impl.h>
19 #include <cyhal_scb_common.h>
20 
21 #include "cy_scb_uart.h"
22 
23 #include <zephyr/logging/log.h>
24 LOG_MODULE_REGISTER(uart_ifx_cat1, CONFIG_UART_LOG_LEVEL);
25 
26 #ifdef CONFIG_UART_ASYNC_API
27 #include <zephyr/drivers/dma.h>
28 #include <cyhal_dma.h>
29 
30 extern int ifx_cat1_dma_ex_connect_digital(const struct device *dev, uint32_t channel,
31 					   cyhal_source_t source, cyhal_dma_input_t input);
32 
33 struct ifx_cat1_dma_stream {
34 	const struct device *dev;
35 	uint32_t dma_channel;
36 	struct dma_config dma_cfg;
37 	struct dma_block_config blk_cfg;
38 	uint8_t *buf;
39 	size_t buf_len;
40 	size_t offset;
41 	size_t counter;
42 	uint32_t timeout;
43 	size_t dma_transmitted_bytes;
44 
45 	struct k_work_delayable timeout_work;
46 };
47 
48 struct ifx_cat1_uart_async {
49 	const struct device *uart_dev;
50 	uart_callback_t cb;
51 	void *user_data;
52 
53 	struct ifx_cat1_dma_stream dma_rx;
54 	struct ifx_cat1_dma_stream dma_tx;
55 
56 	uint8_t *rx_next_buf;
57 	size_t rx_next_buf_len;
58 };
59 
60 #define CURRENT_BUFFER 0
61 #define NEXT_BUFFER    1
62 
63 #endif /* CONFIG_UART_ASYNC_API */
64 
65 /* Data structure */
66 struct ifx_cat1_uart_data {
67 	cyhal_uart_t obj; /* UART CYHAL object */
68 	struct uart_config cfg;
69 	cyhal_resource_inst_t hw_resource;
70 	cyhal_clock_t clock;
71 
72 #if CONFIG_UART_INTERRUPT_DRIVEN
73 	uart_irq_callback_user_data_t irq_cb; /* Interrupt Callback */
74 	void *irq_cb_data;                    /* Interrupt Callback Arg */
75 #endif
76 
77 #ifdef CONFIG_UART_ASYNC_API
78 	struct ifx_cat1_uart_async async;
79 #endif
80 };
81 
82 /* Device config structure */
83 struct ifx_cat1_uart_config {
84 	const struct pinctrl_dev_config *pcfg;
85 	CySCB_Type *reg_addr;
86 	struct uart_config dt_cfg;
87 	uint8_t irq_priority;
88 };
89 
90 /* Default Counter configuration structure */
91 static const cy_stc_scb_uart_config_t _cyhal_uart_default_config = {
92 	.uartMode = CY_SCB_UART_STANDARD,
93 	.enableMutliProcessorMode = false,
94 	.smartCardRetryOnNack = false,
95 	.irdaInvertRx = false,
96 	.irdaEnableLowPowerReceiver = false,
97 	.oversample = 12,
98 	.enableMsbFirst = false,
99 	.dataWidth = 8UL,
100 	.parity = CY_SCB_UART_PARITY_NONE,
101 	.stopBits = CY_SCB_UART_STOP_BITS_1,
102 	.enableInputFilter = false,
103 	.breakWidth = 11UL,
104 	.dropOnFrameError = false,
105 	.dropOnParityError = false,
106 
107 	.receiverAddress = 0x0UL,
108 	.receiverAddressMask = 0x0UL,
109 	.acceptAddrInFifo = false,
110 
111 	.enableCts = false,
112 	.ctsPolarity = CY_SCB_UART_ACTIVE_LOW,
113 #if defined(COMPONENT_CAT1A) || defined(COMPONENT_CAT1B)
114 	.rtsRxFifoLevel = 20UL,
115 #elif defined(COMPONENT_CAT2)
116 	.rtsRxFifoLevel = 3UL,
117 #endif
118 	.rtsPolarity = CY_SCB_UART_ACTIVE_LOW,
119 
120 	/* Level triggers when at least one element is in FIFO */
121 	.rxFifoTriggerLevel = 0UL,
122 	.rxFifoIntEnableMask = 0x0UL,
123 
124 	/* Level triggers when half-fifo is half empty */
125 	.txFifoTriggerLevel = (CY_SCB_FIFO_SIZE / 2 - 1),
126 	.txFifoIntEnableMask = 0x0UL
127 };
128 
129 /* Helper API */
_convert_uart_parity_z_to_cyhal(enum uart_config_parity parity)130 static cyhal_uart_parity_t _convert_uart_parity_z_to_cyhal(enum uart_config_parity parity)
131 {
132 	cyhal_uart_parity_t cyhal_parity;
133 
134 	switch (parity) {
135 	case UART_CFG_PARITY_NONE:
136 		cyhal_parity = CYHAL_UART_PARITY_NONE;
137 		break;
138 	case UART_CFG_PARITY_ODD:
139 		cyhal_parity = CYHAL_UART_PARITY_ODD;
140 		break;
141 	case UART_CFG_PARITY_EVEN:
142 		cyhal_parity = CYHAL_UART_PARITY_EVEN;
143 		break;
144 	default:
145 		cyhal_parity = CYHAL_UART_PARITY_NONE;
146 	}
147 	return cyhal_parity;
148 }
149 
_convert_uart_stop_bits_z_to_cyhal(enum uart_config_stop_bits stop_bits)150 static uint32_t _convert_uart_stop_bits_z_to_cyhal(enum uart_config_stop_bits stop_bits)
151 {
152 	uint32_t cyhal_stop_bits;
153 
154 	switch (stop_bits) {
155 	case UART_CFG_STOP_BITS_1:
156 		cyhal_stop_bits = 1u;
157 		break;
158 
159 	case UART_CFG_STOP_BITS_2:
160 		cyhal_stop_bits = 2u;
161 		break;
162 	default:
163 		cyhal_stop_bits = 1u;
164 	}
165 	return cyhal_stop_bits;
166 }
167 
_convert_uart_data_bits_z_to_cyhal(enum uart_config_data_bits data_bits)168 static uint32_t _convert_uart_data_bits_z_to_cyhal(enum uart_config_data_bits data_bits)
169 {
170 	uint32_t cyhal_data_bits;
171 
172 	switch (data_bits) {
173 	case UART_CFG_DATA_BITS_5:
174 		cyhal_data_bits = 1u;
175 		break;
176 
177 	case UART_CFG_DATA_BITS_6:
178 		cyhal_data_bits = 6u;
179 		break;
180 
181 	case UART_CFG_DATA_BITS_7:
182 		cyhal_data_bits = 7u;
183 		break;
184 
185 	case UART_CFG_DATA_BITS_8:
186 		cyhal_data_bits = 8u;
187 		break;
188 
189 	case UART_CFG_DATA_BITS_9:
190 		cyhal_data_bits = 9u;
191 		break;
192 
193 	default:
194 		cyhal_data_bits = 1u;
195 	}
196 	return cyhal_data_bits;
197 }
198 
_get_hw_block_num(CySCB_Type * reg_addr)199 static int32_t _get_hw_block_num(CySCB_Type *reg_addr)
200 {
201 	extern const uint8_t _CYHAL_SCB_BASE_ADDRESS_INDEX[_SCB_ARRAY_SIZE];
202 	extern CySCB_Type *const _CYHAL_SCB_BASE_ADDRESSES[_SCB_ARRAY_SIZE];
203 
204 	uint32_t i;
205 
206 	for (i = 0u; i < _SCB_ARRAY_SIZE; i++) {
207 		if (_CYHAL_SCB_BASE_ADDRESSES[i] == reg_addr) {
208 			return _CYHAL_SCB_BASE_ADDRESS_INDEX[i];
209 		}
210 	}
211 
212 	return -1;
213 }
214 
ifx_cat1_uart_get_num_in_tx_fifo(const struct device * dev)215 uint32_t ifx_cat1_uart_get_num_in_tx_fifo(const struct device *dev)
216 {
217 	const struct ifx_cat1_uart_config *const config = dev->config;
218 
219 	return Cy_SCB_GetNumInTxFifo(config->reg_addr);
220 }
221 
ifx_cat1_uart_get_tx_active(const struct device * dev)222 bool ifx_cat1_uart_get_tx_active(const struct device *dev)
223 {
224 	const struct ifx_cat1_uart_config *const config = dev->config;
225 
226 	return Cy_SCB_GetTxSrValid(config->reg_addr) ? true : false;
227 }
228 
ifx_cat1_uart_poll_in(const struct device * dev,unsigned char * c)229 static int ifx_cat1_uart_poll_in(const struct device *dev, unsigned char *c)
230 {
231 	cy_rslt_t rec;
232 	struct ifx_cat1_uart_data *data = dev->data;
233 
234 	rec = cyhal_uart_getc(&data->obj, c, 0u);
235 
236 	return ((rec == CY_SCB_UART_RX_NO_DATA) ? -1 : 0);
237 }
238 
ifx_cat1_uart_poll_out(const struct device * dev,unsigned char c)239 static void ifx_cat1_uart_poll_out(const struct device *dev, unsigned char c)
240 {
241 	struct ifx_cat1_uart_data *data = dev->data;
242 
243 	(void)cyhal_uart_putc(&data->obj, (uint32_t)c);
244 }
245 
ifx_cat1_uart_err_check(const struct device * dev)246 static int ifx_cat1_uart_err_check(const struct device *dev)
247 {
248 	struct ifx_cat1_uart_data *data = dev->data;
249 	uint32_t status = Cy_SCB_UART_GetRxFifoStatus(data->obj.base);
250 	int errors = 0;
251 
252 	if (status & CY_SCB_UART_RX_OVERFLOW) {
253 		errors |= UART_ERROR_OVERRUN;
254 	}
255 
256 	if (status & CY_SCB_UART_RX_ERR_PARITY) {
257 		errors |= UART_ERROR_PARITY;
258 	}
259 
260 	if (status & CY_SCB_UART_RX_ERR_FRAME) {
261 		errors |= UART_ERROR_FRAMING;
262 	}
263 
264 	return errors;
265 }
266 
ifx_cat1_uart_configure(const struct device * dev,const struct uart_config * cfg)267 static int ifx_cat1_uart_configure(const struct device *dev, const struct uart_config *cfg)
268 {
269 	__ASSERT_NO_MSG(cfg != NULL);
270 
271 	cy_rslt_t result;
272 	struct ifx_cat1_uart_data *data = dev->data;
273 
274 	cyhal_uart_cfg_t uart_cfg = {
275 		.data_bits = _convert_uart_data_bits_z_to_cyhal(cfg->data_bits),
276 		.stop_bits = _convert_uart_stop_bits_z_to_cyhal(cfg->stop_bits),
277 		.parity = _convert_uart_parity_z_to_cyhal(cfg->parity)};
278 
279 	/* Store Uart Zephyr configuration (uart config) into data structure */
280 	data->cfg = *cfg;
281 
282 	/* Configure parity, data and stop bits */
283 	result = cyhal_uart_configure(&data->obj, &uart_cfg);
284 
285 	/* Configure the baud rate */
286 	if (result == CY_RSLT_SUCCESS) {
287 		result = cyhal_uart_set_baud(&data->obj, cfg->baudrate, NULL);
288 	}
289 
290 	/* Set RTS/CTS flow control pins as NC so cyhal will skip initialization */
291 	data->obj.pin_cts = NC;
292 	data->obj.pin_rts = NC;
293 
294 	/* Enable RTS/CTS flow control */
295 	if ((result == CY_RSLT_SUCCESS) && cfg->flow_ctrl) {
296 		result = cyhal_uart_enable_flow_control(&data->obj, true, true);
297 	}
298 
299 	return (result == CY_RSLT_SUCCESS) ? 0 : -ENOTSUP;
300 };
301 
ifx_cat1_uart_config_get(const struct device * dev,struct uart_config * cfg)302 static int ifx_cat1_uart_config_get(const struct device *dev, struct uart_config *cfg)
303 {
304 	ARG_UNUSED(dev);
305 
306 	struct ifx_cat1_uart_data *const data = dev->data;
307 
308 	if (cfg == NULL) {
309 		return -EINVAL;
310 	}
311 
312 	*cfg = data->cfg;
313 	return 0;
314 }
315 
316 #ifdef CONFIG_UART_INTERRUPT_DRIVEN
317 
318 /* Uart event callback for Interrupt driven mode */
_uart_event_callback_irq_mode(void * arg,cyhal_uart_event_t event)319 static void _uart_event_callback_irq_mode(void *arg, cyhal_uart_event_t event)
320 {
321 	ARG_UNUSED(event);
322 
323 	const struct device *dev = (const struct device *)arg;
324 	struct ifx_cat1_uart_data *const data = dev->data;
325 
326 	if (data->irq_cb != NULL) {
327 		data->irq_cb(dev, data->irq_cb_data);
328 	}
329 }
330 
331 /* Fill FIFO with data */
ifx_cat1_uart_fifo_fill(const struct device * dev,const uint8_t * tx_data,int size)332 static int ifx_cat1_uart_fifo_fill(const struct device *dev, const uint8_t *tx_data, int size)
333 {
334 	struct ifx_cat1_uart_data *const data = dev->data;
335 	size_t _size = (size_t)size;
336 
337 	(void)cyhal_uart_write(&data->obj, (uint8_t *)tx_data, &_size);
338 	return (int)_size;
339 }
340 
341 /* Read data from FIFO */
ifx_cat1_uart_fifo_read(const struct device * dev,uint8_t * rx_data,const int size)342 static int ifx_cat1_uart_fifo_read(const struct device *dev, uint8_t *rx_data, const int size)
343 {
344 	struct ifx_cat1_uart_data *const data = dev->data;
345 	size_t _size = (size_t)size;
346 
347 	(void)cyhal_uart_read(&data->obj, rx_data, &_size);
348 	return (int)_size;
349 }
350 
351 /* Enable TX interrupt */
ifx_cat1_uart_irq_tx_enable(const struct device * dev)352 static void ifx_cat1_uart_irq_tx_enable(const struct device *dev)
353 {
354 	struct ifx_cat1_uart_data *const data = dev->data;
355 	const struct ifx_cat1_uart_config *const config = dev->config;
356 
357 	cyhal_uart_enable_event(&data->obj, (cyhal_uart_event_t)CYHAL_UART_IRQ_TX_EMPTY,
358 				config->irq_priority, 1);
359 }
360 
361 /* Disable TX interrupt */
ifx_cat1_uart_irq_tx_disable(const struct device * dev)362 static void ifx_cat1_uart_irq_tx_disable(const struct device *dev)
363 {
364 	struct ifx_cat1_uart_data *const data = dev->data;
365 	const struct ifx_cat1_uart_config *const config = dev->config;
366 
367 	cyhal_uart_enable_event(&data->obj, (cyhal_uart_event_t)CYHAL_UART_IRQ_TX_EMPTY,
368 				config->irq_priority, 0);
369 }
370 
371 /* Check if UART TX buffer can accept a new char */
ifx_cat1_uart_irq_tx_ready(const struct device * dev)372 static int ifx_cat1_uart_irq_tx_ready(const struct device *dev)
373 {
374 	struct ifx_cat1_uart_data *const data = dev->data;
375 	uint32_t mask = Cy_SCB_GetTxInterruptStatusMasked(data->obj.base);
376 
377 	return (((mask & (CY_SCB_UART_TX_NOT_FULL | SCB_INTR_TX_EMPTY_Msk)) != 0u) ? 1 : 0);
378 }
379 
380 /* Check if UART TX block finished transmission */
ifx_cat1_uart_irq_tx_complete(const struct device * dev)381 static int ifx_cat1_uart_irq_tx_complete(const struct device *dev)
382 {
383 	struct ifx_cat1_uart_data *const data = dev->data;
384 
385 	return (int)!(cyhal_uart_is_tx_active(&data->obj));
386 }
387 
388 /* Enable RX interrupt */
ifx_cat1_uart_irq_rx_enable(const struct device * dev)389 static void ifx_cat1_uart_irq_rx_enable(const struct device *dev)
390 {
391 	struct ifx_cat1_uart_data *const data = dev->data;
392 	const struct ifx_cat1_uart_config *const config = dev->config;
393 
394 	cyhal_uart_enable_event(&data->obj, (cyhal_uart_event_t)CYHAL_UART_IRQ_RX_NOT_EMPTY,
395 				config->irq_priority, 1);
396 }
397 
398 /* Disable TX interrupt */
ifx_cat1_uart_irq_rx_disable(const struct device * dev)399 static void ifx_cat1_uart_irq_rx_disable(const struct device *dev)
400 {
401 	struct ifx_cat1_uart_data *const data = dev->data;
402 	const struct ifx_cat1_uart_config *const config = dev->config;
403 
404 	cyhal_uart_enable_event(&data->obj, (cyhal_uart_event_t)CYHAL_UART_IRQ_RX_NOT_EMPTY,
405 				config->irq_priority, 0);
406 }
407 
408 /* Check if UART RX buffer has a received char */
ifx_cat1_uart_irq_rx_ready(const struct device * dev)409 static int ifx_cat1_uart_irq_rx_ready(const struct device *dev)
410 {
411 	struct ifx_cat1_uart_data *const data = dev->data;
412 
413 	return cyhal_uart_readable(&data->obj) ? 1 : 0;
414 }
415 
416 /* Enable Error interrupts */
ifx_cat1_uart_irq_err_enable(const struct device * dev)417 static void ifx_cat1_uart_irq_err_enable(const struct device *dev)
418 {
419 	struct ifx_cat1_uart_data *const data = dev->data;
420 	const struct ifx_cat1_uart_config *const config = dev->config;
421 
422 	cyhal_uart_enable_event(
423 		&data->obj, (cyhal_uart_event_t)(CYHAL_UART_IRQ_TX_ERROR | CYHAL_UART_IRQ_RX_ERROR),
424 		config->irq_priority, 1);
425 }
426 
427 /* Disable Error interrupts */
ifx_cat1_uart_irq_err_disable(const struct device * dev)428 static void ifx_cat1_uart_irq_err_disable(const struct device *dev)
429 {
430 	struct ifx_cat1_uart_data *const data = dev->data;
431 	const struct ifx_cat1_uart_config *const config = dev->config;
432 
433 	cyhal_uart_enable_event(
434 		&data->obj, (cyhal_uart_event_t)(CYHAL_UART_IRQ_TX_ERROR | CYHAL_UART_IRQ_RX_ERROR),
435 		config->irq_priority, 0);
436 }
437 
438 /* Check if any IRQs is pending */
ifx_cat1_uart_irq_is_pending(const struct device * dev)439 static int ifx_cat1_uart_irq_is_pending(const struct device *dev)
440 {
441 	struct ifx_cat1_uart_data *const data = dev->data;
442 	uint32_t intcause = Cy_SCB_GetInterruptCause(data->obj.base);
443 
444 	return (int)(intcause & (CY_SCB_TX_INTR | CY_SCB_RX_INTR));
445 }
446 
447 /* Start processing interrupts in ISR.
448  * This function should be called the first thing in the ISR. Calling
449  * uart_irq_rx_ready(), uart_irq_tx_ready(), uart_irq_tx_complete()
450  * allowed only after this.
451  */
ifx_cat1_uart_irq_update(const struct device * dev)452 static int ifx_cat1_uart_irq_update(const struct device *dev)
453 {
454 	struct ifx_cat1_uart_data *const data = dev->data;
455 	int status = 1;
456 
457 	if (((ifx_cat1_uart_irq_is_pending(dev) & CY_SCB_RX_INTR) != 0u) &&
458 	    (Cy_SCB_UART_GetNumInRxFifo(data->obj.base) == 0u)) {
459 		status = 0;
460 	}
461 
462 	return status;
463 }
464 
ifx_cat1_uart_irq_callback_set(const struct device * dev,uart_irq_callback_user_data_t cb,void * cb_data)465 static void ifx_cat1_uart_irq_callback_set(const struct device *dev,
466 					   uart_irq_callback_user_data_t cb, void *cb_data)
467 {
468 	struct ifx_cat1_uart_data *data = dev->data;
469 	cyhal_uart_t *uart_obj = &data->obj;
470 
471 	/* Store user callback info */
472 	data->irq_cb = cb;
473 	data->irq_cb_data = cb_data;
474 
475 	/* Register a uart general callback handler  */
476 	cyhal_uart_register_callback(uart_obj, _uart_event_callback_irq_mode, (void *)dev);
477 }
478 
479 #endif /* CONFIG_UART_INTERRUPT_DRIVEN */
480 
481 #ifdef CONFIG_UART_ASYNC_API
ifx_cat1_uart_async_callback_set(const struct device * dev,uart_callback_t callback,void * user_data)482 static int ifx_cat1_uart_async_callback_set(const struct device *dev, uart_callback_t callback,
483 					    void *user_data)
484 {
485 	struct ifx_cat1_uart_data *const data = dev->data;
486 
487 	data->async.cb = callback;
488 	data->async.user_data = user_data;
489 	data->async.dma_tx.dma_cfg.user_data = (void *)dev;
490 
491 	return 0;
492 }
493 
494 /* Async DMA helper */
ifx_cat1_uart_async_dma_config_buffer(const struct device * dev,bool tx)495 static int ifx_cat1_uart_async_dma_config_buffer(const struct device *dev, bool tx)
496 {
497 	int ret;
498 	struct ifx_cat1_uart_data *const data = dev->data;
499 	struct ifx_cat1_dma_stream *dma_stream = tx ? &data->async.dma_tx : &data->async.dma_rx;
500 
501 	/* Configure byte mode */
502 	dma_stream->blk_cfg.block_size = dma_stream->buf_len;
503 
504 	if (tx) {
505 		dma_stream->blk_cfg.source_address = (uint32_t)dma_stream->buf;
506 	} else {
507 		dma_stream->blk_cfg.dest_address = (uint32_t)dma_stream->buf;
508 	}
509 
510 	ret = dma_config(dma_stream->dev, dma_stream->dma_channel, &dma_stream->dma_cfg);
511 
512 	if (!ret) {
513 		ret = dma_start(dma_stream->dev, dma_stream->dma_channel);
514 	}
515 
516 	return ret;
517 }
518 
ifx_cat1_uart_async_tx(const struct device * dev,const uint8_t * tx_data,size_t tx_data_size,int32_t timeout)519 static int ifx_cat1_uart_async_tx(const struct device *dev, const uint8_t *tx_data,
520 				  size_t tx_data_size, int32_t timeout)
521 {
522 	struct ifx_cat1_uart_data *const data = dev->data;
523 	const struct device *dev_dma = data->async.dma_tx.dev;
524 	int err;
525 
526 	if (dev_dma == NULL) {
527 		err = -ENODEV;
528 		goto exit;
529 	}
530 
531 	if (tx_data == NULL || tx_data_size == 0) {
532 		err = -EINVAL;
533 		goto exit;
534 	}
535 
536 	/* Store information about data buffer need to send */
537 	data->async.dma_tx.buf = (uint8_t *)tx_data;
538 	data->async.dma_tx.buf_len = tx_data_size;
539 	data->async.dma_tx.blk_cfg.block_size = 0;
540 	data->async.dma_tx.dma_transmitted_bytes = 0;
541 
542 	/* Configure dma to transfer */
543 	err = ifx_cat1_uart_async_dma_config_buffer(dev, true);
544 	if (err) {
545 		LOG_ERR("Error Tx DMA configure (%d)", err);
546 		goto exit;
547 	}
548 
549 	/* Configure timeout */
550 	if ((timeout != SYS_FOREVER_US) && (timeout != 0)) {
551 		k_work_reschedule(&data->async.dma_tx.timeout_work, K_USEC(timeout));
552 	}
553 
554 exit:
555 	return err;
556 }
557 
ifx_cat1_uart_async_tx_abort(const struct device * dev)558 static int ifx_cat1_uart_async_tx_abort(const struct device *dev)
559 {
560 	struct ifx_cat1_uart_data *data = dev->data;
561 	struct uart_event evt = {0};
562 	struct dma_status stat;
563 	int err = 0;
564 	unsigned int key = irq_lock();
565 
566 	k_work_cancel_delayable(&data->async.dma_tx.timeout_work);
567 
568 	err = dma_stop(data->async.dma_tx.dev, data->async.dma_tx.dma_channel);
569 	if (err) {
570 		LOG_ERR("Error stopping Tx DMA (%d)", err);
571 		goto unlock;
572 	}
573 
574 	err = dma_get_status(data->async.dma_tx.dev, data->async.dma_tx.dma_channel, &stat);
575 	if (err) {
576 		LOG_ERR("Error stopping Tx DMA (%d)", err);
577 		goto unlock;
578 	}
579 
580 	evt.type = UART_TX_ABORTED;
581 	evt.data.tx.buf = data->async.dma_tx.buf;
582 	evt.data.tx.len = 0;
583 
584 	if (data->async.cb) {
585 		data->async.cb(dev, &evt, data->async.user_data);
586 	}
587 
588 unlock:
589 	irq_unlock(key);
590 	return err;
591 }
592 
dma_callback_tx_done(const struct device * dma_dev,void * arg,uint32_t channel,int status)593 static void dma_callback_tx_done(const struct device *dma_dev, void *arg, uint32_t channel,
594 				 int status)
595 {
596 	const struct device *uart_dev = (void *)arg;
597 	struct ifx_cat1_uart_data *const data = uart_dev->data;
598 	unsigned int key = irq_lock();
599 
600 	if (status == 0) {
601 
602 		k_work_cancel_delayable(&data->async.dma_tx.timeout_work);
603 		dma_stop(data->async.dma_tx.dev, data->async.dma_tx.dma_channel);
604 
605 		struct uart_event evt = {.type = UART_TX_DONE,
606 					 .data.tx.buf = data->async.dma_tx.buf,
607 					 .data.tx.len = data->async.dma_tx.buf_len};
608 
609 		data->async.dma_tx.buf = NULL;
610 		data->async.dma_tx.buf_len = 0;
611 
612 		if (data->async.cb) {
613 			data->async.cb(uart_dev, &evt, data->async.user_data);
614 		}
615 
616 	} else {
617 		/* DMA error */
618 		dma_stop(data->async.dma_tx.dev, data->async.dma_tx.dma_channel);
619 	}
620 	irq_unlock(key);
621 }
622 
ifx_cat1_uart_async_tx_timeout(struct k_work * work)623 static void ifx_cat1_uart_async_tx_timeout(struct k_work *work)
624 {
625 	struct k_work_delayable *dwork = k_work_delayable_from_work(work);
626 	struct ifx_cat1_dma_stream *dma_tx =
627 		CONTAINER_OF(dwork, struct ifx_cat1_dma_stream, timeout_work);
628 	struct ifx_cat1_uart_async *async =
629 		CONTAINER_OF(dma_tx, struct ifx_cat1_uart_async, dma_tx);
630 
631 	(void)ifx_cat1_uart_async_tx_abort(async->uart_dev);
632 }
633 
async_evt_rx_rdy(struct ifx_cat1_uart_data * data)634 static inline void async_evt_rx_rdy(struct ifx_cat1_uart_data *data)
635 {
636 	struct uart_event event = {.type = UART_RX_RDY,
637 				   .data.rx.buf = (uint8_t *)data->async.dma_rx.buf,
638 				   .data.rx.len =
639 					   data->async.dma_rx.counter - data->async.dma_rx.offset,
640 				   .data.rx.offset = data->async.dma_rx.offset};
641 
642 	data->async.dma_rx.offset = data->async.dma_rx.counter;
643 
644 	if (event.data.rx.len > 0 && data->async.cb) {
645 		data->async.cb(data->async.uart_dev, &event, data->async.user_data);
646 	}
647 }
648 
async_evt_rx_buf_request(struct ifx_cat1_uart_data * data)649 static inline void async_evt_rx_buf_request(struct ifx_cat1_uart_data *data)
650 {
651 	struct uart_event evt = {.type = UART_RX_BUF_REQUEST};
652 
653 	if (data->async.cb) {
654 		data->async.cb(data->async.uart_dev, &evt, data->async.user_data);
655 	}
656 }
657 
async_evt_rx_release_buffer(struct ifx_cat1_uart_data * data,int buffer_type)658 static inline void async_evt_rx_release_buffer(struct ifx_cat1_uart_data *data, int buffer_type)
659 {
660 	struct uart_event event = {.type = UART_RX_BUF_RELEASED};
661 
662 	if (buffer_type == NEXT_BUFFER && !data->async.rx_next_buf) {
663 		return;
664 	}
665 
666 	if (buffer_type == CURRENT_BUFFER && !data->async.dma_rx.buf) {
667 		return;
668 	}
669 
670 	if (buffer_type == NEXT_BUFFER) {
671 		event.data.rx_buf.buf = data->async.rx_next_buf;
672 		data->async.rx_next_buf = NULL;
673 		data->async.rx_next_buf_len = 0;
674 	} else {
675 		event.data.rx_buf.buf = data->async.dma_rx.buf;
676 		data->async.dma_rx.buf = NULL;
677 		data->async.dma_rx.buf_len = 0;
678 	}
679 
680 	if (data->async.cb) {
681 		data->async.cb(data->async.uart_dev, &event, data->async.user_data);
682 	}
683 }
684 
async_evt_rx_disabled(struct ifx_cat1_uart_data * data)685 static inline void async_evt_rx_disabled(struct ifx_cat1_uart_data *data)
686 {
687 	struct uart_event event = {.type = UART_RX_DISABLED};
688 
689 	data->async.dma_rx.buf = NULL;
690 	data->async.dma_rx.buf_len = 0;
691 	data->async.dma_rx.offset = 0;
692 	data->async.dma_rx.counter = 0;
693 
694 	if (data->async.cb) {
695 		data->async.cb(data->async.uart_dev, &event, data->async.user_data);
696 	}
697 }
698 
async_evt_rx_stopped(struct ifx_cat1_uart_data * data,enum uart_rx_stop_reason reason)699 static inline void async_evt_rx_stopped(struct ifx_cat1_uart_data *data,
700 					enum uart_rx_stop_reason reason)
701 {
702 	struct uart_event event = {.type = UART_RX_STOPPED, .data.rx_stop.reason = reason};
703 	struct uart_event_rx *rx = &event.data.rx_stop.data;
704 	struct dma_status stat;
705 
706 	if (data->async.dma_rx.buf_len == 0 || data->async.cb == NULL) {
707 		return;
708 	}
709 
710 	rx->buf = data->async.dma_rx.buf;
711 
712 	if (dma_get_status(data->async.dma_rx.dev, data->async.dma_rx.dma_channel, &stat) == 0) {
713 		data->async.dma_rx.counter = data->async.dma_rx.buf_len - stat.pending_length;
714 	}
715 	rx->len = data->async.dma_rx.counter - data->async.dma_rx.offset;
716 	rx->offset = data->async.dma_rx.counter;
717 
718 	data->async.cb(data->async.uart_dev, &event, data->async.user_data);
719 }
720 
ifx_cat1_uart_async_rx_enable(const struct device * dev,uint8_t * rx_data,size_t rx_data_size,int32_t timeout)721 static int ifx_cat1_uart_async_rx_enable(const struct device *dev, uint8_t *rx_data,
722 					 size_t rx_data_size, int32_t timeout)
723 {
724 	struct ifx_cat1_uart_data *const data = dev->data;
725 	struct dma_status dma_status = {0};
726 	int err = 0;
727 	unsigned int key = irq_lock();
728 
729 	if (data->async.dma_rx.dev == NULL) {
730 		return -ENODEV;
731 	}
732 
733 	if (data->async.dma_rx.buf_len != 0) {
734 		return -EBUSY;
735 	}
736 
737 	/* Store information about data buffer need to send */
738 	data->async.dma_rx.buf = (uint8_t *)rx_data;
739 	data->async.dma_rx.buf_len = rx_data_size;
740 	data->async.dma_rx.blk_cfg.block_size = 0;
741 	data->async.dma_rx.dma_transmitted_bytes = 0;
742 	data->async.dma_rx.timeout = timeout;
743 
744 	/* Request buffers before enabling rx */
745 	async_evt_rx_buf_request(data);
746 
747 	/* Configure dma to transfer */
748 	err = ifx_cat1_uart_async_dma_config_buffer(dev, false);
749 	if (err) {
750 		LOG_ERR("Error Rx DMA configure (%d)", err);
751 		goto unlock;
752 	}
753 	err = dma_get_status(data->async.dma_rx.dev, data->async.dma_rx.dma_channel, &dma_status);
754 	if (err) {
755 		return err;
756 	}
757 
758 	if (dma_status.busy) {
759 		return -EBUSY;
760 	}
761 
762 	/* Configure timeout */
763 	if ((timeout != SYS_FOREVER_US) && (timeout != 0)) {
764 		k_work_reschedule(&data->async.dma_rx.timeout_work, K_USEC(timeout));
765 	}
766 
767 unlock:
768 	irq_unlock(key);
769 	return err;
770 }
771 
dma_callback_rx_rdy(const struct device * dma_dev,void * arg,uint32_t channel,int status)772 static void dma_callback_rx_rdy(const struct device *dma_dev, void *arg, uint32_t channel,
773 				int status)
774 {
775 	const struct device *uart_dev = (void *)arg;
776 	struct ifx_cat1_uart_data *const data = uart_dev->data;
777 	unsigned int key = irq_lock();
778 
779 	if (status == 0) {
780 		/* All data are sent, call user callback */
781 
782 		k_work_cancel_delayable(&data->async.dma_rx.timeout_work);
783 		data->async.dma_rx.counter = data->async.dma_rx.buf_len;
784 
785 		async_evt_rx_rdy(data);
786 		async_evt_rx_release_buffer(data, CURRENT_BUFFER);
787 
788 		data->async.dma_rx.buf = NULL;
789 		data->async.dma_rx.buf_len = 0;
790 		data->async.dma_rx.blk_cfg.block_size = 0;
791 		data->async.dma_rx.dma_transmitted_bytes = 0;
792 
793 		if (!data->async.rx_next_buf) {
794 			dma_stop(data->async.dma_rx.dev, data->async.dma_rx.dma_channel);
795 			async_evt_rx_disabled(data);
796 			goto unlock;
797 		}
798 
799 		data->async.dma_rx.buf = data->async.rx_next_buf;
800 		data->async.dma_rx.buf_len = data->async.rx_next_buf_len;
801 		data->async.dma_rx.offset = 0;
802 		data->async.dma_rx.counter = 0;
803 		data->async.rx_next_buf = NULL;
804 		data->async.rx_next_buf_len = 0;
805 
806 		ifx_cat1_uart_async_dma_config_buffer(uart_dev, false);
807 
808 		async_evt_rx_buf_request(data);
809 
810 		if ((data->async.dma_rx.timeout != SYS_FOREVER_US) &&
811 		    (data->async.dma_rx.timeout != 0)) {
812 			k_work_reschedule(&data->async.dma_rx.timeout_work,
813 					  K_USEC(data->async.dma_rx.timeout));
814 		}
815 
816 	} else {
817 		/* DMA error */
818 		dma_stop(data->async.dma_rx.dev, data->async.dma_rx.dma_channel);
819 
820 		async_evt_rx_stopped(data, UART_ERROR_OVERRUN);
821 		async_evt_rx_release_buffer(data, CURRENT_BUFFER);
822 		async_evt_rx_release_buffer(data, NEXT_BUFFER);
823 		async_evt_rx_disabled(data);
824 		goto unlock;
825 	}
826 unlock:
827 	irq_unlock(key);
828 }
829 
ifx_cat1_uart_async_rx_timeout(struct k_work * work)830 static void ifx_cat1_uart_async_rx_timeout(struct k_work *work)
831 {
832 
833 	struct k_work_delayable *dwork = k_work_delayable_from_work(work);
834 	struct ifx_cat1_dma_stream *dma_rx =
835 		CONTAINER_OF(dwork, struct ifx_cat1_dma_stream, timeout_work);
836 	struct ifx_cat1_uart_async *async =
837 		CONTAINER_OF(dma_rx, struct ifx_cat1_uart_async, dma_rx);
838 	struct ifx_cat1_uart_data *data = CONTAINER_OF(async, struct ifx_cat1_uart_data, async);
839 
840 	struct dma_status stat;
841 	unsigned int key = irq_lock();
842 
843 	if (dma_rx->buf_len == 0) {
844 		irq_unlock(key);
845 		return;
846 	}
847 	if (dma_get_status(dma_rx->dev, dma_rx->dma_channel, &stat) == 0) {
848 		size_t rx_rcv_len = dma_rx->buf_len - stat.pending_length;
849 
850 		if ((rx_rcv_len > 0) && (rx_rcv_len == dma_rx->counter)) {
851 			dma_rx->counter = rx_rcv_len;
852 			async_evt_rx_rdy(data);
853 		} else {
854 			dma_rx->counter = rx_rcv_len;
855 		}
856 	}
857 	irq_unlock(key);
858 
859 	if ((dma_rx->timeout != SYS_FOREVER_US) && (dma_rx->timeout != 0)) {
860 		k_work_reschedule(&dma_rx->timeout_work, K_USEC(dma_rx->timeout));
861 	}
862 }
863 
ifx_cat1_uart_async_rx_disable(const struct device * dev)864 static int ifx_cat1_uart_async_rx_disable(const struct device *dev)
865 {
866 	struct ifx_cat1_uart_data *data = dev->data;
867 	struct dma_status stat;
868 	unsigned int key;
869 
870 	k_work_cancel_delayable(&data->async.dma_rx.timeout_work);
871 
872 	key = irq_lock();
873 
874 	if (data->async.dma_rx.buf_len == 0) {
875 		__ASSERT_NO_MSG(data->async.dma_rx.buf == NULL);
876 		irq_unlock(key);
877 		return -EINVAL;
878 	}
879 
880 	dma_stop(data->async.dma_rx.dev, data->async.dma_rx.dma_channel);
881 
882 	if (dma_get_status(data->async.dma_rx.dev, data->async.dma_rx.dma_channel, &stat) == 0) {
883 		size_t rx_rcv_len = data->async.dma_rx.buf_len - stat.pending_length;
884 
885 		if (rx_rcv_len > data->async.dma_rx.offset) {
886 			data->async.dma_rx.counter = rx_rcv_len;
887 			async_evt_rx_rdy(data);
888 		}
889 	}
890 	async_evt_rx_release_buffer(data, CURRENT_BUFFER);
891 	async_evt_rx_release_buffer(data, NEXT_BUFFER);
892 	async_evt_rx_disabled(data);
893 
894 	irq_unlock(key);
895 	return 0;
896 }
897 
ifx_cat1_uart_async_rx_buf_rsp(const struct device * dev,uint8_t * buf,size_t len)898 static int ifx_cat1_uart_async_rx_buf_rsp(const struct device *dev, uint8_t *buf, size_t len)
899 {
900 	struct ifx_cat1_uart_data *data = dev->data;
901 	unsigned int key;
902 	int ret = 0;
903 
904 	key = irq_lock();
905 
906 	if (data->async.dma_rx.buf_len == 0U) {
907 		ret = -EACCES;
908 		goto unlock;
909 	}
910 
911 	if (data->async.rx_next_buf_len != 0U) {
912 		ret = -EBUSY;
913 		goto unlock;
914 	}
915 
916 	data->async.rx_next_buf = buf;
917 	data->async.rx_next_buf_len = len;
918 
919 unlock:
920 	irq_unlock(key);
921 	return ret;
922 }
923 
924 #endif /*CONFIG_UART_ASYNC_API */
925 
ifx_cat1_uart_init(const struct device * dev)926 static int ifx_cat1_uart_init(const struct device *dev)
927 {
928 	struct ifx_cat1_uart_data *const data = dev->data;
929 	const struct ifx_cat1_uart_config *const config = dev->config;
930 	cy_rslt_t result;
931 	int ret;
932 
933 	cyhal_uart_configurator_t uart_init_cfg = {
934 		.resource = &data->hw_resource,
935 		.config = &_cyhal_uart_default_config,
936 		.clock = &data->clock,
937 		.gpios = {.pin_tx = NC, .pin_rts = NC, .pin_cts = NC},
938 	};
939 
940 	/* Dedicate SCB HW resource */
941 	data->hw_resource.type = CYHAL_RSC_SCB;
942 	data->hw_resource.block_num = _get_hw_block_num(config->reg_addr);
943 
944 	/* Configure dt provided device signals when available */
945 	ret = pinctrl_apply_state(config->pcfg, PINCTRL_STATE_DEFAULT);
946 	if (ret < 0) {
947 		return ret;
948 	}
949 
950 	/* Allocates clock for selected IP block */
951 	result = _cyhal_utils_allocate_clock(&data->clock, &data->hw_resource,
952 					     CYHAL_CLOCK_BLOCK_PERIPHERAL_16BIT, true);
953 	if (result != CY_RSLT_SUCCESS) {
954 		return -ENOTSUP;
955 	}
956 
957 	/* Assigns a programmable divider to a selected IP block */
958 	en_clk_dst_t clk_idx = _cyhal_scb_get_clock_index(uart_init_cfg.resource->block_num);
959 
960 	result = _cyhal_utils_peri_pclk_assign_divider(clk_idx, uart_init_cfg.clock);
961 	if (result != CY_RSLT_SUCCESS) {
962 		return -ENOTSUP;
963 	}
964 
965 	/* Initialize the UART peripheral */
966 	result = cyhal_uart_init_cfg(&data->obj, &uart_init_cfg);
967 	if (result != CY_RSLT_SUCCESS) {
968 		return -ENOTSUP;
969 	}
970 
971 	/* Perform initial Uart configuration */
972 	data->obj.is_clock_owned = true;
973 	ret = ifx_cat1_uart_configure(dev, &config->dt_cfg);
974 
975 #ifdef CONFIG_UART_ASYNC_API
976 	data->async.uart_dev = dev;
977 	if (data->async.dma_rx.dev != NULL) {
978 		cyhal_source_t uart_source;
979 
980 		if (!device_is_ready(data->async.dma_rx.dev)) {
981 			return -ENODEV;
982 		}
983 
984 		data->async.dma_rx.blk_cfg.source_address =
985 			(uint32_t)(&config->reg_addr->RX_FIFO_RD);
986 		data->async.dma_rx.blk_cfg.source_addr_adj = DMA_ADDR_ADJ_NO_CHANGE;
987 		data->async.dma_rx.blk_cfg.dest_addr_adj = DMA_ADDR_ADJ_INCREMENT;
988 		data->async.dma_rx.dma_cfg.head_block = &data->async.dma_rx.blk_cfg;
989 		data->async.dma_rx.dma_cfg.user_data = (void *)dev;
990 		data->async.dma_rx.dma_cfg.dma_callback = dma_callback_rx_rdy;
991 
992 		if (cyhal_uart_enable_output(&data->obj,
993 					     CYHAL_UART_OUTPUT_TRIGGER_RX_FIFO_LEVEL_REACHED,
994 					     &uart_source)) {
995 			return -ENOTSUP;
996 		}
997 
998 		if (ifx_cat1_dma_ex_connect_digital(data->async.dma_rx.dev,
999 						    data->async.dma_rx.dma_channel, uart_source,
1000 						    CYHAL_DMA_INPUT_TRIGGER_ALL_ELEMENTS)) {
1001 			return -ENOTSUP;
1002 		}
1003 
1004 		Cy_SCB_SetRxFifoLevel(config->reg_addr, 0);
1005 	}
1006 
1007 	if (data->async.dma_tx.dev != NULL) {
1008 		cyhal_source_t uart_source;
1009 
1010 		if (!device_is_ready(data->async.dma_tx.dev)) {
1011 			return -ENODEV;
1012 		}
1013 
1014 		data->async.dma_tx.blk_cfg.dest_address = (uint32_t)(&config->reg_addr->TX_FIFO_WR);
1015 		data->async.dma_tx.blk_cfg.source_addr_adj = DMA_ADDR_ADJ_INCREMENT;
1016 		data->async.dma_tx.blk_cfg.dest_addr_adj = DMA_ADDR_ADJ_NO_CHANGE;
1017 		data->async.dma_tx.dma_cfg.head_block = &data->async.dma_tx.blk_cfg;
1018 		data->async.dma_tx.dma_cfg.user_data = (void *)dev;
1019 		data->async.dma_tx.dma_cfg.dma_callback = dma_callback_tx_done;
1020 
1021 		if (cyhal_uart_enable_output(&data->obj,
1022 					     CYHAL_UART_OUTPUT_TRIGGER_TX_FIFO_LEVEL_REACHED,
1023 					     &uart_source)) {
1024 			return -ENOTSUP;
1025 		}
1026 
1027 		if (ifx_cat1_dma_ex_connect_digital(data->async.dma_tx.dev,
1028 						    data->async.dma_tx.dma_channel, uart_source,
1029 						    CYHAL_DMA_INPUT_TRIGGER_ALL_ELEMENTS)) {
1030 			return -ENOTSUP;
1031 		}
1032 		Cy_SCB_SetTxFifoLevel(config->reg_addr, 1);
1033 	}
1034 
1035 	k_work_init_delayable(&data->async.dma_tx.timeout_work, ifx_cat1_uart_async_tx_timeout);
1036 	k_work_init_delayable(&data->async.dma_rx.timeout_work, ifx_cat1_uart_async_rx_timeout);
1037 
1038 #endif /* CONFIG_UART_ASYNC_API */
1039 
1040 	return ret;
1041 }
1042 
1043 static DEVICE_API(uart, ifx_cat1_uart_driver_api) = {
1044 	.poll_in = ifx_cat1_uart_poll_in,
1045 	.poll_out = ifx_cat1_uart_poll_out,
1046 	.err_check = ifx_cat1_uart_err_check,
1047 
1048 #ifdef CONFIG_UART_USE_RUNTIME_CONFIGURE
1049 	.configure = ifx_cat1_uart_configure,
1050 	.config_get = ifx_cat1_uart_config_get,
1051 #endif /* CONFIG_UART_USE_RUNTIME_CONFIGURE */
1052 
1053 #ifdef CONFIG_UART_INTERRUPT_DRIVEN
1054 	.fifo_fill = ifx_cat1_uart_fifo_fill,
1055 	.fifo_read = ifx_cat1_uart_fifo_read,
1056 	.irq_tx_enable = ifx_cat1_uart_irq_tx_enable,
1057 	.irq_tx_disable = ifx_cat1_uart_irq_tx_disable,
1058 	.irq_tx_ready = ifx_cat1_uart_irq_tx_ready,
1059 	.irq_rx_enable = ifx_cat1_uart_irq_rx_enable,
1060 	.irq_rx_disable = ifx_cat1_uart_irq_rx_disable,
1061 	.irq_tx_complete = ifx_cat1_uart_irq_tx_complete,
1062 	.irq_rx_ready = ifx_cat1_uart_irq_rx_ready,
1063 	.irq_err_enable = ifx_cat1_uart_irq_err_enable,
1064 	.irq_err_disable = ifx_cat1_uart_irq_err_disable,
1065 	.irq_is_pending = ifx_cat1_uart_irq_is_pending,
1066 	.irq_update = ifx_cat1_uart_irq_update,
1067 	.irq_callback_set = ifx_cat1_uart_irq_callback_set,
1068 #endif /* CONFIG_UART_INTERRUPT_DRIVEN */
1069 
1070 #if CONFIG_UART_ASYNC_API
1071 	.callback_set = ifx_cat1_uart_async_callback_set,
1072 	.tx = ifx_cat1_uart_async_tx,
1073 	.rx_enable = ifx_cat1_uart_async_rx_enable,
1074 	.tx_abort = ifx_cat1_uart_async_tx_abort,
1075 	.rx_buf_rsp = ifx_cat1_uart_async_rx_buf_rsp,
1076 	.rx_disable = ifx_cat1_uart_async_rx_disable,
1077 #endif /*CONFIG_UART_ASYNC_API*/
1078 
1079 };
1080 
1081 #if defined(CONFIG_UART_ASYNC_API)
1082 #define UART_DMA_CHANNEL_INIT(index, dir, ch_dir, src_data_size, dst_data_size)                    \
1083 	.dev = DEVICE_DT_GET(DT_INST_DMAS_CTLR_BY_NAME(index, dir)),                               \
1084 	.dma_channel = DT_INST_DMAS_CELL_BY_NAME(index, dir, channel),                             \
1085 	.dma_cfg = {                                                                               \
1086 		.channel_direction = ch_dir,                                                       \
1087 		.source_data_size = src_data_size,                                                 \
1088 		.dest_data_size = dst_data_size,                                                   \
1089 		.source_burst_length = 0,                                                          \
1090 		.dest_burst_length = 0,                                                            \
1091 		.block_count = 1,                                                                  \
1092 		.complete_callback_en = 0,                                                         \
1093 	},
1094 
1095 #define UART_DMA_CHANNEL(index, dir, ch_dir, src_data_size, dst_data_size)                         \
1096 	.async.dma_##dir = {COND_CODE_1(                                                           \
1097 		DT_INST_DMAS_HAS_NAME(index, dir),                                                 \
1098 		(UART_DMA_CHANNEL_INIT(index, dir, ch_dir, src_data_size, dst_data_size)),         \
1099 		(NULL))},
1100 
1101 #else
1102 #define UART_DMA_CHANNEL(index, dir, ch_dir, src_data_size, dst_data_size)
1103 #endif /* CONFIG_UART_ASYNC_API */
1104 
1105 #define INFINEON_CAT1_UART_INIT(n)                                                                 \
1106 	PINCTRL_DT_INST_DEFINE(n);                                                                 \
1107 	static struct ifx_cat1_uart_data ifx_cat1_uart##n##_data = {                               \
1108 		UART_DMA_CHANNEL(n, tx, MEMORY_TO_PERIPHERAL, 1, 1)                                \
1109 			UART_DMA_CHANNEL(n, rx, PERIPHERAL_TO_MEMORY, 1, 1)};                      \
1110                                                                                                    \
1111 	static struct ifx_cat1_uart_config ifx_cat1_uart##n##_cfg = {                              \
1112 		.dt_cfg.baudrate = DT_INST_PROP(n, current_speed),                                 \
1113 		.dt_cfg.parity = DT_INST_ENUM_IDX(n, parity),             \
1114 		.dt_cfg.stop_bits = DT_INST_ENUM_IDX(n, stop_bits),       \
1115 		.dt_cfg.data_bits = DT_INST_ENUM_IDX(n, data_bits),       \
1116 		.dt_cfg.flow_ctrl = DT_INST_PROP(n, hw_flow_control),                              \
1117 		.pcfg = PINCTRL_DT_INST_DEV_CONFIG_GET(n),                                         \
1118 		.reg_addr = (CySCB_Type *)DT_INST_REG_ADDR(n),                                     \
1119 		.irq_priority = DT_INST_IRQ(n, priority)};                                         \
1120                                                                                                    \
1121 	DEVICE_DT_INST_DEFINE(n, &ifx_cat1_uart_init, NULL, &ifx_cat1_uart##n##_data,              \
1122 			      &ifx_cat1_uart##n##_cfg, PRE_KERNEL_1, CONFIG_SERIAL_INIT_PRIORITY,  \
1123 			      &ifx_cat1_uart_driver_api);
1124 
1125 DT_INST_FOREACH_STATUS_OKAY(INFINEON_CAT1_UART_INIT)
1126