1 /*
2 * Copyright (c) 2024-2025 Renesas Electronics Corporation
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
7 #define DT_DRV_COMPAT renesas_ra_sci_uart
8
9 #include <zephyr/kernel.h>
10 #include <zephyr/drivers/uart.h>
11 #include <zephyr/drivers/pinctrl.h>
12 #include <zephyr/irq.h>
13 #include <soc.h>
14 #include "r_sci_uart.h"
15 #include "r_dtc.h"
16
17 #include <zephyr/logging/log.h>
18 LOG_MODULE_REGISTER(ra_sci_uart);
19
20 #define SCI_UART_SSR_FIFO_DR_RDF (R_SCI0_SSR_FIFO_DR_Msk | R_SCI0_SSR_FIFO_RDF_Msk)
21 #define SCI_UART_SSR_FIFO_TDFE_TEND (R_SCI0_SSR_FIFO_TDFE_Msk | R_SCI0_SSR_FIFO_TEND_Msk)
22 #define SCI_UART_SSR_TDRE_TEND (R_SCI0_SSR_TDRE_Msk | R_SCI0_SSR_TEND_Msk)
23 #define SCI_UART_SSR_ERR_MSK (R_SCI0_SSR_ORER_Msk | R_SCI0_SSR_FER_Msk | R_SCI0_SSR_PER_Msk)
24 #define SCI_UART_SSR_FIFO_ERR_MSK \
25 (R_SCI0_SSR_FIFO_ORER_Msk | R_SCI0_SSR_FIFO_FER_Msk | R_SCI0_SSR_FIFO_PER_Msk)
26
27 #if defined(CONFIG_UART_ASYNC_API)
28 void sci_uart_rxi_isr(void);
29 void sci_uart_txi_isr(void);
30 void sci_uart_tei_isr(void);
31 void sci_uart_eri_isr(void);
32 #endif
33
34 struct uart_ra_sci_config {
35 const struct pinctrl_dev_config *pcfg;
36 R_SCI0_Type * const regs;
37 };
38
39 struct uart_ra_sci_data {
40 const struct device *dev;
41 struct st_sci_uart_instance_ctrl sci;
42 struct uart_config uart_config;
43 struct st_uart_cfg fsp_config;
44 struct st_sci_uart_extended_cfg fsp_config_extend;
45 struct st_baud_setting_t fsp_baud_setting;
46 #if defined(CONFIG_UART_INTERRUPT_DRIVEN)
47 uart_irq_callback_user_data_t user_cb;
48 void *user_cb_data;
49 uint32_t ssr;
50 #endif
51 #if defined(CONFIG_UART_ASYNC_API)
52 uart_callback_t async_user_cb;
53 void *async_user_cb_data;
54
55 struct k_work_delayable rx_timeout_work;
56 size_t rx_timeout;
57 size_t rx_buf_len;
58 size_t rx_buf_offset;
59 size_t rx_buf_cap;
60 uint8_t *rx_buffer;
61 size_t rx_next_buf_cap;
62 uint8_t *rx_next_buf;
63
64 struct st_transfer_instance rx_transfer;
65 struct st_dtc_instance_ctrl rx_transfer_ctrl;
66 struct st_transfer_info rx_transfer_info;
67 struct st_transfer_cfg rx_transfer_cfg;
68 struct st_dtc_extended_cfg rx_transfer_cfg_extend;
69
70 struct k_work_delayable tx_timeout;
71 size_t tx_buf_cap;
72
73 struct st_transfer_instance tx_transfer;
74 struct st_dtc_instance_ctrl tx_transfer_ctrl;
75 struct st_transfer_info tx_transfer_info;
76 struct st_transfer_cfg tx_transfer_cfg;
77 struct st_dtc_extended_cfg tx_transfer_cfg_extend;
78 #endif
79 };
80
uart_ra_sci_poll_in(const struct device * dev,unsigned char * c)81 static int uart_ra_sci_poll_in(const struct device *dev, unsigned char *c)
82 {
83 struct uart_ra_sci_data *data = dev->data;
84 const struct uart_ra_sci_config *cfg = dev->config;
85
86 if (IS_ENABLED(CONFIG_UART_ASYNC_API) && cfg->regs->SCR_b.RIE) {
87 /* This function cannot be used if async reception was enabled */
88 return -EBUSY;
89 }
90
91 if (IS_ENABLED(CONFIG_UART_RA_SCI_UART_FIFO_ENABLE) && data->sci.fifo_depth > 0
92 ? cfg->regs->FDR_b.R == 0U
93 : cfg->regs->SSR_b.RDRF == 0U) {
94 /* There are no characters available to read. */
95 return -1;
96 }
97
98 /* got a character */
99 *c = IS_ENABLED(CONFIG_UART_RA_SCI_UART_FIFO_ENABLE) && data->sci.fifo_depth > 0
100 ? cfg->regs->FRDRL
101 : cfg->regs->RDR;
102
103 return 0;
104 }
105
uart_ra_sci_poll_out(const struct device * dev,unsigned char c)106 static void uart_ra_sci_poll_out(const struct device *dev, unsigned char c)
107 {
108 struct uart_ra_sci_data *data = dev->data;
109 const struct uart_ra_sci_config *cfg = dev->config;
110
111 #if CONFIG_UART_RA_SCI_UART_FIFO_ENABLE
112 if (data->sci.fifo_depth > 0) {
113 while (cfg->regs->FDR_b.T > 0x8) {
114 }
115 cfg->regs->FTDRL = c;
116 } else
117 #endif
118 {
119 while (cfg->regs->SSR_b.TDRE == 0U) {
120 }
121 cfg->regs->TDR = c;
122 }
123 }
124
uart_ra_sci_err_check(const struct device * dev)125 static int uart_ra_sci_err_check(const struct device *dev)
126 {
127 struct uart_ra_sci_data *data = dev->data;
128 const struct uart_ra_sci_config *cfg = dev->config;
129 int errors = 0;
130
131 #if CONFIG_UART_RA_SCI_UART_FIFO_ENABLE
132 if (data->sci.fifo_depth > 0) {
133 const uint8_t status = cfg->regs->SSR_FIFO;
134 uint8_t ssr_fifo = 0;
135
136 if (status & R_SCI0_SSR_FIFO_ORER_Msk) {
137 errors |= UART_ERROR_OVERRUN;
138 ssr_fifo |= R_SCI0_SSR_FIFO_ORER_Msk;
139 }
140 if (status & R_SCI0_SSR_FIFO_PER_Msk) {
141 errors |= UART_ERROR_PARITY;
142 ssr_fifo |= R_SCI0_SSR_FIFO_PER_Msk;
143 }
144 if (status & R_SCI0_SSR_FIFO_FER_Msk) {
145 errors |= UART_ERROR_FRAMING;
146 ssr_fifo |= R_SCI0_SSR_FIFO_FER_Msk;
147 }
148 cfg->regs->SSR_FIFO &= ~ssr_fifo;
149 } else
150 #endif
151 {
152 const uint8_t status = cfg->regs->SSR;
153 uint8_t ssr = 0;
154
155 if (status & R_SCI0_SSR_ORER_Msk) {
156 errors |= UART_ERROR_OVERRUN;
157 ssr |= R_SCI0_SSR_ORER_Msk;
158 }
159 if (status & R_SCI0_SSR_PER_Msk) {
160 errors |= UART_ERROR_PARITY;
161 ssr |= R_SCI0_SSR_PER_Msk;
162 }
163 if (status & R_SCI0_SSR_FER_Msk) {
164 errors |= UART_ERROR_FRAMING;
165 ssr |= R_SCI0_SSR_FER_Msk;
166 }
167 cfg->regs->SSR &= ~ssr;
168 }
169
170 return errors;
171 }
172
uart_ra_sci_apply_config(const struct uart_config * config,struct st_uart_cfg * fsp_config,struct st_sci_uart_extended_cfg * fsp_config_extend,struct st_baud_setting_t * fsp_baud_setting)173 static int uart_ra_sci_apply_config(const struct uart_config *config,
174 struct st_uart_cfg *fsp_config,
175 struct st_sci_uart_extended_cfg *fsp_config_extend,
176 struct st_baud_setting_t *fsp_baud_setting)
177 {
178 fsp_err_t fsp_err;
179
180 fsp_err = R_SCI_UART_BaudCalculate(config->baudrate, true, 5000, fsp_baud_setting);
181 if (fsp_err != FSP_SUCCESS) {
182 LOG_DBG("drivers: uart: baud calculate error");
183 return -EINVAL;
184 }
185
186 switch (config->parity) {
187 case UART_CFG_PARITY_NONE:
188 fsp_config->parity = UART_PARITY_OFF;
189 break;
190 case UART_CFG_PARITY_ODD:
191 fsp_config->parity = UART_PARITY_ODD;
192 break;
193 case UART_CFG_PARITY_EVEN:
194 fsp_config->parity = UART_PARITY_EVEN;
195 break;
196 case UART_CFG_PARITY_MARK:
197 return -ENOTSUP;
198 case UART_CFG_PARITY_SPACE:
199 return -ENOTSUP;
200 default:
201 return -EINVAL;
202 }
203
204 switch (config->stop_bits) {
205 case UART_CFG_STOP_BITS_0_5:
206 return -ENOTSUP;
207 case UART_CFG_STOP_BITS_1:
208 fsp_config->stop_bits = UART_STOP_BITS_1;
209 break;
210 case UART_CFG_STOP_BITS_1_5:
211 return -ENOTSUP;
212 case UART_CFG_STOP_BITS_2:
213 fsp_config->stop_bits = UART_STOP_BITS_2;
214 break;
215 default:
216 return -EINVAL;
217 }
218
219 switch (config->data_bits) {
220 case UART_CFG_DATA_BITS_5:
221 return -ENOTSUP;
222 case UART_CFG_DATA_BITS_6:
223 return -ENOTSUP;
224 case UART_CFG_DATA_BITS_7:
225 fsp_config->data_bits = UART_DATA_BITS_7;
226 break;
227 case UART_CFG_DATA_BITS_8:
228 fsp_config->data_bits = UART_DATA_BITS_8;
229 break;
230 case UART_CFG_DATA_BITS_9:
231 fsp_config->data_bits = UART_DATA_BITS_9;
232 break;
233 default:
234 return -EINVAL;
235 }
236
237 #if CONFIG_UART_RA_SCI_UART_FIFO_ENABLE
238 fsp_config_extend->rx_fifo_trigger = 0x8;
239 #endif
240
241 switch (config->flow_ctrl) {
242 case UART_CFG_FLOW_CTRL_NONE:
243 fsp_config_extend->flow_control = 0;
244 fsp_config_extend->rs485_setting.enable = false;
245 break;
246 case UART_CFG_FLOW_CTRL_RTS_CTS:
247 fsp_config_extend->flow_control = SCI_UART_FLOW_CONTROL_HARDWARE_CTSRTS;
248 fsp_config_extend->rs485_setting.enable = false;
249 break;
250 case UART_CFG_FLOW_CTRL_DTR_DSR:
251 return -ENOTSUP;
252 case UART_CFG_FLOW_CTRL_RS485:
253 /* TODO: implement this config */
254 return -ENOTSUP;
255 default:
256 return -EINVAL;
257 }
258
259 return 0;
260 }
261
262 #ifdef CONFIG_UART_USE_RUNTIME_CONFIGURE
263
uart_ra_sci_configure(const struct device * dev,const struct uart_config * config)264 static int uart_ra_sci_configure(const struct device *dev, const struct uart_config *config)
265 {
266 int err;
267 fsp_err_t fsp_err;
268 struct uart_ra_sci_data *data = dev->data;
269
270 err = uart_ra_sci_apply_config(config, &data->fsp_config, &data->fsp_config_extend,
271 &data->fsp_baud_setting);
272 if (err) {
273 return err;
274 }
275
276 fsp_err = R_SCI_UART_Close(&data->sci);
277 fsp_err |= R_SCI_UART_Open(&data->sci, &data->fsp_config);
278 if (fsp_err != FSP_SUCCESS) {
279 LOG_DBG("drivers: serial: uart configure failed");
280 return -EIO;
281 }
282 memcpy(&data->uart_config, config, sizeof(*config));
283
284 return 0;
285 }
286
uart_ra_sci_config_get(const struct device * dev,struct uart_config * cfg)287 static int uart_ra_sci_config_get(const struct device *dev, struct uart_config *cfg)
288 {
289 struct uart_ra_sci_data *data = dev->data;
290
291 memcpy(cfg, &data->uart_config, sizeof(*cfg));
292 return 0;
293 }
294
295 #endif /* CONFIG_UART_USE_RUNTIME_CONFIGURE */
296
297 #ifdef CONFIG_UART_INTERRUPT_DRIVEN
298
uart_ra_sci_fifo_fill(const struct device * dev,const uint8_t * tx_data,int size)299 static int uart_ra_sci_fifo_fill(const struct device *dev, const uint8_t *tx_data, int size)
300 {
301 struct uart_ra_sci_data *data = dev->data;
302 const struct uart_ra_sci_config *cfg = dev->config;
303 int num_tx = 0U;
304
305 #if CONFIG_UART_RA_SCI_UART_FIFO_ENABLE
306 if (data->sci.fifo_depth != 0) {
307 while ((size - num_tx > 0) && cfg->regs->FDR_b.T < data->sci.fifo_depth) {
308 /* Send a character (8bit , parity none) */
309 cfg->regs->FTDRL = tx_data[num_tx++];
310 }
311 cfg->regs->SSR_FIFO &= (uint8_t)~SCI_UART_SSR_FIFO_TDFE_TEND;
312 } else
313 #endif
314 {
315 if (size > 0 && cfg->regs->SSR_b.TDRE) {
316 /* Send a character (8bit , parity none) */
317 cfg->regs->TDR = tx_data[num_tx++];
318 }
319 };
320
321 return num_tx;
322 }
323
uart_ra_sci_fifo_read(const struct device * dev,uint8_t * rx_data,const int size)324 static int uart_ra_sci_fifo_read(const struct device *dev, uint8_t *rx_data, const int size)
325 {
326 struct uart_ra_sci_data *data = dev->data;
327 const struct uart_ra_sci_config *cfg = dev->config;
328 int num_rx = 0U;
329
330 #if CONFIG_UART_RA_SCI_UART_FIFO_ENABLE
331 if (data->sci.fifo_depth != 0) {
332 while ((size - num_rx > 0) && cfg->regs->FDR_b.R > 0) {
333 /* Receive a character (8bit , parity none) */
334 rx_data[num_rx++] = cfg->regs->FRDRL;
335 }
336 cfg->regs->SSR_FIFO &= (uint8_t)~SCI_UART_SSR_FIFO_DR_RDF;
337 } else
338 #endif
339 {
340 if (size > 0 && cfg->regs->SSR_b.RDRF) {
341 /* Receive a character (8bit , parity none) */
342 rx_data[num_rx++] = cfg->regs->RDR;
343 }
344 cfg->regs->SSR &= (uint8_t)~R_SCI0_SSR_RDRF_Msk;
345 }
346
347 return num_rx;
348 }
349
uart_ra_sci_irq_tx_enable(const struct device * dev)350 static void uart_ra_sci_irq_tx_enable(const struct device *dev)
351 {
352 struct uart_ra_sci_data *data = dev->data;
353 const struct uart_ra_sci_config *cfg = dev->config;
354 #if CONFIG_UART_RA_SCI_UART_FIFO_ENABLE
355 if (data->sci.fifo_depth != 0) {
356 cfg->regs->SSR_FIFO &= (uint8_t)~SCI_UART_SSR_FIFO_TDFE_TEND;
357 } else
358 #endif
359 {
360 cfg->regs->SSR = (uint8_t)~SCI_UART_SSR_TDRE_TEND;
361 }
362
363 cfg->regs->SCR |= (R_SCI0_SCR_TIE_Msk | R_SCI0_SCR_TEIE_Msk);
364 }
365
uart_ra_sci_irq_tx_disable(const struct device * dev)366 static void uart_ra_sci_irq_tx_disable(const struct device *dev)
367 {
368 const struct uart_ra_sci_config *cfg = dev->config;
369
370 cfg->regs->SCR &= ~(R_SCI0_SCR_TIE_Msk | R_SCI0_SCR_TEIE_Msk);
371 }
372
uart_ra_sci_irq_tx_ready(const struct device * dev)373 static int uart_ra_sci_irq_tx_ready(const struct device *dev)
374 {
375 struct uart_ra_sci_data *data = dev->data;
376 const struct uart_ra_sci_config *cfg = dev->config;
377 int ret;
378
379 #if CONFIG_UART_RA_SCI_UART_FIFO_ENABLE
380 if (data->sci.fifo_depth != 0) {
381 ret = (cfg->regs->SCR_b.TIE == 1U) && (data->ssr & R_SCI0_SSR_FIFO_TDFE_Msk);
382 } else
383 #endif
384 {
385 ret = (cfg->regs->SCR_b.TIE == 1U) && (data->ssr & R_SCI0_SSR_TDRE_Msk);
386 }
387
388 return ret;
389 }
390
uart_ra_sci_irq_tx_complete(const struct device * dev)391 static int uart_ra_sci_irq_tx_complete(const struct device *dev)
392 {
393 struct uart_ra_sci_data *data = dev->data;
394 const struct uart_ra_sci_config *cfg = dev->config;
395
396 return (cfg->regs->SCR_b.TEIE == 1U) && (data->ssr & BIT(R_SCI0_SSR_TEND_Pos));
397 }
398
uart_ra_sci_irq_rx_enable(const struct device * dev)399 static void uart_ra_sci_irq_rx_enable(const struct device *dev)
400 {
401 struct uart_ra_sci_data *data = dev->data;
402 const struct uart_ra_sci_config *cfg = dev->config;
403
404 #if CONFIG_UART_RA_SCI_UART_FIFO_ENABLE
405 if (data->sci.fifo_depth != 0) {
406 cfg->regs->SSR_FIFO &= (uint8_t) ~(SCI_UART_SSR_FIFO_DR_RDF);
407 } else
408 #endif
409 {
410 cfg->regs->SSR_b.RDRF = 0U;
411 }
412 cfg->regs->SCR_b.RIE = 1U;
413 }
414
uart_ra_sci_irq_rx_disable(const struct device * dev)415 static void uart_ra_sci_irq_rx_disable(const struct device *dev)
416 {
417 const struct uart_ra_sci_config *cfg = dev->config;
418
419 cfg->regs->SCR_b.RIE = 0U;
420 }
421
uart_ra_sci_irq_rx_ready(const struct device * dev)422 static int uart_ra_sci_irq_rx_ready(const struct device *dev)
423 {
424 struct uart_ra_sci_data *data = dev->data;
425 const struct uart_ra_sci_config *cfg = dev->config;
426 int ret;
427
428 #if CONFIG_UART_RA_SCI_UART_FIFO_ENABLE
429 if (data->sci.fifo_depth != 0) {
430 ret = (cfg->regs->SCR_b.RIE == 1U) && (data->ssr & SCI_UART_SSR_FIFO_DR_RDF);
431 } else
432 #endif
433 {
434 ret = (cfg->regs->SCR_b.RIE == 1U) && (data->ssr & R_SCI0_SSR_RDRF_Msk);
435 }
436
437 return ret;
438 }
439
uart_ra_sci_irq_err_enable(const struct device * dev)440 static void uart_ra_sci_irq_err_enable(const struct device *dev)
441 {
442 struct uart_ra_sci_data *data = dev->data;
443
444 NVIC_EnableIRQ(data->fsp_config.eri_irq);
445 }
446
uart_ra_sci_irq_err_disable(const struct device * dev)447 static void uart_ra_sci_irq_err_disable(const struct device *dev)
448 {
449 struct uart_ra_sci_data *data = dev->data;
450
451 NVIC_DisableIRQ(data->fsp_config.eri_irq);
452 }
453
uart_ra_sci_irq_is_pending(const struct device * dev)454 static int uart_ra_sci_irq_is_pending(const struct device *dev)
455 {
456 struct uart_ra_sci_data *data = dev->data;
457 const struct uart_ra_sci_config *cfg = dev->config;
458 uint8_t scr;
459 uint8_t ssr;
460 int ret;
461
462 #if CONFIG_UART_RA_SCI_UART_FIFO_ENABLE
463 if (data->sci.fifo_depth != 0) {
464 scr = cfg->regs->SCR;
465 ssr = cfg->regs->SSR_FIFO;
466 ret = ((scr & R_SCI0_SCR_TIE_Msk) &&
467 (ssr & (R_SCI0_SSR_FIFO_TEND_Msk | R_SCI0_SSR_FIFO_TDFE_Msk))) ||
468 ((scr & R_SCI0_SCR_RIE_Msk) &&
469 ((ssr & (R_SCI0_SSR_FIFO_RDF_Msk | R_SCI0_SSR_FIFO_DR_Msk |
470 R_SCI0_SSR_FIFO_FER_Msk | R_SCI0_SSR_FIFO_ORER_Msk |
471 R_SCI0_SSR_FIFO_PER_Msk))));
472 } else
473 #endif
474 {
475 scr = cfg->regs->SCR;
476 ssr = cfg->regs->SSR;
477 ret = ((scr & R_SCI0_SCR_TIE_Msk) &&
478 (ssr & (R_SCI0_SSR_TEND_Msk | R_SCI0_SSR_TDRE_Msk))) ||
479 ((scr & R_SCI0_SCR_RIE_Msk) &&
480 (ssr & (R_SCI0_SSR_RDRF_Msk | R_SCI0_SSR_PER_Msk | R_SCI0_SSR_FER_Msk |
481 R_SCI0_SSR_ORER_Msk)));
482 }
483
484 return ret;
485 }
486
uart_ra_sci_irq_update(const struct device * dev)487 static int uart_ra_sci_irq_update(const struct device *dev)
488 {
489 struct uart_ra_sci_data *data = dev->data;
490 const struct uart_ra_sci_config *cfg = dev->config;
491
492 #if CONFIG_UART_RA_SCI_UART_FIFO_ENABLE
493 if (data->sci.fifo_depth != 0) {
494 data->ssr = cfg->regs->SSR_FIFO;
495 uint8_t ssr = data->ssr ^ (R_SCI0_SSR_FIFO_ORER_Msk | R_SCI0_SSR_FIFO_FER_Msk |
496 R_SCI0_SSR_FIFO_PER_Msk);
497 cfg->regs->SSR_FIFO &= ssr;
498 } else
499 #endif
500 {
501 data->ssr = cfg->regs->SSR;
502 uint8_t ssr =
503 data->ssr ^ (R_SCI0_SSR_ORER_Msk | R_SCI0_SSR_FER_Msk | R_SCI0_SSR_PER_Msk);
504 cfg->regs->SSR_FIFO &= ssr;
505 }
506
507 return 1;
508 }
509
uart_ra_sci_irq_callback_set(const struct device * dev,uart_irq_callback_user_data_t cb,void * cb_data)510 static void uart_ra_sci_irq_callback_set(const struct device *dev, uart_irq_callback_user_data_t cb,
511 void *cb_data)
512 {
513 struct uart_ra_sci_data *data = dev->data;
514
515 data->user_cb = cb;
516 data->user_cb_data = cb_data;
517
518 #if CONFIG_UART_EXCLUSIVE_API_CALLBACKS
519 data->async_user_cb = NULL;
520 data->async_user_cb_data = NULL;
521 #endif
522 }
523
524 #endif /* CONFIG_UART_INTERRUPT_DRIVEN */
525
526 #ifdef CONFIG_UART_ASYNC_API
527
fsp_err_to_errno(fsp_err_t fsp_err)528 static int fsp_err_to_errno(fsp_err_t fsp_err)
529 {
530 switch (fsp_err) {
531 case FSP_ERR_INVALID_ARGUMENT:
532 return -EINVAL;
533 case FSP_ERR_NOT_OPEN:
534 return -EIO;
535 case FSP_ERR_IN_USE:
536 return -EBUSY;
537 case FSP_ERR_UNSUPPORTED:
538 return -ENOTSUP;
539 case 0:
540 return 0;
541 default:
542 return -EINVAL;
543 }
544 }
545
uart_ra_sci_async_callback_set(const struct device * dev,uart_callback_t cb,void * cb_data)546 static int uart_ra_sci_async_callback_set(const struct device *dev, uart_callback_t cb,
547 void *cb_data)
548 {
549 struct uart_ra_sci_data *data = dev->data;
550
551 data->async_user_cb = cb;
552 data->async_user_cb_data = cb_data;
553
554 #if CONFIG_UART_EXCLUSIVE_API_CALLBACKS
555 data->user_cb = NULL;
556 data->user_cb_data = NULL;
557 #endif
558 return 0;
559 }
560
uart_ra_sci_async_tx(const struct device * dev,const uint8_t * buf,size_t len,int32_t timeout)561 static int uart_ra_sci_async_tx(const struct device *dev, const uint8_t *buf, size_t len,
562 int32_t timeout)
563 {
564 struct uart_ra_sci_data *data = dev->data;
565 int err;
566
567 err = fsp_err_to_errno(R_SCI_UART_Write(&data->sci, buf, len));
568 if (err) {
569 return err;
570 }
571 data->tx_buf_cap = len;
572 if (timeout != SYS_FOREVER_US && timeout != 0) {
573 k_work_reschedule(&data->tx_timeout, Z_TIMEOUT_US(timeout));
574 }
575
576 return 0;
577 }
578
async_user_callback(const struct device * dev,struct uart_event * event)579 static inline void async_user_callback(const struct device *dev, struct uart_event *event)
580 {
581 struct uart_ra_sci_data *data = dev->data;
582
583 if (data->async_user_cb) {
584 data->async_user_cb(dev, event, data->async_user_cb_data);
585 }
586 }
587
async_rx_release_buf(const struct device * dev)588 static inline void async_rx_release_buf(const struct device *dev)
589 {
590 struct uart_ra_sci_data *data = dev->data;
591
592 struct uart_event event = {
593 .type = UART_RX_BUF_RELEASED,
594 .data.rx.buf = (uint8_t *)data->rx_buffer,
595 };
596 async_user_callback(dev, &event);
597 data->rx_buffer = NULL;
598 data->rx_buf_offset = 0;
599 data->rx_buf_len = 0;
600 data->rx_buf_cap = 0;
601 }
602
async_rx_release_next_buf(const struct device * dev)603 static inline void async_rx_release_next_buf(const struct device *dev)
604 {
605 struct uart_ra_sci_data *data = dev->data;
606 struct uart_event event = {
607 .type = UART_RX_BUF_RELEASED,
608 .data.rx.buf = (uint8_t *)data->rx_next_buf,
609 };
610 async_user_callback(dev, &event);
611 data->rx_next_buf = NULL;
612 }
613
async_rx_req_buf(const struct device * dev)614 static inline void async_rx_req_buf(const struct device *dev)
615 {
616 struct uart_event event = {
617 .type = UART_RX_BUF_REQUEST,
618 };
619
620 async_user_callback(dev, &event);
621 }
622
async_rx_disable(const struct device * dev)623 static inline void async_rx_disable(const struct device *dev)
624 {
625 struct uart_ra_sci_data *data = dev->data;
626 const struct uart_ra_sci_config *cfg = dev->config;
627 struct uart_event event = {
628 .type = UART_RX_DISABLED,
629 };
630 async_user_callback(dev, &event);
631
632 /* Disable the RXI request and clear the status flag to be ready for the next reception */
633 cfg->regs->SCR_b.RIE = 0;
634 #if CONFIG_UART_RA_SCI_UART_FIFO_ENABLE
635 if (data->sci.fifo_depth != 0) {
636 cfg->regs->SSR_FIFO &= (uint8_t)~SCI_UART_SSR_FIFO_DR_RDF;
637 } else
638 #endif
639 {
640 cfg->regs->SSR_b.RDRF = 0;
641 }
642 }
643
async_rx_ready(const struct device * dev)644 static inline void async_rx_ready(const struct device *dev)
645 {
646 struct uart_ra_sci_data *data = dev->data;
647
648 if (!data->rx_buf_len) {
649 return;
650 }
651
652 struct uart_event event = {
653 .type = UART_RX_RDY,
654 .data.rx.buf = (uint8_t *)data->rx_buffer,
655 .data.rx.offset = data->rx_buf_offset,
656 .data.rx.len = data->rx_buf_len,
657 };
658 async_user_callback(data->dev, &event);
659 data->rx_buf_offset += data->rx_buf_len;
660 data->rx_buf_len = 0;
661 }
662
disable_tx(const struct device * dev)663 static inline void disable_tx(const struct device *dev)
664 {
665 struct uart_ra_sci_data *data = dev->data;
666 const struct uart_ra_sci_config *cfg = dev->config;
667
668 /* Transmit interrupts must be disabled to start with. */
669 cfg->regs->SCR &= (uint8_t) ~(R_SCI0_SCR_TIE_Msk | R_SCI0_SCR_TEIE_Msk);
670
671 /*
672 * Make sure no transmission is in progress. Setting CCR0_b.TE to 0 when CSR_b.TEND
673 * is 0 causes SCI peripheral to work abnormally.
674 */
675 while (IS_ENABLED(CONFIG_UART_RA_SCI_UART_FIFO_ENABLE) && data->sci.fifo_depth
676 ? cfg->regs->SSR_FIFO_b.TEND != 1U
677 : cfg->regs->SSR_b.TEND != 1U) {
678 }
679
680 cfg->regs->SCR_b.TE = 0;
681 }
682
enable_tx(const struct device * dev)683 static inline void enable_tx(const struct device *dev)
684 {
685 const struct uart_ra_sci_config *cfg = dev->config;
686
687 cfg->regs->SCR_b.TE = 1;
688 }
689
uart_ra_sci_async_tx_abort(const struct device * dev)690 static int uart_ra_sci_async_tx_abort(const struct device *dev)
691 {
692 struct uart_ra_sci_data *data = dev->data;
693 int err = 0;
694
695 if (!data->sci.p_tx_src) {
696 return -EFAULT;
697 }
698
699 disable_tx(dev);
700
701 if (FSP_SUCCESS != R_SCI_UART_Abort(&data->sci, UART_DIR_TX)) {
702 LOG_DBG("drivers: serial: uart abort tx failed");
703 err = -EIO;
704 goto unlock;
705 }
706 transfer_properties_t tx_properties = {0};
707
708 if (FSP_SUCCESS != R_DTC_InfoGet(data->tx_transfer.p_ctrl, &tx_properties)) {
709 LOG_DBG("drivers: serial: uart abort tx failed");
710 err = -EIO;
711 goto unlock;
712 }
713 struct uart_event event = {
714 .type = UART_TX_ABORTED,
715 .data.tx.buf = (uint8_t *)data->sci.p_tx_src,
716 .data.tx.len = data->tx_buf_cap - tx_properties.transfer_length_remaining,
717 };
718 async_user_callback(dev, &event);
719 k_work_cancel_delayable(&data->tx_timeout);
720
721 unlock:
722 enable_tx(dev);
723 return err;
724 }
725
uart_ra_sci_async_rx_enable(const struct device * dev,uint8_t * buf,size_t len,int32_t timeout)726 static int uart_ra_sci_async_rx_enable(const struct device *dev, uint8_t *buf, size_t len,
727 int32_t timeout)
728 {
729 struct uart_ra_sci_data *data = dev->data;
730 const struct uart_ra_sci_config *cfg = dev->config;
731 int err = 0;
732 unsigned int key = irq_lock();
733
734 if (data->rx_buffer) {
735 err = -EAGAIN;
736 goto unlock;
737 }
738
739 #if CONFIG_UART_RA_SCI_UART_FIFO_ENABLE
740 if (data->sci.fifo_depth) {
741 cfg->regs->SSR_FIFO &= (uint8_t) ~(SCI_UART_SSR_FIFO_ERR_MSK);
742 } else
743 #endif
744 {
745 cfg->regs->SSR = (uint8_t)~SCI_UART_SSR_ERR_MSK;
746 }
747
748 err = fsp_err_to_errno(R_SCI_UART_Read(&data->sci, buf, len));
749 if (err) {
750 goto unlock;
751 }
752
753 data->rx_timeout = timeout;
754 data->rx_buffer = buf;
755 data->rx_buf_cap = len;
756 data->rx_buf_len = 0;
757 data->rx_buf_offset = 0;
758
759 /* Call buffer request user callback */
760 async_rx_req_buf(dev);
761 cfg->regs->SCR_b.RIE = 1;
762
763 unlock:
764 irq_unlock(key);
765 return err;
766 }
767
uart_ra_sci_async_rx_buf_rsp(const struct device * dev,uint8_t * buf,size_t len)768 static int uart_ra_sci_async_rx_buf_rsp(const struct device *dev, uint8_t *buf, size_t len)
769 {
770 struct uart_ra_sci_data *data = dev->data;
771
772 data->rx_next_buf = buf;
773 data->rx_next_buf_cap = len;
774
775 return 0;
776 }
777
uart_ra_sci_async_rx_disable(const struct device * dev)778 static int uart_ra_sci_async_rx_disable(const struct device *dev)
779 {
780 struct uart_ra_sci_data *data = dev->data;
781 uint32_t remaining_byte = 0;
782 int err = 0;
783 unsigned int key = irq_lock();
784
785 if (!data->rx_buffer) {
786 err = -EAGAIN;
787 goto unlock;
788 }
789
790 k_work_cancel_delayable(&data->rx_timeout_work);
791 if (FSP_SUCCESS != R_SCI_UART_ReadStop(&data->sci, &remaining_byte)) {
792 LOG_DBG("drivers: serial: uart stop reading failed");
793 err = -EIO;
794 goto unlock;
795 }
796
797 async_rx_ready(dev);
798 async_rx_release_buf(dev);
799 async_rx_release_next_buf(dev);
800 async_rx_disable(dev);
801
802 unlock:
803 irq_unlock(key);
804 return err;
805 }
806
async_evt_rx_err(const struct device * dev,enum uart_rx_stop_reason reason)807 static inline void async_evt_rx_err(const struct device *dev, enum uart_rx_stop_reason reason)
808 {
809 struct uart_ra_sci_data *data = dev->data;
810
811 k_work_cancel_delayable(&data->rx_timeout_work);
812 struct uart_event event = {
813 .type = UART_RX_STOPPED,
814 .data.rx_stop.reason = reason,
815 .data.rx_stop.data.buf = (uint8_t *)data->sci.p_rx_dest,
816 .data.rx_stop.data.offset = 0,
817 .data.rx_stop.data.len =
818 data->rx_buf_cap - data->rx_buf_offset - data->sci.rx_dest_bytes,
819 };
820 async_user_callback(dev, &event);
821 }
822
async_evt_rx_complete(const struct device * dev)823 static inline void async_evt_rx_complete(const struct device *dev)
824 {
825 struct uart_ra_sci_data *data = dev->data;
826 unsigned int key = irq_lock();
827
828 async_rx_ready(dev);
829 async_rx_release_buf(dev);
830 if (data->rx_next_buf) {
831 data->rx_buffer = data->rx_next_buf;
832 data->rx_buf_offset = 0;
833 data->rx_buf_cap = data->rx_next_buf_cap;
834 data->rx_next_buf = NULL;
835 R_SCI_UART_Read(&data->sci, data->rx_buffer, data->rx_buf_cap);
836 async_rx_req_buf(dev);
837 } else {
838 async_rx_disable(dev);
839 }
840 irq_unlock(key);
841 }
842
async_evt_tx_done(const struct device * dev)843 static inline void async_evt_tx_done(const struct device *dev)
844 {
845 struct uart_ra_sci_data *data = dev->data;
846
847 k_work_cancel_delayable(&data->tx_timeout);
848 struct uart_event event = {
849 .type = UART_TX_DONE,
850 .data.tx.buf = (uint8_t *)data->sci.p_tx_src,
851 .data.tx.len = data->tx_buf_cap,
852 };
853 async_user_callback(dev, &event);
854 }
855
uart_ra_sci_callback_adapter(struct st_uart_callback_arg * fsp_args)856 static void uart_ra_sci_callback_adapter(struct st_uart_callback_arg *fsp_args)
857 {
858 const struct device *dev = fsp_args->p_context;
859
860 switch (fsp_args->event) {
861 case UART_EVENT_TX_COMPLETE:
862 async_evt_tx_done(dev);
863 break;
864 case UART_EVENT_RX_COMPLETE:
865 async_evt_rx_complete(dev);
866 break;
867 case UART_EVENT_ERR_PARITY:
868 async_evt_rx_err(dev, UART_ERROR_PARITY);
869 break;
870 case UART_EVENT_ERR_FRAMING:
871 async_evt_rx_err(dev, UART_ERROR_FRAMING);
872 break;
873 case UART_EVENT_ERR_OVERFLOW:
874 async_evt_rx_err(dev, UART_ERROR_OVERRUN);
875 break;
876 case UART_EVENT_BREAK_DETECT:
877 async_evt_rx_err(dev, UART_BREAK);
878 break;
879 case UART_EVENT_TX_DATA_EMPTY:
880 case UART_EVENT_RX_CHAR:
881 break;
882 }
883 }
884
uart_ra_sci_rx_timeout_handler(struct k_work * work)885 static void uart_ra_sci_rx_timeout_handler(struct k_work *work)
886 {
887 struct k_work_delayable *dwork = k_work_delayable_from_work(work);
888 struct uart_ra_sci_data *data =
889 CONTAINER_OF(dwork, struct uart_ra_sci_data, rx_timeout_work);
890 unsigned int key = irq_lock();
891
892 async_rx_ready(data->dev);
893 irq_unlock(key);
894 }
895
uart_ra_sci_tx_timeout_handler(struct k_work * work)896 static void uart_ra_sci_tx_timeout_handler(struct k_work *work)
897 {
898 struct k_work_delayable *dwork = k_work_delayable_from_work(work);
899 struct uart_ra_sci_data *data = CONTAINER_OF(dwork, struct uart_ra_sci_data, tx_timeout);
900
901 uart_ra_sci_async_tx_abort(data->dev);
902 }
903
904 #endif /* CONFIG_UART_ASYNC_API */
905
906 static DEVICE_API(uart, uart_ra_sci_driver_api) = {
907 .poll_in = uart_ra_sci_poll_in,
908 .poll_out = uart_ra_sci_poll_out,
909 .err_check = uart_ra_sci_err_check,
910 #ifdef CONFIG_UART_USE_RUNTIME_CONFIGURE
911 .configure = uart_ra_sci_configure,
912 .config_get = uart_ra_sci_config_get,
913 #endif
914 #ifdef CONFIG_UART_INTERRUPT_DRIVEN
915 .fifo_fill = uart_ra_sci_fifo_fill,
916 .fifo_read = uart_ra_sci_fifo_read,
917 .irq_tx_enable = uart_ra_sci_irq_tx_enable,
918 .irq_tx_disable = uart_ra_sci_irq_tx_disable,
919 .irq_tx_ready = uart_ra_sci_irq_tx_ready,
920 .irq_rx_enable = uart_ra_sci_irq_rx_enable,
921 .irq_rx_disable = uart_ra_sci_irq_rx_disable,
922 .irq_tx_complete = uart_ra_sci_irq_tx_complete,
923 .irq_rx_ready = uart_ra_sci_irq_rx_ready,
924 .irq_err_enable = uart_ra_sci_irq_err_enable,
925 .irq_err_disable = uart_ra_sci_irq_err_disable,
926 .irq_is_pending = uart_ra_sci_irq_is_pending,
927 .irq_update = uart_ra_sci_irq_update,
928 .irq_callback_set = uart_ra_sci_irq_callback_set,
929 #endif /* CONFIG_UART_INTERRUPT_DRIVEN */
930 #if CONFIG_UART_ASYNC_API
931 .callback_set = uart_ra_sci_async_callback_set,
932 .tx = uart_ra_sci_async_tx,
933 .tx_abort = uart_ra_sci_async_tx_abort,
934 .rx_enable = uart_ra_sci_async_rx_enable,
935 .rx_buf_rsp = uart_ra_sci_async_rx_buf_rsp,
936 .rx_disable = uart_ra_sci_async_rx_disable,
937 #endif /* CONFIG_UART_ASYNC_API */
938 };
939
uart_ra_sci_init(const struct device * dev)940 static int uart_ra_sci_init(const struct device *dev)
941 {
942 const struct uart_ra_sci_config *config = dev->config;
943 struct uart_ra_sci_data *data = dev->data;
944 int ret;
945 fsp_err_t fsp_err;
946
947 /* Configure dt provided device signals when available */
948 ret = pinctrl_apply_state(config->pcfg, PINCTRL_STATE_DEFAULT);
949 if (ret < 0) {
950 return ret;
951 }
952
953 /* Setup fsp sci_uart setting */
954 ret = uart_ra_sci_apply_config(&data->uart_config, &data->fsp_config,
955 &data->fsp_config_extend, &data->fsp_baud_setting);
956 if (ret != 0) {
957 return ret;
958 }
959
960 data->fsp_config_extend.p_baud_setting = &data->fsp_baud_setting;
961 #if defined(CONFIG_UART_ASYNC_API)
962 data->fsp_config.p_callback = uart_ra_sci_callback_adapter;
963 data->fsp_config.p_context = dev;
964 k_work_init_delayable(&data->tx_timeout, uart_ra_sci_tx_timeout_handler);
965 k_work_init_delayable(&data->rx_timeout_work, uart_ra_sci_rx_timeout_handler);
966 #endif /* defined(CONFIG_UART_ASYNC_API) */
967 data->fsp_config.p_extend = &data->fsp_config_extend;
968
969 fsp_err = R_SCI_UART_Open(&data->sci, &data->fsp_config);
970 if (fsp_err != FSP_SUCCESS) {
971 LOG_DBG("drivers: uart: initialize failed");
972 return -EIO;
973 }
974 irq_disable(data->fsp_config.eri_irq);
975 return 0;
976 }
977
978 #if CONFIG_UART_INTERRUPT_DRIVEN || CONFIG_UART_ASYNC_API
uart_ra_sci_rxi_isr(const struct device * dev)979 static void uart_ra_sci_rxi_isr(const struct device *dev)
980 {
981 struct uart_ra_sci_data *data = dev->data;
982 #if defined(CONFIG_UART_INTERRUPT_DRIVEN)
983 if (data->user_cb != NULL) {
984 data->user_cb(dev, data->user_cb_data);
985 goto out;
986 }
987 #endif
988
989 #if defined(CONFIG_UART_ASYNC_API)
990 if (data->rx_timeout != SYS_FOREVER_US && data->rx_timeout != 0) {
991 k_work_reschedule(&data->rx_timeout_work, Z_TIMEOUT_US(data->rx_timeout));
992 }
993 data->rx_buf_len++;
994 if (data->rx_buf_len + data->rx_buf_offset == data->rx_buf_cap) {
995 sci_uart_rxi_isr();
996 } else {
997 goto out;
998 }
999 #endif
1000 out:
1001 R_ICU->IELSR_b[data->fsp_config.rxi_irq].IR = 0U;
1002 }
1003
uart_ra_sci_txi_isr(const struct device * dev)1004 static void uart_ra_sci_txi_isr(const struct device *dev)
1005 {
1006 #if defined(CONFIG_UART_INTERRUPT_DRIVEN)
1007 struct uart_ra_sci_data *data = dev->data;
1008
1009 if (data->user_cb != NULL) {
1010 data->user_cb(dev, data->user_cb_data);
1011 R_ICU->IELSR_b[data->fsp_config.txi_irq].IR = 0U;
1012 return;
1013 }
1014 #endif
1015
1016 #if defined(CONFIG_UART_ASYNC_API)
1017 sci_uart_txi_isr();
1018 #endif
1019 }
1020
uart_ra_sci_tei_isr(const struct device * dev)1021 static void uart_ra_sci_tei_isr(const struct device *dev)
1022 {
1023 #if defined(CONFIG_UART_INTERRUPT_DRIVEN)
1024 struct uart_ra_sci_data *data = dev->data;
1025
1026 if (data->user_cb != NULL) {
1027 data->user_cb(dev, data->user_cb_data);
1028 R_ICU->IELSR_b[data->fsp_config.tei_irq].IR = 0U;
1029 return;
1030 }
1031 #endif
1032
1033 #if defined(CONFIG_UART_ASYNC_API)
1034 sci_uart_tei_isr();
1035 #endif
1036 }
1037
uart_ra_sci_eri_isr(const struct device * dev)1038 static void uart_ra_sci_eri_isr(const struct device *dev)
1039 {
1040 #if defined(CONFIG_UART_INTERRUPT_DRIVEN)
1041 struct uart_ra_sci_data *data = dev->data;
1042
1043 if (data->user_cb != NULL) {
1044 data->user_cb(dev, data->user_cb_data);
1045 R_ICU->IELSR_b[data->fsp_config.eri_irq].IR = 0U;
1046 return;
1047 }
1048 #endif
1049
1050 #if defined(CONFIG_UART_ASYNC_API)
1051 sci_uart_eri_isr();
1052 #endif
1053 }
1054 #endif
1055
1056 #define EVENT_SCI_RXI(channel) BSP_PRV_IELS_ENUM(CONCAT(EVENT_SCI, channel, _RXI))
1057 #define EVENT_SCI_TXI(channel) BSP_PRV_IELS_ENUM(CONCAT(EVENT_SCI, channel, _TXI))
1058 #define EVENT_SCI_TEI(channel) BSP_PRV_IELS_ENUM(CONCAT(EVENT_SCI, channel, _TEI))
1059 #define EVENT_SCI_ERI(channel) BSP_PRV_IELS_ENUM(CONCAT(EVENT_SCI, channel, _ERI))
1060
1061 #if CONFIG_UART_ASYNC_API
1062 #define UART_RA_SCI_ASYNC_INIT(index) \
1063 .rx_transfer_info = \
1064 { \
1065 .transfer_settings_word_b.dest_addr_mode = TRANSFER_ADDR_MODE_INCREMENTED, \
1066 .transfer_settings_word_b.repeat_area = TRANSFER_REPEAT_AREA_DESTINATION, \
1067 .transfer_settings_word_b.irq = TRANSFER_IRQ_EACH, \
1068 .transfer_settings_word_b.chain_mode = TRANSFER_CHAIN_MODE_DISABLED, \
1069 .transfer_settings_word_b.src_addr_mode = TRANSFER_ADDR_MODE_FIXED, \
1070 .transfer_settings_word_b.size = TRANSFER_SIZE_1_BYTE, \
1071 .transfer_settings_word_b.mode = TRANSFER_MODE_NORMAL, \
1072 .p_dest = (void *)NULL, \
1073 .p_src = (void const *)NULL, \
1074 .num_blocks = 0, \
1075 .length = 0, \
1076 }, \
1077 .rx_transfer_cfg_extend = {.activation_source = \
1078 DT_IRQ_BY_NAME(DT_INST_PARENT(index), rxi, irq)}, \
1079 .rx_transfer_cfg = \
1080 { \
1081 .p_info = &uart_ra_sci_data_##index.rx_transfer_info, \
1082 .p_extend = &uart_ra_sci_data_##index.rx_transfer_cfg_extend, \
1083 }, \
1084 .rx_transfer = \
1085 { \
1086 .p_ctrl = &uart_ra_sci_data_##index.rx_transfer_ctrl, \
1087 .p_cfg = &uart_ra_sci_data_##index.rx_transfer_cfg, \
1088 .p_api = &g_transfer_on_dtc, \
1089 }, \
1090 .tx_transfer_info = \
1091 { \
1092 .transfer_settings_word_b.dest_addr_mode = TRANSFER_ADDR_MODE_FIXED, \
1093 .transfer_settings_word_b.repeat_area = TRANSFER_REPEAT_AREA_SOURCE, \
1094 .transfer_settings_word_b.irq = TRANSFER_IRQ_END, \
1095 .transfer_settings_word_b.chain_mode = TRANSFER_CHAIN_MODE_DISABLED, \
1096 .transfer_settings_word_b.src_addr_mode = TRANSFER_ADDR_MODE_INCREMENTED, \
1097 .transfer_settings_word_b.size = TRANSFER_SIZE_1_BYTE, \
1098 .transfer_settings_word_b.mode = TRANSFER_MODE_NORMAL, \
1099 .p_dest = (void *)NULL, \
1100 .p_src = (void const *)NULL, \
1101 .num_blocks = 0, \
1102 .length = 0, \
1103 }, \
1104 .tx_transfer_cfg_extend = {.activation_source = \
1105 DT_IRQ_BY_NAME(DT_INST_PARENT(index), txi, irq)}, \
1106 .tx_transfer_cfg = \
1107 { \
1108 .p_info = &uart_ra_sci_data_##index.tx_transfer_info, \
1109 .p_extend = &uart_ra_sci_data_##index.tx_transfer_cfg_extend, \
1110 }, \
1111 .tx_transfer = { \
1112 .p_ctrl = &uart_ra_sci_data_##index.tx_transfer_ctrl, \
1113 .p_cfg = &uart_ra_sci_data_##index.tx_transfer_cfg, \
1114 .p_api = &g_transfer_on_dtc, \
1115 },
1116
1117 #define UART_RA_SCI_DTC_INIT(index) \
1118 { \
1119 uart_ra_sci_data_##index.fsp_config.p_transfer_rx = \
1120 &uart_ra_sci_data_##index.rx_transfer; \
1121 uart_ra_sci_data_##index.fsp_config.p_transfer_tx = \
1122 &uart_ra_sci_data_##index.tx_transfer; \
1123 }
1124
1125 #else
1126 #define UART_RA_SCI_ASYNC_INIT(index)
1127 #define UART_RA_SCI_DTC_INIT(index)
1128 #endif
1129
1130 #if CONFIG_UART_INTERRUPT_DRIVEN || CONFIG_UART_ASYNC_API
1131 #define UART_RA_SCI_IRQ_INIT(index) \
1132 { \
1133 R_ICU->IELSR[DT_IRQ_BY_NAME(DT_INST_PARENT(index), rxi, irq)] = \
1134 EVENT_SCI_RXI(DT_INST_PROP(index, channel)); \
1135 R_ICU->IELSR[DT_IRQ_BY_NAME(DT_INST_PARENT(index), txi, irq)] = \
1136 EVENT_SCI_TXI(DT_INST_PROP(index, channel)); \
1137 R_ICU->IELSR[DT_IRQ_BY_NAME(DT_INST_PARENT(index), tei, irq)] = \
1138 EVENT_SCI_TEI(DT_INST_PROP(index, channel)); \
1139 R_ICU->IELSR[DT_IRQ_BY_NAME(DT_INST_PARENT(index), eri, irq)] = \
1140 EVENT_SCI_ERI(DT_INST_PROP(index, channel)); \
1141 \
1142 IRQ_CONNECT(DT_IRQ_BY_NAME(DT_INST_PARENT(index), rxi, irq), \
1143 DT_IRQ_BY_NAME(DT_INST_PARENT(index), rxi, priority), \
1144 uart_ra_sci_rxi_isr, DEVICE_DT_INST_GET(index), 0); \
1145 IRQ_CONNECT(DT_IRQ_BY_NAME(DT_INST_PARENT(index), txi, irq), \
1146 DT_IRQ_BY_NAME(DT_INST_PARENT(index), txi, priority), \
1147 uart_ra_sci_txi_isr, DEVICE_DT_INST_GET(index), 0); \
1148 IRQ_CONNECT(DT_IRQ_BY_NAME(DT_INST_PARENT(index), tei, irq), \
1149 DT_IRQ_BY_NAME(DT_INST_PARENT(index), tei, priority), \
1150 uart_ra_sci_tei_isr, DEVICE_DT_INST_GET(index), 0); \
1151 IRQ_CONNECT(DT_IRQ_BY_NAME(DT_INST_PARENT(index), eri, irq), \
1152 DT_IRQ_BY_NAME(DT_INST_PARENT(index), eri, priority), \
1153 uart_ra_sci_eri_isr, DEVICE_DT_INST_GET(index), 0); \
1154 \
1155 irq_enable(DT_IRQ_BY_NAME(DT_INST_PARENT(index), rxi, irq)); \
1156 irq_enable(DT_IRQ_BY_NAME(DT_INST_PARENT(index), txi, irq)); \
1157 irq_enable(DT_IRQ_BY_NAME(DT_INST_PARENT(index), tei, irq)); \
1158 }
1159 #else
1160 #define UART_RA_SCI_IRQ_INIT(index)
1161 #endif
1162
1163 #define FLOW_CTRL_PARAMETER(index) \
1164 COND_CODE_1(DT_INST_PROP(index, hw_flow_control), \
1165 (UART_CFG_FLOW_CTRL_RTS_CTS), (UART_CFG_FLOW_CTRL_NONE))
1166
1167 #define UART_RA_SCI_INIT(index) \
1168 PINCTRL_DT_DEFINE(DT_INST_PARENT(index)); \
1169 static const struct uart_ra_sci_config uart_ra_sci_config_##index = { \
1170 .pcfg = PINCTRL_DT_DEV_CONFIG_GET(DT_INST_PARENT(index)), \
1171 .regs = (R_SCI0_Type *)DT_REG_ADDR(DT_INST_PARENT(index)), \
1172 }; \
1173 \
1174 static struct uart_ra_sci_data uart_ra_sci_data_##index = { \
1175 .uart_config = \
1176 { \
1177 .baudrate = DT_INST_PROP(index, current_speed), \
1178 .parity = UART_CFG_PARITY_NONE, \
1179 .stop_bits = UART_CFG_STOP_BITS_1, \
1180 .data_bits = UART_CFG_DATA_BITS_8, \
1181 .flow_ctrl = FLOW_CTRL_PARAMETER(index), \
1182 }, \
1183 .fsp_config = \
1184 { \
1185 .channel = DT_INST_PROP(index, channel), \
1186 .rxi_ipl = DT_IRQ_BY_NAME(DT_INST_PARENT(index), rxi, priority), \
1187 .rxi_irq = DT_IRQ_BY_NAME(DT_INST_PARENT(index), rxi, irq), \
1188 .txi_ipl = DT_IRQ_BY_NAME(DT_INST_PARENT(index), txi, priority), \
1189 .txi_irq = DT_IRQ_BY_NAME(DT_INST_PARENT(index), txi, irq), \
1190 .tei_ipl = DT_IRQ_BY_NAME(DT_INST_PARENT(index), tei, priority), \
1191 .tei_irq = DT_IRQ_BY_NAME(DT_INST_PARENT(index), tei, irq), \
1192 .eri_ipl = DT_IRQ_BY_NAME(DT_INST_PARENT(index), eri, priority), \
1193 .eri_irq = DT_IRQ_BY_NAME(DT_INST_PARENT(index), eri, irq), \
1194 }, \
1195 .fsp_config_extend = {}, \
1196 .fsp_baud_setting = {}, \
1197 .dev = DEVICE_DT_INST_GET(index), \
1198 UART_RA_SCI_ASYNC_INIT(index)}; \
1199 \
1200 static int uart_ra_sci_init##index(const struct device *dev) \
1201 { \
1202 UART_RA_SCI_IRQ_INIT(index); \
1203 UART_RA_SCI_DTC_INIT(index); \
1204 int err = uart_ra_sci_init(dev); \
1205 if (err != 0) { \
1206 return err; \
1207 } \
1208 return 0; \
1209 } \
1210 DEVICE_DT_INST_DEFINE(index, uart_ra_sci_init##index, NULL, &uart_ra_sci_data_##index, \
1211 &uart_ra_sci_config_##index, PRE_KERNEL_1, \
1212 CONFIG_SERIAL_INIT_PRIORITY, &uart_ra_sci_driver_api);
1213
1214 DT_INST_FOREACH_STATUS_OKAY(UART_RA_SCI_INIT)
1215