1 /*
2 * Copyright (c) 2024 Renesas Electronics Corporation
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
7 #define DT_DRV_COMPAT renesas_ra_sci_uart
8
9 #include <zephyr/kernel.h>
10 #include <zephyr/drivers/uart.h>
11 #include <zephyr/drivers/pinctrl.h>
12 #include <zephyr/irq.h>
13 #include <soc.h>
14 #include "r_sci_uart.h"
15 #include "r_dtc.h"
16
17 #include <zephyr/logging/log.h>
18 LOG_MODULE_REGISTER(ra_sci_uart);
19
20 #define SCI_UART_SSR_FIFO_DR_RDF (R_SCI0_SSR_FIFO_DR_Msk | R_SCI0_SSR_FIFO_RDF_Msk)
21 #define SCI_UART_SSR_FIFO_TDFE_TEND (R_SCI0_SSR_FIFO_TDFE_Msk | R_SCI0_SSR_FIFO_TEND_Msk)
22 #define SCI_UART_SSR_TDRE_TEND (R_SCI0_SSR_TDRE_Msk | R_SCI0_SSR_TEND_Msk)
23 #define SCI_UART_SSR_ERR_MSK (R_SCI0_SSR_ORER_Msk | R_SCI0_SSR_FER_Msk | R_SCI0_SSR_PER_Msk)
24 #define SCI_UART_SSR_FIFO_ERR_MSK \
25 (R_SCI0_SSR_FIFO_ORER_Msk | R_SCI0_SSR_FIFO_FER_Msk | R_SCI0_SSR_FIFO_PER_Msk)
26
27 #if defined(CONFIG_UART_ASYNC_API)
28 void sci_uart_rxi_isr(void);
29 void sci_uart_txi_isr(void);
30 void sci_uart_tei_isr(void);
31 void sci_uart_eri_isr(void);
32 #endif
33
34 struct uart_ra_sci_config {
35 const struct pinctrl_dev_config *pcfg;
36
37 R_SCI0_Type * const regs;
38 };
39
40 struct uart_ra_sci_data {
41 const struct device *dev;
42 struct st_sci_uart_instance_ctrl sci;
43 struct uart_config uart_config;
44 struct st_uart_cfg fsp_config;
45 struct st_sci_uart_extended_cfg fsp_config_extend;
46 struct st_baud_setting_t fsp_baud_setting;
47 #if defined(CONFIG_UART_INTERRUPT_DRIVEN)
48 uart_irq_callback_user_data_t user_cb;
49 void *user_cb_data;
50 uint32_t ssr;
51 #endif
52 #if defined(CONFIG_UART_ASYNC_API)
53 uart_callback_t async_user_cb;
54 void *async_user_cb_data;
55
56 struct k_work_delayable rx_timeout_work;
57 size_t rx_timeout;
58 size_t rx_buf_len;
59 size_t rx_buf_offset;
60 size_t rx_buf_cap;
61 uint8_t *rx_buffer;
62 size_t rx_next_buf_cap;
63 uint8_t *rx_next_buf;
64
65 struct st_transfer_instance rx_transfer;
66 struct st_dtc_instance_ctrl rx_transfer_ctrl;
67 struct st_transfer_info rx_transfer_info;
68 struct st_transfer_cfg rx_transfer_cfg;
69 struct st_dtc_extended_cfg rx_transfer_cfg_extend;
70
71 struct k_work_delayable tx_timeout;
72 size_t tx_buf_cap;
73
74 struct st_transfer_instance tx_transfer;
75 struct st_dtc_instance_ctrl tx_transfer_ctrl;
76 struct st_transfer_info tx_transfer_info;
77 struct st_transfer_cfg tx_transfer_cfg;
78 struct st_dtc_extended_cfg tx_transfer_cfg_extend;
79 #endif
80 };
81
uart_ra_sci_poll_in(const struct device * dev,unsigned char * c)82 static int uart_ra_sci_poll_in(const struct device *dev, unsigned char *c)
83 {
84 struct uart_ra_sci_data *data = dev->data;
85 const struct uart_ra_sci_config *cfg = dev->config;
86
87 if (IS_ENABLED(CONFIG_UART_ASYNC_API) && cfg->regs->SCR_b.RIE) {
88 /* This function cannot be used if async reception was enabled */
89 return -EBUSY;
90 }
91
92 if (IS_ENABLED(CONFIG_UART_RA_SCI_UART_FIFO_ENABLE) && data->sci.fifo_depth > 0
93 ? cfg->regs->FDR_b.R == 0U
94 : cfg->regs->SSR_b.RDRF == 0U) {
95 /* There are no characters available to read. */
96 return -1;
97 }
98
99 /* got a character */
100 *c = IS_ENABLED(CONFIG_UART_RA_SCI_UART_FIFO_ENABLE) && data->sci.fifo_depth > 0
101 ? cfg->regs->FRDRL
102 : cfg->regs->RDR;
103
104 return 0;
105 }
106
uart_ra_sci_poll_out(const struct device * dev,unsigned char c)107 static void uart_ra_sci_poll_out(const struct device *dev, unsigned char c)
108 {
109 struct uart_ra_sci_data *data = dev->data;
110 const struct uart_ra_sci_config *cfg = dev->config;
111
112 #if CONFIG_UART_RA_SCI_UART_FIFO_ENABLE
113 if (data->sci.fifo_depth > 0) {
114 while (cfg->regs->FDR_b.T > 0x8) {
115 }
116 cfg->regs->FTDRL = c;
117 } else
118 #endif
119 {
120 while (cfg->regs->SSR_b.TDRE == 0U) {
121 }
122 cfg->regs->TDR = c;
123 }
124 }
125
uart_ra_sci_err_check(const struct device * dev)126 static int uart_ra_sci_err_check(const struct device *dev)
127 {
128 struct uart_ra_sci_data *data = dev->data;
129 const struct uart_ra_sci_config *cfg = dev->config;
130 int errors = 0;
131
132 #if CONFIG_UART_RA_SCI_UART_FIFO_ENABLE
133 if (data->sci.fifo_depth > 0) {
134 const uint8_t status = cfg->regs->SSR_FIFO;
135 uint8_t ssr_fifo = 0;
136
137 if (status & R_SCI0_SSR_FIFO_ORER_Msk) {
138 errors |= UART_ERROR_OVERRUN;
139 ssr_fifo |= R_SCI0_SSR_FIFO_ORER_Msk;
140 }
141 if (status & R_SCI0_SSR_FIFO_PER_Msk) {
142 errors |= UART_ERROR_PARITY;
143 ssr_fifo |= R_SCI0_SSR_FIFO_PER_Msk;
144 }
145 if (status & R_SCI0_SSR_FIFO_FER_Msk) {
146 errors |= UART_ERROR_FRAMING;
147 ssr_fifo |= R_SCI0_SSR_FIFO_FER_Msk;
148 }
149 cfg->regs->SSR_FIFO &= ~ssr_fifo;
150 } else
151 #endif
152 {
153 const uint8_t status = cfg->regs->SSR;
154 uint8_t ssr = 0;
155
156 if (status & R_SCI0_SSR_ORER_Msk) {
157 errors |= UART_ERROR_OVERRUN;
158 ssr |= R_SCI0_SSR_ORER_Msk;
159 }
160 if (status & R_SCI0_SSR_PER_Msk) {
161 errors |= UART_ERROR_PARITY;
162 ssr |= R_SCI0_SSR_PER_Msk;
163 }
164 if (status & R_SCI0_SSR_FER_Msk) {
165 errors |= UART_ERROR_FRAMING;
166 ssr |= R_SCI0_SSR_FER_Msk;
167 }
168 cfg->regs->SSR &= ~ssr;
169 }
170
171 return errors;
172 }
173
uart_ra_sci_apply_config(const struct uart_config * config,struct st_uart_cfg * fsp_config,struct st_sci_uart_extended_cfg * fsp_config_extend,struct st_baud_setting_t * fsp_baud_setting)174 static int uart_ra_sci_apply_config(const struct uart_config *config,
175 struct st_uart_cfg *fsp_config,
176 struct st_sci_uart_extended_cfg *fsp_config_extend,
177 struct st_baud_setting_t *fsp_baud_setting)
178 {
179 fsp_err_t fsp_err;
180
181 fsp_err = R_SCI_UART_BaudCalculate(config->baudrate, true, 5000, fsp_baud_setting);
182 if (fsp_err != FSP_SUCCESS) {
183 LOG_DBG("drivers: uart: baud calculate error");
184 return -EINVAL;
185 }
186
187 switch (config->parity) {
188 case UART_CFG_PARITY_NONE:
189 fsp_config->parity = UART_PARITY_OFF;
190 break;
191 case UART_CFG_PARITY_ODD:
192 fsp_config->parity = UART_PARITY_ODD;
193 break;
194 case UART_CFG_PARITY_EVEN:
195 fsp_config->parity = UART_PARITY_EVEN;
196 break;
197 case UART_CFG_PARITY_MARK:
198 return -ENOTSUP;
199 case UART_CFG_PARITY_SPACE:
200 return -ENOTSUP;
201 default:
202 return -EINVAL;
203 }
204
205 switch (config->stop_bits) {
206 case UART_CFG_STOP_BITS_0_5:
207 return -ENOTSUP;
208 case UART_CFG_STOP_BITS_1:
209 fsp_config->stop_bits = UART_STOP_BITS_1;
210 break;
211 case UART_CFG_STOP_BITS_1_5:
212 return -ENOTSUP;
213 case UART_CFG_STOP_BITS_2:
214 fsp_config->stop_bits = UART_STOP_BITS_2;
215 break;
216 default:
217 return -EINVAL;
218 }
219
220 switch (config->data_bits) {
221 case UART_CFG_DATA_BITS_5:
222 return -ENOTSUP;
223 case UART_CFG_DATA_BITS_6:
224 return -ENOTSUP;
225 case UART_CFG_DATA_BITS_7:
226 fsp_config->data_bits = UART_DATA_BITS_7;
227 break;
228 case UART_CFG_DATA_BITS_8:
229 fsp_config->data_bits = UART_DATA_BITS_8;
230 break;
231 case UART_CFG_DATA_BITS_9:
232 fsp_config->data_bits = UART_DATA_BITS_9;
233 break;
234 default:
235 return -EINVAL;
236 }
237
238 #if CONFIG_UART_RA_SCI_UART_FIFO_ENABLE
239 fsp_config_extend->rx_fifo_trigger = 0x8;
240 #endif
241
242 switch (config->flow_ctrl) {
243 case UART_CFG_FLOW_CTRL_NONE:
244 fsp_config_extend->flow_control = 0;
245 fsp_config_extend->rs485_setting.enable = false;
246 break;
247 case UART_CFG_FLOW_CTRL_RTS_CTS:
248 fsp_config_extend->flow_control = SCI_UART_FLOW_CONTROL_HARDWARE_CTSRTS;
249 fsp_config_extend->rs485_setting.enable = false;
250 break;
251 case UART_CFG_FLOW_CTRL_DTR_DSR:
252 return -ENOTSUP;
253 case UART_CFG_FLOW_CTRL_RS485:
254 /* TODO: implement this config */
255 return -ENOTSUP;
256 default:
257 return -EINVAL;
258 }
259
260 return 0;
261 }
262
263 #ifdef CONFIG_UART_USE_RUNTIME_CONFIGURE
264
uart_ra_sci_configure(const struct device * dev,const struct uart_config * config)265 static int uart_ra_sci_configure(const struct device *dev, const struct uart_config *config)
266 {
267 int err;
268 fsp_err_t fsp_err;
269 struct uart_ra_sci_data *data = dev->data;
270
271 err = uart_ra_sci_apply_config(config, &data->fsp_config, &data->fsp_config_extend,
272 &data->fsp_baud_setting);
273 if (err) {
274 return err;
275 }
276
277 fsp_err = R_SCI_UART_Close(&data->sci);
278 fsp_err |= R_SCI_UART_Open(&data->sci, &data->fsp_config);
279 if (fsp_err != FSP_SUCCESS) {
280 LOG_DBG("drivers: serial: uart configure failed");
281 return -EIO;
282 }
283 memcpy(&data->uart_config, config, sizeof(*config));
284
285 return 0;
286 }
287
uart_ra_sci_config_get(const struct device * dev,struct uart_config * cfg)288 static int uart_ra_sci_config_get(const struct device *dev, struct uart_config *cfg)
289 {
290 struct uart_ra_sci_data *data = dev->data;
291
292 memcpy(cfg, &data->uart_config, sizeof(*cfg));
293 return 0;
294 }
295
296 #endif /* CONFIG_UART_USE_RUNTIME_CONFIGURE */
297
298 #ifdef CONFIG_UART_INTERRUPT_DRIVEN
299
uart_ra_sci_fifo_fill(const struct device * dev,const uint8_t * tx_data,int size)300 static int uart_ra_sci_fifo_fill(const struct device *dev, const uint8_t *tx_data, int size)
301 {
302 struct uart_ra_sci_data *data = dev->data;
303 const struct uart_ra_sci_config *cfg = dev->config;
304 int num_tx = 0U;
305
306 #if CONFIG_UART_RA_SCI_UART_FIFO_ENABLE
307 if (data->sci.fifo_depth != 0) {
308 while ((size - num_tx > 0) && cfg->regs->FDR_b.T < data->sci.fifo_depth) {
309 /* Send a character (8bit , parity none) */
310 cfg->regs->FTDRL = tx_data[num_tx++];
311 }
312 cfg->regs->SSR_FIFO &= (uint8_t)~SCI_UART_SSR_FIFO_TDFE_TEND;
313 } else
314 #endif
315 {
316 if (size > 0 && cfg->regs->SSR_b.TDRE) {
317 /* Send a character (8bit , parity none) */
318 cfg->regs->TDR = tx_data[num_tx++];
319 }
320 };
321
322 return num_tx;
323 }
324
uart_ra_sci_fifo_read(const struct device * dev,uint8_t * rx_data,const int size)325 static int uart_ra_sci_fifo_read(const struct device *dev, uint8_t *rx_data, const int size)
326 {
327 struct uart_ra_sci_data *data = dev->data;
328 const struct uart_ra_sci_config *cfg = dev->config;
329 int num_rx = 0U;
330
331 #if CONFIG_UART_RA_SCI_UART_FIFO_ENABLE
332 if (data->sci.fifo_depth != 0) {
333 while ((size - num_rx > 0) && cfg->regs->FDR_b.R > 0) {
334 /* Receive a character (8bit , parity none) */
335 rx_data[num_rx++] = cfg->regs->FRDRL;
336 }
337 cfg->regs->SSR_FIFO &= (uint8_t)~SCI_UART_SSR_FIFO_DR_RDF;
338 } else
339 #endif
340 {
341 if (size > 0 && cfg->regs->SSR_b.RDRF) {
342 /* Receive a character (8bit , parity none) */
343 rx_data[num_rx++] = cfg->regs->RDR;
344 }
345 cfg->regs->SSR &= (uint8_t)~R_SCI0_SSR_RDRF_Msk;
346 }
347
348 return num_rx;
349 }
350
uart_ra_sci_irq_tx_enable(const struct device * dev)351 static void uart_ra_sci_irq_tx_enable(const struct device *dev)
352 {
353 struct uart_ra_sci_data *data = dev->data;
354 const struct uart_ra_sci_config *cfg = dev->config;
355 #if CONFIG_UART_RA_SCI_UART_FIFO_ENABLE
356 if (data->sci.fifo_depth != 0) {
357 cfg->regs->SSR_FIFO &= (uint8_t)~SCI_UART_SSR_FIFO_TDFE_TEND;
358 } else
359 #endif
360 {
361 cfg->regs->SSR = (uint8_t)~SCI_UART_SSR_TDRE_TEND;
362 }
363
364 cfg->regs->SCR |= (R_SCI0_SCR_TIE_Msk | R_SCI0_SCR_TEIE_Msk);
365 }
366
uart_ra_sci_irq_tx_disable(const struct device * dev)367 static void uart_ra_sci_irq_tx_disable(const struct device *dev)
368 {
369 const struct uart_ra_sci_config *cfg = dev->config;
370
371 cfg->regs->SCR &= ~(R_SCI0_SCR_TIE_Msk | R_SCI0_SCR_TEIE_Msk);
372 }
373
uart_ra_sci_irq_tx_ready(const struct device * dev)374 static int uart_ra_sci_irq_tx_ready(const struct device *dev)
375 {
376 struct uart_ra_sci_data *data = dev->data;
377 const struct uart_ra_sci_config *cfg = dev->config;
378 int ret;
379
380 #if CONFIG_UART_RA_SCI_UART_FIFO_ENABLE
381 if (data->sci.fifo_depth != 0) {
382 ret = (cfg->regs->SCR_b.TIE == 1U) && (data->ssr & R_SCI0_SSR_FIFO_TDFE_Msk);
383 } else
384 #endif
385 {
386 ret = (cfg->regs->SCR_b.TIE == 1U) && (data->ssr & R_SCI0_SSR_TDRE_Msk);
387 }
388
389 return ret;
390 }
391
uart_ra_sci_irq_tx_complete(const struct device * dev)392 static int uart_ra_sci_irq_tx_complete(const struct device *dev)
393 {
394 struct uart_ra_sci_data *data = dev->data;
395 const struct uart_ra_sci_config *cfg = dev->config;
396
397 return (cfg->regs->SCR_b.TEIE == 1U) && (data->ssr & BIT(R_SCI0_SSR_TEND_Pos));
398 }
399
uart_ra_sci_irq_rx_enable(const struct device * dev)400 static void uart_ra_sci_irq_rx_enable(const struct device *dev)
401 {
402 struct uart_ra_sci_data *data = dev->data;
403 const struct uart_ra_sci_config *cfg = dev->config;
404
405 #if CONFIG_UART_RA_SCI_UART_FIFO_ENABLE
406 if (data->sci.fifo_depth != 0) {
407 cfg->regs->SSR_FIFO &= (uint8_t) ~(SCI_UART_SSR_FIFO_DR_RDF);
408 } else
409 #endif
410 {
411 cfg->regs->SSR_b.RDRF = 0U;
412 }
413 cfg->regs->SCR_b.RIE = 1U;
414 }
415
uart_ra_sci_irq_rx_disable(const struct device * dev)416 static void uart_ra_sci_irq_rx_disable(const struct device *dev)
417 {
418 const struct uart_ra_sci_config *cfg = dev->config;
419
420 cfg->regs->SCR_b.RIE = 0U;
421 }
422
uart_ra_sci_irq_rx_ready(const struct device * dev)423 static int uart_ra_sci_irq_rx_ready(const struct device *dev)
424 {
425 struct uart_ra_sci_data *data = dev->data;
426 const struct uart_ra_sci_config *cfg = dev->config;
427 int ret;
428
429 #if CONFIG_UART_RA_SCI_UART_FIFO_ENABLE
430 if (data->sci.fifo_depth != 0) {
431 ret = (cfg->regs->SCR_b.RIE == 1U) && (data->ssr & SCI_UART_SSR_FIFO_DR_RDF);
432 } else
433 #endif
434 {
435 ret = (cfg->regs->SCR_b.RIE == 1U) && (data->ssr & R_SCI0_SSR_RDRF_Msk);
436 }
437
438 return ret;
439 }
440
uart_ra_sci_irq_err_enable(const struct device * dev)441 static void uart_ra_sci_irq_err_enable(const struct device *dev)
442 {
443 struct uart_ra_sci_data *data = dev->data;
444
445 NVIC_EnableIRQ(data->fsp_config.eri_irq);
446 }
447
uart_ra_sci_irq_err_disable(const struct device * dev)448 static void uart_ra_sci_irq_err_disable(const struct device *dev)
449 {
450 struct uart_ra_sci_data *data = dev->data;
451
452 NVIC_DisableIRQ(data->fsp_config.eri_irq);
453 }
454
uart_ra_sci_irq_is_pending(const struct device * dev)455 static int uart_ra_sci_irq_is_pending(const struct device *dev)
456 {
457 struct uart_ra_sci_data *data = dev->data;
458 const struct uart_ra_sci_config *cfg = dev->config;
459 uint8_t scr;
460 uint8_t ssr;
461 int ret;
462
463 #if CONFIG_UART_RA_SCI_UART_FIFO_ENABLE
464 if (data->sci.fifo_depth != 0) {
465 scr = cfg->regs->SCR;
466 ssr = cfg->regs->SSR_FIFO;
467 ret = ((scr & R_SCI0_SCR_TIE_Msk) &&
468 (ssr & (R_SCI0_SSR_FIFO_TEND_Msk | R_SCI0_SSR_FIFO_TDFE_Msk))) ||
469 ((scr & R_SCI0_SCR_RIE_Msk) &&
470 ((ssr & (R_SCI0_SSR_FIFO_RDF_Msk | R_SCI0_SSR_FIFO_DR_Msk |
471 R_SCI0_SSR_FIFO_FER_Msk | R_SCI0_SSR_FIFO_ORER_Msk |
472 R_SCI0_SSR_FIFO_PER_Msk))));
473 } else
474 #endif
475 {
476 scr = cfg->regs->SCR;
477 ssr = cfg->regs->SSR;
478 ret = ((scr & R_SCI0_SCR_TIE_Msk) &&
479 (ssr & (R_SCI0_SSR_TEND_Msk | R_SCI0_SSR_TDRE_Msk))) ||
480 ((scr & R_SCI0_SCR_RIE_Msk) &&
481 (ssr & (R_SCI0_SSR_RDRF_Msk | R_SCI0_SSR_PER_Msk | R_SCI0_SSR_FER_Msk |
482 R_SCI0_SSR_ORER_Msk)));
483 }
484
485 return ret;
486 }
487
uart_ra_sci_irq_update(const struct device * dev)488 static int uart_ra_sci_irq_update(const struct device *dev)
489 {
490 struct uart_ra_sci_data *data = dev->data;
491 const struct uart_ra_sci_config *cfg = dev->config;
492
493 #if CONFIG_UART_RA_SCI_UART_FIFO_ENABLE
494 if (data->sci.fifo_depth != 0) {
495 data->ssr = cfg->regs->SSR_FIFO;
496 uint8_t ssr = data->ssr ^ (R_SCI0_SSR_FIFO_ORER_Msk | R_SCI0_SSR_FIFO_FER_Msk |
497 R_SCI0_SSR_FIFO_PER_Msk);
498 cfg->regs->SSR_FIFO &= ssr;
499 } else
500 #endif
501 {
502 data->ssr = cfg->regs->SSR;
503 uint8_t ssr =
504 data->ssr ^ (R_SCI0_SSR_ORER_Msk | R_SCI0_SSR_FER_Msk | R_SCI0_SSR_PER_Msk);
505 cfg->regs->SSR_FIFO &= ssr;
506 }
507
508 return 1;
509 }
510
uart_ra_sci_irq_callback_set(const struct device * dev,uart_irq_callback_user_data_t cb,void * cb_data)511 static void uart_ra_sci_irq_callback_set(const struct device *dev, uart_irq_callback_user_data_t cb,
512 void *cb_data)
513 {
514 struct uart_ra_sci_data *data = dev->data;
515
516 data->user_cb = cb;
517 data->user_cb_data = cb_data;
518
519 #if CONFIG_UART_EXCLUSIVE_API_CALLBACKS
520 data->async_user_cb = NULL;
521 data->async_user_cb_data = NULL;
522 #endif
523 }
524
525 #endif /* CONFIG_UART_INTERRUPT_DRIVEN */
526
527 #ifdef CONFIG_UART_ASYNC_API
528
fsp_err_to_errno(fsp_err_t fsp_err)529 static int fsp_err_to_errno(fsp_err_t fsp_err)
530 {
531 switch (fsp_err) {
532 case FSP_ERR_INVALID_ARGUMENT:
533 return -EINVAL;
534 case FSP_ERR_NOT_OPEN:
535 return -EIO;
536 case FSP_ERR_IN_USE:
537 return -EBUSY;
538 case FSP_ERR_UNSUPPORTED:
539 return -ENOTSUP;
540 case 0:
541 return 0;
542 default:
543 return -EINVAL;
544 }
545 }
546
uart_ra_sci_async_callback_set(const struct device * dev,uart_callback_t cb,void * cb_data)547 static int uart_ra_sci_async_callback_set(const struct device *dev, uart_callback_t cb,
548 void *cb_data)
549 {
550 struct uart_ra_sci_data *data = dev->data;
551
552 data->async_user_cb = cb;
553 data->async_user_cb_data = cb_data;
554
555 #if CONFIG_UART_EXCLUSIVE_API_CALLBACKS
556 data->user_cb = NULL;
557 data->user_cb_data = NULL;
558 #endif
559 return 0;
560 }
561
uart_ra_sci_async_tx(const struct device * dev,const uint8_t * buf,size_t len,int32_t timeout)562 static int uart_ra_sci_async_tx(const struct device *dev, const uint8_t *buf, size_t len,
563 int32_t timeout)
564 {
565 struct uart_ra_sci_data *data = dev->data;
566 int err;
567
568 err = fsp_err_to_errno(R_SCI_UART_Write(&data->sci, buf, len));
569 if (err) {
570 return err;
571 }
572 data->tx_buf_cap = len;
573 if (timeout != SYS_FOREVER_US && timeout != 0) {
574 k_work_reschedule(&data->tx_timeout, Z_TIMEOUT_US(timeout));
575 }
576
577 return 0;
578 }
579
async_user_callback(const struct device * dev,struct uart_event * event)580 static inline void async_user_callback(const struct device *dev, struct uart_event *event)
581 {
582 struct uart_ra_sci_data *data = dev->data;
583
584 if (data->async_user_cb) {
585 data->async_user_cb(dev, event, data->async_user_cb_data);
586 }
587 }
588
async_rx_release_buf(const struct device * dev)589 static inline void async_rx_release_buf(const struct device *dev)
590 {
591 struct uart_ra_sci_data *data = dev->data;
592
593 struct uart_event event = {
594 .type = UART_RX_BUF_RELEASED,
595 .data.rx.buf = (uint8_t *)data->rx_buffer,
596 };
597 async_user_callback(dev, &event);
598 data->rx_buffer = NULL;
599 data->rx_buf_offset = 0;
600 data->rx_buf_len = 0;
601 data->rx_buf_cap = 0;
602 }
603
async_rx_release_next_buf(const struct device * dev)604 static inline void async_rx_release_next_buf(const struct device *dev)
605 {
606 struct uart_ra_sci_data *data = dev->data;
607 struct uart_event event = {
608 .type = UART_RX_BUF_RELEASED,
609 .data.rx.buf = (uint8_t *)data->rx_next_buf,
610 };
611 async_user_callback(dev, &event);
612 data->rx_next_buf = NULL;
613 }
614
async_rx_req_buf(const struct device * dev)615 static inline void async_rx_req_buf(const struct device *dev)
616 {
617 struct uart_event event = {
618 .type = UART_RX_BUF_REQUEST,
619 };
620
621 async_user_callback(dev, &event);
622 }
623
async_rx_disable(const struct device * dev)624 static inline void async_rx_disable(const struct device *dev)
625 {
626 struct uart_ra_sci_data *data = dev->data;
627 const struct uart_ra_sci_config *cfg = dev->config;
628 struct uart_event event = {
629 .type = UART_RX_DISABLED,
630 };
631 async_user_callback(dev, &event);
632
633 /* Disable the RXI request and clear the status flag to be ready for the next reception */
634 cfg->regs->SCR_b.RIE = 0;
635 #if CONFIG_UART_RA_SCI_UART_FIFO_ENABLE
636 if (data->sci.fifo_depth != 0) {
637 cfg->regs->SSR_FIFO &= (uint8_t)~SCI_UART_SSR_FIFO_DR_RDF;
638 } else
639 #endif
640 {
641 cfg->regs->SSR_b.RDRF = 0;
642 }
643 }
644
async_rx_ready(const struct device * dev)645 static inline void async_rx_ready(const struct device *dev)
646 {
647 struct uart_ra_sci_data *data = dev->data;
648
649 if (!data->rx_buf_len) {
650 return;
651 }
652
653 struct uart_event event = {
654 .type = UART_RX_RDY,
655 .data.rx.buf = (uint8_t *)data->rx_buffer,
656 .data.rx.offset = data->rx_buf_offset,
657 .data.rx.len = data->rx_buf_len,
658 };
659 async_user_callback(data->dev, &event);
660 data->rx_buf_offset += data->rx_buf_len;
661 data->rx_buf_len = 0;
662 }
663
disable_tx(const struct device * dev)664 static inline void disable_tx(const struct device *dev)
665 {
666 struct uart_ra_sci_data *data = dev->data;
667 const struct uart_ra_sci_config *cfg = dev->config;
668
669 /* Transmit interrupts must be disabled to start with. */
670 cfg->regs->SCR &= (uint8_t) ~(R_SCI0_SCR_TIE_Msk | R_SCI0_SCR_TEIE_Msk);
671
672 /*
673 * Make sure no transmission is in progress. Setting CCR0_b.TE to 0 when CSR_b.TEND
674 * is 0 causes SCI peripheral to work abnormally.
675 */
676 while (IS_ENABLED(CONFIG_UART_RA_SCI_UART_FIFO_ENABLE) && data->sci.fifo_depth
677 ? cfg->regs->SSR_FIFO_b.TEND != 1U
678 : cfg->regs->SSR_b.TEND != 1U) {
679 }
680
681 cfg->regs->SCR_b.TE = 0;
682 }
683
enable_tx(const struct device * dev)684 static inline void enable_tx(const struct device *dev)
685 {
686 const struct uart_ra_sci_config *cfg = dev->config;
687
688 cfg->regs->SCR_b.TE = 1;
689 }
690
uart_ra_sci_async_tx_abort(const struct device * dev)691 static int uart_ra_sci_async_tx_abort(const struct device *dev)
692 {
693 struct uart_ra_sci_data *data = dev->data;
694 int err = 0;
695
696 if (!data->sci.p_tx_src) {
697 return -EFAULT;
698 }
699
700 disable_tx(dev);
701
702 if (FSP_SUCCESS != R_SCI_UART_Abort(&data->sci, UART_DIR_TX)) {
703 LOG_DBG("drivers: serial: uart abort tx failed");
704 err = -EIO;
705 goto unlock;
706 }
707 transfer_properties_t tx_properties = {0};
708
709 if (FSP_SUCCESS != R_DTC_InfoGet(data->tx_transfer.p_ctrl, &tx_properties)) {
710 LOG_DBG("drivers: serial: uart abort tx failed");
711 err = -EIO;
712 goto unlock;
713 }
714 struct uart_event event = {
715 .type = UART_TX_ABORTED,
716 .data.tx.buf = (uint8_t *)data->sci.p_tx_src,
717 .data.tx.len = data->tx_buf_cap - tx_properties.transfer_length_remaining,
718 };
719 async_user_callback(dev, &event);
720 k_work_cancel_delayable(&data->tx_timeout);
721
722 unlock:
723 enable_tx(dev);
724 return err;
725 }
726
uart_ra_sci_async_rx_enable(const struct device * dev,uint8_t * buf,size_t len,int32_t timeout)727 static int uart_ra_sci_async_rx_enable(const struct device *dev, uint8_t *buf, size_t len,
728 int32_t timeout)
729 {
730 struct uart_ra_sci_data *data = dev->data;
731 const struct uart_ra_sci_config *cfg = dev->config;
732 int err = 0;
733 unsigned int key = irq_lock();
734
735 if (data->rx_buffer) {
736 err = -EAGAIN;
737 goto unlock;
738 }
739
740 #if CONFIG_UART_RA_SCI_UART_FIFO_ENABLE
741 if (data->sci.fifo_depth) {
742 cfg->regs->SSR_FIFO &= (uint8_t) ~(SCI_UART_SSR_FIFO_ERR_MSK);
743 } else
744 #endif
745 {
746 cfg->regs->SSR = (uint8_t)~SCI_UART_SSR_ERR_MSK;
747 }
748
749 err = fsp_err_to_errno(R_SCI_UART_Read(&data->sci, buf, len));
750 if (err) {
751 goto unlock;
752 }
753
754 data->rx_timeout = timeout;
755 data->rx_buffer = buf;
756 data->rx_buf_cap = len;
757 data->rx_buf_len = 0;
758 data->rx_buf_offset = 0;
759
760 /* Call buffer request user callback */
761 async_rx_req_buf(dev);
762 cfg->regs->SCR_b.RIE = 1;
763
764 unlock:
765 irq_unlock(key);
766 return err;
767 }
768
uart_ra_sci_async_rx_buf_rsp(const struct device * dev,uint8_t * buf,size_t len)769 static int uart_ra_sci_async_rx_buf_rsp(const struct device *dev, uint8_t *buf, size_t len)
770 {
771 struct uart_ra_sci_data *data = dev->data;
772
773 data->rx_next_buf = buf;
774 data->rx_next_buf_cap = len;
775
776 return 0;
777 }
778
uart_ra_sci_async_rx_disable(const struct device * dev)779 static int uart_ra_sci_async_rx_disable(const struct device *dev)
780 {
781 struct uart_ra_sci_data *data = dev->data;
782 uint32_t remaining_byte = 0;
783 int err = 0;
784 unsigned int key = irq_lock();
785
786 if (!data->rx_buffer) {
787 err = -EAGAIN;
788 goto unlock;
789 }
790
791 k_work_cancel_delayable(&data->rx_timeout_work);
792 if (FSP_SUCCESS != R_SCI_UART_ReadStop(&data->sci, &remaining_byte)) {
793 LOG_DBG("drivers: serial: uart stop reading failed");
794 err = -EIO;
795 goto unlock;
796 }
797
798 async_rx_ready(dev);
799 async_rx_release_buf(dev);
800 async_rx_release_next_buf(dev);
801 async_rx_disable(dev);
802
803 unlock:
804 irq_unlock(key);
805 return err;
806 }
807
async_evt_rx_err(const struct device * dev,enum uart_rx_stop_reason reason)808 static inline void async_evt_rx_err(const struct device *dev, enum uart_rx_stop_reason reason)
809 {
810 struct uart_ra_sci_data *data = dev->data;
811
812 k_work_cancel_delayable(&data->rx_timeout_work);
813 struct uart_event event = {
814 .type = UART_RX_STOPPED,
815 .data.rx_stop.reason = reason,
816 .data.rx_stop.data.buf = (uint8_t *)data->sci.p_rx_dest,
817 .data.rx_stop.data.offset = 0,
818 .data.rx_stop.data.len =
819 data->rx_buf_cap - data->rx_buf_offset - data->sci.rx_dest_bytes,
820 };
821 async_user_callback(dev, &event);
822 }
823
async_evt_rx_complete(const struct device * dev)824 static inline void async_evt_rx_complete(const struct device *dev)
825 {
826 struct uart_ra_sci_data *data = dev->data;
827 unsigned int key = irq_lock();
828
829 async_rx_ready(dev);
830 async_rx_release_buf(dev);
831 if (data->rx_next_buf) {
832 data->rx_buffer = data->rx_next_buf;
833 data->rx_buf_offset = 0;
834 data->rx_buf_cap = data->rx_next_buf_cap;
835 data->rx_next_buf = NULL;
836 R_SCI_UART_Read(&data->sci, data->rx_buffer, data->rx_buf_cap);
837 async_rx_req_buf(dev);
838 } else {
839 async_rx_disable(dev);
840 }
841 irq_unlock(key);
842 }
843
async_evt_tx_done(const struct device * dev)844 static inline void async_evt_tx_done(const struct device *dev)
845 {
846 struct uart_ra_sci_data *data = dev->data;
847
848 k_work_cancel_delayable(&data->tx_timeout);
849 struct uart_event event = {
850 .type = UART_TX_DONE,
851 .data.tx.buf = (uint8_t *)data->sci.p_tx_src,
852 .data.tx.len = data->tx_buf_cap,
853 };
854 async_user_callback(dev, &event);
855 }
856
uart_ra_sci_callback_adapter(struct st_uart_callback_arg * fsp_args)857 static void uart_ra_sci_callback_adapter(struct st_uart_callback_arg *fsp_args)
858 {
859 const struct device *dev = fsp_args->p_context;
860
861 switch (fsp_args->event) {
862 case UART_EVENT_TX_COMPLETE:
863 async_evt_tx_done(dev);
864 break;
865 case UART_EVENT_RX_COMPLETE:
866 async_evt_rx_complete(dev);
867 break;
868 case UART_EVENT_ERR_PARITY:
869 async_evt_rx_err(dev, UART_ERROR_PARITY);
870 break;
871 case UART_EVENT_ERR_FRAMING:
872 async_evt_rx_err(dev, UART_ERROR_FRAMING);
873 break;
874 case UART_EVENT_ERR_OVERFLOW:
875 async_evt_rx_err(dev, UART_ERROR_OVERRUN);
876 break;
877 case UART_EVENT_BREAK_DETECT:
878 async_evt_rx_err(dev, UART_BREAK);
879 break;
880 case UART_EVENT_TX_DATA_EMPTY:
881 case UART_EVENT_RX_CHAR:
882 break;
883 }
884 }
885
uart_ra_sci_rx_timeout_handler(struct k_work * work)886 static void uart_ra_sci_rx_timeout_handler(struct k_work *work)
887 {
888 struct k_work_delayable *dwork = k_work_delayable_from_work(work);
889 struct uart_ra_sci_data *data =
890 CONTAINER_OF(dwork, struct uart_ra_sci_data, rx_timeout_work);
891 unsigned int key = irq_lock();
892
893 async_rx_ready(data->dev);
894 irq_unlock(key);
895 }
896
uart_ra_sci_tx_timeout_handler(struct k_work * work)897 static void uart_ra_sci_tx_timeout_handler(struct k_work *work)
898 {
899 struct k_work_delayable *dwork = k_work_delayable_from_work(work);
900 struct uart_ra_sci_data *data = CONTAINER_OF(dwork, struct uart_ra_sci_data, tx_timeout);
901
902 uart_ra_sci_async_tx_abort(data->dev);
903 }
904
905 #endif /* CONFIG_UART_ASYNC_API */
906
907 static DEVICE_API(uart, uart_ra_sci_driver_api) = {
908 .poll_in = uart_ra_sci_poll_in,
909 .poll_out = uart_ra_sci_poll_out,
910 .err_check = uart_ra_sci_err_check,
911 #ifdef CONFIG_UART_USE_RUNTIME_CONFIGURE
912 .configure = uart_ra_sci_configure,
913 .config_get = uart_ra_sci_config_get,
914 #endif
915 #ifdef CONFIG_UART_INTERRUPT_DRIVEN
916 .fifo_fill = uart_ra_sci_fifo_fill,
917 .fifo_read = uart_ra_sci_fifo_read,
918 .irq_tx_enable = uart_ra_sci_irq_tx_enable,
919 .irq_tx_disable = uart_ra_sci_irq_tx_disable,
920 .irq_tx_ready = uart_ra_sci_irq_tx_ready,
921 .irq_rx_enable = uart_ra_sci_irq_rx_enable,
922 .irq_rx_disable = uart_ra_sci_irq_rx_disable,
923 .irq_tx_complete = uart_ra_sci_irq_tx_complete,
924 .irq_rx_ready = uart_ra_sci_irq_rx_ready,
925 .irq_err_enable = uart_ra_sci_irq_err_enable,
926 .irq_err_disable = uart_ra_sci_irq_err_disable,
927 .irq_is_pending = uart_ra_sci_irq_is_pending,
928 .irq_update = uart_ra_sci_irq_update,
929 .irq_callback_set = uart_ra_sci_irq_callback_set,
930 #endif /* CONFIG_UART_INTERRUPT_DRIVEN */
931 #if CONFIG_UART_ASYNC_API
932 .callback_set = uart_ra_sci_async_callback_set,
933 .tx = uart_ra_sci_async_tx,
934 .tx_abort = uart_ra_sci_async_tx_abort,
935 .rx_enable = uart_ra_sci_async_rx_enable,
936 .rx_buf_rsp = uart_ra_sci_async_rx_buf_rsp,
937 .rx_disable = uart_ra_sci_async_rx_disable,
938 #endif /* CONFIG_UART_ASYNC_API */
939 };
940
uart_ra_sci_init(const struct device * dev)941 static int uart_ra_sci_init(const struct device *dev)
942 {
943 const struct uart_ra_sci_config *config = dev->config;
944 struct uart_ra_sci_data *data = dev->data;
945 int ret;
946 fsp_err_t fsp_err;
947
948 /* Configure dt provided device signals when available */
949 ret = pinctrl_apply_state(config->pcfg, PINCTRL_STATE_DEFAULT);
950 if (ret < 0) {
951 return ret;
952 }
953
954 /* Setup fsp sci_uart setting */
955 ret = uart_ra_sci_apply_config(&data->uart_config, &data->fsp_config,
956 &data->fsp_config_extend, &data->fsp_baud_setting);
957 if (ret != 0) {
958 return ret;
959 }
960
961 data->fsp_config_extend.p_baud_setting = &data->fsp_baud_setting;
962 #if defined(CONFIG_UART_ASYNC_API)
963 data->fsp_config.p_callback = uart_ra_sci_callback_adapter;
964 data->fsp_config.p_context = dev;
965 k_work_init_delayable(&data->tx_timeout, uart_ra_sci_tx_timeout_handler);
966 k_work_init_delayable(&data->rx_timeout_work, uart_ra_sci_rx_timeout_handler);
967 #endif /* defined(CONFIG_UART_ASYNC_API) */
968 data->fsp_config.p_extend = &data->fsp_config_extend;
969
970 fsp_err = R_SCI_UART_Open(&data->sci, &data->fsp_config);
971 if (fsp_err != FSP_SUCCESS) {
972 LOG_DBG("drivers: uart: initialize failed");
973 return -EIO;
974 }
975 irq_disable(data->fsp_config.eri_irq);
976 return 0;
977 }
978
979 #if CONFIG_UART_INTERRUPT_DRIVEN || CONFIG_UART_ASYNC_API
uart_ra_sci_rxi_isr(const struct device * dev)980 static void uart_ra_sci_rxi_isr(const struct device *dev)
981 {
982 struct uart_ra_sci_data *data = dev->data;
983 #if defined(CONFIG_UART_INTERRUPT_DRIVEN)
984 if (data->user_cb != NULL) {
985 data->user_cb(dev, data->user_cb_data);
986 goto out;
987 }
988 #endif
989
990 #if defined(CONFIG_UART_ASYNC_API)
991 if (data->rx_timeout != SYS_FOREVER_US && data->rx_timeout != 0) {
992 k_work_reschedule(&data->rx_timeout_work, Z_TIMEOUT_US(data->rx_timeout));
993 }
994 data->rx_buf_len++;
995 if (data->rx_buf_len + data->rx_buf_offset == data->rx_buf_cap) {
996 sci_uart_rxi_isr();
997 } else {
998 goto out;
999 }
1000 #endif
1001 out:
1002 R_ICU->IELSR_b[data->fsp_config.rxi_irq].IR = 0U;
1003 }
1004
uart_ra_sci_txi_isr(const struct device * dev)1005 static void uart_ra_sci_txi_isr(const struct device *dev)
1006 {
1007 #if defined(CONFIG_UART_INTERRUPT_DRIVEN)
1008 struct uart_ra_sci_data *data = dev->data;
1009
1010 if (data->user_cb != NULL) {
1011 data->user_cb(dev, data->user_cb_data);
1012 R_ICU->IELSR_b[data->fsp_config.txi_irq].IR = 0U;
1013 return;
1014 }
1015 #endif
1016
1017 #if defined(CONFIG_UART_ASYNC_API)
1018 sci_uart_txi_isr();
1019 #endif
1020 }
1021
uart_ra_sci_tei_isr(const struct device * dev)1022 static void uart_ra_sci_tei_isr(const struct device *dev)
1023 {
1024 #if defined(CONFIG_UART_INTERRUPT_DRIVEN)
1025 struct uart_ra_sci_data *data = dev->data;
1026
1027 if (data->user_cb != NULL) {
1028 data->user_cb(dev, data->user_cb_data);
1029 R_ICU->IELSR_b[data->fsp_config.tei_irq].IR = 0U;
1030 return;
1031 }
1032 #endif
1033
1034 #if defined(CONFIG_UART_ASYNC_API)
1035 sci_uart_tei_isr();
1036 #endif
1037 }
1038
uart_ra_sci_eri_isr(const struct device * dev)1039 static void uart_ra_sci_eri_isr(const struct device *dev)
1040 {
1041 #if defined(CONFIG_UART_INTERRUPT_DRIVEN)
1042 struct uart_ra_sci_data *data = dev->data;
1043
1044 if (data->user_cb != NULL) {
1045 data->user_cb(dev, data->user_cb_data);
1046 R_ICU->IELSR_b[data->fsp_config.eri_irq].IR = 0U;
1047 return;
1048 }
1049 #endif
1050
1051 #if defined(CONFIG_UART_ASYNC_API)
1052 sci_uart_eri_isr();
1053 #endif
1054 }
1055 #endif
1056
1057 #define _ELC_EVENT_SCI_RXI(channel) ELC_EVENT_SCI##channel##_RXI
1058 #define _ELC_EVENT_SCI_TXI(channel) ELC_EVENT_SCI##channel##_TXI
1059 #define _ELC_EVENT_SCI_TEI(channel) ELC_EVENT_SCI##channel##_TEI
1060 #define _ELC_EVENT_SCI_ERI(channel) ELC_EVENT_SCI##channel##_ERI
1061
1062 #define ELC_EVENT_SCI_RXI(channel) _ELC_EVENT_SCI_RXI(channel)
1063 #define ELC_EVENT_SCI_TXI(channel) _ELC_EVENT_SCI_TXI(channel)
1064 #define ELC_EVENT_SCI_TEI(channel) _ELC_EVENT_SCI_TEI(channel)
1065 #define ELC_EVENT_SCI_ERI(channel) _ELC_EVENT_SCI_ERI(channel)
1066
1067 #if CONFIG_UART_ASYNC_API
1068 #define UART_RA_SCI_ASYNC_INIT(index) \
1069 .rx_transfer_info = \
1070 { \
1071 .transfer_settings_word_b.dest_addr_mode = TRANSFER_ADDR_MODE_INCREMENTED, \
1072 .transfer_settings_word_b.repeat_area = TRANSFER_REPEAT_AREA_DESTINATION, \
1073 .transfer_settings_word_b.irq = TRANSFER_IRQ_EACH, \
1074 .transfer_settings_word_b.chain_mode = TRANSFER_CHAIN_MODE_DISABLED, \
1075 .transfer_settings_word_b.src_addr_mode = TRANSFER_ADDR_MODE_FIXED, \
1076 .transfer_settings_word_b.size = TRANSFER_SIZE_1_BYTE, \
1077 .transfer_settings_word_b.mode = TRANSFER_MODE_NORMAL, \
1078 .p_dest = (void *)NULL, \
1079 .p_src = (void const *)NULL, \
1080 .num_blocks = 0, \
1081 .length = 0, \
1082 }, \
1083 .rx_transfer_cfg_extend = {.activation_source = \
1084 DT_IRQ_BY_NAME(DT_INST_PARENT(index), rxi, irq)}, \
1085 .rx_transfer_cfg = \
1086 { \
1087 .p_info = &uart_ra_sci_data_##index.rx_transfer_info, \
1088 .p_extend = &uart_ra_sci_data_##index.rx_transfer_cfg_extend, \
1089 }, \
1090 .rx_transfer = \
1091 { \
1092 .p_ctrl = &uart_ra_sci_data_##index.rx_transfer_ctrl, \
1093 .p_cfg = &uart_ra_sci_data_##index.rx_transfer_cfg, \
1094 .p_api = &g_transfer_on_dtc, \
1095 }, \
1096 .tx_transfer_info = \
1097 { \
1098 .transfer_settings_word_b.dest_addr_mode = TRANSFER_ADDR_MODE_FIXED, \
1099 .transfer_settings_word_b.repeat_area = TRANSFER_REPEAT_AREA_SOURCE, \
1100 .transfer_settings_word_b.irq = TRANSFER_IRQ_END, \
1101 .transfer_settings_word_b.chain_mode = TRANSFER_CHAIN_MODE_DISABLED, \
1102 .transfer_settings_word_b.src_addr_mode = TRANSFER_ADDR_MODE_INCREMENTED, \
1103 .transfer_settings_word_b.size = TRANSFER_SIZE_1_BYTE, \
1104 .transfer_settings_word_b.mode = TRANSFER_MODE_NORMAL, \
1105 .p_dest = (void *)NULL, \
1106 .p_src = (void const *)NULL, \
1107 .num_blocks = 0, \
1108 .length = 0, \
1109 }, \
1110 .tx_transfer_cfg_extend = {.activation_source = \
1111 DT_IRQ_BY_NAME(DT_INST_PARENT(index), txi, irq)}, \
1112 .tx_transfer_cfg = \
1113 { \
1114 .p_info = &uart_ra_sci_data_##index.tx_transfer_info, \
1115 .p_extend = &uart_ra_sci_data_##index.tx_transfer_cfg_extend, \
1116 }, \
1117 .tx_transfer = { \
1118 .p_ctrl = &uart_ra_sci_data_##index.tx_transfer_ctrl, \
1119 .p_cfg = &uart_ra_sci_data_##index.tx_transfer_cfg, \
1120 .p_api = &g_transfer_on_dtc, \
1121 },
1122
1123 #define UART_RA_SCI_DTC_INIT(index) \
1124 { \
1125 uart_ra_sci_data_##index.fsp_config.p_transfer_rx = \
1126 &uart_ra_sci_data_##index.rx_transfer; \
1127 uart_ra_sci_data_##index.fsp_config.p_transfer_tx = \
1128 &uart_ra_sci_data_##index.tx_transfer; \
1129 }
1130
1131 #else
1132 #define UART_RA_SCI_ASYNC_INIT(index)
1133 #define UART_RA_SCI_DTC_INIT(index)
1134 #endif
1135
1136 #if CONFIG_UART_INTERRUPT_DRIVEN || CONFIG_UART_ASYNC_API
1137 #define UART_RA_SCI_IRQ_INIT(index) \
1138 { \
1139 R_ICU->IELSR[DT_IRQ_BY_NAME(DT_INST_PARENT(index), rxi, irq)] = \
1140 ELC_EVENT_SCI_RXI(DT_INST_PROP(index, channel)); \
1141 R_ICU->IELSR[DT_IRQ_BY_NAME(DT_INST_PARENT(index), txi, irq)] = \
1142 ELC_EVENT_SCI_TXI(DT_INST_PROP(index, channel)); \
1143 R_ICU->IELSR[DT_IRQ_BY_NAME(DT_INST_PARENT(index), tei, irq)] = \
1144 ELC_EVENT_SCI_TEI(DT_INST_PROP(index, channel)); \
1145 R_ICU->IELSR[DT_IRQ_BY_NAME(DT_INST_PARENT(index), eri, irq)] = \
1146 ELC_EVENT_SCI_ERI(DT_INST_PROP(index, channel)); \
1147 \
1148 IRQ_CONNECT(DT_IRQ_BY_NAME(DT_INST_PARENT(index), rxi, irq), \
1149 DT_IRQ_BY_NAME(DT_INST_PARENT(index), rxi, priority), \
1150 uart_ra_sci_rxi_isr, DEVICE_DT_INST_GET(index), 0); \
1151 IRQ_CONNECT(DT_IRQ_BY_NAME(DT_INST_PARENT(index), txi, irq), \
1152 DT_IRQ_BY_NAME(DT_INST_PARENT(index), txi, priority), \
1153 uart_ra_sci_txi_isr, DEVICE_DT_INST_GET(index), 0); \
1154 IRQ_CONNECT(DT_IRQ_BY_NAME(DT_INST_PARENT(index), tei, irq), \
1155 DT_IRQ_BY_NAME(DT_INST_PARENT(index), tei, priority), \
1156 uart_ra_sci_tei_isr, DEVICE_DT_INST_GET(index), 0); \
1157 IRQ_CONNECT(DT_IRQ_BY_NAME(DT_INST_PARENT(index), eri, irq), \
1158 DT_IRQ_BY_NAME(DT_INST_PARENT(index), eri, priority), \
1159 uart_ra_sci_eri_isr, DEVICE_DT_INST_GET(index), 0); \
1160 \
1161 irq_enable(DT_IRQ_BY_NAME(DT_INST_PARENT(index), rxi, irq)); \
1162 irq_enable(DT_IRQ_BY_NAME(DT_INST_PARENT(index), txi, irq)); \
1163 irq_enable(DT_IRQ_BY_NAME(DT_INST_PARENT(index), tei, irq)); \
1164 }
1165 #else
1166 #define UART_RA_SCI_IRQ_INIT(index)
1167 #endif
1168
1169 #define UART_RA_SCI_INIT(index) \
1170 PINCTRL_DT_DEFINE(DT_INST_PARENT(index)); \
1171 static const struct uart_ra_sci_config uart_ra_sci_config_##index = { \
1172 .pcfg = PINCTRL_DT_DEV_CONFIG_GET(DT_INST_PARENT(index)), \
1173 .regs = (R_SCI0_Type *)DT_REG_ADDR(DT_INST_PARENT(index)), \
1174 }; \
1175 \
1176 static struct uart_ra_sci_data uart_ra_sci_data_##index = { \
1177 .uart_config = \
1178 { \
1179 .baudrate = DT_INST_PROP(index, current_speed), \
1180 .parity = UART_CFG_PARITY_NONE, \
1181 .stop_bits = UART_CFG_STOP_BITS_1, \
1182 .data_bits = UART_CFG_DATA_BITS_8, \
1183 .flow_ctrl = COND_CODE_1(DT_INST_PROP(index, hw_flow_control), \
1184 (UART_CFG_FLOW_CTRL_RTS_CTS), \
1185 (UART_CFG_FLOW_CTRL_NONE)), \
1186 }, \
1187 .fsp_config = \
1188 { \
1189 .channel = DT_INST_PROP(index, channel), \
1190 .rxi_ipl = DT_IRQ_BY_NAME(DT_INST_PARENT(index), rxi, priority), \
1191 .rxi_irq = DT_IRQ_BY_NAME(DT_INST_PARENT(index), rxi, irq), \
1192 .txi_ipl = DT_IRQ_BY_NAME(DT_INST_PARENT(index), txi, priority), \
1193 .txi_irq = DT_IRQ_BY_NAME(DT_INST_PARENT(index), txi, irq), \
1194 .tei_ipl = DT_IRQ_BY_NAME(DT_INST_PARENT(index), tei, priority), \
1195 .tei_irq = DT_IRQ_BY_NAME(DT_INST_PARENT(index), tei, irq), \
1196 .eri_ipl = DT_IRQ_BY_NAME(DT_INST_PARENT(index), eri, priority), \
1197 .eri_irq = DT_IRQ_BY_NAME(DT_INST_PARENT(index), eri, irq), \
1198 }, \
1199 .fsp_config_extend = {}, \
1200 .fsp_baud_setting = {}, \
1201 .dev = DEVICE_DT_INST_GET(index), \
1202 UART_RA_SCI_ASYNC_INIT(index)}; \
1203 \
1204 static int uart_ra_sci_init##index(const struct device *dev) \
1205 { \
1206 UART_RA_SCI_IRQ_INIT(index); \
1207 UART_RA_SCI_DTC_INIT(index); \
1208 int err = uart_ra_sci_init(dev); \
1209 if (err != 0) { \
1210 return err; \
1211 } \
1212 return 0; \
1213 } \
1214 DEVICE_DT_INST_DEFINE(index, uart_ra_sci_init##index, NULL, &uart_ra_sci_data_##index, \
1215 &uart_ra_sci_config_##index, PRE_KERNEL_1, \
1216 CONFIG_SERIAL_INIT_PRIORITY, &uart_ra_sci_driver_api);
1217
1218 DT_INST_FOREACH_STATUS_OKAY(UART_RA_SCI_INIT)
1219