1 /*
2 * Copyright (c) 2024 Renesas Electronics Corporation
3 * SPDX-License-Identifier: Apache-2.0
4 */
5
6 #define DT_DRV_COMPAT renesas_ra8_uart_sci_b
7
8 #include <zephyr/kernel.h>
9 #include <zephyr/drivers/uart.h>
10 #include <zephyr/drivers/pinctrl.h>
11 #include <zephyr/sys/util.h>
12 #include <zephyr/irq.h>
13 #include <soc.h>
14 #include "r_sci_b_uart.h"
15 #include "r_dtc.h"
16 #include "r_transfer_api.h"
17
18 #include <zephyr/logging/log.h>
19 LOG_MODULE_REGISTER(ra8_uart_sci_b);
20
21 #if defined(CONFIG_UART_ASYNC_API)
22 void sci_b_uart_rxi_isr(void);
23 void sci_b_uart_txi_isr(void);
24 void sci_b_uart_tei_isr(void);
25 void sci_b_uart_eri_isr(void);
26 #endif
27
28 struct uart_ra_sci_b_config {
29 R_SCI_B0_Type * const regs;
30 const struct pinctrl_dev_config *pcfg;
31 };
32
33 struct uart_ra_sci_b_data {
34 const struct device *dev;
35 struct st_sci_b_uart_instance_ctrl sci;
36 struct uart_config uart_config;
37 struct st_uart_cfg fsp_config;
38 struct st_sci_b_uart_extended_cfg fsp_config_extend;
39 struct st_sci_b_baud_setting_t fsp_baud_setting;
40 #if defined(CONFIG_UART_INTERRUPT_DRIVEN)
41 uart_irq_callback_user_data_t user_cb;
42 void *user_cb_data;
43 uint32_t csr;
44 #endif
45 #if defined(CONFIG_UART_ASYNC_API)
46 /* RX */
47 struct st_transfer_instance rx_transfer;
48 struct st_dtc_instance_ctrl rx_transfer_ctrl;
49 struct st_transfer_info rx_transfer_info;
50 struct st_transfer_cfg rx_transfer_cfg;
51 struct st_dtc_extended_cfg rx_transfer_cfg_extend;
52 struct k_work_delayable rx_timeout_work;
53 size_t rx_timeout;
54 uint8_t *rx_buffer;
55 size_t rx_buffer_len;
56 size_t rx_buffer_cap;
57 size_t rx_buffer_offset;
58 uint8_t *rx_next_buffer;
59 size_t rx_next_buffer_cap;
60
61 /* TX */
62 struct st_transfer_instance tx_transfer;
63 struct st_dtc_instance_ctrl tx_transfer_ctrl;
64 struct st_transfer_info tx_transfer_info;
65 struct st_transfer_cfg tx_transfer_cfg;
66 struct st_dtc_extended_cfg tx_transfer_cfg_extend;
67 struct k_work_delayable tx_timeout_work;
68 size_t tx_timeout;
69 uint8_t *tx_buffer;
70 size_t tx_buffer_len;
71 size_t tx_buffer_cap;
72
73 uart_callback_t async_user_cb;
74 void *async_user_cb_data;
75 #endif
76 };
77
uart_ra_sci_b_poll_in(const struct device * dev,unsigned char * c)78 static int uart_ra_sci_b_poll_in(const struct device *dev, unsigned char *c)
79 {
80 const struct uart_ra_sci_b_config *cfg = dev->config;
81
82 /* Check if async reception was enabled */
83 if (IS_ENABLED(CONFIG_UART_ASYNC_API) && cfg->regs->CCR0_b.RIE) {
84 return -EBUSY;
85 }
86
87 if (IS_ENABLED(CONFIG_UART_RA_SCI_B_UART_FIFO_ENABLE) ? cfg->regs->FRSR_b.R == 0U
88 : cfg->regs->CSR_b.RDRF == 0U) {
89 /* There are no characters available to read. */
90 return -1;
91 }
92
93 /* got a character */
94 *c = (unsigned char)cfg->regs->RDR;
95
96 return 0;
97 }
98
uart_ra_sci_b_poll_out(const struct device * dev,unsigned char c)99 static void uart_ra_sci_b_poll_out(const struct device *dev, unsigned char c)
100 {
101 const struct uart_ra_sci_b_config *cfg = dev->config;
102
103 while (cfg->regs->CSR_b.TEND == 0U) {
104 }
105
106 cfg->regs->TDR_BY = c;
107 }
108
uart_ra_sci_b_err_check(const struct device * dev)109 static int uart_ra_sci_b_err_check(const struct device *dev)
110 {
111 const struct uart_ra_sci_b_config *cfg = dev->config;
112
113 const uint32_t status = cfg->regs->CSR;
114 int errors = 0;
115
116 if ((status & BIT(R_SCI_B0_CSR_ORER_Pos)) != 0) {
117 errors |= UART_ERROR_OVERRUN;
118 }
119 if ((status & BIT(R_SCI_B0_CSR_PER_Pos)) != 0) {
120 errors |= UART_ERROR_PARITY;
121 }
122 if ((status & BIT(R_SCI_B0_CSR_FER_Pos)) != 0) {
123 errors |= UART_ERROR_FRAMING;
124 }
125
126 return errors;
127 }
128
uart_ra_sci_b_apply_config(const struct uart_config * config,struct st_uart_cfg * fsp_config,struct st_sci_b_uart_extended_cfg * fsp_config_extend,struct st_sci_b_baud_setting_t * fsp_baud_setting)129 static int uart_ra_sci_b_apply_config(const struct uart_config *config,
130 struct st_uart_cfg *fsp_config,
131 struct st_sci_b_uart_extended_cfg *fsp_config_extend,
132 struct st_sci_b_baud_setting_t *fsp_baud_setting)
133 {
134 fsp_err_t fsp_err;
135
136 fsp_err = R_SCI_B_UART_BaudCalculate(config->baudrate, false, 5000, fsp_baud_setting);
137 __ASSERT(fsp_err == 0, "sci_uart: baud calculate error");
138
139 switch (config->parity) {
140 case UART_CFG_PARITY_NONE:
141 fsp_config->parity = UART_PARITY_OFF;
142 break;
143 case UART_CFG_PARITY_ODD:
144 fsp_config->parity = UART_PARITY_ODD;
145 break;
146 case UART_CFG_PARITY_EVEN:
147 fsp_config->parity = UART_PARITY_EVEN;
148 break;
149 case UART_CFG_PARITY_MARK:
150 return -ENOTSUP;
151 case UART_CFG_PARITY_SPACE:
152 return -ENOTSUP;
153 default:
154 return -EINVAL;
155 }
156
157 switch (config->stop_bits) {
158 case UART_CFG_STOP_BITS_0_5:
159 return -ENOTSUP;
160 case UART_CFG_STOP_BITS_1:
161 fsp_config->stop_bits = UART_STOP_BITS_1;
162 break;
163 case UART_CFG_STOP_BITS_1_5:
164 return -ENOTSUP;
165 case UART_CFG_STOP_BITS_2:
166 fsp_config->stop_bits = UART_STOP_BITS_2;
167 break;
168 default:
169 return -EINVAL;
170 }
171
172 switch (config->data_bits) {
173 case UART_CFG_DATA_BITS_5:
174 return -ENOTSUP;
175 case UART_CFG_DATA_BITS_6:
176 return -ENOTSUP;
177 case UART_CFG_DATA_BITS_7:
178 fsp_config->data_bits = UART_DATA_BITS_7;
179 break;
180 case UART_CFG_DATA_BITS_8:
181 fsp_config->data_bits = UART_DATA_BITS_8;
182 break;
183 case UART_CFG_DATA_BITS_9:
184 fsp_config->data_bits = UART_DATA_BITS_9;
185 break;
186 default:
187 return -EINVAL;
188 }
189
190 fsp_config_extend->clock = SCI_B_UART_CLOCK_INT;
191 fsp_config_extend->rx_edge_start = SCI_B_UART_START_BIT_FALLING_EDGE;
192 fsp_config_extend->noise_cancel = SCI_B_UART_NOISE_CANCELLATION_DISABLE;
193 fsp_config_extend->flow_control_pin = UINT16_MAX;
194 #if CONFIG_UART_RA_SCI_B_UART_FIFO_ENABLE
195 fsp_config_extend->rx_fifo_trigger = 0x8;
196 #endif /* CONFIG_UART_RA_SCI_B_UART_FIFO_ENABLE */
197
198 switch (config->flow_ctrl) {
199 case UART_CFG_FLOW_CTRL_NONE:
200 fsp_config_extend->flow_control = 0;
201 fsp_config_extend->rs485_setting.enable = false;
202 break;
203 case UART_CFG_FLOW_CTRL_RTS_CTS:
204 fsp_config_extend->flow_control = SCI_B_UART_FLOW_CONTROL_HARDWARE_CTSRTS;
205 fsp_config_extend->rs485_setting.enable = false;
206 break;
207 case UART_CFG_FLOW_CTRL_DTR_DSR:
208 return -ENOTSUP;
209 case UART_CFG_FLOW_CTRL_RS485:
210 /* TODO: implement this config */
211 return -ENOTSUP;
212 default:
213 return -EINVAL;
214 }
215
216 return 0;
217 }
218
219 #ifdef CONFIG_UART_USE_RUNTIME_CONFIGURE
220
uart_ra_sci_b_configure(const struct device * dev,const struct uart_config * cfg)221 static int uart_ra_sci_b_configure(const struct device *dev, const struct uart_config *cfg)
222 {
223 int err;
224 fsp_err_t fsp_err;
225 struct uart_ra_sci_b_data *data = dev->data;
226
227 err = uart_ra_sci_b_apply_config(cfg, &data->fsp_config, &data->fsp_config_extend,
228 &data->fsp_baud_setting);
229 if (err) {
230 return err;
231 }
232
233 fsp_err = R_SCI_B_UART_Close(&data->sci);
234 __ASSERT(fsp_err == 0, "sci_uart: configure: fsp close failed");
235
236 fsp_err = R_SCI_B_UART_Open(&data->sci, &data->fsp_config);
237 __ASSERT(fsp_err == 0, "sci_uart: configure: fsp open failed");
238 memcpy(&data->uart_config, cfg, sizeof(struct uart_config));
239
240 return err;
241 }
242
uart_ra_sci_b_config_get(const struct device * dev,struct uart_config * cfg)243 static int uart_ra_sci_b_config_get(const struct device *dev, struct uart_config *cfg)
244 {
245 struct uart_ra_sci_b_data *data = dev->data;
246
247 memcpy(cfg, &data->uart_config, sizeof(*cfg));
248 return 0;
249 }
250
251 #endif /* CONFIG_UART_USE_RUNTIME_CONFIGURE */
252
253 #ifdef CONFIG_UART_INTERRUPT_DRIVEN
254
uart_ra_sci_b_fifo_fill(const struct device * dev,const uint8_t * tx_data,int size)255 static int uart_ra_sci_b_fifo_fill(const struct device *dev, const uint8_t *tx_data, int size)
256 {
257 struct uart_ra_sci_b_data *data = dev->data;
258 const struct uart_ra_sci_b_config *cfg = dev->config;
259 uint8_t num_tx = 0U;
260
261 if (IS_ENABLED(CONFIG_UART_RA_SCI_B_UART_FIFO_ENABLE) && data->sci.fifo_depth > 0) {
262 while ((size - num_tx > 0) && cfg->regs->FTSR != 0x10U) {
263 /* FTSR flag will be cleared with byte write to TDR register */
264
265 /* Send a character (8bit , parity none) */
266 cfg->regs->TDR_BY = tx_data[num_tx++];
267 }
268 } else {
269 if (size > 0 && cfg->regs->CSR_b.TDRE) {
270 /* TEND flag will be cleared with byte write to TDR register */
271
272 /* Send a character (8bit , parity none) */
273 cfg->regs->TDR_BY = tx_data[num_tx++];
274 }
275 }
276
277 return num_tx;
278 }
279
uart_ra_sci_b_fifo_read(const struct device * dev,uint8_t * rx_data,const int size)280 static int uart_ra_sci_b_fifo_read(const struct device *dev, uint8_t *rx_data, const int size)
281 {
282 struct uart_ra_sci_b_data *data = dev->data;
283 const struct uart_ra_sci_b_config *cfg = dev->config;
284 uint8_t num_rx = 0U;
285
286 if (IS_ENABLED(CONFIG_UART_RA_SCI_B_UART_FIFO_ENABLE) && data->sci.fifo_depth > 0) {
287 while ((size - num_rx > 0) && cfg->regs->FRSR_b.R > 0U) {
288 /* FRSR.DR flag will be cleared with byte write to RDR register */
289
290 /* Receive a character (8bit , parity none) */
291 rx_data[num_rx++] = cfg->regs->RDR;
292 }
293 if (cfg->regs->FRSR_b.R == 0U) {
294 cfg->regs->CFCLR_b.RDRFC = 1U;
295 cfg->regs->FFCLR_b.DRC = 1U;
296 }
297 } else {
298 if (size > 0 && cfg->regs->CSR_b.RDRF) {
299 /* Receive a character (8bit , parity none) */
300 rx_data[num_rx++] = cfg->regs->RDR;
301 }
302 }
303
304 /* Clear overrun error flag */
305 cfg->regs->CFCLR_b.ORERC = 0U;
306
307 return num_rx;
308 }
309
uart_ra_sci_b_irq_tx_enable(const struct device * dev)310 static void uart_ra_sci_b_irq_tx_enable(const struct device *dev)
311 {
312 const struct uart_ra_sci_b_config *cfg = dev->config;
313
314 cfg->regs->CCR0 |= (BIT(R_SCI_B0_CCR0_TIE_Pos) | BIT(R_SCI_B0_CCR0_TEIE_Pos));
315 }
316
uart_ra_sci_b_irq_tx_disable(const struct device * dev)317 static void uart_ra_sci_b_irq_tx_disable(const struct device *dev)
318 {
319 const struct uart_ra_sci_b_config *cfg = dev->config;
320
321 cfg->regs->CCR0 &= ~(BIT(R_SCI_B0_CCR0_TIE_Pos) | BIT(R_SCI_B0_CCR0_TEIE_Pos));
322 }
323
uart_ra_sci_b_irq_tx_ready(const struct device * dev)324 static int uart_ra_sci_b_irq_tx_ready(const struct device *dev)
325 {
326 struct uart_ra_sci_b_data *data = dev->data;
327 const struct uart_ra_sci_b_config *cfg = dev->config;
328
329 return (cfg->regs->CCR0_b.TIE == 1U) &&
330 (data->csr & (BIT(R_SCI_B0_CSR_TDRE_Pos) | BIT(R_SCI_B0_CSR_TEND_Pos)));
331 }
332
uart_ra_sci_b_irq_tx_complete(const struct device * dev)333 static int uart_ra_sci_b_irq_tx_complete(const struct device *dev)
334 {
335 struct uart_ra_sci_b_data *data = dev->data;
336 const struct uart_ra_sci_b_config *cfg = dev->config;
337
338 return (cfg->regs->CCR0_b.TEIE == 1U) && (data->csr & BIT(R_SCI_B0_CSR_TEND_Pos));
339 }
340
uart_ra_sci_b_irq_rx_enable(const struct device * dev)341 static void uart_ra_sci_b_irq_rx_enable(const struct device *dev)
342 {
343 const struct uart_ra_sci_b_config *cfg = dev->config;
344
345 cfg->regs->CCR0_b.RIE = 1U;
346 }
347
uart_ra_sci_b_irq_rx_disable(const struct device * dev)348 static void uart_ra_sci_b_irq_rx_disable(const struct device *dev)
349 {
350 const struct uart_ra_sci_b_config *cfg = dev->config;
351
352 cfg->regs->CCR0_b.RIE = 0U;
353 }
354
uart_ra_sci_b_irq_rx_ready(const struct device * dev)355 static int uart_ra_sci_b_irq_rx_ready(const struct device *dev)
356 {
357 struct uart_ra_sci_b_data *data = dev->data;
358 const struct uart_ra_sci_b_config *cfg = dev->config;
359
360 return (cfg->regs->CCR0_b.RIE == 1U) &&
361 ((data->csr & BIT(R_SCI_B0_CSR_RDRF_Pos)) ||
362 (IS_ENABLED(CONFIG_UART_RA_SCI_B_UART_FIFO_ENABLE) && cfg->regs->FRSR_b.DR == 1U));
363 }
364
uart_ra_sci_b_irq_err_enable(const struct device * dev)365 static void uart_ra_sci_b_irq_err_enable(const struct device *dev)
366 {
367 struct uart_ra_sci_b_data *data = dev->data;
368
369 NVIC_EnableIRQ(data->fsp_config.eri_irq);
370 }
371
uart_ra_sci_b_irq_err_disable(const struct device * dev)372 static void uart_ra_sci_b_irq_err_disable(const struct device *dev)
373 {
374 struct uart_ra_sci_b_data *data = dev->data;
375
376 NVIC_DisableIRQ(data->fsp_config.eri_irq);
377 }
378
uart_ra_sci_b_irq_is_pending(const struct device * dev)379 static int uart_ra_sci_b_irq_is_pending(const struct device *dev)
380 {
381 const struct uart_ra_sci_b_config *cfg = dev->config;
382
383 const uint32_t ccr0 = cfg->regs->CCR0;
384 const uint32_t csr = cfg->regs->CSR;
385
386 const bool tx_pending = ((ccr0 & BIT(R_SCI_B0_CCR0_TIE_Pos)) &&
387 (csr & (BIT(R_SCI_B0_CSR_TEND_Pos) | BIT(R_SCI_B0_CSR_TDRE_Pos))));
388 const bool rx_pending =
389 ((ccr0 & BIT(R_SCI_B0_CCR0_RIE_Pos)) &&
390 ((csr & (BIT(R_SCI_B0_CSR_RDRF_Pos) | BIT(R_SCI_B0_CSR_PER_Pos) |
391 BIT(R_SCI_B0_CSR_FER_Pos) | BIT(R_SCI_B0_CSR_ORER_Pos))) ||
392 (IS_ENABLED(CONFIG_UART_RA_SCI_B_UART_FIFO_ENABLE) &&
393 cfg->regs->FRSR_b.DR == 1U)));
394
395 return tx_pending || rx_pending;
396 }
397
uart_ra_sci_b_irq_update(const struct device * dev)398 static int uart_ra_sci_b_irq_update(const struct device *dev)
399 {
400 struct uart_ra_sci_b_data *data = dev->data;
401 const struct uart_ra_sci_b_config *cfg = dev->config;
402 uint32_t cfclr = 0;
403
404 data->csr = cfg->regs->CSR;
405
406 if (data->csr & BIT(R_SCI_B0_CSR_PER_Pos)) {
407 cfclr |= BIT(R_SCI_B0_CFCLR_PERC_Pos);
408 }
409 if (data->csr & BIT(R_SCI_B0_CSR_FER_Pos)) {
410 cfclr |= BIT(R_SCI_B0_CFCLR_FERC_Pos);
411 }
412 if (data->csr & BIT(R_SCI_B0_CSR_ORER_Pos)) {
413 cfclr |= BIT(R_SCI_B0_CFCLR_ORERC_Pos);
414 }
415
416 cfg->regs->CFCLR = cfclr;
417
418 return 1;
419 }
420
uart_ra_sci_b_irq_callback_set(const struct device * dev,uart_irq_callback_user_data_t cb,void * cb_data)421 static void uart_ra_sci_b_irq_callback_set(const struct device *dev,
422 uart_irq_callback_user_data_t cb, void *cb_data)
423 {
424 struct uart_ra_sci_b_data *data = dev->data;
425
426 data->user_cb = cb;
427 data->user_cb_data = cb_data;
428 }
429
430 #endif /* CONFIG_UART_INTERRUPT_DRIVEN */
431
432 #ifdef CONFIG_UART_ASYNC_API
433
async_user_callback(const struct device * dev,struct uart_event * event)434 static inline void async_user_callback(const struct device *dev, struct uart_event *event)
435 {
436 struct uart_ra_sci_b_data *data = dev->data;
437
438 if (data->async_user_cb) {
439 data->async_user_cb(dev, event, data->async_user_cb_data);
440 }
441 }
442
async_rx_error(const struct device * dev,enum uart_rx_stop_reason reason)443 static inline void async_rx_error(const struct device *dev, enum uart_rx_stop_reason reason)
444 {
445 struct uart_ra_sci_b_data *data = dev->data;
446 struct uart_event event = {
447 .type = UART_RX_STOPPED,
448 .data.rx_stop.reason = reason,
449 .data.rx_stop.data.buf = (uint8_t *)data->rx_buffer,
450 .data.rx_stop.data.offset = data->rx_buffer_offset,
451 .data.rx_stop.data.len = data->rx_buffer_len,
452 };
453 async_user_callback(dev, &event);
454 }
455
async_rx_disabled(const struct device * dev)456 static inline void async_rx_disabled(const struct device *dev)
457 {
458 struct uart_event event = {
459 .type = UART_RX_DISABLED,
460 };
461 return async_user_callback(dev, &event);
462 }
463
async_request_rx_buffer(const struct device * dev)464 static inline void async_request_rx_buffer(const struct device *dev)
465 {
466 struct uart_event event = {
467 .type = UART_RX_BUF_REQUEST,
468 };
469 return async_user_callback(dev, &event);
470 }
471
async_rx_ready(const struct device * dev)472 static inline void async_rx_ready(const struct device *dev)
473 {
474 struct uart_ra_sci_b_data *data = dev->data;
475
476 if (data->rx_buffer_len == 0) {
477 return;
478 }
479
480 struct uart_event event = {
481 .type = UART_RX_RDY,
482 .data.rx.buf = (uint8_t *)data->rx_buffer,
483 .data.rx.offset = data->rx_buffer_offset,
484 .data.rx.len = data->rx_buffer_len,
485 };
486 async_user_callback(dev, &event);
487
488 data->rx_buffer_offset += data->rx_buffer_len;
489 data->rx_buffer_len = 0;
490 }
491
async_replace_rx_buffer(const struct device * dev)492 static inline void async_replace_rx_buffer(const struct device *dev)
493 {
494 struct uart_ra_sci_b_data *data = dev->data;
495
496 if (data->rx_next_buffer != NULL) {
497 data->rx_buffer = data->rx_next_buffer;
498 data->rx_buffer_cap = data->rx_next_buffer_cap;
499
500 R_SCI_B_UART_Read(&data->sci, data->rx_buffer, data->rx_buffer_cap);
501
502 data->rx_next_buffer = NULL;
503 data->rx_next_buffer_cap = 0;
504 async_request_rx_buffer(dev);
505 } else {
506 async_rx_disabled(dev);
507 }
508 }
509
async_release_rx_buffer(const struct device * dev)510 static inline void async_release_rx_buffer(const struct device *dev)
511 {
512 struct uart_ra_sci_b_data *data = dev->data;
513
514 if (data->rx_buffer == NULL) {
515 return;
516 }
517
518 struct uart_event event = {
519 .type = UART_RX_BUF_RELEASED,
520 .data.rx.buf = (uint8_t *)data->rx_buffer,
521 };
522 async_user_callback(dev, &event);
523
524 data->rx_buffer = NULL;
525 data->rx_buffer_cap = 0;
526 data->rx_buffer_len = 0;
527 data->rx_buffer_offset = 0;
528 }
529
async_release_rx_next_buffer(const struct device * dev)530 static inline void async_release_rx_next_buffer(const struct device *dev)
531 {
532 struct uart_ra_sci_b_data *data = dev->data;
533
534 if (data->rx_next_buffer == NULL) {
535 return;
536 }
537
538 struct uart_event event = {
539 .type = UART_RX_BUF_RELEASED,
540 .data.rx.buf = (uint8_t *)data->rx_next_buffer,
541 };
542 async_user_callback(dev, &event);
543
544 data->rx_next_buffer = NULL;
545 data->rx_next_buffer_cap = 0;
546 }
547
async_update_tx_buffer(const struct device * dev)548 static inline void async_update_tx_buffer(const struct device *dev)
549 {
550 struct uart_ra_sci_b_data *data = dev->data;
551 struct uart_event event = {
552 .type = UART_TX_DONE,
553 .data.tx.buf = (uint8_t *)data->tx_buffer,
554 .data.tx.len = data->tx_buffer_cap,
555 };
556 async_user_callback(dev, &event);
557
558 data->tx_buffer = NULL;
559 data->tx_buffer_cap = 0;
560 }
561
async_tx_abort(const struct device * dev)562 static inline void async_tx_abort(const struct device *dev)
563 {
564 struct uart_ra_sci_b_data *data = dev->data;
565
566 if (data->tx_buffer_len < data->tx_buffer_cap) {
567 struct uart_event event = {
568 .type = UART_TX_ABORTED,
569 .data.tx.buf = (uint8_t *)data->tx_buffer,
570 .data.tx.len = data->tx_buffer_len,
571 };
572 async_user_callback(dev, &event);
573 }
574
575 data->tx_buffer = NULL;
576 data->tx_buffer_cap = 0;
577 }
578
uart_ra_sci_b_async_timer_start(struct k_work_delayable * work,size_t timeout)579 static inline void uart_ra_sci_b_async_timer_start(struct k_work_delayable *work, size_t timeout)
580 {
581 if (timeout != SYS_FOREVER_US && timeout != 0) {
582 LOG_DBG("Async timer started for %d us", timeout);
583 k_work_reschedule(work, K_USEC(timeout));
584 }
585 }
586
fsp_err_to_errno(fsp_err_t fsp_err)587 static inline int fsp_err_to_errno(fsp_err_t fsp_err)
588 {
589 switch (fsp_err) {
590 case FSP_ERR_INVALID_ARGUMENT:
591 return -EINVAL;
592 case FSP_ERR_NOT_OPEN:
593 return -EIO;
594 case FSP_ERR_IN_USE:
595 return -EBUSY;
596 case FSP_ERR_UNSUPPORTED:
597 return -ENOTSUP;
598 case 0:
599 return 0;
600 default:
601 return -EINVAL;
602 }
603 }
604
uart_ra_sci_b_async_callback_set(const struct device * dev,uart_callback_t cb,void * cb_data)605 static int uart_ra_sci_b_async_callback_set(const struct device *dev, uart_callback_t cb,
606 void *cb_data)
607
608 {
609 struct uart_ra_sci_b_data *data = dev->data;
610 unsigned int key = irq_lock();
611
612 data->async_user_cb = cb;
613 data->async_user_cb_data = cb_data;
614
615 irq_unlock(key);
616 return 0;
617 }
618
uart_ra_sci_b_async_tx(const struct device * dev,const uint8_t * buf,size_t len,int32_t timeout)619 static int uart_ra_sci_b_async_tx(const struct device *dev, const uint8_t *buf, size_t len,
620 int32_t timeout)
621 {
622 struct uart_ra_sci_b_data *data = dev->data;
623 int err = 0;
624
625 unsigned int key = irq_lock();
626
627 if (data->tx_buffer_len < data->tx_buffer_cap) {
628 err = -EBUSY;
629 goto unlock;
630 }
631
632 err = fsp_err_to_errno(R_SCI_B_UART_Write(&data->sci, buf, len));
633 if (err != 0) {
634 goto unlock;
635 }
636
637 data->tx_buffer = (uint8_t *)buf;
638 data->tx_buffer_cap = len;
639
640 uart_ra_sci_b_async_timer_start(&data->tx_timeout_work, timeout);
641
642 unlock:
643 irq_unlock(key);
644 return err;
645 }
646
disable_tx(const struct device * dev)647 static inline void disable_tx(const struct device *dev)
648 {
649 const struct uart_ra_sci_b_config *cfg = dev->config;
650
651 /* Transmit interrupts must be disabled to start with. */
652 cfg->regs->CCR0 &= (uint32_t) ~(R_SCI_B0_CCR0_TIE_Msk | R_SCI_B0_CCR0_TEIE_Msk);
653
654 /*
655 * Make sure no transmission is in progress. Setting CCR0_b.TE to 0 when CSR_b.TEND
656 * is 0 causes SCI peripheral to work abnormally.
657 */
658 while (cfg->regs->CSR_b.TEND != 1U) {
659 }
660
661 cfg->regs->CCR0 &= (uint32_t) ~(R_SCI_B0_CCR0_TE_Msk);
662 while (cfg->regs->CESR_b.TIST != 0U) {
663 }
664 }
665
uart_ra_sci_b_async_tx_abort(const struct device * dev)666 static int uart_ra_sci_b_async_tx_abort(const struct device *dev)
667 {
668 struct uart_ra_sci_b_data *data = dev->data;
669 int err = 0;
670
671 disable_tx(dev);
672 k_work_cancel_delayable(&data->tx_timeout_work);
673
674 if (data->fsp_config.p_transfer_tx) {
675 transfer_properties_t transfer_info;
676
677 err = fsp_err_to_errno(R_DTC_InfoGet(&data->tx_transfer_ctrl, &transfer_info));
678 if (err != 0) {
679 return err;
680 }
681 data->tx_buffer_len = data->tx_buffer_cap - transfer_info.transfer_length_remaining;
682 } else {
683 data->tx_buffer_len = data->tx_buffer_cap - data->sci.tx_src_bytes;
684 }
685
686 R_SCI_B_UART_Abort(&data->sci, UART_DIR_TX);
687
688 async_tx_abort(dev);
689
690 return 0;
691 }
692
uart_ra_sci_b_async_tx_timeout(struct k_work * work)693 static void uart_ra_sci_b_async_tx_timeout(struct k_work *work)
694 {
695 struct k_work_delayable *dwork = k_work_delayable_from_work(work);
696 struct uart_ra_sci_b_data *data =
697 CONTAINER_OF(dwork, struct uart_ra_sci_b_data, tx_timeout_work);
698
699 uart_ra_sci_b_async_tx_abort(data->dev);
700 }
701
uart_ra_sci_b_async_rx_enable(const struct device * dev,uint8_t * buf,size_t len,int32_t timeout)702 static int uart_ra_sci_b_async_rx_enable(const struct device *dev, uint8_t *buf, size_t len,
703 int32_t timeout)
704 {
705 struct uart_ra_sci_b_data *data = dev->data;
706 const struct uart_ra_sci_b_config *cfg = dev->config;
707 int err = 0;
708
709 k_work_cancel_delayable(&data->rx_timeout_work);
710
711 unsigned int key = irq_lock();
712
713 if (data->rx_buffer) {
714 err = -EBUSY;
715 goto unlock;
716 }
717
718 err = fsp_err_to_errno(R_SCI_B_UART_Read(&data->sci, buf, len));
719 if (err != 0) {
720 goto unlock;
721 }
722
723 data->rx_timeout = timeout;
724 data->rx_buffer = buf;
725 data->rx_buffer_cap = len;
726 data->rx_buffer_len = 0;
727 data->rx_buffer_offset = 0;
728
729 cfg->regs->CCR0_b.RIE = 1U;
730
731 async_request_rx_buffer(dev);
732
733 unlock:
734 irq_unlock(key);
735 return err;
736 }
737
uart_ra_sci_b_async_rx_buf_rsp(const struct device * dev,uint8_t * buf,size_t len)738 static int uart_ra_sci_b_async_rx_buf_rsp(const struct device *dev, uint8_t *buf, size_t len)
739 {
740 struct uart_ra_sci_b_data *data = dev->data;
741
742 data->rx_next_buffer = buf;
743 data->rx_next_buffer_cap = len;
744
745 return 0;
746 }
747
uart_ra_sci_b_async_rx_disable(const struct device * dev)748 static int uart_ra_sci_b_async_rx_disable(const struct device *dev)
749 {
750 struct uart_ra_sci_b_data *data = dev->data;
751 const struct uart_ra_sci_b_config *cfg = dev->config;
752 uint32_t remaining_byte = 0;
753 int err = 0;
754 unsigned int key = irq_lock();
755
756 k_work_cancel_delayable(&data->rx_timeout_work);
757
758 err = fsp_err_to_errno(R_SCI_B_UART_ReadStop(&data->sci, &remaining_byte));
759 if (err != 0) {
760 goto unlock;
761 }
762
763 if (!data->fsp_config.p_transfer_rx) {
764 data->rx_buffer_len = data->rx_buffer_cap - data->rx_buffer_offset - remaining_byte;
765 }
766 async_rx_ready(dev);
767 async_release_rx_buffer(dev);
768 async_release_rx_next_buffer(dev);
769 async_rx_disabled(dev);
770
771 /* Clear the RDRF bit so that the next reception can be raised correctly */
772 cfg->regs->CFCLR_b.RDRFC = 1U;
773
774 unlock:
775 irq_unlock(key);
776 return err;
777 }
778
uart_ra_sci_b_async_rx_timeout(struct k_work * work)779 static void uart_ra_sci_b_async_rx_timeout(struct k_work *work)
780 {
781 struct k_work_delayable *dwork = k_work_delayable_from_work(work);
782 struct uart_ra_sci_b_data *data =
783 CONTAINER_OF(dwork, struct uart_ra_sci_b_data, rx_timeout_work);
784 const struct device *dev = data->dev;
785
786 unsigned int key = irq_lock();
787
788 if (!data->fsp_config.p_transfer_rx) {
789 data->rx_buffer_len =
790 data->rx_buffer_cap - data->rx_buffer_offset - data->sci.rx_dest_bytes;
791 }
792 async_rx_ready(dev);
793
794 irq_unlock(key);
795 }
796
uart_ra_sci_b_callback_adapter(struct st_uart_callback_arg * fsp_args)797 static void uart_ra_sci_b_callback_adapter(struct st_uart_callback_arg *fsp_args)
798 {
799 const struct device *dev = fsp_args->p_context;
800 struct uart_ra_sci_b_data *data = dev->data;
801
802 switch (fsp_args->event) {
803 case UART_EVENT_TX_COMPLETE: {
804 data->tx_buffer_len = data->tx_buffer_cap;
805 async_update_tx_buffer(dev);
806 return;
807 }
808 case UART_EVENT_RX_COMPLETE: {
809 data->rx_buffer_len =
810 data->rx_buffer_cap - data->rx_buffer_offset - data->sci.rx_dest_bytes;
811 async_rx_ready(dev);
812 async_release_rx_buffer(dev);
813 async_replace_rx_buffer(dev);
814 return;
815 }
816 case UART_EVENT_ERR_PARITY:
817 return async_rx_error(dev, UART_ERROR_PARITY);
818 case UART_EVENT_ERR_FRAMING:
819 return async_rx_error(dev, UART_ERROR_FRAMING);
820 case UART_EVENT_ERR_OVERFLOW:
821 return async_rx_error(dev, UART_ERROR_OVERRUN);
822 case UART_EVENT_BREAK_DETECT:
823 return async_rx_error(dev, UART_BREAK);
824 case UART_EVENT_TX_DATA_EMPTY:
825 case UART_EVENT_RX_CHAR:
826 break;
827 }
828 }
829
830 #endif /* CONFIG_UART_ASYNC_API */
831
832 static const struct uart_driver_api uart_ra_sci_b_driver_api = {
833 .poll_in = uart_ra_sci_b_poll_in,
834 .poll_out = uart_ra_sci_b_poll_out,
835 .err_check = uart_ra_sci_b_err_check,
836 #ifdef CONFIG_UART_USE_RUNTIME_CONFIGURE
837 .configure = uart_ra_sci_b_configure,
838 .config_get = uart_ra_sci_b_config_get,
839 #endif
840 #ifdef CONFIG_UART_INTERRUPT_DRIVEN
841 .fifo_fill = uart_ra_sci_b_fifo_fill,
842 .fifo_read = uart_ra_sci_b_fifo_read,
843 .irq_tx_enable = uart_ra_sci_b_irq_tx_enable,
844 .irq_tx_disable = uart_ra_sci_b_irq_tx_disable,
845 .irq_tx_ready = uart_ra_sci_b_irq_tx_ready,
846 .irq_rx_enable = uart_ra_sci_b_irq_rx_enable,
847 .irq_rx_disable = uart_ra_sci_b_irq_rx_disable,
848 .irq_tx_complete = uart_ra_sci_b_irq_tx_complete,
849 .irq_rx_ready = uart_ra_sci_b_irq_rx_ready,
850 .irq_err_enable = uart_ra_sci_b_irq_err_enable,
851 .irq_err_disable = uart_ra_sci_b_irq_err_disable,
852 .irq_is_pending = uart_ra_sci_b_irq_is_pending,
853 .irq_update = uart_ra_sci_b_irq_update,
854 .irq_callback_set = uart_ra_sci_b_irq_callback_set,
855 #endif /* CONFIG_UART_INTERRUPT_DRIVEN */
856 #if CONFIG_UART_ASYNC_API
857 .callback_set = uart_ra_sci_b_async_callback_set,
858 .tx = uart_ra_sci_b_async_tx,
859 .tx_abort = uart_ra_sci_b_async_tx_abort,
860 .rx_enable = uart_ra_sci_b_async_rx_enable,
861 .rx_buf_rsp = uart_ra_sci_b_async_rx_buf_rsp,
862 .rx_disable = uart_ra_sci_b_async_rx_disable,
863 #endif /* CONFIG_UART_ASYNC_API */
864 };
865
uart_ra_sci_b_init(const struct device * dev)866 static int uart_ra_sci_b_init(const struct device *dev)
867 {
868 const struct uart_ra_sci_b_config *config = dev->config;
869 struct uart_ra_sci_b_data *data = dev->data;
870 int ret;
871 fsp_err_t fsp_err;
872
873 /* Configure dt provided device signals when available */
874 ret = pinctrl_apply_state(config->pcfg, PINCTRL_STATE_DEFAULT);
875 if (ret < 0) {
876 return ret;
877 }
878
879 /* Setup fsp sci_uart setting */
880 ret = uart_ra_sci_b_apply_config(&data->uart_config, &data->fsp_config,
881 &data->fsp_config_extend, &data->fsp_baud_setting);
882 if (ret != 0) {
883 return ret;
884 }
885
886 data->fsp_config_extend.p_baud_setting = &data->fsp_baud_setting;
887 data->fsp_config.p_extend = &data->fsp_config_extend;
888
889 #if defined(CONFIG_UART_ASYNC_API)
890 data->fsp_config.p_callback = uart_ra_sci_b_callback_adapter;
891 data->fsp_config.p_context = dev;
892
893 k_work_init_delayable(&data->tx_timeout_work, uart_ra_sci_b_async_tx_timeout);
894 k_work_init_delayable(&data->rx_timeout_work, uart_ra_sci_b_async_rx_timeout);
895 #endif /* defined(CONFIG_UART_ASYNC_API) */
896
897 fsp_err = R_SCI_B_UART_Open(&data->sci, &data->fsp_config);
898 __ASSERT(fsp_err == 0, "sci_uart: initialization: open failed");
899
900 return 0;
901 }
902
903 #if defined(CONFIG_UART_INTERRUPT_DRIVEN) || defined(CONFIG_UART_ASYNC_API)
904
uart_ra_sci_b_rxi_isr(const struct device * dev)905 static void uart_ra_sci_b_rxi_isr(const struct device *dev)
906 {
907 struct uart_ra_sci_b_data *data = dev->data;
908
909 #if defined(CONFIG_UART_INTERRUPT_DRIVEN)
910 if (data->user_cb != NULL) {
911 data->user_cb(dev, data->user_cb_data);
912 }
913 #endif
914
915 #if defined(CONFIG_UART_ASYNC_API)
916 uart_ra_sci_b_async_timer_start(&data->rx_timeout_work, data->rx_timeout);
917
918 if (data->fsp_config.p_transfer_rx) {
919 /*
920 * The RX DTC is set to TRANSFER_IRQ_EACH, triggering an interrupt for each received
921 * byte. However, the sci_b_uart_rxi_isr function currently only handles the
922 * TRANSFER_IRQ_END case, which assumes the transfer is complete. To address this,
923 * we need to add some code to simulate the TRANSFER_IRQ_END case by counting the
924 * received length.
925 */
926 data->rx_buffer_len++;
927 if (data->rx_buffer_offset + data->rx_buffer_len == data->rx_buffer_cap) {
928 sci_b_uart_rxi_isr();
929 } else {
930 R_ICU->IELSR_b[data->fsp_config.rxi_irq].IR = 0U;
931 }
932 } else {
933 sci_b_uart_rxi_isr();
934 }
935 #else
936 R_ICU->IELSR_b[data->fsp_config.rxi_irq].IR = 0U;
937 #endif
938 }
939
uart_ra_sci_b_txi_isr(const struct device * dev)940 static void uart_ra_sci_b_txi_isr(const struct device *dev)
941 {
942 #if defined(CONFIG_UART_INTERRUPT_DRIVEN)
943 struct uart_ra_sci_b_data *data = dev->data;
944
945 if (data->user_cb != NULL) {
946 data->user_cb(dev, data->user_cb_data);
947 }
948 #endif
949
950 #if defined(CONFIG_UART_ASYNC_API)
951 sci_b_uart_txi_isr();
952 #else
953 R_ICU->IELSR_b[data->fsp_config.txi_irq].IR = 0U;
954 #endif
955 }
956
uart_ra_sci_b_tei_isr(const struct device * dev)957 static void uart_ra_sci_b_tei_isr(const struct device *dev)
958 {
959 struct uart_ra_sci_b_data *data = dev->data;
960
961 #if defined(CONFIG_UART_INTERRUPT_DRIVEN)
962 if (data->user_cb != NULL) {
963 data->user_cb(dev, data->user_cb_data);
964 }
965 #endif
966
967 #if defined(CONFIG_UART_ASYNC_API)
968 k_work_cancel_delayable(&data->tx_timeout_work);
969 sci_b_uart_tei_isr();
970 #else
971 R_ICU->IELSR_b[data->fsp_config.tei_irq].IR = 0U;
972 #endif
973 }
974
uart_ra_sci_b_eri_isr(const struct device * dev)975 static void uart_ra_sci_b_eri_isr(const struct device *dev)
976 {
977 #if defined(CONFIG_UART_INTERRUPT_DRIVEN)
978 struct uart_ra_sci_b_data *data = dev->data;
979
980 if (data->user_cb != NULL) {
981 data->user_cb(dev, data->user_cb_data);
982 }
983 #endif
984
985 #if defined(CONFIG_UART_ASYNC_API)
986 sci_b_uart_eri_isr();
987 #else
988 R_ICU->IELSR_b[data->fsp_config.eri_irq].IR = 0U;
989 #endif
990 }
991
992 #endif /* defined(CONFIG_UART_INTERRUPT_DRIVEN) || defined(CONFIG_UART_ASYNC_API) */
993
994 #define _ELC_EVENT_SCI_RXI(channel) ELC_EVENT_SCI##channel##_RXI
995 #define _ELC_EVENT_SCI_TXI(channel) ELC_EVENT_SCI##channel##_TXI
996 #define _ELC_EVENT_SCI_TEI(channel) ELC_EVENT_SCI##channel##_TEI
997 #define _ELC_EVENT_SCI_ERI(channel) ELC_EVENT_SCI##channel##_ERI
998
999 #define ELC_EVENT_SCI_RXI(channel) _ELC_EVENT_SCI_RXI(channel)
1000 #define ELC_EVENT_SCI_TXI(channel) _ELC_EVENT_SCI_TXI(channel)
1001 #define ELC_EVENT_SCI_TEI(channel) _ELC_EVENT_SCI_TEI(channel)
1002 #define ELC_EVENT_SCI_ERI(channel) _ELC_EVENT_SCI_ERI(channel)
1003
1004 #if defined(CONFIG_UART_INTERRUPT_DRIVEN) || defined(CONFIG_UART_ASYNC_API)
1005
1006 #define UART_RA_SCI_B_IRQ_CONFIG_INIT(index) \
1007 do { \
1008 R_ICU->IELSR[DT_IRQ_BY_NAME(DT_INST_PARENT(index), rxi, irq)] = \
1009 ELC_EVENT_SCI_RXI(DT_INST_PROP(index, channel)); \
1010 R_ICU->IELSR[DT_IRQ_BY_NAME(DT_INST_PARENT(index), txi, irq)] = \
1011 ELC_EVENT_SCI_TXI(DT_INST_PROP(index, channel)); \
1012 R_ICU->IELSR[DT_IRQ_BY_NAME(DT_INST_PARENT(index), tei, irq)] = \
1013 ELC_EVENT_SCI_TEI(DT_INST_PROP(index, channel)); \
1014 R_ICU->IELSR[DT_IRQ_BY_NAME(DT_INST_PARENT(index), eri, irq)] = \
1015 ELC_EVENT_SCI_ERI(DT_INST_PROP(index, channel)); \
1016 \
1017 IRQ_CONNECT(DT_IRQ_BY_NAME(DT_INST_PARENT(index), rxi, irq), \
1018 DT_IRQ_BY_NAME(DT_INST_PARENT(index), rxi, priority), \
1019 uart_ra_sci_b_rxi_isr, DEVICE_DT_INST_GET(index), 0); \
1020 IRQ_CONNECT(DT_IRQ_BY_NAME(DT_INST_PARENT(index), txi, irq), \
1021 DT_IRQ_BY_NAME(DT_INST_PARENT(index), txi, priority), \
1022 uart_ra_sci_b_txi_isr, DEVICE_DT_INST_GET(index), 0); \
1023 IRQ_CONNECT(DT_IRQ_BY_NAME(DT_INST_PARENT(index), tei, irq), \
1024 DT_IRQ_BY_NAME(DT_INST_PARENT(index), tei, priority), \
1025 uart_ra_sci_b_tei_isr, DEVICE_DT_INST_GET(index), 0); \
1026 IRQ_CONNECT(DT_IRQ_BY_NAME(DT_INST_PARENT(index), eri, irq), \
1027 DT_IRQ_BY_NAME(DT_INST_PARENT(index), eri, priority), \
1028 uart_ra_sci_b_eri_isr, DEVICE_DT_INST_GET(index), 0); \
1029 } while (0)
1030
1031 #else
1032
1033 #define UART_RA_SCI_B_IRQ_CONFIG_INIT(index)
1034
1035 #endif
1036
1037 #if defined(CONFIG_UART_ASYNC_API)
1038
1039 #define UART_RA_SCI_B_DTC_INIT(index) \
1040 do { \
1041 if (DT_INST_PROP_OR(index, rx_dtc, false)) { \
1042 uart_ra_sci_b_data_##index.fsp_config.p_transfer_rx = \
1043 &uart_ra_sci_b_data_##index.rx_transfer; \
1044 } \
1045 if (DT_INST_PROP_OR(index, tx_dtc, false)) { \
1046 uart_ra_sci_b_data_##index.fsp_config.p_transfer_tx = \
1047 &uart_ra_sci_b_data_##index.tx_transfer; \
1048 } \
1049 } while (0)
1050
1051 #define UART_RA_SCI_B_ASYNC_INIT(index) \
1052 .rx_transfer_info = \
1053 { \
1054 .transfer_settings_word_b.dest_addr_mode = TRANSFER_ADDR_MODE_INCREMENTED, \
1055 .transfer_settings_word_b.repeat_area = TRANSFER_REPEAT_AREA_DESTINATION, \
1056 .transfer_settings_word_b.irq = TRANSFER_IRQ_EACH, \
1057 .transfer_settings_word_b.chain_mode = TRANSFER_CHAIN_MODE_DISABLED, \
1058 .transfer_settings_word_b.src_addr_mode = TRANSFER_ADDR_MODE_FIXED, \
1059 .transfer_settings_word_b.size = TRANSFER_SIZE_1_BYTE, \
1060 .transfer_settings_word_b.mode = TRANSFER_MODE_NORMAL, \
1061 .p_dest = (void *)NULL, \
1062 .p_src = (void const *)NULL, \
1063 .num_blocks = 0, \
1064 .length = 0, \
1065 }, \
1066 .rx_transfer_cfg_extend = {.activation_source = \
1067 DT_IRQ_BY_NAME(DT_INST_PARENT(index), rxi, irq)}, \
1068 .rx_transfer_cfg = \
1069 { \
1070 .p_info = &uart_ra_sci_b_data_##index.rx_transfer_info, \
1071 .p_extend = &uart_ra_sci_b_data_##index.rx_transfer_cfg_extend, \
1072 }, \
1073 .rx_transfer = \
1074 { \
1075 .p_ctrl = &uart_ra_sci_b_data_##index.rx_transfer_ctrl, \
1076 .p_cfg = &uart_ra_sci_b_data_##index.rx_transfer_cfg, \
1077 .p_api = &g_transfer_on_dtc, \
1078 }, \
1079 .tx_transfer_info = \
1080 { \
1081 .transfer_settings_word_b.dest_addr_mode = TRANSFER_ADDR_MODE_FIXED, \
1082 .transfer_settings_word_b.repeat_area = TRANSFER_REPEAT_AREA_SOURCE, \
1083 .transfer_settings_word_b.irq = TRANSFER_IRQ_END, \
1084 .transfer_settings_word_b.chain_mode = TRANSFER_CHAIN_MODE_DISABLED, \
1085 .transfer_settings_word_b.src_addr_mode = TRANSFER_ADDR_MODE_INCREMENTED, \
1086 .transfer_settings_word_b.size = TRANSFER_SIZE_1_BYTE, \
1087 .transfer_settings_word_b.mode = TRANSFER_MODE_NORMAL, \
1088 .p_dest = (void *)NULL, \
1089 .p_src = (void const *)NULL, \
1090 .num_blocks = 0, \
1091 .length = 0, \
1092 }, \
1093 .tx_transfer_cfg_extend = {.activation_source = \
1094 DT_IRQ_BY_NAME(DT_INST_PARENT(index), txi, irq)}, \
1095 .tx_transfer_cfg = \
1096 { \
1097 .p_info = &uart_ra_sci_b_data_##index.tx_transfer_info, \
1098 .p_extend = &uart_ra_sci_b_data_##index.tx_transfer_cfg_extend, \
1099 }, \
1100 .tx_transfer = { \
1101 .p_ctrl = &uart_ra_sci_b_data_##index.tx_transfer_ctrl, \
1102 .p_cfg = &uart_ra_sci_b_data_##index.tx_transfer_cfg, \
1103 .p_api = &g_transfer_on_dtc, \
1104 },
1105
1106 #else
1107 #define UART_RA_SCI_B_ASYNC_INIT(index)
1108 #define UART_RA_SCI_B_DTC_INIT(index)
1109 #endif
1110
1111 #define UART_RA_SCI_B_INIT(index) \
1112 PINCTRL_DT_DEFINE(DT_INST_PARENT(index)); \
1113 \
1114 static const struct uart_ra_sci_b_config uart_ra_sci_b_config_##index = { \
1115 .pcfg = PINCTRL_DT_DEV_CONFIG_GET(DT_INST_PARENT(index)), \
1116 .regs = (R_SCI_B0_Type *)DT_REG_ADDR(DT_INST_PARENT(index)), \
1117 }; \
1118 \
1119 static struct uart_ra_sci_b_data uart_ra_sci_b_data_##index = { \
1120 .uart_config = \
1121 { \
1122 .baudrate = DT_INST_PROP(index, current_speed), \
1123 .parity = UART_CFG_PARITY_NONE, \
1124 .stop_bits = UART_CFG_STOP_BITS_1, \
1125 .data_bits = UART_CFG_DATA_BITS_8, \
1126 .flow_ctrl = COND_CODE_1(DT_NODE_HAS_PROP(idx, hw_flow_control), \
1127 (UART_CFG_FLOW_CTRL_RTS_CTS), \
1128 (UART_CFG_FLOW_CTRL_NONE)), \
1129 }, \
1130 .fsp_config = \
1131 { \
1132 .channel = DT_INST_PROP(index, channel), \
1133 .rxi_ipl = DT_IRQ_BY_NAME(DT_INST_PARENT(index), rxi, priority), \
1134 .rxi_irq = DT_IRQ_BY_NAME(DT_INST_PARENT(index), rxi, irq), \
1135 .txi_ipl = DT_IRQ_BY_NAME(DT_INST_PARENT(index), txi, priority), \
1136 .txi_irq = DT_IRQ_BY_NAME(DT_INST_PARENT(index), txi, irq), \
1137 .tei_ipl = DT_IRQ_BY_NAME(DT_INST_PARENT(index), tei, priority), \
1138 .tei_irq = DT_IRQ_BY_NAME(DT_INST_PARENT(index), tei, irq), \
1139 .eri_ipl = DT_IRQ_BY_NAME(DT_INST_PARENT(index), eri, priority), \
1140 .eri_irq = DT_IRQ_BY_NAME(DT_INST_PARENT(index), eri, irq), \
1141 }, \
1142 .fsp_config_extend = {}, \
1143 .fsp_baud_setting = {}, \
1144 .dev = DEVICE_DT_GET(DT_DRV_INST(index)), \
1145 UART_RA_SCI_B_ASYNC_INIT(index)}; \
1146 \
1147 static int uart_ra_sci_b_init_##index(const struct device *dev) \
1148 { \
1149 UART_RA_SCI_B_DTC_INIT(index); \
1150 UART_RA_SCI_B_IRQ_CONFIG_INIT(index); \
1151 int err = uart_ra_sci_b_init(dev); \
1152 if (err != 0) { \
1153 return err; \
1154 } \
1155 return 0; \
1156 } \
1157 \
1158 DEVICE_DT_INST_DEFINE(index, uart_ra_sci_b_init_##index, NULL, \
1159 &uart_ra_sci_b_data_##index, &uart_ra_sci_b_config_##index, \
1160 PRE_KERNEL_1, CONFIG_SERIAL_INIT_PRIORITY, \
1161 &uart_ra_sci_b_driver_api);
1162
1163 DT_INST_FOREACH_STATUS_OKAY(UART_RA_SCI_B_INIT)
1164