1 /*
2 * Copyright (c) 2021, ATL Electronics
3 * SPDX-License-Identifier: Apache-2.0
4 */
5
6 #define DT_DRV_COMPAT gd_gd32_usart
7
8 #include <errno.h>
9
10 #include <zephyr/drivers/clock_control.h>
11 #include <zephyr/drivers/clock_control/gd32.h>
12 #include <zephyr/drivers/pinctrl.h>
13 #include <zephyr/drivers/reset.h>
14 #include <zephyr/drivers/uart.h>
15 #include <zephyr/irq.h>
16
17 #include <gd32_usart.h>
18
19 /* Unify GD32 HAL USART status register name to USART_STAT */
20 #ifndef USART_STAT
21 #define USART_STAT USART_STAT0
22 #endif
23
24 struct gd32_usart_config {
25 uint32_t reg;
26 uint16_t clkid;
27 struct reset_dt_spec reset;
28 const struct pinctrl_dev_config *pcfg;
29 uint32_t parity;
30 #ifdef CONFIG_UART_INTERRUPT_DRIVEN
31 uart_irq_config_func_t irq_config_func;
32 #endif /* CONFIG_UART_INTERRUPT_DRIVEN */
33 };
34
35 struct gd32_usart_data {
36 uint32_t baud_rate;
37 #ifdef CONFIG_UART_INTERRUPT_DRIVEN
38 uart_irq_callback_user_data_t user_cb;
39 void *user_data;
40 #endif /* CONFIG_UART_INTERRUPT_DRIVEN */
41 };
42
43 #ifdef CONFIG_UART_INTERRUPT_DRIVEN
usart_gd32_isr(const struct device * dev)44 static void usart_gd32_isr(const struct device *dev)
45 {
46 struct gd32_usart_data *const data = dev->data;
47
48 if (data->user_cb) {
49 data->user_cb(dev, data->user_data);
50 }
51 }
52 #endif /* CONFIG_UART_INTERRUPT_DRIVEN */
53
usart_gd32_init(const struct device * dev)54 static int usart_gd32_init(const struct device *dev)
55 {
56 const struct gd32_usart_config *const cfg = dev->config;
57 struct gd32_usart_data *const data = dev->data;
58 uint32_t word_length;
59 uint32_t parity;
60 int ret;
61
62 ret = pinctrl_apply_state(cfg->pcfg, PINCTRL_STATE_DEFAULT);
63 if (ret < 0) {
64 return ret;
65 }
66
67 /**
68 * In order to keep the transfer data size to 8 bits(1 byte),
69 * append word length to 9BIT if parity bit enabled.
70 */
71 switch (cfg->parity) {
72 case UART_CFG_PARITY_NONE:
73 parity = USART_PM_NONE;
74 word_length = USART_WL_8BIT;
75 break;
76 case UART_CFG_PARITY_ODD:
77 parity = USART_PM_ODD;
78 word_length = USART_WL_9BIT;
79 break;
80 case UART_CFG_PARITY_EVEN:
81 parity = USART_PM_EVEN;
82 word_length = USART_WL_9BIT;
83 break;
84 default:
85 return -ENOTSUP;
86 }
87
88 (void)clock_control_on(GD32_CLOCK_CONTROLLER,
89 (clock_control_subsys_t)&cfg->clkid);
90
91 (void)reset_line_toggle_dt(&cfg->reset);
92
93 usart_baudrate_set(cfg->reg, data->baud_rate);
94 usart_parity_config(cfg->reg, parity);
95 usart_word_length_set(cfg->reg, word_length);
96 /* Default to 1 stop bit */
97 usart_stop_bit_set(cfg->reg, USART_STB_1BIT);
98 usart_receive_config(cfg->reg, USART_RECEIVE_ENABLE);
99 usart_transmit_config(cfg->reg, USART_TRANSMIT_ENABLE);
100 usart_enable(cfg->reg);
101
102 #ifdef CONFIG_UART_INTERRUPT_DRIVEN
103 cfg->irq_config_func(dev);
104 #endif /* CONFIG_UART_INTERRUPT_DRIVEN */
105
106 return 0;
107 }
108
usart_gd32_poll_in(const struct device * dev,unsigned char * c)109 static int usart_gd32_poll_in(const struct device *dev, unsigned char *c)
110 {
111 const struct gd32_usart_config *const cfg = dev->config;
112 uint32_t status;
113
114 status = usart_flag_get(cfg->reg, USART_FLAG_RBNE);
115
116 if (!status) {
117 return -EPERM;
118 }
119
120 *c = usart_data_receive(cfg->reg);
121
122 return 0;
123 }
124
usart_gd32_poll_out(const struct device * dev,unsigned char c)125 static void usart_gd32_poll_out(const struct device *dev, unsigned char c)
126 {
127 const struct gd32_usart_config *const cfg = dev->config;
128
129 usart_data_transmit(cfg->reg, c);
130
131 while (usart_flag_get(cfg->reg, USART_FLAG_TBE) == RESET) {
132 ;
133 }
134 }
135
usart_gd32_err_check(const struct device * dev)136 static int usart_gd32_err_check(const struct device *dev)
137 {
138 const struct gd32_usart_config *const cfg = dev->config;
139 uint32_t status = USART_STAT(cfg->reg);
140 int errors = 0;
141
142 if (status & USART_FLAG_ORERR) {
143 usart_flag_clear(cfg->reg, USART_FLAG_ORERR);
144
145 errors |= UART_ERROR_OVERRUN;
146 }
147
148 if (status & USART_FLAG_PERR) {
149 usart_flag_clear(cfg->reg, USART_FLAG_PERR);
150
151 errors |= UART_ERROR_PARITY;
152 }
153
154 if (status & USART_FLAG_FERR) {
155 usart_flag_clear(cfg->reg, USART_FLAG_FERR);
156
157 errors |= UART_ERROR_FRAMING;
158 }
159
160 usart_flag_clear(cfg->reg, USART_FLAG_NERR);
161
162 return errors;
163 }
164
165 #ifdef CONFIG_UART_INTERRUPT_DRIVEN
usart_gd32_fifo_fill(const struct device * dev,const uint8_t * tx_data,int len)166 int usart_gd32_fifo_fill(const struct device *dev, const uint8_t *tx_data,
167 int len)
168 {
169 const struct gd32_usart_config *const cfg = dev->config;
170 int num_tx = 0U;
171
172 while ((len - num_tx > 0) &&
173 usart_flag_get(cfg->reg, USART_FLAG_TBE)) {
174 usart_data_transmit(cfg->reg, tx_data[num_tx++]);
175 }
176
177 return num_tx;
178 }
179
usart_gd32_fifo_read(const struct device * dev,uint8_t * rx_data,const int size)180 int usart_gd32_fifo_read(const struct device *dev, uint8_t *rx_data,
181 const int size)
182 {
183 const struct gd32_usart_config *const cfg = dev->config;
184 int num_rx = 0U;
185
186 while ((size - num_rx > 0) &&
187 usart_flag_get(cfg->reg, USART_FLAG_RBNE)) {
188 rx_data[num_rx++] = usart_data_receive(cfg->reg);
189 }
190
191 return num_rx;
192 }
193
usart_gd32_irq_tx_enable(const struct device * dev)194 void usart_gd32_irq_tx_enable(const struct device *dev)
195 {
196 const struct gd32_usart_config *const cfg = dev->config;
197
198 usart_interrupt_enable(cfg->reg, USART_INT_TC);
199 }
200
usart_gd32_irq_tx_disable(const struct device * dev)201 void usart_gd32_irq_tx_disable(const struct device *dev)
202 {
203 const struct gd32_usart_config *const cfg = dev->config;
204
205 usart_interrupt_disable(cfg->reg, USART_INT_TC);
206 }
207
usart_gd32_irq_tx_ready(const struct device * dev)208 int usart_gd32_irq_tx_ready(const struct device *dev)
209 {
210 const struct gd32_usart_config *const cfg = dev->config;
211
212 return usart_flag_get(cfg->reg, USART_FLAG_TBE) &&
213 usart_interrupt_flag_get(cfg->reg, USART_INT_FLAG_TC);
214 }
215
usart_gd32_irq_tx_complete(const struct device * dev)216 int usart_gd32_irq_tx_complete(const struct device *dev)
217 {
218 const struct gd32_usart_config *const cfg = dev->config;
219
220 return usart_flag_get(cfg->reg, USART_FLAG_TC);
221 }
222
usart_gd32_irq_rx_enable(const struct device * dev)223 void usart_gd32_irq_rx_enable(const struct device *dev)
224 {
225 const struct gd32_usart_config *const cfg = dev->config;
226
227 usart_interrupt_enable(cfg->reg, USART_INT_RBNE);
228 }
229
usart_gd32_irq_rx_disable(const struct device * dev)230 void usart_gd32_irq_rx_disable(const struct device *dev)
231 {
232 const struct gd32_usart_config *const cfg = dev->config;
233
234 usart_interrupt_disable(cfg->reg, USART_INT_RBNE);
235 }
236
usart_gd32_irq_rx_ready(const struct device * dev)237 int usart_gd32_irq_rx_ready(const struct device *dev)
238 {
239 const struct gd32_usart_config *const cfg = dev->config;
240
241 return usart_flag_get(cfg->reg, USART_FLAG_RBNE);
242 }
243
usart_gd32_irq_err_enable(const struct device * dev)244 void usart_gd32_irq_err_enable(const struct device *dev)
245 {
246 const struct gd32_usart_config *const cfg = dev->config;
247
248 usart_interrupt_enable(cfg->reg, USART_INT_ERR);
249 usart_interrupt_enable(cfg->reg, USART_INT_PERR);
250 }
251
usart_gd32_irq_err_disable(const struct device * dev)252 void usart_gd32_irq_err_disable(const struct device *dev)
253 {
254 const struct gd32_usart_config *const cfg = dev->config;
255
256 usart_interrupt_disable(cfg->reg, USART_INT_ERR);
257 usart_interrupt_disable(cfg->reg, USART_INT_PERR);
258 }
259
usart_gd32_irq_is_pending(const struct device * dev)260 int usart_gd32_irq_is_pending(const struct device *dev)
261 {
262 const struct gd32_usart_config *const cfg = dev->config;
263
264 return ((usart_flag_get(cfg->reg, USART_FLAG_RBNE) &&
265 usart_interrupt_flag_get(cfg->reg, USART_INT_FLAG_RBNE)) ||
266 (usart_flag_get(cfg->reg, USART_FLAG_TC) &&
267 usart_interrupt_flag_get(cfg->reg, USART_INT_FLAG_TC)));
268 }
269
usart_gd32_irq_update(const struct device * dev)270 int usart_gd32_irq_update(const struct device *dev)
271 {
272 ARG_UNUSED(dev);
273 return 1;
274 }
275
usart_gd32_irq_callback_set(const struct device * dev,uart_irq_callback_user_data_t cb,void * user_data)276 void usart_gd32_irq_callback_set(const struct device *dev,
277 uart_irq_callback_user_data_t cb,
278 void *user_data)
279 {
280 struct gd32_usart_data *const data = dev->data;
281
282 data->user_cb = cb;
283 data->user_data = user_data;
284 }
285 #endif /* CONFIG_UART_INTERRUPT_DRIVEN */
286
287 static DEVICE_API(uart, usart_gd32_driver_api) = {
288 .poll_in = usart_gd32_poll_in,
289 .poll_out = usart_gd32_poll_out,
290 .err_check = usart_gd32_err_check,
291 #ifdef CONFIG_UART_INTERRUPT_DRIVEN
292 .fifo_fill = usart_gd32_fifo_fill,
293 .fifo_read = usart_gd32_fifo_read,
294 .irq_tx_enable = usart_gd32_irq_tx_enable,
295 .irq_tx_disable = usart_gd32_irq_tx_disable,
296 .irq_tx_ready = usart_gd32_irq_tx_ready,
297 .irq_tx_complete = usart_gd32_irq_tx_complete,
298 .irq_rx_enable = usart_gd32_irq_rx_enable,
299 .irq_rx_disable = usart_gd32_irq_rx_disable,
300 .irq_rx_ready = usart_gd32_irq_rx_ready,
301 .irq_err_enable = usart_gd32_irq_err_enable,
302 .irq_err_disable = usart_gd32_irq_err_disable,
303 .irq_is_pending = usart_gd32_irq_is_pending,
304 .irq_update = usart_gd32_irq_update,
305 .irq_callback_set = usart_gd32_irq_callback_set,
306 #endif /* CONFIG_UART_INTERRUPT_DRIVEN */
307 };
308
309 #ifdef CONFIG_UART_INTERRUPT_DRIVEN
310 #define GD32_USART_IRQ_HANDLER(n) \
311 static void usart_gd32_config_func_##n(const struct device *dev) \
312 { \
313 IRQ_CONNECT(DT_INST_IRQN(n), \
314 DT_INST_IRQ(n, priority), \
315 usart_gd32_isr, \
316 DEVICE_DT_INST_GET(n), \
317 0); \
318 irq_enable(DT_INST_IRQN(n)); \
319 }
320 #define GD32_USART_IRQ_HANDLER_FUNC_INIT(n) \
321 .irq_config_func = usart_gd32_config_func_##n
322 #else /* CONFIG_UART_INTERRUPT_DRIVEN */
323 #define GD32_USART_IRQ_HANDLER(n)
324 #define GD32_USART_IRQ_HANDLER_FUNC_INIT(n)
325 #endif /* CONFIG_UART_INTERRUPT_DRIVEN */
326
327 #define GD32_USART_INIT(n) \
328 PINCTRL_DT_INST_DEFINE(n); \
329 GD32_USART_IRQ_HANDLER(n) \
330 static struct gd32_usart_data usart_gd32_data_##n = { \
331 .baud_rate = DT_INST_PROP(n, current_speed), \
332 }; \
333 static const struct gd32_usart_config usart_gd32_config_##n = { \
334 .reg = DT_INST_REG_ADDR(n), \
335 .clkid = DT_INST_CLOCKS_CELL(n, id), \
336 .reset = RESET_DT_SPEC_INST_GET(n), \
337 .pcfg = PINCTRL_DT_INST_DEV_CONFIG_GET(n), \
338 .parity = DT_INST_ENUM_IDX_OR(n, parity, UART_CFG_PARITY_NONE), \
339 GD32_USART_IRQ_HANDLER_FUNC_INIT(n) \
340 }; \
341 DEVICE_DT_INST_DEFINE(n, usart_gd32_init, \
342 NULL, \
343 &usart_gd32_data_##n, \
344 &usart_gd32_config_##n, PRE_KERNEL_1, \
345 CONFIG_SERIAL_INIT_PRIORITY, \
346 &usart_gd32_driver_api);
347
348 DT_INST_FOREACH_STATUS_OKAY(GD32_USART_INIT)
349