1 /*
2 * Copyright (c) 2021, Thomas Stranger
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
7 /*
8 * This is not a real serial driver. It is used to instantiate struct
9 * devices for the "vnd,serial" devicetree compatible used in test code.
10 */
11
12 #include <stdbool.h>
13
14 #include <zephyr/sys/__assert.h>
15 #include <zephyr/device.h>
16 #include <zephyr/drivers/uart.h>
17 #include <zephyr/drivers/uart/serial_test.h>
18 #include <zephyr/kernel.h>
19 #include <zephyr/logging/log.h>
20 #include <zephyr/sys/ring_buffer.h>
21
22 LOG_MODULE_REGISTER(mock_serial, CONFIG_LOG_DEFAULT_LEVEL);
23
24 #define DT_DRV_COMPAT vnd_serial
25 struct serial_vnd_data {
26 #ifdef CONFIG_RING_BUFFER
27 struct ring_buf *written;
28 struct ring_buf *read_queue;
29 #endif
30 serial_vnd_write_cb_t callback;
31 void *callback_data;
32 #ifdef CONFIG_UART_INTERRUPT_DRIVEN
33 uart_irq_callback_user_data_t irq_isr;
34 bool irq_rx_enabled;
35 bool irq_tx_enabled;
36 #endif
37 #ifdef CONFIG_UART_ASYNC_API
38 uart_callback_t async_cb;
39 void *async_cb_user_data;
40 uint8_t *read_buf;
41 size_t read_size;
42 size_t read_position;
43 #endif
44 };
45
46 #ifdef CONFIG_UART_INTERRUPT_DRIVEN
is_irq_rx_pending(const struct device * dev)47 static bool is_irq_rx_pending(const struct device *dev)
48 {
49 struct serial_vnd_data *data = dev->data;
50
51 return !ring_buf_is_empty(data->read_queue);
52 }
53
is_irq_tx_pending(const struct device * dev)54 static bool is_irq_tx_pending(const struct device *dev)
55 {
56 struct serial_vnd_data *data = dev->data;
57
58 return ring_buf_space_get(data->written) != 0;
59 }
60
irq_process(const struct device * dev)61 static void irq_process(const struct device *dev)
62 {
63 struct serial_vnd_data *data = dev->data;
64
65 for (;;) {
66 bool rx_rdy = is_irq_rx_pending(dev);
67 bool tx_rdy = is_irq_tx_pending(dev);
68 bool rx_int = rx_rdy && data->irq_rx_enabled;
69 bool tx_int = tx_rdy && data->irq_tx_enabled;
70
71 LOG_DBG("rx_rdy %d tx_rdy %d", rx_rdy, tx_rdy);
72 LOG_DBG("rx_int %d tx_int %d", rx_int, tx_int);
73
74 if (!(rx_int || tx_int)) {
75 break;
76 }
77
78 LOG_DBG("isr");
79 if (!data->irq_isr) {
80 LOG_ERR("no isr registered");
81 break;
82 }
83 data->irq_isr(dev, NULL);
84 };
85 }
86
irq_rx_enable(const struct device * dev)87 static void irq_rx_enable(const struct device *dev)
88 {
89 struct serial_vnd_data *data = dev->data;
90
91 data->irq_rx_enabled = true;
92 LOG_DBG("rx enabled");
93 irq_process(dev);
94 }
95
irq_rx_disable(const struct device * dev)96 static void irq_rx_disable(const struct device *dev)
97 {
98 struct serial_vnd_data *data = dev->data;
99
100 data->irq_rx_enabled = false;
101 LOG_DBG("rx disabled");
102 }
103
irq_rx_ready(const struct device * dev)104 static int irq_rx_ready(const struct device *dev)
105 {
106 struct serial_vnd_data *data = dev->data;
107 bool ready = !ring_buf_is_empty(data->read_queue);
108
109 LOG_DBG("rx ready: %d", ready);
110 return ready;
111 }
112
irq_tx_enable(const struct device * dev)113 static void irq_tx_enable(const struct device *dev)
114 {
115 struct serial_vnd_data *data = dev->data;
116
117 LOG_DBG("tx enabled");
118 data->irq_tx_enabled = true;
119 irq_process(dev);
120 }
121
irq_tx_disable(const struct device * dev)122 static void irq_tx_disable(const struct device *dev)
123 {
124 struct serial_vnd_data *data = dev->data;
125
126 data->irq_tx_enabled = false;
127 LOG_DBG("tx disabled");
128 }
129
irq_tx_ready(const struct device * dev)130 static int irq_tx_ready(const struct device *dev)
131 {
132 struct serial_vnd_data *data = dev->data;
133 bool ready = (ring_buf_space_get(data->written) != 0);
134
135 LOG_DBG("tx ready: %d", ready);
136 return ready;
137 }
138
irq_callback_set(const struct device * dev,uart_irq_callback_user_data_t cb,void * user_data)139 static void irq_callback_set(const struct device *dev, uart_irq_callback_user_data_t cb,
140 void *user_data)
141 {
142 struct serial_vnd_data *data = dev->data;
143
144 /* Not implemented. Ok because `user_data` is always NULL in the current
145 * implementation of core UART API.
146 */
147 __ASSERT_NO_MSG(user_data == NULL);
148
149 #if defined(CONFIG_UART_EXCLUSIVE_API_CALLBACKS) && defined(CONFIG_UART_ASYNC_API)
150 if (data->read_buf) {
151 LOG_ERR("Setting callback to NULL while asynchronous API is in use.");
152 }
153 data->async_cb = NULL;
154 data->async_cb_user_data = NULL;
155 #endif
156
157 data->irq_isr = cb;
158 LOG_DBG("callback set");
159 }
160
fifo_fill(const struct device * dev,const uint8_t * tx_data,int size)161 static int fifo_fill(const struct device *dev, const uint8_t *tx_data, int size)
162 {
163 struct serial_vnd_data *data = dev->data;
164 uint32_t write_len = ring_buf_put(data->written, tx_data, size);
165
166 if (data->callback) {
167 data->callback(dev, data->callback_data);
168 }
169 return write_len;
170 }
171
fifo_read(const struct device * dev,uint8_t * rx_data,const int size)172 static int fifo_read(const struct device *dev, uint8_t *rx_data, const int size)
173 {
174 struct serial_vnd_data *data = dev->data;
175 int read_len = ring_buf_get(data->read_queue, rx_data, size);
176
177 LOG_HEXDUMP_DBG(rx_data, read_len, "");
178 return read_len;
179 }
180 #endif /* CONFIG_UART_INTERRUPT_DRIVEN */
181
serial_vnd_poll_in(const struct device * dev,unsigned char * c)182 static int serial_vnd_poll_in(const struct device *dev, unsigned char *c)
183 {
184 #ifdef CONFIG_RING_BUFFER
185 struct serial_vnd_data *data = dev->data;
186 uint32_t bytes_read;
187
188 if (data == NULL || data->read_queue == NULL) {
189 return -ENOTSUP;
190 }
191 bytes_read = ring_buf_get(data->read_queue, c, 1);
192 if (bytes_read == 1) {
193 return 0;
194 }
195 return -1;
196 #else
197 return -ENOTSUP;
198 #endif
199 }
200
serial_vnd_poll_out(const struct device * dev,unsigned char c)201 static void serial_vnd_poll_out(const struct device *dev, unsigned char c)
202 {
203 struct serial_vnd_data *data = dev->data;
204
205 #ifdef CONFIG_RING_BUFFER
206 if (data == NULL || data->written == NULL) {
207 return;
208 }
209 ring_buf_put(data->written, &c, 1);
210 #endif
211 if (data->callback) {
212 data->callback(dev, data->callback_data);
213 }
214 }
215
216 #ifdef CONFIG_UART_ASYNC_API
217 static void async_rx_run(const struct device *dev);
218 #endif
219
220 #ifdef CONFIG_RING_BUFFER
serial_vnd_queue_in_data(const struct device * dev,const unsigned char * c,uint32_t size)221 int serial_vnd_queue_in_data(const struct device *dev, const unsigned char *c, uint32_t size)
222 {
223 struct serial_vnd_data *data = dev->data;
224 int write_size;
225
226 if (data == NULL || data->read_queue == NULL) {
227 return -ENOTSUP;
228 }
229 write_size = ring_buf_put(data->read_queue, c, size);
230
231 LOG_DBG("size %u write_size %u", size, write_size);
232 LOG_HEXDUMP_DBG(c, write_size, "");
233
234 #ifdef CONFIG_UART_INTERRUPT_DRIVEN
235 if (write_size > 0) {
236 irq_process(dev);
237 }
238 #endif
239
240 #ifdef CONFIG_UART_ASYNC_API
241 async_rx_run(dev);
242 #endif
243
244 return write_size;
245 }
246
serial_vnd_out_data_size_get(const struct device * dev)247 uint32_t serial_vnd_out_data_size_get(const struct device *dev)
248 {
249 struct serial_vnd_data *data = dev->data;
250
251 if (data == NULL || data->written == NULL) {
252 return -ENOTSUP;
253 }
254 return ring_buf_size_get(data->written);
255 }
256
serial_vnd_read_out_data(const struct device * dev,unsigned char * out_data,uint32_t size)257 uint32_t serial_vnd_read_out_data(const struct device *dev, unsigned char *out_data, uint32_t size)
258 {
259 struct serial_vnd_data *data = dev->data;
260
261 if (data == NULL || data->written == NULL) {
262 return -ENOTSUP;
263 }
264 return ring_buf_get(data->written, out_data, size);
265 }
266
serial_vnd_peek_out_data(const struct device * dev,unsigned char * out_data,uint32_t size)267 uint32_t serial_vnd_peek_out_data(const struct device *dev, unsigned char *out_data, uint32_t size)
268 {
269 struct serial_vnd_data *data = dev->data;
270
271 if (data == NULL || data->written == NULL) {
272 return -ENOTSUP;
273 }
274 return ring_buf_peek(data->written, out_data, size);
275 }
276 #endif
277
serial_vnd_set_callback(const struct device * dev,serial_vnd_write_cb_t callback,void * user_data)278 void serial_vnd_set_callback(const struct device *dev, serial_vnd_write_cb_t callback,
279 void *user_data)
280 {
281 struct serial_vnd_data *data = dev->data;
282
283 if (data == NULL) {
284 return;
285 }
286 data->callback = callback;
287 data->callback_data = user_data;
288 }
289
serial_vnd_err_check(const struct device * dev)290 static int serial_vnd_err_check(const struct device *dev)
291 {
292 return -ENOTSUP;
293 }
294
295 #ifdef CONFIG_UART_USE_RUNTIME_CONFIGURE
serial_vnd_configure(const struct device * dev,const struct uart_config * cfg)296 static int serial_vnd_configure(const struct device *dev, const struct uart_config *cfg)
297 {
298 return -ENOTSUP;
299 }
300
serial_vnd_config_get(const struct device * dev,struct uart_config * cfg)301 static int serial_vnd_config_get(const struct device *dev, struct uart_config *cfg)
302 {
303 return -ENOTSUP;
304 }
305 #endif /* CONFIG_UART_USE_RUNTIME_CONFIGURE */
306
307 #ifdef CONFIG_UART_ASYNC_API
serial_vnd_callback_set(const struct device * dev,uart_callback_t callback,void * user_data)308 static int serial_vnd_callback_set(const struct device *dev, uart_callback_t callback,
309 void *user_data)
310 {
311 struct serial_vnd_data *data = dev->data;
312
313 if (data == NULL) {
314 return -ENOTSUP;
315 }
316
317 #if defined(CONFIG_UART_EXCLUSIVE_API_CALLBACKS) && defined(CONFIG_UART_INTERRUPT_DRIVEN)
318 data->irq_isr = NULL;
319 #endif
320
321 if (callback == NULL && data->read_buf) {
322 LOG_ERR("Setting callback to NULL while asynchronous API is in use.");
323 }
324
325 data->async_cb = callback;
326 data->async_cb_user_data = user_data;
327
328 return 0;
329 }
330
serial_vnd_api_tx(const struct device * dev,const uint8_t * tx_data,size_t len,int32_t timeout)331 static int serial_vnd_api_tx(const struct device *dev, const uint8_t *tx_data, size_t len,
332 int32_t timeout)
333 {
334 struct serial_vnd_data *data = dev->data;
335 struct uart_event evt;
336 uint32_t write_len;
337
338 if (data == NULL) {
339 return -ENOTSUP;
340 }
341
342 if (data->async_cb == NULL) {
343 return -EINVAL;
344 }
345
346 write_len = ring_buf_put(data->written, tx_data, len);
347 if (data->callback) {
348 data->callback(dev, data->callback_data);
349 }
350
351 __ASSERT(write_len == len, "Ring buffer full. Async wait not implemented.");
352
353 evt = (struct uart_event){
354 .type = UART_TX_DONE,
355 .data.tx.buf = tx_data,
356 .data.tx.len = len,
357 };
358 data->async_cb(dev, &evt, data->async_cb_user_data);
359
360 return 0;
361 }
362
async_rx_run(const struct device * dev)363 static void async_rx_run(const struct device *dev)
364 {
365 struct serial_vnd_data *data = dev->data;
366 struct uart_event evt;
367 uint32_t read_len;
368 uint32_t read_remaining;
369
370 if (!data->read_buf) {
371 return;
372 }
373
374 __ASSERT_NO_MSG(data->async_cb);
375
376 read_remaining = data->read_size - data->read_position;
377
378 read_len = ring_buf_get(data->read_queue, &data->read_buf[data->read_position],
379 read_remaining);
380
381 if (read_len != 0) {
382 evt = (struct uart_event){
383 .type = UART_RX_RDY,
384 .data.rx.buf = data->read_buf,
385 .data.rx.len = read_len,
386 .data.rx.offset = data->read_position,
387 };
388 data->async_cb(dev, &evt, data->async_cb_user_data);
389 }
390
391 data->read_position += read_len;
392
393 if (data->read_position == data->read_size) {
394 data->read_buf = NULL;
395 evt = (struct uart_event){
396 .type = UART_RX_DISABLED,
397 };
398 data->async_cb(dev, &evt, data->async_cb_user_data);
399 }
400 }
401
serial_vnd_rx_enable(const struct device * dev,uint8_t * read_buf,size_t read_size,int32_t timeout)402 static int serial_vnd_rx_enable(const struct device *dev, uint8_t *read_buf, size_t read_size,
403 int32_t timeout)
404 {
405 struct serial_vnd_data *data = dev->data;
406
407 LOG_WRN("read_size %zd", read_size);
408
409 if (data == NULL) {
410 return -ENOTSUP;
411 }
412
413 if (data->async_cb == NULL) {
414 return -EINVAL;
415 }
416
417 __ASSERT(timeout == SYS_FOREVER_MS, "Async timeout not implemented.");
418
419 data->read_buf = read_buf;
420 data->read_size = read_size;
421 data->read_position = 0;
422
423 async_rx_run(dev);
424
425 return 0;
426 }
427 #endif /* CONFIG_UART_ASYNC_API */
428
429 static const struct uart_driver_api serial_vnd_api = {
430 .poll_in = serial_vnd_poll_in,
431 .poll_out = serial_vnd_poll_out,
432 .err_check = serial_vnd_err_check,
433 #ifdef CONFIG_UART_USE_RUNTIME_CONFIGURE
434 .configure = serial_vnd_configure,
435 .config_get = serial_vnd_config_get,
436 #endif /* CONFIG_UART_USE_RUNTIME_CONFIGURE */
437 #ifdef CONFIG_UART_INTERRUPT_DRIVEN
438 .irq_callback_set = irq_callback_set,
439 .irq_rx_enable = irq_rx_enable,
440 .irq_rx_disable = irq_rx_disable,
441 .irq_rx_ready = irq_rx_ready,
442 .irq_tx_enable = irq_tx_enable,
443 .irq_tx_disable = irq_tx_disable,
444 .irq_tx_ready = irq_tx_ready,
445 .fifo_read = fifo_read,
446 .fifo_fill = fifo_fill,
447 #endif /* CONFIG_UART_INTERRUPT_DRIVEN */
448 #ifdef CONFIG_UART_ASYNC_API
449 .callback_set = serial_vnd_callback_set,
450 .tx = serial_vnd_api_tx,
451 .rx_enable = serial_vnd_rx_enable,
452 #endif /* CONFIG_UART_ASYNC_API */
453 };
454
455 #define VND_SERIAL_DATA_BUFFER(n) \
456 RING_BUF_DECLARE(written_data_##n, DT_INST_PROP(n, buffer_size)); \
457 RING_BUF_DECLARE(read_queue_##n, DT_INST_PROP(n, buffer_size)); \
458 static struct serial_vnd_data serial_vnd_data_##n = { \
459 .written = &written_data_##n, \
460 .read_queue = &read_queue_##n, \
461 };
462 #define VND_SERIAL_DATA(n) static struct serial_vnd_data serial_vnd_data_##n = {};
463 #define VND_SERIAL_INIT(n) \
464 COND_CODE_1(DT_INST_NODE_HAS_PROP(n, buffer_size), (VND_SERIAL_DATA_BUFFER(n)), \
465 (VND_SERIAL_DATA(n))) \
466 DEVICE_DT_INST_DEFINE(n, NULL, NULL, &serial_vnd_data_##n, NULL, POST_KERNEL, \
467 CONFIG_SERIAL_INIT_PRIORITY, &serial_vnd_api);
468
469 DT_INST_FOREACH_STATUS_OKAY(VND_SERIAL_INIT)
470