Lines Matching full:data

32 /* Device run time data */
112 struct uart_emul_data *data = dev->data; in uart_emul_tx_data_ready() local
115 if (data->tx_data_ready_cb) { in uart_emul_tx_data_ready()
116 (data->tx_data_ready_cb)(dev, ring_buf_size_get(data->tx_rb), data->user_data); in uart_emul_tx_data_ready()
118 SYS_SLIST_FOR_EACH_NODE(&data->emuls, node) { in uart_emul_tx_data_ready()
124 emul->api->tx_data_ready(dev, ring_buf_size_get(data->tx_rb), emul->target); in uart_emul_tx_data_ready()
130 struct uart_emul_data *drv_data = dev->data; in uart_emul_poll_in()
148 struct uart_emul_data *drv_data = dev->data; in uart_emul_poll_out()
171 struct uart_emul_data *drv_data = dev->data; in uart_emul_err_check()
181 struct uart_emul_data *drv_data = dev->data; in uart_emul_configure()
189 const struct uart_emul_data *drv_data = dev->data; in uart_emul_config_get()
200 struct uart_emul_data *data = dev->data; in uart_emul_fifo_fill() local
204 K_SPINLOCK(&data->tx_lock) { in uart_emul_fifo_fill()
205 ret = ring_buf_put(data->tx_rb, tx_data, put_size); in uart_emul_fifo_fill()
219 struct uart_emul_data *data = dev->data; in uart_emul_fifo_read() local
223 K_SPINLOCK(&data->rx_lock) { in uart_emul_fifo_read()
224 bytes_to_read = MIN(config->latch_buffer_size, ring_buf_size_get(data->rx_rb)); in uart_emul_fifo_read()
226 ring_buf_get(data->rx_rb, rx_data, bytes_to_read); in uart_emul_fifo_read()
235 struct uart_emul_data *data = dev->data; in uart_emul_irq_tx_ready() local
237 K_SPINLOCK(&data->tx_lock) { in uart_emul_irq_tx_ready()
238 if (!data->tx_irq_en) { in uart_emul_irq_tx_ready()
242 available = ring_buf_space_get(data->tx_rb); in uart_emul_irq_tx_ready()
251 struct uart_emul_data *data = dev->data; in uart_emul_irq_rx_ready() local
253 K_SPINLOCK(&data->rx_lock) { in uart_emul_irq_rx_ready()
254 if (!data->rx_irq_en) { in uart_emul_irq_rx_ready()
258 ready = !ring_buf_is_empty(data->rx_rb); in uart_emul_irq_rx_ready()
266 struct uart_emul_data *data = CONTAINER_OF(work, struct uart_emul_data, irq_work); in uart_emul_irq_handler() local
267 const struct device *dev = data->dev; in uart_emul_irq_handler()
268 uart_irq_callback_user_data_t cb = data->irq_cb; in uart_emul_irq_handler()
269 void *udata = data->irq_cb_udata; in uart_emul_irq_handler()
279 K_SPINLOCK(&data->tx_lock) { in uart_emul_irq_handler()
280 if (!data->tx_irq_en) { in uart_emul_irq_handler()
284 have_work = have_work || ring_buf_space_get(data->tx_rb) > 0; in uart_emul_irq_handler()
287 K_SPINLOCK(&data->rx_lock) { in uart_emul_irq_handler()
288 if (!data->rx_irq_en) { in uart_emul_irq_handler()
292 have_work = have_work || !ring_buf_is_empty(data->rx_rb); in uart_emul_irq_handler()
311 struct uart_emul_data *const data = dev->data; in uart_emul_irq_tx_enable() local
313 K_SPINLOCK(&data->tx_lock) { in uart_emul_irq_tx_enable()
314 data->tx_irq_en = true; in uart_emul_irq_tx_enable()
315 submit_irq_work = ring_buf_space_get(data->tx_rb) > 0; in uart_emul_irq_tx_enable()
319 (void)k_work_submit_to_queue(&uart_emul_work_q, &data->irq_work); in uart_emul_irq_tx_enable()
326 struct uart_emul_data *const data = dev->data; in uart_emul_irq_rx_enable() local
328 K_SPINLOCK(&data->rx_lock) { in uart_emul_irq_rx_enable()
329 data->rx_irq_en = true; in uart_emul_irq_rx_enable()
330 submit_irq_work = !ring_buf_is_empty(data->rx_rb); in uart_emul_irq_rx_enable()
334 (void)k_work_submit_to_queue(&uart_emul_work_q, &data->irq_work); in uart_emul_irq_rx_enable()
340 struct uart_emul_data *const data = dev->data; in uart_emul_irq_tx_disable() local
342 K_SPINLOCK(&data->tx_lock) { in uart_emul_irq_tx_disable()
343 data->tx_irq_en = false; in uart_emul_irq_tx_disable()
349 struct uart_emul_data *const data = dev->data; in uart_emul_irq_rx_disable() local
351 K_SPINLOCK(&data->rx_lock) { in uart_emul_irq_rx_disable()
352 data->rx_irq_en = false; in uart_emul_irq_rx_disable()
359 struct uart_emul_data *const data = dev->data; in uart_emul_irq_tx_complete() local
361 K_SPINLOCK(&data->tx_lock) { in uart_emul_irq_tx_complete()
362 tx_complete = ring_buf_is_empty(data->tx_rb); in uart_emul_irq_tx_complete()
371 struct uart_emul_data *const data = dev->data; in uart_emul_irq_callback_set() local
373 data->irq_cb = cb; in uart_emul_irq_callback_set()
374 data->irq_cb_udata = user_data; in uart_emul_irq_callback_set()
386 struct uart_emul_data *data = dev->data; in uart_emul_post_event() local
388 if (!data->uart_callback) { in uart_emul_post_event()
392 data->uart_callback(dev, evt, data->callback_user_data); in uart_emul_post_event()
400 static void uart_emul_async_switch_buf_nolock(struct uart_emul_data *data) in uart_emul_async_switch_buf_nolock() argument
402 data->rx_buf = data->rx_buf_next; in uart_emul_async_switch_buf_nolock()
403 data->rx_buf_len = data->rx_buf_next_len; in uart_emul_async_switch_buf_nolock()
404 data->rx_buf_offset = 0; in uart_emul_async_switch_buf_nolock()
405 data->rx_buf_data_len = 0; in uart_emul_async_switch_buf_nolock()
406 data->rx_buf_next = NULL; in uart_emul_async_switch_buf_nolock()
407 data->rx_buf_next_len = 0; in uart_emul_async_switch_buf_nolock()
413 struct uart_emul_data *data = CONTAINER_OF(work, struct uart_emul_data, rx_timeout_work); in uart_emul_async_rx_timeout_handler() local
414 const struct device *dev = data->dev; in uart_emul_async_rx_timeout_handler()
424 K_SPINLOCK(&data->rx_lock) { in uart_emul_async_rx_timeout_handler()
425 rx_en = data->rx_async_en; in uart_emul_async_rx_timeout_handler()
426 rx_buf = data->rx_buf; in uart_emul_async_rx_timeout_handler()
427 rx_buf_len = data->rx_buf_len; in uart_emul_async_rx_timeout_handler()
428 rx_buf_offset = data->rx_buf_offset; in uart_emul_async_rx_timeout_handler()
429 rx_buf_data_len = data->rx_buf_data_len; in uart_emul_async_rx_timeout_handler()
431 data->rx_buf_offset += rx_buf_data_len; in uart_emul_async_rx_timeout_handler()
432 data->rx_buf_data_len = 0; in uart_emul_async_rx_timeout_handler()
434 if (data->rx_buf_offset >= rx_buf_len || in uart_emul_async_rx_timeout_handler()
435 (rx_buf_data_len > 0 && data->rx_release_on_timeout)) { in uart_emul_async_rx_timeout_handler()
437 uart_emul_async_switch_buf_nolock(data); in uart_emul_async_rx_timeout_handler()
438 if (data->rx_buf == NULL) { in uart_emul_async_rx_timeout_handler()
441 data->rx_async_en = false; in uart_emul_async_rx_timeout_handler()
452 .data.rx = { in uart_emul_async_rx_timeout_handler()
464 .data.rx_buf.buf = rx_buf, in uart_emul_async_rx_timeout_handler()
476 struct uart_emul_data *data = CONTAINER_OF(work, struct uart_emul_data, rx_work); in uart_emul_async_rx_handler() local
477 const struct device *dev = data->dev; in uart_emul_async_rx_handler()
491 K_SPINLOCK(&data->rx_lock) { in uart_emul_async_rx_handler()
492 rx_en = data->rx_async_en; in uart_emul_async_rx_handler()
493 rx_buf = data->rx_buf; in uart_emul_async_rx_handler()
494 buf_len = data->rx_buf_len; in uart_emul_async_rx_handler()
495 offset = data->rx_buf_offset; in uart_emul_async_rx_handler()
496 data_len = data->rx_buf_data_len; in uart_emul_async_rx_handler()
497 empty = ring_buf_is_empty(data->rx_rb); in uart_emul_async_rx_handler()
504 uart_emul_async_switch_buf_nolock(data); in uart_emul_async_rx_handler()
505 rx_buf = data->rx_buf; in uart_emul_async_rx_handler()
506 buf_len = data->rx_buf_len; in uart_emul_async_rx_handler()
507 offset = data->rx_buf_offset; in uart_emul_async_rx_handler()
508 data_len = data->rx_buf_data_len; in uart_emul_async_rx_handler()
515 data->rx_async_en = false; in uart_emul_async_rx_handler()
523 buf_request = data_len == 0 && data->rx_buf_next == NULL; in uart_emul_async_rx_handler()
525 uint32_t read = ring_buf_get(data->rx_rb, &rx_buf[offset + data_len], in uart_emul_async_rx_handler()
528 data->rx_buf_data_len = data_len; in uart_emul_async_rx_handler()
530 if (offset + data_len >= data->rx_buf_len) { in uart_emul_async_rx_handler()
532 data->rx_buf = NULL; in uart_emul_async_rx_handler()
533 data->rx_buf_len = 0; in uart_emul_async_rx_handler()
534 data->rx_buf_offset = 0; in uart_emul_async_rx_handler()
535 data->rx_buf_data_len = 0; in uart_emul_async_rx_handler()
548 if (empty && data->rx_buf_timeout != SYS_FOREVER_US) { in uart_emul_async_rx_handler()
549 (void)k_work_reschedule_for_queue(&uart_emul_work_q, &data->rx_timeout_work, in uart_emul_async_rx_handler()
550 K_USEC(data->rx_buf_timeout)); in uart_emul_async_rx_handler()
560 .data.rx = { in uart_emul_async_rx_handler()
571 .data.rx_buf.buf = rx_buf, in uart_emul_async_rx_handler()
581 struct uart_emul_data *data = CONTAINER_OF(work, struct uart_emul_data, tx_work); in uart_emul_async_tx_handler() local
582 const struct device *dev = data->dev; in uart_emul_async_tx_handler()
592 K_SPINLOCK(&data->tx_lock) { in uart_emul_async_tx_handler()
593 tx_buf = data->tx_buf; in uart_emul_async_tx_handler()
594 tx_buf_len = data->tx_buf_len; in uart_emul_async_tx_handler()
595 tx_buf_offset = data->tx_buf_offset; in uart_emul_async_tx_handler()
601 written = ring_buf_put(data->tx_rb, &data->tx_buf[tx_buf_offset], in uart_emul_async_tx_handler()
605 data->tx_buf_offset += written; in uart_emul_async_tx_handler()
608 data->tx_buf = NULL; in uart_emul_async_tx_handler()
609 data->tx_buf_len = 0; in uart_emul_async_tx_handler()
610 data->tx_buf_offset = 0; in uart_emul_async_tx_handler()
653 k_work_submit_to_queue(&uart_emul_work_q, &data->tx_work); in uart_emul_async_tx_handler()
659 .data.tx = { in uart_emul_async_tx_handler()
668 static void uart_emul_rx_stop(const struct device *dev, struct uart_emul_data *data) in uart_emul_rx_stop() argument
674 k_work_cancel_delayable(&data->rx_timeout_work); in uart_emul_rx_stop()
676 K_SPINLOCK(&data->rx_lock) { in uart_emul_rx_stop()
677 if (!data->rx_async_en) { in uart_emul_rx_stop()
680 rx_buf = data->rx_buf; in uart_emul_rx_stop()
681 rx_buf_offset = data->rx_buf_offset; in uart_emul_rx_stop()
682 rx_buf_data_len = data->rx_buf_data_len; in uart_emul_rx_stop()
684 data->rx_buf = NULL; in uart_emul_rx_stop()
685 data->rx_buf_len = 0; in uart_emul_rx_stop()
686 data->rx_buf_offset = 0; in uart_emul_rx_stop()
687 data->rx_buf_data_len = 0; in uart_emul_rx_stop()
688 data->rx_buf_next = NULL; in uart_emul_rx_stop()
689 data->rx_buf_next_len = 0; in uart_emul_rx_stop()
690 data->rx_async_en = false; in uart_emul_rx_stop()
691 data->rx_stopping = false; in uart_emul_rx_stop()
701 .data.rx = { in uart_emul_rx_stop()
713 .data.rx_buf.buf = rx_buf, in uart_emul_rx_stop()
722 struct uart_emul_data *data = CONTAINER_OF(work, struct uart_emul_data, rx_disable_work); in uart_emul_async_rx_disable_handler() local
723 const struct device *dev = data->dev; in uart_emul_async_rx_disable_handler()
725 uart_emul_rx_stop(dev, data); in uart_emul_async_rx_disable_handler()
731 struct uart_emul_data *data = dev->data; in uart_emul_callback_set() local
733 data->uart_callback = callback; in uart_emul_callback_set()
734 data->callback_user_data = user_data; in uart_emul_callback_set()
741 struct uart_emul_data *data = dev->data; in uart_emul_tx() local
744 K_SPINLOCK(&data->tx_lock) { in uart_emul_tx()
745 if (data->tx_buf) { in uart_emul_tx()
750 data->tx_buf = buf; in uart_emul_tx()
751 data->tx_buf_len = len; in uart_emul_tx()
752 data->tx_buf_offset = 0; in uart_emul_tx()
754 k_work_submit_to_queue(&uart_emul_work_q, &data->tx_work); in uart_emul_tx()
762 struct uart_emul_data *data = dev->data; in uart_emul_tx_abort() local
766 K_SPINLOCK(&data->tx_lock) { in uart_emul_tx_abort()
767 tx_buf = data->tx_buf; in uart_emul_tx_abort()
768 tx_buf_sent = data->tx_buf_offset; in uart_emul_tx_abort()
770 data->tx_buf = NULL; in uart_emul_tx_abort()
771 data->tx_buf_len = 0; in uart_emul_tx_abort()
772 data->tx_buf_offset = 0; in uart_emul_tx_abort()
774 k_work_cancel(&data->tx_work); in uart_emul_tx_abort()
783 .data.tx = { in uart_emul_tx_abort()
796 struct uart_emul_data *data = dev->data; in uart_emul_rx_buf_rsp() local
799 K_SPINLOCK(&data->rx_lock) { in uart_emul_rx_buf_rsp()
800 if (!data->rx_async_en) { in uart_emul_rx_buf_rsp()
805 if (data->rx_buf_next != NULL) { in uart_emul_rx_buf_rsp()
810 data->rx_buf_next = buf; in uart_emul_rx_buf_rsp()
811 data->rx_buf_next_len = len; in uart_emul_rx_buf_rsp()
819 struct uart_emul_data *data = dev->data; in uart_emul_rx_enable() local
823 K_SPINLOCK(&data->rx_lock) { in uart_emul_rx_enable()
824 rx_stopping = data->rx_stopping; in uart_emul_rx_enable()
825 k_work_cancel(&data->rx_disable_work); in uart_emul_rx_enable()
829 uart_emul_rx_stop(dev, data); in uart_emul_rx_enable()
832 K_SPINLOCK(&data->rx_lock) { in uart_emul_rx_enable()
833 if (data->rx_async_en) { in uart_emul_rx_enable()
838 data->rx_async_en = true; in uart_emul_rx_enable()
839 data->rx_buf = buf; in uart_emul_rx_enable()
840 data->rx_buf_len = len; in uart_emul_rx_enable()
841 data->rx_buf_timeout = timeout; in uart_emul_rx_enable()
842 data->rx_buf_offset = 0; in uart_emul_rx_enable()
843 data->rx_buf_data_len = 0; in uart_emul_rx_enable()
844 data->rx_buf_next = NULL; in uart_emul_rx_enable()
845 data->rx_buf_next_len = 0; in uart_emul_rx_enable()
847 if (!ring_buf_is_empty(data->rx_rb)) { in uart_emul_rx_enable()
848 (void)k_work_submit_to_queue(&uart_emul_work_q, &data->rx_work); in uart_emul_rx_enable()
857 struct uart_emul_data *data = dev->data; in uart_emul_rx_disable() local
860 K_SPINLOCK(&data->rx_lock) { in uart_emul_rx_disable()
861 if (!data->rx_async_en) { in uart_emul_rx_disable()
865 data->rx_stopping = true; in uart_emul_rx_disable()
866 k_work_submit_to_queue(&uart_emul_work_q, &data->rx_disable_work); in uart_emul_rx_disable()
908 struct uart_emul_data *drv_data = dev->data; in uart_emul_callback_tx_data_ready_set()
914 uint32_t uart_emul_put_rx_data(const struct device *dev, const uint8_t *data, size_t size) in uart_emul_put_rx_data() argument
916 struct uart_emul_data *drv_data = dev->data; in uart_emul_put_rx_data()
923 count = ring_buf_put(drv_data->rx_rb, data, size); in uart_emul_put_rx_data()
947 uint32_t uart_emul_get_tx_data(const struct device *dev, uint8_t *data, size_t size) in uart_emul_get_tx_data() argument
949 struct uart_emul_data *drv_data = dev->data; in uart_emul_get_tx_data()
954 count = ring_buf_get(drv_data->tx_rb, data, size); in uart_emul_get_tx_data()
961 struct uart_emul_data *drv_data = dev->data; in uart_emul_flush_rx_data()
974 struct uart_emul_data *drv_data = dev->data; in uart_emul_flush_tx_data()
987 struct uart_emul_data *drv_data = dev->data; in uart_emul_set_errors()
994 __unused struct uart_emul_data *drv_data = dev->data; in uart_emul_set_release_buffer_on_timeout()
1001 struct uart_emul_data *data = dev->data; in uart_emul_register() local
1003 sys_slist_append(&data->emuls, &emul->node); in uart_emul_register()