Lines Matching +full:tx +full:- +full:inst +full:- +full:mode

4  * SPDX-License-Identifier: Apache-2.0
27 struct uart_reg *inst; member
33 /* int-mux configuration */
105 if (atomic_test_and_set_bit(data->pm_policy_state_flag, flag) == 0) { in uart_npcx_pm_policy_state_lock_get()
113 if (atomic_test_and_clear_bit(data->pm_policy_state_flag, flag) == 1) { in uart_npcx_pm_policy_state_lock_put()
120 static int uart_set_npcx_baud_rate(struct uart_reg *const inst, int baud_rate, int src_clk) in uart_set_npcx_baud_rate() argument
124 * - 115200 in uart_set_npcx_baud_rate()
125 * - 3000000 in uart_set_npcx_baud_rate()
129 inst->UPSR = 0x38; in uart_set_npcx_baud_rate()
130 inst->UBAUD = 0x01; in uart_set_npcx_baud_rate()
132 inst->UPSR = 0x08; in uart_set_npcx_baud_rate()
133 inst->UBAUD = 0x0a; in uart_set_npcx_baud_rate()
135 inst->UPSR = 0x10; in uart_set_npcx_baud_rate()
136 inst->UBAUD = 0x08; in uart_set_npcx_baud_rate()
138 inst->UPSR = 0x10; in uart_set_npcx_baud_rate()
139 inst->UBAUD = 0x0a; in uart_set_npcx_baud_rate()
141 inst->UPSR = 0x08; in uart_set_npcx_baud_rate()
142 inst->UBAUD = 0x19; in uart_set_npcx_baud_rate()
144 inst->UPSR = 0x08; in uart_set_npcx_baud_rate()
145 inst->UBAUD = 0x1a; in uart_set_npcx_baud_rate()
147 return -EINVAL; in uart_set_npcx_baud_rate()
151 inst->UPSR = 0x08; in uart_set_npcx_baud_rate()
152 inst->UBAUD = 0x0; in uart_set_npcx_baud_rate()
154 return -EINVAL; in uart_set_npcx_baud_rate()
157 return -EINVAL; in uart_set_npcx_baud_rate()
166 const struct uart_npcx_config *const config = dev->config; in uart_npcx_rx_fifo_available()
167 struct uart_reg *const inst = config->inst; in uart_npcx_rx_fifo_available() local
170 return IS_BIT_SET(inst->UFRSTS, NPCX_UFRSTS_RFIFO_NEMPTY_STS); in uart_npcx_rx_fifo_available()
175 const struct uart_npcx_config *const config = dev->config; in uart_npcx_dis_all_tx_interrupts()
176 struct uart_reg *const inst = config->inst; in uart_npcx_dis_all_tx_interrupts() local
178 /* Disable all Tx interrupts */ in uart_npcx_dis_all_tx_interrupts()
179 inst->UFTCTL &= ~(BIT(NPCX_UFTCTL_TEMPTY_LVL_EN) | BIT(NPCX_UFTCTL_TEMPTY_EN) | in uart_npcx_dis_all_tx_interrupts()
185 const struct uart_npcx_config *const config = dev->config; in uart_npcx_clear_rx_fifo()
186 struct uart_reg *const inst = config->inst; in uart_npcx_clear_rx_fifo() local
191 scratch = inst->URBUF; in uart_npcx_clear_rx_fifo()
200 const struct uart_npcx_config *const config = dev->config; in uart_npcx_tx_fifo_ready()
201 struct uart_reg *const inst = config->inst; in uart_npcx_tx_fifo_ready() local
203 /* True if the Tx FIFO is not completely full */ in uart_npcx_tx_fifo_ready()
204 return !(GET_FIELD(inst->UFTSTS, NPCX_UFTSTS_TEMPTY_LVL) == 0); in uart_npcx_tx_fifo_ready()
209 const struct uart_npcx_config *const config = dev->config; in uart_npcx_fifo_fill()
210 struct uart_reg *const inst = config->inst; in uart_npcx_fifo_fill() local
211 struct uart_npcx_data *data = dev->data; in uart_npcx_fifo_fill()
213 k_spinlock_key_t key = k_spin_lock(&data->lock); in uart_npcx_fifo_fill()
215 /* If Tx FIFO is still ready to send */ in uart_npcx_fifo_fill()
216 while ((size - tx_bytes > 0) && uart_npcx_tx_fifo_ready(dev)) { in uart_npcx_fifo_fill()
217 /* Put a character into Tx FIFO */ in uart_npcx_fifo_fill()
218 inst->UTBUF = tx_data[tx_bytes++]; in uart_npcx_fifo_fill()
223 inst->UFTCTL |= BIT(NPCX_UFTCTL_NXMIP_EN); in uart_npcx_fifo_fill()
225 k_spin_unlock(&data->lock, key); in uart_npcx_fifo_fill()
232 const struct uart_npcx_config *const config = dev->config; in uart_npcx_fifo_read()
233 struct uart_reg *const inst = config->inst; in uart_npcx_fifo_read() local
237 while ((size - rx_bytes > 0) && uart_npcx_rx_fifo_available(dev)) { in uart_npcx_fifo_read()
239 rx_data[rx_bytes++] = inst->URBUF; in uart_npcx_fifo_read()
247 const struct uart_npcx_config *const config = dev->config; in uart_npcx_irq_tx_enable()
248 struct uart_reg *const inst = config->inst; in uart_npcx_irq_tx_enable() local
249 struct uart_npcx_data *data = dev->data; in uart_npcx_irq_tx_enable()
250 k_spinlock_key_t key = k_spin_lock(&data->lock); in uart_npcx_irq_tx_enable()
252 inst->UFTCTL |= BIT(NPCX_UFTCTL_TEMPTY_EN); in uart_npcx_irq_tx_enable()
253 k_spin_unlock(&data->lock, key); in uart_npcx_irq_tx_enable()
258 const struct uart_npcx_config *const config = dev->config; in uart_npcx_irq_tx_disable()
259 struct uart_reg *const inst = config->inst; in uart_npcx_irq_tx_disable() local
260 struct uart_npcx_data *data = dev->data; in uart_npcx_irq_tx_disable()
261 k_spinlock_key_t key = k_spin_lock(&data->lock); in uart_npcx_irq_tx_disable()
263 inst->UFTCTL &= ~(BIT(NPCX_UFTCTL_TEMPTY_EN)); in uart_npcx_irq_tx_disable()
264 k_spin_unlock(&data->lock, key); in uart_npcx_irq_tx_disable()
269 const struct uart_npcx_config *const config = dev->config; in uart_npcx_irq_tx_is_enabled()
270 struct uart_reg *const inst = config->inst; in uart_npcx_irq_tx_is_enabled() local
272 return IS_BIT_SET(inst->UFTCTL, NPCX_UFTCTL_TEMPTY_EN); in uart_npcx_irq_tx_is_enabled()
282 const struct uart_npcx_config *const config = dev->config; in uart_npcx_irq_tx_complete()
283 struct uart_reg *const inst = config->inst; in uart_npcx_irq_tx_complete() local
285 /* Tx FIFO is empty or last byte is sending */ in uart_npcx_irq_tx_complete()
286 return IS_BIT_SET(inst->UFTSTS, NPCX_UFTSTS_NXMIP); in uart_npcx_irq_tx_complete()
291 const struct uart_npcx_config *const config = dev->config; in uart_npcx_irq_rx_enable()
292 struct uart_reg *const inst = config->inst; in uart_npcx_irq_rx_enable() local
294 inst->UFRCTL |= BIT(NPCX_UFRCTL_RNEMPTY_EN); in uart_npcx_irq_rx_enable()
299 const struct uart_npcx_config *const config = dev->config; in uart_npcx_irq_rx_disable()
300 struct uart_reg *const inst = config->inst; in uart_npcx_irq_rx_disable() local
302 inst->UFRCTL &= ~(BIT(NPCX_UFRCTL_RNEMPTY_EN)); in uart_npcx_irq_rx_disable()
307 const struct uart_npcx_config *const config = dev->config; in uart_npcx_irq_rx_is_enabled()
308 struct uart_reg *const inst = config->inst; in uart_npcx_irq_rx_is_enabled() local
310 return IS_BIT_SET(inst->UFRCTL, NPCX_UFRCTL_RNEMPTY_EN); in uart_npcx_irq_rx_is_enabled()
320 const struct uart_npcx_config *const config = dev->config; in uart_npcx_irq_err_enable()
321 struct uart_reg *const inst = config->inst; in uart_npcx_irq_err_enable() local
323 inst->UICTRL |= BIT(NPCX_UICTRL_EEI); in uart_npcx_irq_err_enable()
328 const struct uart_npcx_config *const config = dev->config; in uart_npcx_irq_err_disable()
329 struct uart_reg *const inst = config->inst; in uart_npcx_irq_err_disable() local
331 inst->UICTRL &= ~(BIT(NPCX_UICTRL_EEI)); in uart_npcx_irq_err_disable()
350 struct uart_npcx_data *data = dev->data; in uart_npcx_irq_callback_set()
352 data->user_cb = cb; in uart_npcx_irq_callback_set()
353 data->user_data = cb_data; in uart_npcx_irq_callback_set()
356 data->async.user_callback = NULL; in uart_npcx_irq_callback_set()
357 data->async.user_data = NULL; in uart_npcx_irq_callback_set()
362 * Poll-in implementation for interrupt driven config, forward call to
367 return uart_npcx_fifo_read(dev, c, 1) ? 0 : -1; in uart_npcx_poll_in()
371 * Poll-out implementation for interrupt driven config, forward call to
384 * Poll-in implementation for byte mode config, read byte from URBUF if
389 const struct uart_npcx_config *const config = dev->config; in uart_npcx_poll_in()
390 struct uart_reg *const inst = config->inst; in uart_npcx_poll_in() local
393 if (!IS_BIT_SET(inst->UICTRL, NPCX_UICTRL_RBF)) { in uart_npcx_poll_in()
394 return -1; in uart_npcx_poll_in()
397 *c = inst->URBUF; in uart_npcx_poll_in()
402 * Poll-out implementation for byte mode config, write byte to UTBUF if empty.
406 const struct uart_npcx_config *const config = dev->config; in uart_npcx_poll_out()
407 struct uart_reg *const inst = config->inst; in uart_npcx_poll_out() local
409 /* Wait while Tx single byte buffer is ready to send */ in uart_npcx_poll_out()
410 while (!IS_BIT_SET(inst->UICTRL, NPCX_UICTRL_TBE)) { in uart_npcx_poll_out()
414 inst->UTBUF = c; in uart_npcx_poll_out()
421 const struct uart_npcx_data *data = dev->data; in async_user_callback()
423 if (data->async.user_callback) { in async_user_callback()
424 data->async.user_callback(dev, evt, data->async.user_data); in async_user_callback()
430 struct uart_npcx_data *data = dev->data; in async_evt_rx_rdy()
431 struct uart_npcx_rx_dma_params *rx_dma_params = &data->async.rx_dma_params; in async_evt_rx_rdy()
434 .data.rx.buf = rx_dma_params->buf, in async_evt_rx_rdy()
435 .data.rx.len = rx_dma_params->counter - rx_dma_params->offset, in async_evt_rx_rdy()
436 .data.rx.offset = rx_dma_params->offset}; in async_evt_rx_rdy()
442 rx_dma_params->offset = rx_dma_params->counter; in async_evt_rx_rdy()
452 struct uart_npcx_data *data = dev->data; in async_evt_tx_done()
454 (void)k_work_cancel_delayable(&data->async.tx_dma_params.timeout_work); in async_evt_tx_done()
456 LOG_DBG("TX done: %d", data->async.tx_dma_params.buf_len); in async_evt_tx_done()
459 .data.tx.buf = data->async.tx_dma_params.buf, in async_evt_tx_done()
460 .data.tx.len = data->async.tx_dma_params.buf_len}; in async_evt_tx_done()
462 /* Reset TX Buffer */ in async_evt_tx_done()
463 data->async.tx_dma_params.buf = NULL; in async_evt_tx_done()
464 data->async.tx_dma_params.buf_len = 0U; in async_evt_tx_done()
470 const struct uart_npcx_config *const config = dev->config; in uart_npcx_async_rx_dma_get_status()
471 struct mdma_reg *const mdma_reg_base = config->mdma_reg_base; in uart_npcx_async_rx_dma_get_status()
473 if (IS_BIT_SET(mdma_reg_base->MDMA_CTL0, NPCX_MDMA_CTL_MDMAEN)) { in uart_npcx_async_rx_dma_get_status()
474 *pending_length = mdma_reg_base->MDMA_CTCNT0; in uart_npcx_async_rx_dma_get_status()
482 struct uart_npcx_data *data = dev->data; in uart_npcx_async_rx_flush()
483 struct uart_npcx_rx_dma_params *rx_dma_params = &data->async.rx_dma_params; in uart_npcx_async_rx_flush()
487 curr_rcv_len = rx_dma_params->buf_len - dma_pending_len; in uart_npcx_async_rx_flush()
489 if (curr_rcv_len > rx_dma_params->offset) { in uart_npcx_async_rx_flush()
490 rx_dma_params->counter = curr_rcv_len; in uart_npcx_async_rx_flush()
496 k_work_reschedule(&data->rx_refresh_timeout_work, delay); in uart_npcx_async_rx_flush()
513 struct uart_npcx_data *data = dev->data; in uart_npcx_async_callback_set()
515 data->async.user_callback = callback; in uart_npcx_async_callback_set()
516 data->async.user_data = user_data; in uart_npcx_async_callback_set()
519 data->user_cb = NULL; in uart_npcx_async_callback_set()
520 data->user_data = NULL; in uart_npcx_async_callback_set()
536 const struct uart_npcx_config *const config = dev->config; in uart_npcx_async_tx_dma_get_status()
537 struct mdma_reg *const mdma_reg_base = config->mdma_reg_base; in uart_npcx_async_tx_dma_get_status()
539 if (IS_BIT_SET(mdma_reg_base->MDMA_CTL1, NPCX_MDMA_CTL_MDMAEN)) { in uart_npcx_async_tx_dma_get_status()
540 *pending_length = mdma_reg_base->MDMA_CTCNT1; in uart_npcx_async_tx_dma_get_status()
543 return -EBUSY; in uart_npcx_async_tx_dma_get_status()
552 const struct uart_npcx_config *const config = dev->config; in uart_npcx_async_tx()
553 struct uart_reg *const inst = config->inst; in uart_npcx_async_tx() local
554 struct mdma_reg *const mdma_reg_base = config->mdma_reg_base; in uart_npcx_async_tx()
555 struct uart_npcx_data *data = dev->data; in uart_npcx_async_tx()
556 struct uart_npcx_tx_dma_params *tx_dma_params = &data->async.tx_dma_params; in uart_npcx_async_tx()
561 return -EINVAL; in uart_npcx_async_tx()
564 if (tx_dma_params->buf) { in uart_npcx_async_tx()
566 return -EBUSY; in uart_npcx_async_tx()
569 data->async.tx_in_progress = true; in uart_npcx_async_tx()
571 data->async.tx_dma_params.buf = buf; in uart_npcx_async_tx()
572 data->async.tx_dma_params.buf_len = len; in uart_npcx_async_tx()
573 data->async.tx_dma_params.timeout_us = timeout; in uart_npcx_async_tx()
575 mdma_reg_base->MDMA_SRCB1 = (uint32_t)buf; in uart_npcx_async_tx()
576 mdma_reg_base->MDMA_TCNT1 = len; in uart_npcx_async_tx()
578 async_timer_start(&data->async.tx_dma_params.timeout_work, timeout); in uart_npcx_async_tx()
579 mdma_reg_base->MDMA_CTL1 |= BIT(NPCX_MDMA_CTL_MDMAEN) | BIT(NPCX_MDMA_CTL_SIEN); in uart_npcx_async_tx()
581 inst->UMDSL |= BIT(NPCX_UMDSL_ETD); in uart_npcx_async_tx()
594 const struct uart_npcx_config *const config = dev->config; in uart_npcx_async_tx_abort()
595 struct uart_npcx_data *data = dev->data; in uart_npcx_async_tx_abort()
596 struct mdma_reg *const mdma_reg_base = config->mdma_reg_base; in uart_npcx_async_tx_abort()
600 k_work_cancel_delayable(&data->async.tx_dma_params.timeout_work); in uart_npcx_async_tx_abort()
602 mdma_reg_base->MDMA_CTL1 &= ~BIT(NPCX_MDMA_CTL_MDMAEN); in uart_npcx_async_tx_abort()
608 bytes_transmitted = data->async.tx_dma_params.buf_len - dma_pending_len; in uart_npcx_async_tx_abort()
613 .data.tx.buf = data->async.tx_dma_params.buf, in uart_npcx_async_tx_abort()
614 .data.tx.len = bytes_transmitted, in uart_npcx_async_tx_abort()
628 const struct device *dev = async_data->uart_dev; in uart_npcx_async_tx_timeout()
630 LOG_ERR("Async Tx Timeout"); in uart_npcx_async_tx_timeout()
637 const struct uart_npcx_config *const config = dev->config; in uart_npcx_async_rx_enable()
638 struct uart_reg *const inst = config->inst; in uart_npcx_async_rx_enable() local
639 struct mdma_reg *const mdma_reg_base = config->mdma_reg_base; in uart_npcx_async_rx_enable()
640 struct uart_npcx_data *data = dev->data; in uart_npcx_async_rx_enable()
641 struct uart_npcx_rx_dma_params *rx_dma_params = &data->async.rx_dma_params; in uart_npcx_async_rx_enable()
651 rx_dma_params->timeout_us = timeout_us; in uart_npcx_async_rx_enable()
652 rx_dma_params->buf = buf; in uart_npcx_async_rx_enable()
653 rx_dma_params->buf_len = len; in uart_npcx_async_rx_enable()
655 rx_dma_params->offset = 0; in uart_npcx_async_rx_enable()
656 rx_dma_params->counter = 0; in uart_npcx_async_rx_enable()
658 SET_FIELD(inst->UFRCTL, NPCX_UFRCTL_RFULL_LVL_SEL, 1); in uart_npcx_async_rx_enable()
660 mdma_reg_base->MDMA_DSTB0 = (uint32_t)buf; in uart_npcx_async_rx_enable()
661 mdma_reg_base->MDMA_TCNT0 = len; in uart_npcx_async_rx_enable()
662 mdma_reg_base->MDMA_CTL0 |= BIT(NPCX_MDMA_CTL_MDMAEN) | BIT(NPCX_MDMA_CTL_SIEN); in uart_npcx_async_rx_enable()
664 inst->UMDSL |= BIT(NPCX_UMDSL_ERD); in uart_npcx_async_rx_enable()
666 rx_dma_params->enabled = true; in uart_npcx_async_rx_enable()
670 inst->UFRCTL |= BIT(NPCX_UFRCTL_RNEMPTY_EN); in uart_npcx_async_rx_enable()
679 struct uart_npcx_data *data = dev->data; in async_evt_rx_buf_release()
682 .data.rx_buf.buf = data->async.rx_dma_params.buf, in async_evt_rx_buf_release()
686 data->async.rx_dma_params.buf = NULL; in async_evt_rx_buf_release()
687 data->async.rx_dma_params.buf_len = 0U; in async_evt_rx_buf_release()
688 data->async.rx_dma_params.offset = 0U; in async_evt_rx_buf_release()
689 data->async.rx_dma_params.counter = 0U; in async_evt_rx_buf_release()
694 const struct uart_npcx_config *const config = dev->config; in uart_npcx_async_rx_disable()
695 struct uart_reg *const inst = config->inst; in uart_npcx_async_rx_disable() local
696 struct uart_npcx_data *data = dev->data; in uart_npcx_async_rx_disable()
697 struct mdma_reg *const mdma_reg_base = config->mdma_reg_base; in uart_npcx_async_rx_disable()
698 struct uart_npcx_rx_dma_params *rx_dma_params = &data->async.rx_dma_params; in uart_npcx_async_rx_disable()
704 inst->UFRCTL &= ~(BIT(NPCX_UFRCTL_RNEMPTY_EN)); in uart_npcx_async_rx_disable()
706 k_work_cancel_delayable(&rx_dma_params->timeout_work); in uart_npcx_async_rx_disable()
708 if (rx_dma_params->buf == NULL) { in uart_npcx_async_rx_disable()
715 rx_dma_params->enabled = false; in uart_npcx_async_rx_disable()
717 if (data->async.next_rx_buffer != NULL) { in uart_npcx_async_rx_disable()
718 rx_dma_params->buf = data->async.next_rx_buffer; in uart_npcx_async_rx_disable()
719 rx_dma_params->buf_len = data->async.next_rx_buffer_len; in uart_npcx_async_rx_disable()
720 data->async.next_rx_buffer = NULL; in uart_npcx_async_rx_disable()
721 data->async.next_rx_buffer_len = 0; in uart_npcx_async_rx_disable()
726 mdma_reg_base->MDMA_CTL0 &= ~BIT(NPCX_MDMA_CTL_MDMAEN); in uart_npcx_async_rx_disable()
739 struct uart_npcx_data *data = dev->data; in uart_npcx_async_rx_buf_rsp()
741 if (data->async.next_rx_buffer != NULL) { in uart_npcx_async_rx_buf_rsp()
742 return -EBUSY; in uart_npcx_async_rx_buf_rsp()
743 } else if (data->async.rx_dma_params.enabled == false) { in uart_npcx_async_rx_buf_rsp()
744 return -EACCES; in uart_npcx_async_rx_buf_rsp()
747 data->async.next_rx_buffer = buf; in uart_npcx_async_rx_buf_rsp()
748 data->async.next_rx_buffer_len = len; in uart_npcx_async_rx_buf_rsp()
762 const struct device *dev = async_data->uart_dev; in uart_npcx_async_rx_timeout()
770 const struct uart_npcx_config *const config = dev->config; in uart_npcx_async_dma_load_new_rx_buf()
771 struct uart_reg *const inst = config->inst; in uart_npcx_async_dma_load_new_rx_buf() local
772 struct mdma_reg *const mdma_reg_base = config->mdma_reg_base; in uart_npcx_async_dma_load_new_rx_buf()
773 struct uart_npcx_data *data = dev->data; in uart_npcx_async_dma_load_new_rx_buf()
774 struct uart_npcx_rx_dma_params *rx_dma_params = &data->async.rx_dma_params; in uart_npcx_async_dma_load_new_rx_buf()
776 rx_dma_params->offset = 0; in uart_npcx_async_dma_load_new_rx_buf()
777 rx_dma_params->counter = 0; in uart_npcx_async_dma_load_new_rx_buf()
779 rx_dma_params->buf = data->async.next_rx_buffer; in uart_npcx_async_dma_load_new_rx_buf()
780 rx_dma_params->buf_len = data->async.next_rx_buffer_len; in uart_npcx_async_dma_load_new_rx_buf()
781 data->async.next_rx_buffer = NULL; in uart_npcx_async_dma_load_new_rx_buf()
782 data->async.next_rx_buffer_len = 0; in uart_npcx_async_dma_load_new_rx_buf()
784 mdma_reg_base->MDMA_DSTB0 = (uint32_t)rx_dma_params->buf; in uart_npcx_async_dma_load_new_rx_buf()
785 mdma_reg_base->MDMA_TCNT0 = rx_dma_params->buf_len; in uart_npcx_async_dma_load_new_rx_buf()
786 mdma_reg_base->MDMA_CTL0 |= BIT(NPCX_MDMA_CTL_MDMAEN) | BIT(NPCX_MDMA_CTL_SIEN); in uart_npcx_async_dma_load_new_rx_buf()
787 inst->UMDSL |= BIT(NPCX_UMDSL_ERD); in uart_npcx_async_dma_load_new_rx_buf()
793 struct uart_npcx_data *data = dev->data; in uart_npcx_async_dma_rx_complete()
794 struct uart_npcx_rx_dma_params *rx_dma_params = &data->async.rx_dma_params; in uart_npcx_async_dma_rx_complete()
796 rx_dma_params->counter = rx_dma_params->buf_len; in uart_npcx_async_dma_rx_complete()
801 if (data->async.next_rx_buffer != NULL) { in uart_npcx_async_dma_rx_complete()
806 async_timer_start(&rx_dma_params->timeout_work, rx_dma_params->timeout_us); in uart_npcx_async_dma_rx_complete()
818 struct uart_npcx_data *data = dev->data; in uart_npcx_isr()
820 const struct uart_npcx_config *const config = dev->config; in uart_npcx_isr()
821 struct uart_reg *const inst = config->inst; in uart_npcx_isr() local
833 k_work_reschedule(&data->rx_refresh_timeout_work, delay); in uart_npcx_isr()
838 if (data->user_cb) { in uart_npcx_isr()
839 data->user_cb(dev, data->user_data); in uart_npcx_isr()
844 if (data->async.user_callback) { in uart_npcx_isr()
845 struct mdma_reg *const mdma_reg_base = config->mdma_reg_base; in uart_npcx_isr()
848 * Check rx in any way because the RFIFO_NEMPTY_STS is not valid when MDMA mode is in uart_npcx_isr()
852 if (data->async.rx_dma_params.timeout_us == 0) { in uart_npcx_isr()
854 } else if (IS_BIT_SET(inst->UFRCTL, NPCX_UFRCTL_RNEMPTY_EN)) { in uart_npcx_isr()
855 async_timer_start(&data->async.rx_dma_params.timeout_work, in uart_npcx_isr()
856 data->async.rx_dma_params.timeout_us); in uart_npcx_isr()
860 if (IS_BIT_SET(mdma_reg_base->MDMA_CTL0, NPCX_MDMA_CTL_TC) && in uart_npcx_isr()
861 IS_BIT_SET(mdma_reg_base->MDMA_CTL0, NPCX_MDMA_CTL_SIEN)) { in uart_npcx_isr()
862 mdma_reg_base->MDMA_CTL0 &= ~BIT(NPCX_MDMA_CTL_SIEN); in uart_npcx_isr()
863 /* TC is write-0-clear bit */ in uart_npcx_isr()
864 mdma_reg_base->MDMA_CTL0 &= ~BIT(NPCX_MDMA_CTL_TC); in uart_npcx_isr()
865 inst->UMDSL &= ~BIT(NPCX_UMDSL_ERD); in uart_npcx_isr()
870 /* MDMA tx done interrupt */ in uart_npcx_isr()
871 if (IS_BIT_SET(mdma_reg_base->MDMA_CTL1, NPCX_MDMA_CTL_TC) && in uart_npcx_isr()
872 IS_BIT_SET(mdma_reg_base->MDMA_CTL1, NPCX_MDMA_CTL_SIEN)) { in uart_npcx_isr()
873 mdma_reg_base->MDMA_CTL1 &= ~BIT(NPCX_MDMA_CTL_SIEN); in uart_npcx_isr()
874 /* TC is write-0-clear bit */ in uart_npcx_isr()
875 mdma_reg_base->MDMA_CTL1 &= ~BIT(NPCX_MDMA_CTL_TC); in uart_npcx_isr()
878 * MDMA tx is done (i.e. all data in the memory are moved to UART tx FIFO), in uart_npcx_isr()
879 * but data in the tx FIFO are not completely sent to the bus. in uart_npcx_isr()
881 if (!IS_BIT_SET(inst->UFTSTS, NPCX_UFTSTS_NXMIP)) { in uart_npcx_isr()
882 k_spinlock_key_t key = k_spin_lock(&data->lock); in uart_npcx_isr()
884 inst->UFTCTL |= BIT(NPCX_UFTCTL_NXMIP_EN); in uart_npcx_isr()
885 k_spin_unlock(&data->lock, key); in uart_npcx_isr()
887 data->async.tx_in_progress = false; in uart_npcx_isr()
899 if (IS_BIT_SET(inst->UFTCTL, NPCX_UFTCTL_NXMIP_EN) && in uart_npcx_isr()
900 IS_BIT_SET(inst->UFTSTS, NPCX_UFTSTS_NXMIP)) { in uart_npcx_isr()
901 k_spinlock_key_t key = k_spin_lock(&data->lock); in uart_npcx_isr()
904 inst->UFTCTL &= ~BIT(NPCX_UFTCTL_NXMIP_EN); in uart_npcx_isr()
905 k_spin_unlock(&data->lock, key); in uart_npcx_isr()
910 if (data->async.tx_in_progress) { in uart_npcx_isr()
911 data->async.tx_in_progress = false; in uart_npcx_isr()
913 LOG_DBG("Tx wait-empty done"); in uart_npcx_isr()
924 const struct uart_npcx_config *const config = dev->config; in uart_npcx_err_check()
925 struct uart_reg *const inst = config->inst; in uart_npcx_err_check() local
927 uint8_t stat = inst->USTAT; in uart_npcx_err_check()
950 LOG_DBG("-->%s", dev->name); in uart_npcx_rx_wk_isr()
952 struct uart_npcx_data *data = dev->data; in uart_npcx_rx_wk_isr()
956 k_work_reschedule(&data->rx_refresh_timeout_work, delay); in uart_npcx_rx_wk_isr()
1000 .tx = uart_npcx_async_tx,
1010 const struct uart_npcx_config *const config = dev->config; in uart_npcx_init()
1011 struct uart_npcx_data *const data = dev->data; in uart_npcx_init()
1013 struct uart_reg *const inst = config->inst; in uart_npcx_init() local
1019 return -ENODEV; in uart_npcx_init()
1023 ret = clock_control_on(clk_dev, (clock_control_subsys_t)&config->clk_cfg); in uart_npcx_init()
1030 ret = clock_control_on(clk_dev, (clock_control_subsys_t)&config->mdma_clk_cfg); in uart_npcx_init()
1041 ret = clock_control_get_rate(clk_dev, (clock_control_subsys_t)&config->clk_cfg, &uart_rate); in uart_npcx_init()
1048 ret = uart_set_npcx_baud_rate(inst, data->baud_rate, uart_rate); in uart_npcx_init()
1050 LOG_ERR("Set baud rate %d with unsupported apb clock %d failed", data->baud_rate, in uart_npcx_init()
1056 * 8-N-1, FIFO enabled. Must be done after setting in uart_npcx_init()
1059 inst->UFRS = 0x00; in uart_npcx_init()
1061 /* Initialize UART FIFO if mode is interrupt driven */ in uart_npcx_init()
1063 /* Enable the UART FIFO mode */ in uart_npcx_init()
1064 inst->UMDSL |= BIT(NPCX_UMDSL_FIFO_MD); in uart_npcx_init()
1066 /* Disable all UART tx FIFO interrupts */ in uart_npcx_init()
1073 config->irq_config_func(dev); in uart_npcx_init()
1077 data->async.next_rx_buffer = NULL; in uart_npcx_init()
1078 data->async.next_rx_buffer_len = 0; in uart_npcx_init()
1079 data->async.uart_dev = dev; in uart_npcx_init()
1080 k_work_init_delayable(&data->async.rx_dma_params.timeout_work, uart_npcx_async_rx_timeout); in uart_npcx_init()
1081 k_work_init_delayable(&data->async.tx_dma_params.timeout_work, uart_npcx_async_tx_timeout); in uart_npcx_init()
1086 npcx_miwu_init_dev_callback(&data->uart_rx_cb, &config->uart_rx_wui, in uart_npcx_init()
1088 npcx_miwu_manage_callback(&data->uart_rx_cb, true); in uart_npcx_init()
1090 * Configure the UART wake-up event triggered from a falling in uart_npcx_init()
1093 npcx_miwu_interrupt_configure(&config->uart_rx_wui, NPCX_MIWU_MODE_EDGE, in uart_npcx_init()
1097 k_work_init_delayable(&data->rx_refresh_timeout_work, uart_npcx_rx_refresh_timeout); in uart_npcx_init()
1101 /* Configure pin-mux for uart device */ in uart_npcx_init()
1102 ret = pinctrl_apply_state(config->pcfg, PINCTRL_STATE_DEFAULT); in uart_npcx_init()
1112 #define NPCX_UART_IRQ_CONFIG_FUNC_DECL(inst) \ argument
1113 static void uart_npcx_irq_config_##inst(const struct device *dev)
1114 #define NPCX_UART_IRQ_CONFIG_FUNC_INIT(inst) .irq_config_func = uart_npcx_irq_config_##inst, argument
1115 #define NPCX_UART_IRQ_CONFIG_FUNC(inst) \ argument
1116 static void uart_npcx_irq_config_##inst(const struct device *dev) \
1118 IRQ_CONNECT(DT_INST_IRQN(inst), DT_INST_IRQ(inst, priority), uart_npcx_isr, \
1119 DEVICE_DT_INST_GET(inst), 0); \
1120 irq_enable(DT_INST_IRQN(inst)); \
1123 #define NPCX_UART_IRQ_CONFIG_FUNC_DECL(inst) argument
1124 #define NPCX_UART_IRQ_CONFIG_FUNC_INIT(inst) argument
1125 #define NPCX_UART_IRQ_CONFIG_FUNC(inst) argument
1134 .inst = (struct uart_reg *)DT_INST_REG_ADDR(i), \