Lines Matching +full:pclk +full:- +full:pin

1 /* ns16550.c - NS16550D serial driver */
6 * Copyright (c) 2010, 2012-2015 Wind River Systems, Inc.
7 * Copyright (c) 2020-2023 Intel Corp.
9 * SPDX-License-Identifier: Apache-2.0
72 /* If any node has property io-mapped set, we need to support IO port
76 * as io-mapped property is considered always exists and present,
78 * resort to the follow helper to see if any okay nodes have io-mapped
164 * RXRDY pin (29) will be low active. Once it is activated the
165 * RXRDY pin will go inactive when there are no more charac-
170 * reached, the RXRDY pin will go low active. Once it is acti-
175 * FIFO Mode (FCR0 = 1, FCR3 = 0) and there are no charac-
177 * pin (24) will be low active. Once it is activated the TXRDY
178 * pin will go inactive after the first character is loaded into the
183 * TXRDY pin will go low active. This pin will become inactive
271 #define IIRC(dev) (((struct uart_ns16550_dev_data *)(dev)->data)->iir_cache)
390 if (cfg->io_map) {
413 if (cfg->io_map) {
438 if (cfg->io_map) {
454 if (cfg->io_map) {
464 const struct uart_ns16550_dev_config *config = dev->config;
466 return config->reg_interval;
473 const struct uart_ns16550_dev_config *config = dev->config;
475 if (config->io_map) {
476 port = config->port;
489 uint32_t pclk) argument
494 * (uint32_t)(pclk / (16.0 * baud_rate) + 0.5)
496 return ((pclk + (baud_rate << 3)) / baud_rate) >> 4;
502 uint32_t pclk) argument
504 const struct uart_ns16550_dev_config * const dev_cfg = dev->config;
522 divisor = get_uart_baudrate_divisor(dev, baud_rate, pclk);
533 const struct uart_ns16550_dev_config * const dev_cfg = dev->config;
540 return -1;
543 static void set_baud_rate(const struct device *dev, uint32_t baud_rate, uint32_t pclk) argument
545 struct uart_ns16550_dev_data * const dev_data = dev->data;
546 const struct uart_ns16550_dev_config * const dev_cfg = dev->config;
550 if ((baud_rate != 0U) && (pclk != 0U)) {
552 divisor = get_ite_uart_baudrate_divisor(dev, baud_rate, pclk);
554 divisor = get_uart_baudrate_divisor(dev, baud_rate, pclk);
565 dev_data->uart_config.baudrate = baud_rate;
572 struct uart_ns16550_dev_data * const dev_data = dev->data;
573 const struct uart_ns16550_dev_config * const dev_cfg = dev->config;
575 uint32_t pclk = 0U; local
580 k_spinlock_key_t key = k_spin_lock(&dev_data->lock);
583 if (dev_cfg->pincfg != NULL) {
584 pinctrl_apply_state(dev_cfg->pincfg, PINCTRL_STATE_DEFAULT);
589 dev_data->iir_cache = 0U;
593 ns16550_outbyte(dev_cfg, DLF(dev), dev_data->dlf);
597 uint32_t pcp = dev_cfg->pcp;
617 if (dev_cfg->sys_clk_freq != 0U) {
618 pclk = dev_cfg->sys_clk_freq;
620 if (!device_is_ready(dev_cfg->clock_dev)) {
621 ret = -EINVAL;
625 if (clock_control_get_rate(dev_cfg->clock_dev,
626 dev_cfg->clock_subsys,
627 &pclk) != 0) {
628 ret = -EINVAL;
633 set_baud_rate(dev, cfg->baudrate, pclk);
638 switch (cfg->data_bits) {
652 ret = -ENOTSUP;
656 switch (cfg->stop_bits) {
664 ret = -ENOTSUP;
668 switch (cfg->parity) {
676 ret = -ENOTSUP;
680 dev_data->uart_config = *cfg;
689 if (cfg->flow_ctrl == UART_CFG_FLOW_CTRL_RTS_CTS) {
710 dev_data->fifo_size = 64;
712 dev_data->fifo_size = 128;
714 dev_data->fifo_size = 16;
717 dev_data->fifo_size = 1;
727 k_spin_unlock(&dev_data->lock, key);
735 struct uart_ns16550_dev_data *data = dev->data;
737 cfg->baudrate = data->uart_config.baudrate;
738 cfg->parity = data->uart_config.parity;
739 cfg->stop_bits = data->uart_config.stop_bits;
740 cfg->data_bits = data->uart_config.data_bits;
741 cfg->flow_ctrl = data->uart_config.flow_ctrl;
761 if (!device_is_ready(reset_spec->dev)) {
763 return -ENODEV;
766 ret = reset_line_toggle(reset_spec->dev, reset_spec->id);
797 struct uart_ns16550_dev_data *data = dev->data;
798 const struct uart_ns16550_dev_config *dev_cfg = dev->config;
805 if (dev_cfg->reset_spec.dev != NULL) {
806 ret = uart_reset_config(&(dev_cfg->reset_spec));
814 if (dev_cfg->pcie) {
817 if (dev_cfg->pcie->bdf == PCIE_BDF_NONE) {
818 return -EINVAL;
821 pcie_probe_mbar(dev_cfg->pcie->bdf, 0, &mbar);
822 pcie_set_cmd(dev_cfg->pcie->bdf, PCIE_CONF_CMDSTAT_MEM, true);
827 if (data->async.tx_dma_params.dma_dev != NULL) {
828 pcie_set_cmd(dev_cfg->pcie->bdf, PCIE_CONF_CMDSTAT_MASTER, true);
829 data->phys_addr = mbar.phys_addr;
837 if (!dev_cfg->io_map) {
845 if (data->async.tx_dma_params.dma_dev != NULL) {
846 data->async.next_rx_buffer = NULL;
847 data->async.next_rx_buffer_len = 0;
848 data->async.uart_dev = dev;
849 k_work_init_delayable(&data->async.rx_dma_params.timeout_work,
851 k_work_init_delayable(&data->async.tx_dma_params.timeout_work,
853 data->async.rx_dma_params.dma_cfg.head_block =
854 &data->async.rx_dma_params.active_dma_block;
855 data->async.tx_dma_params.dma_cfg.head_block =
856 &data->async.tx_dma_params.active_dma_block;
859 if (!dev_cfg->io_map)
865 dma_intel_lpss_set_base(data->async.tx_dma_params.dma_dev, base);
866 dma_intel_lpss_setup(data->async.tx_dma_params.dma_dev);
867 sys_write32((uint32_t)data->phys_addr,
869 sys_write32((uint32_t)(data->phys_addr >> DMA_INTEL_LPSS_ADDR_RIGHT_SHIFT),
875 ret = uart_ns16550_configure(dev, &data->uart_config);
881 dev_cfg->irq_config_func(dev);
893 * @return 0 if a character arrived, -1 if the input buffer if empty.
897 struct uart_ns16550_dev_data *data = dev->data;
898 int ret = -1;
899 k_spinlock_key_t key = k_spin_lock(&data->lock);
903 k_spin_unlock(&data->lock, key);
923 struct uart_ns16550_dev_data *data = dev->data;
924 const struct uart_ns16550_dev_config * const dev_cfg = dev->config;
925 k_spinlock_key_t key = k_spin_lock(&data->lock);
932 k_spin_unlock(&data->lock, key);
945 struct uart_ns16550_dev_data *data = dev->data;
946 const struct uart_ns16550_dev_config * const dev_cfg = dev->config;
947 k_spinlock_key_t key = k_spin_lock(&data->lock);
950 k_spin_unlock(&data->lock, key);
970 struct uart_ns16550_dev_data *data = dev->data;
971 const struct uart_ns16550_dev_config * const dev_cfg = dev->config;
973 k_spinlock_key_t key = k_spin_lock(&data->lock);
975 for (i = 0; (i < size) && (i < data->fifo_size); i++) {
979 k_spin_unlock(&data->lock, key);
996 struct uart_ns16550_dev_data *data = dev->data;
998 k_spinlock_key_t key = k_spin_lock(&data->lock);
1000 for (i = 0; (i < size) && (ns16550_read_char(dev, &rx_data[i]) != -1); i++) {
1003 k_spin_unlock(&data->lock, key);
1015 struct uart_ns16550_dev_data *data = dev->data;
1016 const struct uart_ns16550_dev_config * const dev_cfg = dev->config;
1017 k_spinlock_key_t key = k_spin_lock(&data->lock);
1020 struct uart_ns16550_dev_data *const dev_data = dev->data;
1022 if (!dev_data->tx_stream_on) {
1023 dev_data->tx_stream_on = true;
1041 k_spin_unlock(&data->lock, key);
1051 struct uart_ns16550_dev_data *data = dev->data;
1052 const struct uart_ns16550_dev_config * const dev_cfg = dev->config;
1053 k_spinlock_key_t key = k_spin_lock(&data->lock);
1059 struct uart_ns16550_dev_data *const dev_data = dev->data;
1061 if (dev_data->tx_stream_on) {
1062 dev_data->tx_stream_on = false;
1078 k_spin_unlock(&data->lock, key);
1090 struct uart_ns16550_dev_data *data = dev->data;
1091 k_spinlock_key_t key = k_spin_lock(&data->lock);
1095 k_spin_unlock(&data->lock, key);
1109 struct uart_ns16550_dev_data *data = dev->data;
1110 const struct uart_ns16550_dev_config * const dev_cfg = dev->config;
1111 k_spinlock_key_t key = k_spin_lock(&data->lock);
1116 k_spin_unlock(&data->lock, key);
1128 struct uart_ns16550_dev_data *data = dev->data;
1129 const struct uart_ns16550_dev_config * const dev_cfg = dev->config;
1130 k_spinlock_key_t key = k_spin_lock(&data->lock);
1134 k_spin_unlock(&data->lock, key);
1144 struct uart_ns16550_dev_data *data = dev->data;
1145 const struct uart_ns16550_dev_config * const dev_cfg = dev->config;
1146 k_spinlock_key_t key = k_spin_lock(&data->lock);
1151 k_spin_unlock(&data->lock, key);
1163 struct uart_ns16550_dev_data *data = dev->data;
1164 k_spinlock_key_t key = k_spin_lock(&data->lock);
1168 k_spin_unlock(&data->lock, key);
1180 struct uart_ns16550_dev_data *data = dev->data;
1181 const struct uart_ns16550_dev_config * const dev_cfg = dev->config;
1182 k_spinlock_key_t key = k_spin_lock(&data->lock);
1187 k_spin_unlock(&data->lock, key);
1199 struct uart_ns16550_dev_data *data = dev->data;
1200 const struct uart_ns16550_dev_config * const dev_cfg = dev->config;
1201 k_spinlock_key_t key = k_spin_lock(&data->lock);
1206 k_spin_unlock(&data->lock, key);
1218 struct uart_ns16550_dev_data *data = dev->data;
1219 k_spinlock_key_t key = k_spin_lock(&data->lock);
1223 k_spin_unlock(&data->lock, key);
1237 struct uart_ns16550_dev_data *data = dev->data;
1238 const struct uart_ns16550_dev_config * const dev_cfg = dev->config;
1239 k_spinlock_key_t key = k_spin_lock(&data->lock);
1243 k_spin_unlock(&data->lock, key);
1258 struct uart_ns16550_dev_data * const dev_data = dev->data;
1259 k_spinlock_key_t key = k_spin_lock(&dev_data->lock);
1261 dev_data->cb = cb;
1262 dev_data->cb_data = cb_data;
1264 k_spin_unlock(&dev_data->lock, key);
1276 struct uart_ns16550_dev_data * const dev_data = dev->data;
1277 const struct uart_ns16550_dev_config * const dev_cfg = dev->config;
1279 if (dev_data->cb) {
1280 dev_data->cb(dev, dev_data->cb_data);
1293 if (dev_data->async.tx_dma_params.dma_dev != NULL) {
1294 const struct uart_ns16550_dev_config * const config = dev->config;
1299 if (dma_status & BIT(dev_data->async.rx_dma_params.dma_channel)) {
1300 async_timer_start(&dev_data->async.rx_dma_params.timeout_work,
1301 dev_data->async.rx_dma_params.timeout_us);
1303 BIT(dev_data->async.rx_dma_params.dma_channel));
1306 dma_intel_lpss_isr(dev_data->async.rx_dma_params.dma_dev);
1309 async_timer_start(&dev_data->async.rx_dma_params.timeout_work,
1310 dev_data->async.rx_dma_params.timeout_us);
1340 struct uart_ns16550_dev_data *data = dev->data;
1341 const struct uart_ns16550_dev_config *const dev_cfg = dev->config;
1342 uint32_t mdc, chg, pclk = 0U; local
1345 if (dev_cfg->sys_clk_freq != 0U) {
1346 pclk = dev_cfg->sys_clk_freq;
1348 if (device_is_ready(dev_cfg->clock_dev)) {
1349 clock_control_get_rate(dev_cfg->clock_dev, dev_cfg->clock_subsys, &pclk);
1355 set_baud_rate(dev, val, pclk);
1360 key = k_spin_lock(&data->lock);
1375 k_spin_unlock(&data->lock, key);
1379 return -ENOTSUP;
1400 struct uart_ns16550_dev_data * const dev_data = dev->data;
1401 const struct uart_ns16550_dev_config * const dev_cfg = dev->config;
1402 k_spinlock_key_t key = k_spin_lock(&dev_data->lock);
1404 dev_data->dlf = p;
1405 ns16550_outbyte(dev_cfg, DLF(dev), dev_data->dlf);
1406 k_spin_unlock(&dev_data->lock, key);
1411 return -ENOTSUP;
1419 const struct uart_ns16550_dev_data *data = dev->data;
1421 if (data->async.user_callback) {
1422 data->async.user_callback(dev, evt, data->async.user_data);
1429 struct uart_ns16550_dev_data *data = dev->data;
1430 struct uart_ns16550_tx_dma_params *tx_params = &data->async.tx_dma_params;
1432 (void)k_work_cancel_delayable(&data->async.tx_dma_params.timeout_work);
1436 .data.tx.buf = tx_params->buf,
1437 .data.tx.len = tx_params->buf_len
1440 tx_params->buf = NULL;
1441 tx_params->buf_len = 0U;
1449 struct uart_ns16550_dev_data *data = dev->data;
1450 struct uart_ns16550_rx_dma_params *dma_params = &data->async.rx_dma_params;
1454 .data.rx.buf = dma_params->buf,
1455 .data.rx.len = dma_params->counter - dma_params->offset,
1456 .data.rx.offset = dma_params->offset
1459 dma_params->offset = dma_params->counter;
1468 struct uart_ns16550_dev_data *data = (struct uart_ns16550_dev_data *)dev->data;
1471 .data.rx_buf.buf = data->async.rx_dma_params.buf
1475 data->async.rx_dma_params.buf = NULL;
1476 data->async.rx_dma_params.buf_len = 0U;
1477 data->async.rx_dma_params.offset = 0U;
1478 data->async.rx_dma_params.counter = 0U;
1491 struct uart_ns16550_dev_data *data = dev->data;
1492 struct uart_ns16550_rx_dma_params *dma_params = &data->async.rx_dma_params;
1495 dma_get_status(dma_params->dma_dev,
1496 dma_params->dma_channel,
1499 const int rx_count = dma_params->buf_len - status.pending_length;
1501 if (rx_count > dma_params->counter) {
1502 dma_params->counter = rx_count;
1509 struct uart_ns16550_dev_data *data = (struct uart_ns16550_dev_data *)dev->data;
1510 struct uart_ns16550_rx_dma_params *dma_params = &data->async.rx_dma_params;
1511 k_spinlock_key_t key = k_spin_lock(&data->lock);
1514 if (!device_is_ready(dma_params->dma_dev)) {
1515 ret = -ENODEV;
1519 (void)k_work_cancel_delayable(&data->async.rx_dma_params.timeout_work);
1521 if (dma_params->buf && (dma_params->buf_len > 0)) {
1524 if (data->async.next_rx_buffer != NULL) {
1525 dma_params->buf = data->async.next_rx_buffer;
1526 dma_params->buf_len = data->async.next_rx_buffer_len;
1527 data->async.next_rx_buffer = NULL;
1528 data->async.next_rx_buffer_len = 0;
1532 ret = dma_stop(dma_params->dma_dev,
1533 dma_params->dma_channel);
1542 k_spin_unlock(&data->lock, key);
1548 struct uart_ns16550_dev_data *data = (struct uart_ns16550_dev_data *)dev->data;
1549 struct uart_ns16550_rx_dma_params *rx_dma_params = &data->async.rx_dma_params;
1551 assert(rx_dma_params->buf != NULL);
1552 assert(rx_dma_params->buf_len > 0);
1554 struct dma_block_config *head_block_config = &rx_dma_params->active_dma_block;
1556 head_block_config->dest_address = (uintptr_t)rx_dma_params->buf;
1557 head_block_config->source_address = data->phys_addr;
1558 head_block_config->block_size = rx_dma_params->buf_len;
1566 struct uart_ns16550_dev_data *data = (struct uart_ns16550_dev_data *)uart_dev->data;
1567 struct uart_ns16550_rx_dma_params *rx_params = &data->async.rx_dma_params;
1568 struct uart_ns16550_tx_dma_params *tx_params = &data->async.tx_dma_params;
1570 if (channel == tx_params->dma_channel) {
1572 } else if (channel == rx_params->dma_channel) {
1574 rx_params->counter = rx_params->buf_len;
1579 rx_params->buf = data->async.next_rx_buffer;
1580 rx_params->buf_len = data->async.next_rx_buffer_len;
1581 data->async.next_rx_buffer = NULL;
1582 data->async.next_rx_buffer_len = 0U;
1584 if (rx_params->buf != NULL &&
1585 rx_params->buf_len > 0) {
1586 dma_reload(dev, rx_params->dma_channel, data->phys_addr,
1587 (uintptr_t)rx_params->buf, rx_params->buf_len);
1588 dma_start(dev, rx_params->dma_channel);
1600 struct uart_ns16550_dev_data *data = dev->data;
1602 data->async.user_callback = callback;
1603 data->async.user_data = user_data;
1611 struct uart_ns16550_dev_data *data = dev->data;
1612 struct uart_ns16550_tx_dma_params *tx_params = &data->async.tx_dma_params;
1613 k_spinlock_key_t key = k_spin_lock(&data->lock);
1616 if (!device_is_ready(tx_params->dma_dev)) {
1617 ret = -ENODEV;
1621 tx_params->buf = buf;
1622 tx_params->buf_len = len;
1623 tx_params->active_dma_block.source_address = (uintptr_t)buf;
1624 tx_params->active_dma_block.dest_address = data->phys_addr;
1625 tx_params->active_dma_block.block_size = len;
1626 tx_params->active_dma_block.next_block = NULL;
1628 ret = dma_config(tx_params->dma_dev,
1629 tx_params->dma_channel,
1630 (struct dma_config *)&tx_params->dma_cfg);
1633 ret = dma_start(tx_params->dma_dev,
1634 tx_params->dma_channel);
1636 ret = -EIO;
1639 async_timer_start(&data->async.tx_dma_params.timeout_work, timeout_us);
1643 k_spin_unlock(&data->lock, key);
1649 struct uart_ns16550_dev_data *data = dev->data;
1650 struct uart_ns16550_tx_dma_params *tx_params = &data->async.tx_dma_params;
1655 k_spinlock_key_t key = k_spin_lock(&data->lock);
1657 if (!device_is_ready(tx_params->dma_dev)) {
1658 ret = -ENODEV;
1662 (void)k_work_cancel_delayable(&data->async.tx_dma_params.timeout_work);
1664 ret = dma_stop(tx_params->dma_dev, tx_params->dma_channel);
1665 dma_get_status(tx_params->dma_dev,
1666 tx_params->dma_channel,
1668 bytes_tx = tx_params->buf_len - status.pending_length;
1673 .data.tx.buf = tx_params->buf,
1679 k_spin_unlock(&data->lock, key);
1686 struct uart_ns16550_dev_data *data = dev->data;
1687 const struct uart_ns16550_dev_config *config = dev->config;
1688 struct uart_ns16550_rx_dma_params *rx_dma_params = &data->async.rx_dma_params;
1690 k_spinlock_key_t key = k_spin_lock(&data->lock);
1692 if (!device_is_ready(rx_dma_params->dma_dev)) {
1693 ret = -ENODEV;
1697 rx_dma_params->timeout_us = timeout_us;
1698 rx_dma_params->buf = buf;
1699 rx_dma_params->buf_len = len;
1702 ns16550_outword(config, MST(dev), UNMASK_LPSS_INT(rx_dma_params->dma_channel));
1709 dma_config(rx_dma_params->dma_dev,
1710 rx_dma_params->dma_channel,
1711 (struct dma_config *)&rx_dma_params->dma_cfg);
1712 dma_start(rx_dma_params->dma_dev, rx_dma_params->dma_channel);
1715 k_spin_unlock(&data->lock, key);
1721 struct uart_ns16550_dev_data *data = dev->data;
1723 assert(data->async.next_rx_buffer == NULL);
1724 assert(data->async.next_rx_buffer_len == 0);
1725 data->async.next_rx_buffer = buf;
1726 data->async.next_rx_buffer_len = len;
1740 const struct device *dev = async_data->uart_dev;
1755 const struct device *dev = async_data->uart_dev;
1812 /* IO-port or MMIO based UART */
1831 const struct uart_ns16550_dev_config *dev_cfg = dev->config; \
1832 unsigned int irq = pcie_alloc_irq(dev_cfg->pcie->bdf); \
1836 pcie_connect_dynamic_irq(dev_cfg->pcie->bdf, irq, \
1841 pcie_irq_enable(dev_cfg->pcie->bdf, irq); \