Lines Matching +full:spi +full:- +full:dev

3  * SPDX-License-Identifier: Apache-2.0
12 #include <zephyr/drivers/spi.h>
13 #include <zephyr/drivers/spi/rtio.h>
25 static uint32_t spi_pw_reg_read(const struct device *dev, uint32_t offset) in spi_pw_reg_read() argument
27 return sys_read32(DEVICE_MMIO_GET(dev) + offset); in spi_pw_reg_read()
30 static void spi_pw_reg_write(const struct device *dev, in spi_pw_reg_write() argument
34 sys_write32(val, DEVICE_MMIO_GET(dev) + offset); in spi_pw_reg_write()
37 static void spi_pw_ssp_reset(const struct device *dev) in spi_pw_ssp_reset() argument
40 spi_pw_reg_write(dev, PW_SPI_REG_RESETS, 0x00); in spi_pw_ssp_reset()
41 spi_pw_reg_write(dev, PW_SPI_REG_RESETS, PW_SPI_INST_RESET); in spi_pw_ssp_reset()
45 static bool is_spi_transfer_ongoing(struct spi_pw_data *spi) in is_spi_transfer_ongoing() argument
47 return spi_context_tx_on(&spi->ctx) || spi_context_rx_on(&spi->ctx); in is_spi_transfer_ongoing()
51 static void spi_pw_enable_cs_hw_ctrl(const struct device *dev) in spi_pw_enable_cs_hw_ctrl() argument
55 cs_ctrl = spi_pw_reg_read(dev, PW_SPI_REG_CS_CTRL); in spi_pw_enable_cs_hw_ctrl()
57 spi_pw_reg_write(dev, PW_SPI_REG_CS_CTRL, cs_ctrl); in spi_pw_enable_cs_hw_ctrl()
60 static void spi_pw_cs_sw_ctrl(const struct device *dev, bool enable) in spi_pw_cs_sw_ctrl() argument
64 cs_ctrl = spi_pw_reg_read(dev, PW_SPI_REG_CS_CTRL); in spi_pw_cs_sw_ctrl()
75 spi_pw_reg_write(dev, PW_SPI_REG_CS_CTRL, cs_ctrl); in spi_pw_cs_sw_ctrl()
79 static void spi_pw_intr_enable(const struct device *dev, bool rx_mask) in spi_pw_intr_enable() argument
83 ctrlr1 = spi_pw_reg_read(dev, PW_SPI_REG_CTRLR1); in spi_pw_intr_enable()
90 spi_pw_reg_write(dev, PW_SPI_REG_CTRLR1, ctrlr1); in spi_pw_intr_enable()
93 static void spi_pw_intr_disable(const struct device *dev) in spi_pw_intr_disable() argument
97 ctrlr1 = spi_pw_reg_read(dev, PW_SPI_REG_CTRLR1); in spi_pw_intr_disable()
99 spi_pw_reg_write(dev, PW_SPI_REG_CTRLR1, ctrlr1); in spi_pw_intr_disable()
103 static void spi_pw_ssp_enable(const struct device *dev) in spi_pw_ssp_enable() argument
107 ctrlr0 = spi_pw_reg_read(dev, PW_SPI_REG_CTRLR0); in spi_pw_ssp_enable()
109 spi_pw_reg_write(dev, PW_SPI_REG_CTRLR0, ctrlr0); in spi_pw_ssp_enable()
113 static void spi_pw_ssp_disable(const struct device *dev) in spi_pw_ssp_disable() argument
117 ctrlr0 = spi_pw_reg_read(dev, PW_SPI_REG_CTRLR0); in spi_pw_ssp_disable()
119 spi_pw_reg_write(dev, PW_SPI_REG_CTRLR0, ctrlr0); in spi_pw_ssp_disable()
122 static bool is_pw_ssp_busy(const struct device *dev) in is_pw_ssp_busy() argument
126 status = spi_pw_reg_read(dev, PW_SPI_REG_SSSR); in is_pw_ssp_busy()
132 uint8_t dfs = SPI_WORD_SIZE_GET(config->operation); in spi_pw_get_frame_size()
137 LOG_WRN("Unsupported dfs, 1-byte size will be used"); in spi_pw_get_frame_size()
144 void spi_pw_cs_ctrl_enable(const struct device *dev, bool enable) in spi_pw_cs_ctrl_enable() argument
146 struct spi_pw_data *spi = dev->data; in spi_pw_cs_ctrl_enable() local
149 if (spi->cs_mode == CS_SW_MODE) { in spi_pw_cs_ctrl_enable()
150 spi_pw_cs_sw_ctrl(dev, true); in spi_pw_cs_ctrl_enable()
151 } else if (spi->cs_mode == CS_GPIO_MODE) { in spi_pw_cs_ctrl_enable()
152 spi_context_cs_control(&spi->ctx, true); in spi_pw_cs_ctrl_enable()
155 if (spi->cs_mode == CS_SW_MODE) { in spi_pw_cs_ctrl_enable()
156 spi_pw_cs_sw_ctrl(dev, false); in spi_pw_cs_ctrl_enable()
157 } else if (spi->cs_mode == CS_GPIO_MODE) { in spi_pw_cs_ctrl_enable()
158 spi_context_cs_control(&spi->ctx, false); in spi_pw_cs_ctrl_enable()
163 static void spi_pw_cs_ctrl_init(const struct device *dev) in spi_pw_cs_ctrl_init() argument
166 struct spi_pw_data *spi = dev->data; in spi_pw_cs_ctrl_init() local
169 cs_ctrl = spi_pw_reg_read(dev, PW_SPI_REG_CS_CTRL); in spi_pw_cs_ctrl_init()
171 if (spi->cs_output == PW_SPI_CS1_OUTPUT_SELECT) { in spi_pw_cs_ctrl_init()
180 spi_pw_reg_write(dev, PW_SPI_REG_CS_CTRL, cs_ctrl); in spi_pw_cs_ctrl_init()
182 if (spi->cs_mode == CS_HW_MODE) { in spi_pw_cs_ctrl_init()
183 spi_pw_enable_cs_hw_ctrl(dev); in spi_pw_cs_ctrl_init()
184 } else if (spi->cs_mode == CS_SW_MODE) { in spi_pw_cs_ctrl_init()
185 spi_pw_cs_sw_ctrl(dev, false); in spi_pw_cs_ctrl_init()
186 } else if (spi->cs_mode == CS_GPIO_MODE) { in spi_pw_cs_ctrl_init()
187 spi_pw_cs_sw_ctrl(dev, false); in spi_pw_cs_ctrl_init()
191 static void spi_pw_tx_thld_set(const struct device *dev) in spi_pw_tx_thld_set() argument
196 reg_data = spi_pw_reg_read(dev, PW_SPI_REG_SITF); in spi_pw_tx_thld_set()
202 spi_pw_reg_write(dev, PW_SPI_REG_SITF, reg_data); in spi_pw_tx_thld_set()
205 static void spi_pw_rx_thld_set(const struct device *dev, in spi_pw_rx_thld_set() argument
206 struct spi_pw_data *spi) in spi_pw_rx_thld_set() argument
211 reg_data = spi_pw_reg_read(dev, PW_SPI_REG_SIRF); in spi_pw_rx_thld_set()
214 if (spi->ctx.rx_len && spi->ctx.rx_len < spi->fifo_depth) { in spi_pw_rx_thld_set()
215 reg_data = spi->ctx.rx_len - 1; in spi_pw_rx_thld_set()
217 spi_pw_reg_write(dev, PW_SPI_REG_SIRF, reg_data); in spi_pw_rx_thld_set()
220 static int spi_pw_set_data_size(const struct device *dev, in spi_pw_set_data_size() argument
225 ctrlr0 = spi_pw_reg_read(dev, PW_SPI_REG_CTRLR0); in spi_pw_set_data_size()
234 if (SPI_WORD_SIZE_GET(config->operation) == 4) { in spi_pw_set_data_size()
236 } else if (SPI_WORD_SIZE_GET(config->operation) == 8) { in spi_pw_set_data_size()
238 } else if (SPI_WORD_SIZE_GET(config->operation) == 16) { in spi_pw_set_data_size()
240 } else if (SPI_WORD_SIZE_GET(config->operation) == 32) { in spi_pw_set_data_size()
244 return -ENOTSUP; in spi_pw_set_data_size()
247 spi_pw_reg_write(dev, PW_SPI_REG_CTRLR0, ctrlr0); in spi_pw_set_data_size()
252 static void spi_pw_config_phase_polarity(const struct device *dev, in spi_pw_config_phase_polarity() argument
258 ctrlr1 = spi_pw_reg_read(dev, PW_SPI_REG_CTRLR1); in spi_pw_config_phase_polarity()
260 mode = (SPI_MODE_GET(config->operation) & SPI_MODE_CPOL) | in spi_pw_config_phase_polarity()
261 (SPI_MODE_GET(config->operation) & SPI_MODE_CPHA); in spi_pw_config_phase_polarity()
287 spi_pw_reg_write(dev, PW_SPI_REG_CTRLR1, ctrlr1); in spi_pw_config_phase_polarity()
290 static void spi_pw_enable_clk(const struct device *dev) in spi_pw_enable_clk() argument
295 clks = spi_pw_reg_read(dev, PW_SPI_REG_CLKS); in spi_pw_enable_clk()
300 spi_pw_reg_write(dev, PW_SPI_REG_CLKS, clks); in spi_pw_enable_clk()
303 static void spi_pw_config_clk(const struct device *dev, in spi_pw_config_clk() argument
310 if (!config->frequency) { in spi_pw_config_clk()
312 } else if (config->frequency > PW_SPI_BR_MAX_FRQ) { in spi_pw_config_clk()
313 scr = (info->clock_freq / PW_SPI_BR_MAX_FRQ) - 1; in spi_pw_config_clk()
315 scr = (info->clock_freq / config->frequency) - 1; in spi_pw_config_clk()
317 ctrlr0 = spi_pw_reg_read(dev, PW_SPI_REG_CTRLR0); in spi_pw_config_clk()
321 spi_pw_reg_write(dev, PW_SPI_REG_CTRLR0, ctrlr0); in spi_pw_config_clk()
324 static void spi_pw_completed(const struct device *dev, int err) in spi_pw_completed() argument
326 struct spi_pw_data *spi = dev->data; in spi_pw_completed() local
328 if (!err && (spi_context_tx_on(&spi->ctx) || in spi_pw_completed()
329 spi_context_rx_on(&spi->ctx))) { in spi_pw_completed()
334 while (is_pw_ssp_busy(dev)) { in spi_pw_completed()
339 spi_pw_intr_disable(dev); in spi_pw_completed()
345 spi_pw_ssp_disable(dev); in spi_pw_completed()
347 spi_pw_cs_ctrl_enable(dev, false); in spi_pw_completed()
349 LOG_DBG("SPI transaction completed %s error\n", in spi_pw_completed()
352 spi_context_complete(&spi->ctx, dev, err); in spi_pw_completed()
355 static void spi_pw_clear_intr(const struct device *dev) in spi_pw_clear_intr() argument
359 sssr = spi_pw_reg_read(dev, PW_SPI_REG_SSSR); in spi_pw_clear_intr()
361 spi_pw_reg_write(dev, PW_SPI_REG_SSSR, sssr); in spi_pw_clear_intr()
364 static int spi_pw_get_tx_fifo_level(const struct device *dev) in spi_pw_get_tx_fifo_level() argument
368 tx_fifo_level = spi_pw_reg_read(dev, PW_SPI_REG_SITF); in spi_pw_get_tx_fifo_level()
376 static int spi_pw_get_rx_fifo_level(const struct device *dev) in spi_pw_get_rx_fifo_level() argument
380 rx_fifo_level = spi_pw_reg_read(dev, PW_SPI_REG_SIRF); in spi_pw_get_rx_fifo_level()
387 static void spi_pw_reset_tx_fifo_level(const struct device *dev) in spi_pw_reset_tx_fifo_level() argument
391 tx_fifo_level = spi_pw_reg_read(dev, PW_SPI_REG_SITF); in spi_pw_reset_tx_fifo_level()
393 spi_pw_reg_write(dev, PW_SPI_REG_SITF, tx_fifo_level); in spi_pw_reset_tx_fifo_level()
398 const struct device *dev) in spi_pw_update_rx_fifo_level() argument
402 rx_fifo_level = spi_pw_reg_read(dev, PW_SPI_REG_SIRF); in spi_pw_update_rx_fifo_level()
405 spi_pw_reg_write(dev, PW_SPI_REG_SIRF, rx_fifo_level); in spi_pw_update_rx_fifo_level()
408 static void spi_pw_tx_data(const struct device *dev) in spi_pw_tx_data() argument
410 struct spi_pw_data *spi = dev->data; in spi_pw_tx_data() local
414 if (spi_context_rx_on(&spi->ctx)) { in spi_pw_tx_data()
415 fifo_len = spi->fifo_depth - in spi_pw_tx_data()
416 spi_pw_get_tx_fifo_level(dev) - in spi_pw_tx_data()
417 spi_pw_get_rx_fifo_level(dev); in spi_pw_tx_data()
422 fifo_len = spi->fifo_depth - spi_pw_get_tx_fifo_level(dev); in spi_pw_tx_data()
426 if (spi_context_tx_buf_on(&spi->ctx)) { in spi_pw_tx_data()
427 switch (spi->dfs) { in spi_pw_tx_data()
430 (spi->ctx.tx_buf)); in spi_pw_tx_data()
434 (spi->ctx.tx_buf)); in spi_pw_tx_data()
438 (spi->ctx.tx_buf)); in spi_pw_tx_data()
441 } else if (spi_context_rx_on(&spi->ctx)) { in spi_pw_tx_data()
442 if ((int)(spi->ctx.rx_len - spi->fifo_diff) <= 0) { in spi_pw_tx_data()
447 } else if (spi_context_tx_on(&spi->ctx)) { in spi_pw_tx_data()
453 spi_pw_reg_write(dev, PW_SPI_REG_SSDR, data); in spi_pw_tx_data()
455 spi_context_update_tx(&spi->ctx, spi->dfs, 1); in spi_pw_tx_data()
456 spi->fifo_diff++; in spi_pw_tx_data()
457 fifo_len--; in spi_pw_tx_data()
460 if (!spi_context_tx_on(&spi->ctx)) { in spi_pw_tx_data()
461 spi_pw_reset_tx_fifo_level(dev); in spi_pw_tx_data()
465 static void spi_pw_rx_data(const struct device *dev) in spi_pw_rx_data() argument
467 struct spi_pw_data *spi = dev->data; in spi_pw_rx_data() local
469 while (spi_pw_get_rx_fifo_level(dev)) { in spi_pw_rx_data()
470 uint32_t data = spi_pw_reg_read(dev, PW_SPI_REG_SSDR); in spi_pw_rx_data()
472 if (spi_context_rx_buf_on(&spi->ctx)) { in spi_pw_rx_data()
473 switch (spi->dfs) { in spi_pw_rx_data()
476 (uint8_t *)spi->ctx.rx_buf); in spi_pw_rx_data()
480 (uint16_t *)spi->ctx.rx_buf); in spi_pw_rx_data()
484 (uint32_t *)spi->ctx.rx_buf); in spi_pw_rx_data()
489 spi_context_update_rx(&spi->ctx, spi->dfs, 1); in spi_pw_rx_data()
490 spi->fifo_diff--; in spi_pw_rx_data()
493 if (!spi->ctx.rx_len && spi->ctx.tx_len < spi->fifo_depth) { in spi_pw_rx_data()
494 spi_pw_update_rx_fifo_level(spi->ctx.tx_len - 1, dev); in spi_pw_rx_data()
495 } else if (spi_pw_get_rx_fifo_level(dev) >= spi->ctx.rx_len) { in spi_pw_rx_data()
496 spi_pw_update_rx_fifo_level(spi->ctx.rx_len - 1, dev); in spi_pw_rx_data()
500 static int spi_pw_transfer(const struct device *dev) in spi_pw_transfer() argument
505 intr_status = spi_pw_reg_read(dev, PW_SPI_REG_SSSR); in spi_pw_transfer()
509 err = -EIO; in spi_pw_transfer()
515 err = -EIO; in spi_pw_transfer()
521 err = -EIO; in spi_pw_transfer()
528 spi_pw_rx_data(dev); in spi_pw_transfer()
532 spi_pw_tx_data(dev); in spi_pw_transfer()
537 spi_pw_clear_intr(dev); in spi_pw_transfer()
543 static int spi_pw_configure(const struct device *dev, in spi_pw_configure() argument
545 struct spi_pw_data *spi, in spi_pw_configure() argument
551 spi->ctx.config = config; in spi_pw_configure()
553 if (!spi_cs_is_gpio(spi->ctx.config)) { in spi_pw_configure()
554 if (spi->cs_mode == CS_GPIO_MODE) { in spi_pw_configure()
556 spi->cs_mode = CS_HW_MODE; in spi_pw_configure()
557 spi_pw_enable_cs_hw_ctrl(dev); in spi_pw_configure()
561 if (config->operation & SPI_HALF_DUPLEX) { in spi_pw_configure()
562 LOG_ERR("Half-duplex not supported"); in spi_pw_configure()
563 return -ENOTSUP; in spi_pw_configure()
567 if (config->operation & SPI_OP_MODE_SLAVE) { in spi_pw_configure()
569 return -ENOTSUP; in spi_pw_configure()
572 if ((config->operation & SPI_TRANSFER_LSB) || in spi_pw_configure()
574 (config->operation & (SPI_LINES_DUAL | in spi_pw_configure()
578 return -EINVAL; in spi_pw_configure()
581 if (config->operation & SPI_FRAME_FORMAT_TI) { in spi_pw_configure()
583 return -ENOTSUP; in spi_pw_configure()
586 if (config->operation & SPI_HOLD_ON_CS) { in spi_pw_configure()
588 return -ENOTSUP; in spi_pw_configure()
592 err = spi_pw_set_data_size(dev, config); in spi_pw_configure()
596 return -ENOTSUP; in spi_pw_configure()
600 spi_pw_config_phase_polarity(dev, config); in spi_pw_configure()
603 spi_pw_enable_clk(dev); in spi_pw_configure()
606 spi_pw_config_clk(dev, info, config); in spi_pw_configure()
611 static int transceive(const struct device *dev, in transceive() argument
619 const struct spi_pw_config *info = dev->config; in transceive()
620 struct spi_pw_data *spi = dev->data; in transceive() local
630 return -ENOTSUP; in transceive()
633 spi_context_lock(&spi->ctx, asynchronous, cb, userdata, config); in transceive()
636 err = spi_pw_configure(dev, info, spi, config); in transceive()
638 LOG_ERR("spi pw config fail"); in transceive()
643 spi->dfs = spi_pw_get_frame_size(config); in transceive()
644 spi_context_buffers_setup(&spi->ctx, tx_bufs, rx_bufs, in transceive()
645 spi->dfs); in transceive()
647 spi->fifo_diff = 0U; in transceive()
650 spi_pw_tx_thld_set(dev); in transceive()
653 spi_pw_rx_thld_set(dev, spi); in transceive()
655 spi_pw_cs_ctrl_enable(dev, true); in transceive()
658 spi_pw_ssp_enable(dev); in transceive()
665 spi_pw_intr_enable(dev, true); in transceive()
667 spi_pw_intr_enable(dev, false); in transceive()
670 err = spi_context_wait_for_completion(&spi->ctx); in transceive()
675 err = spi_pw_transfer(dev); in transceive()
676 } while ((!err) && is_spi_transfer_ongoing(spi)); in transceive()
678 spi_pw_completed(dev, err); in transceive()
682 spi_context_release(&spi->ctx, err); in transceive()
686 static int spi_pw_transceive(const struct device *dev, in spi_pw_transceive() argument
691 LOG_DBG("%p, %p, %p\n", dev, tx_bufs, rx_bufs); in spi_pw_transceive()
692 return transceive(dev, config, tx_bufs, rx_bufs, in spi_pw_transceive()
697 static int spi_pw_transceive_async(const struct device *dev, in spi_pw_transceive_async() argument
704 LOG_DBG("%p, %p, %p, %p, %p\n", dev, tx_bufs, rx_bufs, in spi_pw_transceive_async()
707 return transceive(dev, config, tx_bufs, rx_bufs, true, in spi_pw_transceive_async()
712 static int spi_pw_release(const struct device *dev, in spi_pw_release() argument
715 struct spi_pw_data *spi = dev->data; in spi_pw_release() local
717 if (!spi_context_configured(&spi->ctx, config)) { in spi_pw_release()
718 return -EINVAL; in spi_pw_release()
721 spi_context_unlock_unconditionally(&spi->ctx); in spi_pw_release()
729 const struct device *dev = (const struct device *)arg; in spi_pw_isr() local
732 err = spi_pw_transfer(dev); in spi_pw_isr()
733 spi_pw_completed(dev, err); in spi_pw_isr()
737 static DEVICE_API(spi, pw_spi_api) = {
748 static int spi_pw_init(const struct device *dev) in spi_pw_init() argument
750 const struct spi_pw_config *info = dev->config; in spi_pw_init()
751 struct spi_pw_data *spi = dev->data; in spi_pw_init() local
755 if (info->pcie) { in spi_pw_init()
758 if (info->pcie->bdf == PCIE_BDF_NONE) { in spi_pw_init()
760 return -ENODEV; in spi_pw_init()
763 if (!pcie_probe_mbar(info->pcie->bdf, 0, &mbar)) { in spi_pw_init()
765 return -EINVAL; in spi_pw_init()
768 pcie_set_cmd(info->pcie->bdf, PCIE_CONF_CMDSTAT_MEM, in spi_pw_init()
771 device_map(DEVICE_MMIO_RAM_PTR(dev), mbar.phys_addr, in spi_pw_init()
774 pcie_set_cmd(info->pcie->bdf, in spi_pw_init()
779 DEVICE_MMIO_MAP(dev, K_MEM_CACHE_NONE); in spi_pw_init()
782 DEVICE_MMIO_MAP(dev, K_MEM_CACHE_NONE); in spi_pw_init()
786 spi_pw_ssp_reset(dev); in spi_pw_init()
789 spi_pw_ssp_disable(dev); in spi_pw_init()
792 spi_pw_cs_ctrl_init(dev); in spi_pw_init()
796 spi_pw_intr_disable(dev); in spi_pw_init()
799 info->irq_config(dev); in spi_pw_init()
802 if (spi->cs_mode == CS_GPIO_MODE) { in spi_pw_init()
803 err = spi_context_cs_configure_all(&spi->ctx); in spi_pw_init()
810 spi_context_unlock_unconditionally(&spi->ctx); in spi_pw_init()
812 LOG_DBG("SPI pw init success"); in spi_pw_init()
834 "SPI PCIe requires dynamic interrupts"); \
835 static void spi_##n##_irq_init(const struct device *dev) \
837 const struct spi_pw_config *info = dev->config; \
840 irq = pcie_alloc_irq(info->pcie->bdf); \
846 pcie_conf_write(info->pcie->bdf, \
849 pcie_connect_dynamic_irq(info->pcie->bdf, irq, \
854 pcie_irq_enable(info->pcie->bdf, irq); \
855 LOG_DBG("lpass spi Configure irq %d", irq); \