1 /*
2 * Copyright (c) 2024 Vogl Electronic GmbH
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
7 #define DT_DRV_COMPAT litex_spi_litespi
8
9 #define LOG_LEVEL CONFIG_SPI_LOG_LEVEL
10 #include <zephyr/logging/log.h>
11 LOG_MODULE_REGISTER(spi_litex_litespi);
12
13 #include <zephyr/sys/byteorder.h>
14 #include "spi_litex_common.h"
15
16 #define SPIFLASH_CORE_MASTER_PHYCONFIG_LEN_OFFSET 0x0
17 #define SPIFLASH_CORE_MASTER_PHYCONFIG_WIDTH_OFFSET 0x1
18 #define SPIFLASH_CORE_MASTER_PHYCONFIG_MASK_OFFSET 0x2
19
20 #define SPIFLASH_CORE_MASTER_STATUS_TX_READY_OFFSET 0x0
21 #define SPIFLASH_CORE_MASTER_STATUS_RX_READY_OFFSET 0x1
22
23 #define SPI_MAX_WORD_SIZE 32
24 #define SPI_MAX_CS_SIZE 4
25
26 struct spi_litex_dev_config {
27 uint32_t core_master_cs_addr;
28 uint32_t core_master_phyconfig_addr;
29 uint32_t core_master_rxtx_addr;
30 uint32_t core_master_rxtx_size;
31 uint32_t core_master_status_addr;
32 uint32_t phy_clk_divisor_addr;
33 bool phy_clk_divisor_exists;
34 };
35
36 struct spi_litex_data {
37 struct spi_context ctx;
38 uint8_t dfs; /* dfs in bytes: 1,2 or 4 */
39 };
40
41
spi_litex_set_frequency(const struct device * dev,const struct spi_config * config)42 static int spi_litex_set_frequency(const struct device *dev, const struct spi_config *config)
43 {
44 const struct spi_litex_dev_config *dev_config = dev->config;
45
46 if (!dev_config->phy_clk_divisor_exists) {
47 /* In the LiteX Simulator the phy_clk_divisor doesn't exists, thats why we check. */
48 LOG_WRN("No phy_clk_divisor found, can't change frequency");
49 return 0;
50 }
51
52 uint32_t divisor = DIV_ROUND_UP(sys_clock_hw_cycles_per_sec(), (2 * config->frequency)) - 1;
53
54 litex_write32(divisor, dev_config->phy_clk_divisor_addr);
55 return 0;
56 }
57
58 /* Helper Functions */
spi_config(const struct device * dev,const struct spi_config * config)59 static int spi_config(const struct device *dev, const struct spi_config *config)
60 {
61 struct spi_litex_data *dev_data = dev->data;
62
63 if (config->slave != 0) {
64 if (config->slave >= SPI_MAX_CS_SIZE) {
65 LOG_ERR("More slaves than supported");
66 return -ENOTSUP;
67 }
68 }
69
70 if (config->operation & SPI_HALF_DUPLEX) {
71 LOG_ERR("Half-duplex not supported");
72 return -ENOTSUP;
73 }
74
75 if (SPI_WORD_SIZE_GET(config->operation) > SPI_MAX_WORD_SIZE) {
76 LOG_ERR("Word size must be <= %d, is %d", SPI_MAX_WORD_SIZE,
77 SPI_WORD_SIZE_GET(config->operation));
78 return -ENOTSUP;
79 }
80
81 if (config->operation & SPI_CS_ACTIVE_HIGH) {
82 LOG_ERR("CS active high not supported");
83 return -ENOTSUP;
84 }
85
86 if (config->operation & SPI_LOCK_ON) {
87 LOG_ERR("Lock On not supported");
88 return -ENOTSUP;
89 }
90
91 if (IS_ENABLED(CONFIG_SPI_EXTENDED_MODES) &&
92 (config->operation & SPI_LINES_MASK) != SPI_LINES_SINGLE) {
93 LOG_ERR("Only supports single mode");
94 return -ENOTSUP;
95 }
96
97 if (config->operation & SPI_TRANSFER_LSB) {
98 LOG_ERR("LSB first not supported");
99 return -ENOTSUP;
100 }
101
102 if (config->operation & (SPI_MODE_CPOL | SPI_MODE_CPHA)) {
103 LOG_ERR("Only supports CPOL=CPHA=0");
104 return -ENOTSUP;
105 }
106
107 if (config->operation & SPI_OP_MODE_SLAVE) {
108 LOG_ERR("Slave mode not supported");
109 return -ENOTSUP;
110 }
111
112 if (config->operation & SPI_MODE_LOOP) {
113 LOG_ERR("Loopback mode not supported");
114 return -ENOTSUP;
115 }
116
117 dev_data->dfs = get_dfs_value(config);
118
119 spi_litex_set_frequency(dev, config);
120
121 return 0;
122 }
123
spiflash_len_mask_width_write(uint32_t len,uint32_t width,uint32_t mask,uint32_t addr)124 static void spiflash_len_mask_width_write(uint32_t len, uint32_t width, uint32_t mask,
125 uint32_t addr)
126 {
127 uint32_t tmp = len & BIT_MASK(8);
128 uint32_t word = tmp << (SPIFLASH_CORE_MASTER_PHYCONFIG_LEN_OFFSET * 8);
129
130 tmp = width & BIT_MASK(8);
131 word |= tmp << (SPIFLASH_CORE_MASTER_PHYCONFIG_WIDTH_OFFSET * 8);
132 tmp = mask & BIT_MASK(8);
133 word |= tmp << (SPIFLASH_CORE_MASTER_PHYCONFIG_MASK_OFFSET * 8);
134 litex_write32(word, addr);
135 }
136
spi_litex_xfer(const struct device * dev,const struct spi_config * config)137 static int spi_litex_xfer(const struct device *dev, const struct spi_config *config)
138 {
139 const struct spi_litex_dev_config *dev_config = dev->config;
140 struct spi_litex_data *data = dev->data;
141 struct spi_context *ctx = &data->ctx;
142 uint32_t txd, rxd;
143 int ret = 0;
144
145 uint8_t len = data->dfs; /* SPI Xfer length*/
146 uint8_t old_len = len; /* old SPI Xfer length*/
147 uint8_t width = BIT(0); /* SPI Xfer width*/
148 uint8_t mask = BIT(0); /* SPI Xfer mask*/
149
150 spiflash_len_mask_width_write(len * 8, width, mask, dev_config->core_master_phyconfig_addr);
151
152 litex_write32(BIT(config->slave), dev_config->core_master_cs_addr);
153
154 /* Flush RX buffer */
155 while ((litex_read8(dev_config->core_master_status_addr) &
156 BIT(SPIFLASH_CORE_MASTER_STATUS_RX_READY_OFFSET))) {
157 rxd = litex_read32(dev_config->core_master_rxtx_addr);
158 LOG_DBG("flushed rxd: 0x%x", rxd);
159 }
160
161 do {
162 len = MIN(spi_context_max_continuous_chunk(ctx), dev_config->core_master_rxtx_size);
163 if (len != old_len) {
164 spiflash_len_mask_width_write(len * 8, width, mask,
165 dev_config->core_master_phyconfig_addr);
166 old_len = len;
167 }
168
169 if (spi_context_tx_buf_on(ctx)) {
170 litex_spi_tx_put(len, &txd, ctx->tx_buf);
171 } else {
172 txd = 0U;
173 }
174
175 while (!(litex_read8(dev_config->core_master_status_addr) &
176 BIT(SPIFLASH_CORE_MASTER_STATUS_TX_READY_OFFSET))) {
177 ;
178 }
179
180 LOG_DBG("txd: 0x%x", txd);
181 litex_write32(txd, dev_config->core_master_rxtx_addr);
182
183 spi_context_update_tx(ctx, data->dfs, len / data->dfs);
184
185 while (!(litex_read8(dev_config->core_master_status_addr) &
186 BIT(SPIFLASH_CORE_MASTER_STATUS_RX_READY_OFFSET))) {
187 ;
188 }
189
190 rxd = litex_read32(dev_config->core_master_rxtx_addr);
191 LOG_DBG("rxd: 0x%x", rxd);
192
193 if (spi_context_rx_buf_on(ctx)) {
194 litex_spi_rx_put(len, &rxd, ctx->rx_buf);
195 }
196
197 spi_context_update_rx(ctx, data->dfs, len / data->dfs);
198
199 } while (spi_context_tx_on(ctx) || spi_context_rx_on(ctx));
200
201 litex_write32(0, dev_config->core_master_cs_addr);
202
203 spi_context_complete(ctx, dev, 0);
204
205 return ret;
206 }
207
spi_litex_transceive(const struct device * dev,const struct spi_config * config,const struct spi_buf_set * tx_bufs,const struct spi_buf_set * rx_bufs)208 static int spi_litex_transceive(const struct device *dev, const struct spi_config *config,
209 const struct spi_buf_set *tx_bufs,
210 const struct spi_buf_set *rx_bufs)
211 {
212 struct spi_litex_data *data = dev->data;
213
214 int ret = spi_config(dev, config);
215
216 if (ret) {
217 return ret;
218 }
219
220 if (!tx_bufs && !rx_bufs) {
221 return 0;
222 }
223
224 spi_context_buffers_setup(&data->ctx, tx_bufs, rx_bufs, data->dfs);
225
226 ret = spi_litex_xfer(dev, config);
227
228 return ret;
229 }
230
231 #ifdef CONFIG_SPI_ASYNC
spi_litex_transceive_async(const struct device * dev,const struct spi_config * config,const struct spi_buf_set * tx_bufs,const struct spi_buf_set * rx_bufs,struct k_poll_signal * async)232 static int spi_litex_transceive_async(const struct device *dev, const struct spi_config *config,
233 const struct spi_buf_set *tx_bufs,
234 const struct spi_buf_set *rx_bufs,
235 struct k_poll_signal *async)
236 {
237 return -ENOTSUP;
238 }
239 #endif /* CONFIG_SPI_ASYNC */
240
spi_litex_release(const struct device * dev,const struct spi_config * config)241 static int spi_litex_release(const struct device *dev, const struct spi_config *config)
242 {
243
244 return 0;
245 }
246
247 /* Device Instantiation */
248 static DEVICE_API(spi, spi_litex_api) = {
249 .transceive = spi_litex_transceive,
250 #ifdef CONFIG_SPI_ASYNC
251 .transceive_async = spi_litex_transceive_async,
252 #endif /* CONFIG_SPI_ASYNC */
253 #ifdef CONFIG_SPI_RTIO
254 .iodev_submit = spi_rtio_iodev_default_submit,
255 #endif
256 .release = spi_litex_release,
257 };
258
259 #define SPI_INIT(n) \
260 static struct spi_litex_data spi_litex_data_##n = { \
261 SPI_CONTEXT_INIT_LOCK(spi_litex_data_##n, ctx), \
262 SPI_CONTEXT_INIT_SYNC(spi_litex_data_##n, ctx), \
263 }; \
264 static struct spi_litex_dev_config spi_litex_cfg_##n = { \
265 .core_master_cs_addr = DT_INST_REG_ADDR_BY_NAME(n, core_master_cs), \
266 .core_master_phyconfig_addr = DT_INST_REG_ADDR_BY_NAME(n, core_master_phyconfig), \
267 .core_master_rxtx_addr = DT_INST_REG_ADDR_BY_NAME(n, core_master_rxtx), \
268 .core_master_rxtx_size = DT_INST_REG_SIZE_BY_NAME(n, core_master_rxtx), \
269 .core_master_status_addr = DT_INST_REG_ADDR_BY_NAME(n, core_master_status), \
270 .phy_clk_divisor_exists = DT_INST_REG_HAS_NAME(n, phy_clk_divisor), \
271 .phy_clk_divisor_addr = DT_INST_REG_ADDR_BY_NAME_OR(n, phy_clk_divisor, 0) \
272 \
273 }; \
274 SPI_DEVICE_DT_INST_DEFINE(n, NULL, NULL, &spi_litex_data_##n, &spi_litex_cfg_##n, \
275 POST_KERNEL, CONFIG_SPI_INIT_PRIORITY, &spi_litex_api);
276
277 DT_INST_FOREACH_STATUS_OKAY(SPI_INIT)
278