1  /*
2   * Copyright 2022 TOKITA Hiroshi <tokita.hiroshi@fujitsu.com>
3   *
4   * SPDX-License-Identifier: Apache-2.0
5   */
6  
7  #define DT_DRV_COMPAT arm_pl022
8  
9  #include <errno.h>
10  #include <zephyr/kernel.h>
11  #include <zephyr/drivers/clock_control.h>
12  #include <zephyr/drivers/reset.h>
13  #include <zephyr/drivers/spi.h>
14  #include <zephyr/drivers/spi/rtio.h>
15  #include <zephyr/sys/util.h>
16  #include <zephyr/spinlock.h>
17  #include <soc.h>
18  #if defined(CONFIG_PINCTRL)
19  #include <zephyr/drivers/pinctrl.h>
20  #endif
21  #if defined(CONFIG_SPI_PL022_DMA)
22  #include <zephyr/drivers/dma.h>
23  #endif
24  
25  #define LOG_LEVEL CONFIG_SPI_LOG_LEVEL
26  #include <zephyr/logging/log.h>
27  #include <zephyr/irq.h>
28  LOG_MODULE_REGISTER(spi_pl022);
29  
30  #include "spi_context.h"
31  
32  #define SSP_MASK(regname, name) GENMASK(SSP_##regname##_##name##_MSB, SSP_##regname##_##name##_LSB)
33  
34  /* PL022 Register definitions */
35  
36  /*
37   * Macros to access SSP Registers with their offsets
38   */
39  #define SSP_CR0(r)      (r + 0x000)
40  #define SSP_CR1(r)      (r + 0x004)
41  #define SSP_DR(r)       (r + 0x008)
42  #define SSP_SR(r)       (r + 0x00C)
43  #define SSP_CPSR(r)     (r + 0x010)
44  #define SSP_IMSC(r)     (r + 0x014)
45  #define SSP_RIS(r)      (r + 0x018)
46  #define SSP_MIS(r)      (r + 0x01C)
47  #define SSP_ICR(r)      (r + 0x020)
48  #define SSP_DMACR(r)    (r + 0x024)
49  
50  /*
51   * Control Register 0
52   */
53  #define SSP_CR0_SCR_MSB 15
54  #define SSP_CR0_SCR_LSB 8
55  #define SSP_CR0_SPH_MSB 7
56  #define SSP_CR0_SPH_LSB 7
57  #define SSP_CR0_SPO_MSB 6
58  #define SSP_CR0_SPO_LSB 6
59  #define SSP_CR0_FRF_MSB 5
60  #define SSP_CR0_FRF_LSB 4
61  #define SSP_CR0_DSS_MSB 3
62  #define SSP_CR0_DSS_LSB 0
63  
64  /* Data size select */
65  #define SSP_CR0_MASK_DSS SSP_MASK(CR0, DSS)
66  /* Frame format */
67  #define SSP_CR0_MASK_FRF SSP_MASK(CR0, FRF)
68  /* Polarity */
69  #define SSP_CR0_MASK_SPO SSP_MASK(CR0, SPO)
70  /* Phase */
71  #define SSP_CR0_MASK_SPH SSP_MASK(CR0, SPH)
72  /* Serial Clock Rate */
73  #define SSP_CR0_MASK_SCR SSP_MASK(CR0, SCR)
74  
75  /*
76   * Control Register 1
77   */
78  #define SSP_CR1_SOD_MSB 3
79  #define SSP_CR1_SOD_LSB 3
80  #define SSP_CR1_MS_MSB 2
81  #define SSP_CR1_MS_LSB 2
82  #define SSP_CR1_SSE_MSB 1
83  #define SSP_CR1_SSE_LSB 1
84  #define SSP_CR1_LBM_MSB 0
85  #define SSP_CR1_LBM_LSB 0
86  
87  /* Loopback Mode */
88  #define SSP_CR1_MASK_LBM SSP_MASK(CR1, LBM)
89  /* Port Enable */
90  #define SSP_CR1_MASK_SSE SSP_MASK(CR1, SSE)
91  /* Controller/Peripheral (Master/Slave) select */
92  #define SSP_CR1_MASK_MS SSP_MASK(CR1, MS)
93  /* Peripheral (Slave) mode output disabled */
94  #define SSP_CR1_MASK_SOD SSP_MASK(CR1, SOD)
95  
96  /*
97   * Status Register
98   */
99  #define SSP_SR_BSY_MSB 4
100  #define SSP_SR_BSY_LSB 4
101  #define SSP_SR_RFF_MSB 3
102  #define SSP_SR_RFF_LSB 3
103  #define SSP_SR_RNE_MSB 2
104  #define SSP_SR_RNE_LSB 2
105  #define SSP_SR_TNF_MSB 1
106  #define SSP_SR_TNF_LSB 1
107  #define SSP_SR_TFE_MSB 0
108  #define SSP_SR_TFE_LSB 0
109  
110  /* TX FIFO empty */
111  #define SSP_SR_MASK_TFE SSP_MASK(SR, TFE)
112  /* TX FIFO not full */
113  #define SSP_SR_MASK_TNF SSP_MASK(SR, TNF)
114  /* RX FIFO not empty */
115  #define SSP_SR_MASK_RNE SSP_MASK(SR, RNE)
116  /* RX FIFO full */
117  #define SSP_SR_MASK_RFF SSP_MASK(SR, RFF)
118  /* Busy Flag */
119  #define SSP_SR_MASK_BSY SSP_MASK(SR, BSY)
120  
121  /*
122   * Clock Prescale Register
123   */
124  #define SSP_CPSR_CPSDVSR_MSB 7
125  #define SSP_CPSR_CPSDVSR_LSB 0
126  /* Clock prescale divider */
127  #define SSP_CPSR_MASK_CPSDVSR SSP_MASK(CPSR, CPSDVSR)
128  
129  /*
130   * Interrupt Mask Set/Clear Register
131   */
132  #define SSP_IMSC_TXIM_MSB 3
133  #define SSP_IMSC_TXIM_LSB 3
134  #define SSP_IMSC_RXIM_MSB 2
135  #define SSP_IMSC_RXIM_LSB 2
136  #define SSP_IMSC_RTIM_MSB 1
137  #define SSP_IMSC_RTIM_LSB 1
138  #define SSP_IMSC_RORIM_MSB 0
139  #define SSP_IMSC_RORIM_LSB 0
140  
141  /* Receive Overrun Interrupt mask */
142  #define SSP_IMSC_MASK_RORIM SSP_MASK(IMSC, RORIM)
143  /* Receive timeout Interrupt mask */
144  #define SSP_IMSC_MASK_RTIM SSP_MASK(IMSC, RTIM)
145  /* Receive FIFO Interrupt mask */
146  #define SSP_IMSC_MASK_RXIM SSP_MASK(IMSC, RXIM)
147  /* Transmit FIFO Interrupt mask */
148  #define SSP_IMSC_MASK_TXIM SSP_MASK(IMSC, TXIM)
149  
150  /*
151   * Raw Interrupt Status Register
152   */
153  #define SSP_RIS_TXRIS_MSB 3
154  #define SSP_RIS_TXRIS_LSB 3
155  #define SSP_RIS_RXRIS_MSB 2
156  #define SSP_RIS_RXRIS_LSB 2
157  #define SSP_RIS_RTRIS_MSB 1
158  #define SSP_RIS_RTRIS_LSB 1
159  #define SSP_RIS_RORRIS_MSB 0
160  #define SSP_RIS_RORRIS_LSB 0
161  
162  /* Receive Overrun Raw Interrupt status */
163  #define SSP_RIS_MASK_RORRIS SSP_MASK(RIS, RORRIS)
164  /* Receive Timeout Raw Interrupt status */
165  #define SSP_RIS_MASK_RTRIS SSP_MASK(RIS, RTRIS)
166  /* Receive FIFO Raw Interrupt status */
167  #define SSP_RIS_MASK_RXRIS SSP_MASK(RIS, RXRIS)
168  /* Transmit FIFO Raw Interrupt status */
169  #define SSP_RIS_MASK_TXRIS SSP_MASK(RIS, TXRIS)
170  
171  /*
172   * Masked Interrupt Status Register
173   */
174  #define SSP_MIS_TXMIS_MSB 3
175  #define SSP_MIS_TXMIS_LSB 3
176  #define SSP_MIS_RXMIS_MSB 2
177  #define SSP_MIS_RXMIS_LSB 2
178  #define SSP_MIS_RTMIS_MSB 1
179  #define SSP_MIS_RTMIS_LSB 1
180  #define SSP_MIS_RORMIS_MSB 0
181  #define SSP_MIS_RORMIS_LSB 0
182  
183  /* Receive Overrun Masked Interrupt status */
184  #define SSP_MIS_MASK_RORMIS SSP_MASK(MIS, RORMIS)
185  /* Receive Timeout Masked Interrupt status */
186  #define SSP_MIS_MASK_RTMIS SSP_MASK(MIS, RTMIS)
187  /* Receive FIFO Masked Interrupt status */
188  #define SSP_MIS_MASK_RXMIS SSP_MASK(MIS, RXMIS)
189  /* Transmit FIFO Masked Interrupt status */
190  #define SSP_MIS_MASK_TXMIS SSP_MASK(MIS, TXMIS)
191  
192  /*
193   * Interrupt Clear Register
194   */
195  #define SSP_ICR_RTIC_MSB 1
196  #define SSP_ICR_RTIC_LSB 1
197  #define SSP_ICR_RORIC_MSB 0
198  #define SSP_ICR_RORIC_LSB 0
199  
200  /* Receive Overrun Raw Clear Interrupt bit */
201  #define SSP_ICR_MASK_RORIC SSP_MASK(ICR, RORIC)
202  /* Receive Timeout Clear Interrupt bit */
203  #define SSP_ICR_MASK_RTIC SSP_MASK(ICR, RTIC)
204  
205  /*
206   * DMA Control Register
207   */
208  #define SSP_DMACR_TXDMAE_MSB 1
209  #define SSP_DMACR_TXDMAE_LSB 1
210  #define SSP_DMACR_RXDMAE_MSB 0
211  #define SSP_DMACR_RXDMAE_LSB 0
212  
213  /* Receive DMA Enable bit */
214  #define SSP_DMACR_MASK_RXDMAE SSP_MASK(DMACR, RXDMAE)
215  /* Transmit DMA Enable bit */
216  #define SSP_DMACR_MASK_TXDMAE SSP_MASK(DMACR, TXDMAE)
217  
218  /* End register definitions */
219  
220  /*
221   * Clock Parameter ranges
222   */
223  #define CPSDVR_MIN 0x02
224  #define CPSDVR_MAX 0xFE
225  
226  #define SCR_MIN 0x00
227  #define SCR_MAX 0xFF
228  
229  /* Fifo depth */
230  #define SSP_FIFO_DEPTH 8
231  
232  /*
233   * Register READ/WRITE macros
234   */
235  #define SSP_READ_REG(reg) (*((volatile uint32_t *)reg))
236  #define SSP_WRITE_REG(reg, val) (*((volatile uint32_t *)reg) = val)
237  #define SSP_CLEAR_REG(reg, val) (*((volatile uint32_t *)reg) &= ~(val))
238  
239  /*
240   * Status check macros
241   */
242  #define SSP_BUSY(reg) (SSP_READ_REG(SSP_SR(reg)) & SSP_SR_MASK_BSY)
243  #define SSP_RX_FIFO_NOT_EMPTY(reg) (SSP_READ_REG(SSP_SR(reg)) & SSP_SR_MASK_RNE)
244  #define SSP_TX_FIFO_EMPTY(reg) (SSP_READ_REG(SSP_SR(reg)) & SSP_SR_MASK_TFE)
245  #define SSP_TX_FIFO_NOT_FULL(reg) (SSP_READ_REG(SSP_SR(reg)) & SSP_SR_MASK_TNF)
246  
247  #if defined(CONFIG_SPI_PL022_DMA)
248  enum spi_pl022_dma_direction {
249  	TX = 0,
250  	RX,
251  	NUM_OF_DIRECTION
252  };
253  
254  struct spi_pl022_dma_config {
255  	const struct device *dev;
256  	uint32_t channel;
257  	uint32_t channel_config;
258  	uint32_t slot;
259  };
260  
261  struct spi_pl022_dma_data {
262  	struct dma_config config;
263  	struct dma_block_config block;
264  	uint32_t count;
265  	bool callbacked;
266  };
267  #endif
268  
269  /*
270   * Max frequency
271   */
272  #define MAX_FREQ_CONTROLLER_MODE(pclk) ((pclk) / 2)
273  #define MAX_FREQ_PERIPHERAL_MODE(pclk) ((pclk) / 12)
274  
275  struct spi_pl022_cfg {
276  	const uint32_t reg;
277  	const uint32_t pclk;
278  	const bool dma_enabled;
279  #if defined(CONFIG_CLOCK_CONTROL)
280  	const struct device *clk_dev;
281  	const clock_control_subsys_t clk_id;
282  #endif
283  #if defined(CONFIG_RESET)
284  	const struct reset_dt_spec reset;
285  #endif
286  #if defined(CONFIG_PINCTRL)
287  	const struct pinctrl_dev_config *pincfg;
288  #endif
289  #if defined(CONFIG_SPI_PL022_INTERRUPT)
290  	void (*irq_config)(const struct device *port);
291  #endif
292  #if defined(CONFIG_SPI_PL022_DMA)
293  	const struct spi_pl022_dma_config dma[NUM_OF_DIRECTION];
294  #endif
295  };
296  
297  struct spi_pl022_data {
298  	struct spi_context ctx;
299  	uint32_t tx_count;
300  	uint32_t rx_count;
301  	struct k_spinlock lock;
302  #if defined(CONFIG_SPI_PL022_DMA)
303  	struct spi_pl022_dma_data dma[NUM_OF_DIRECTION];
304  #endif
305  };
306  
307  #if defined(CONFIG_SPI_PL022_DMA)
308  static uint32_t dummy_tx;
309  static uint32_t dummy_rx;
310  #endif
311  
312  /* Helper Functions */
313  
spi_pl022_calc_prescale(const uint32_t pclk,const uint32_t baud)314  static inline uint32_t spi_pl022_calc_prescale(const uint32_t pclk, const uint32_t baud)
315  {
316  	uint32_t prescale;
317  
318  	/* prescale only can take even number */
319  	for (prescale = CPSDVR_MIN; prescale < CPSDVR_MAX; prescale += 2) {
320  		if (pclk < (prescale + 2) * CPSDVR_MAX * baud) {
321  			break;
322  		}
323  	}
324  
325  	return prescale;
326  }
327  
spi_pl022_calc_postdiv(const uint32_t pclk,const uint32_t baud,const uint32_t prescale)328  static inline uint32_t spi_pl022_calc_postdiv(const uint32_t pclk,
329  					      const uint32_t baud, const uint32_t prescale)
330  {
331  	uint32_t postdiv;
332  
333  	for (postdiv = SCR_MAX + 1; postdiv > SCR_MIN + 1; --postdiv) {
334  		if (pclk / (prescale * (postdiv - 1)) > baud) {
335  			break;
336  		}
337  	}
338  	return postdiv - 1;
339  }
340  
spi_pl022_configure(const struct device * dev,const struct spi_config * spicfg)341  static int spi_pl022_configure(const struct device *dev,
342  			       const struct spi_config *spicfg)
343  {
344  	const struct spi_pl022_cfg *cfg = dev->config;
345  	struct spi_pl022_data *data = dev->data;
346  	const uint16_t op = spicfg->operation;
347  	uint32_t prescale;
348  	uint32_t postdiv;
349  	uint32_t pclk = 0;
350  	uint32_t cr0;
351  	uint32_t cr1;
352  	int ret;
353  
354  	if (spi_context_configured(&data->ctx, spicfg)) {
355  		return 0;
356  	}
357  
358  #if defined(CONFIG_CLOCK_CONTROL)
359  	ret = clock_control_get_rate(cfg->clk_dev, cfg->clk_id, &pclk);
360  	if (ret < 0 || pclk == 0) {
361  		return -EINVAL;
362  	}
363  #endif
364  
365  	if (spicfg->frequency > MAX_FREQ_CONTROLLER_MODE(pclk)) {
366  		LOG_ERR("Frequency is up to %u in controller mode.",
367  			MAX_FREQ_CONTROLLER_MODE(pclk));
368  		return -ENOTSUP;
369  	}
370  
371  	if (op & SPI_TRANSFER_LSB) {
372  		LOG_ERR("LSB-first not supported");
373  		return -ENOTSUP;
374  	}
375  
376  	/* Half-duplex mode has not been implemented */
377  	if (op & SPI_HALF_DUPLEX) {
378  		LOG_ERR("Half-duplex not supported");
379  		return -ENOTSUP;
380  	}
381  
382  	/* Peripheral mode has not been implemented */
383  	if (SPI_OP_MODE_GET(op) != SPI_OP_MODE_MASTER) {
384  		LOG_ERR("Peripheral mode is not supported");
385  		return -ENOTSUP;
386  	}
387  
388  	/* Word sizes other than 8 bits has not been implemented */
389  	if (SPI_WORD_SIZE_GET(op) != 8) {
390  		LOG_ERR("Word sizes other than 8 bits are not supported");
391  		return -ENOTSUP;
392  	}
393  
394  	/* configure registers */
395  
396  	prescale = spi_pl022_calc_prescale(pclk, spicfg->frequency);
397  	postdiv = spi_pl022_calc_postdiv(pclk, spicfg->frequency, prescale);
398  
399  	cr0 = 0;
400  	cr0 |= (postdiv << SSP_CR0_SCR_LSB);
401  	cr0 |= (SPI_WORD_SIZE_GET(op) - 1);
402  	cr0 |= (op & SPI_MODE_CPOL) ? SSP_CR0_MASK_SPO : 0;
403  	cr0 |= (op & SPI_MODE_CPHA) ? SSP_CR0_MASK_SPH : 0;
404  
405  	cr1 = 0;
406  	cr1 |= SSP_CR1_MASK_SSE; /* Always enable SPI */
407  	cr1 |= (op & SPI_MODE_LOOP) ? SSP_CR1_MASK_LBM : 0;
408  
409  	SSP_WRITE_REG(SSP_CPSR(cfg->reg), prescale);
410  	SSP_WRITE_REG(SSP_CR0(cfg->reg), cr0);
411  	SSP_WRITE_REG(SSP_CR1(cfg->reg), cr1);
412  
413  #if defined(CONFIG_SPI_PL022_INTERRUPT)
414  	if (!cfg->dma_enabled) {
415  		SSP_WRITE_REG(SSP_IMSC(cfg->reg),
416  			      SSP_IMSC_MASK_RORIM | SSP_IMSC_MASK_RTIM | SSP_IMSC_MASK_RXIM);
417  	}
418  #endif
419  
420  	data->ctx.config = spicfg;
421  
422  	return 0;
423  }
424  
spi_pl022_transfer_ongoing(struct spi_pl022_data * data)425  static inline bool spi_pl022_transfer_ongoing(struct spi_pl022_data *data)
426  {
427  	return spi_context_tx_on(&data->ctx) || spi_context_rx_on(&data->ctx);
428  }
429  
430  #if defined(CONFIG_SPI_PL022_DMA)
431  static void spi_pl022_dma_callback(const struct device *dma_dev, void *arg, uint32_t channel,
432  				   int status);
433  
spi_pl022_dma_enabled_num(const struct device * dev)434  static size_t spi_pl022_dma_enabled_num(const struct device *dev)
435  {
436  	const struct spi_pl022_cfg *cfg = dev->config;
437  
438  	return cfg->dma_enabled ? 2 : 0;
439  }
440  
spi_pl022_dma_setup(const struct device * dev,const uint32_t dir)441  static uint32_t spi_pl022_dma_setup(const struct device *dev, const uint32_t dir)
442  {
443  	const struct spi_pl022_cfg *cfg = dev->config;
444  	struct spi_pl022_data *data = dev->data;
445  	struct dma_config *dma_cfg = &data->dma[dir].config;
446  	struct dma_block_config *block_cfg = &data->dma[dir].block;
447  	const struct spi_pl022_dma_config *dma = &cfg->dma[dir];
448  	int ret;
449  
450  	memset(dma_cfg, 0, sizeof(struct dma_config));
451  	memset(block_cfg, 0, sizeof(struct dma_block_config));
452  
453  	dma_cfg->source_burst_length = 1;
454  	dma_cfg->dest_burst_length = 1;
455  	dma_cfg->user_data = (void *)dev;
456  	dma_cfg->block_count = 1U;
457  	dma_cfg->head_block = block_cfg;
458  	dma_cfg->dma_slot = cfg->dma[dir].slot;
459  	dma_cfg->channel_direction = dir == TX ? MEMORY_TO_PERIPHERAL : PERIPHERAL_TO_MEMORY;
460  
461  	if (SPI_WORD_SIZE_GET(data->ctx.config->operation) == 8) {
462  		dma_cfg->source_data_size = 1;
463  		dma_cfg->dest_data_size = 1;
464  	} else {
465  		dma_cfg->source_data_size = 2;
466  		dma_cfg->dest_data_size = 2;
467  	}
468  
469  	block_cfg->block_size = spi_context_max_continuous_chunk(&data->ctx);
470  
471  	if (dir == TX) {
472  		dma_cfg->dma_callback = spi_pl022_dma_callback;
473  		block_cfg->dest_address = SSP_DR(cfg->reg);
474  		block_cfg->dest_addr_adj = DMA_ADDR_ADJ_NO_CHANGE;
475  		if (spi_context_tx_buf_on(&data->ctx)) {
476  			block_cfg->source_address = (uint32_t)data->ctx.tx_buf;
477  			block_cfg->source_addr_adj = DMA_ADDR_ADJ_INCREMENT;
478  		} else {
479  			block_cfg->source_address = (uint32_t)&dummy_tx;
480  			block_cfg->source_addr_adj = DMA_ADDR_ADJ_NO_CHANGE;
481  		}
482  	}
483  
484  	if (dir == RX) {
485  		dma_cfg->dma_callback = spi_pl022_dma_callback;
486  		block_cfg->source_address = SSP_DR(cfg->reg);
487  		block_cfg->source_addr_adj = DMA_ADDR_ADJ_NO_CHANGE;
488  
489  		if (spi_context_rx_buf_on(&data->ctx)) {
490  			block_cfg->dest_address = (uint32_t)data->ctx.rx_buf;
491  			block_cfg->dest_addr_adj = DMA_ADDR_ADJ_INCREMENT;
492  		} else {
493  			block_cfg->dest_address = (uint32_t)&dummy_rx;
494  			block_cfg->dest_addr_adj = DMA_ADDR_ADJ_NO_CHANGE;
495  		}
496  	}
497  
498  	ret = dma_config(dma->dev, dma->channel, dma_cfg);
499  	if (ret < 0) {
500  		LOG_ERR("dma_config %p failed %d\n", dma->dev, ret);
501  		return ret;
502  	}
503  
504  	data->dma[dir].callbacked = false;
505  
506  	ret = dma_start(dma->dev, dma->channel);
507  	if (ret < 0) {
508  		LOG_ERR("dma_start %p failed %d\n", dma->dev, ret);
509  		return ret;
510  	}
511  
512  	return 0;
513  }
514  
spi_pl022_start_dma_transceive(const struct device * dev)515  static int spi_pl022_start_dma_transceive(const struct device *dev)
516  {
517  	const struct spi_pl022_cfg *cfg = dev->config;
518  	int ret = 0;
519  
520  	SSP_CLEAR_REG(SSP_DMACR(cfg->reg), SSP_DMACR_MASK_RXDMAE | SSP_DMACR_MASK_TXDMAE);
521  
522  	for (size_t i = 0; i < spi_pl022_dma_enabled_num(dev); i++) {
523  		ret = spi_pl022_dma_setup(dev, i);
524  		if (ret < 0) {
525  			goto on_error;
526  		}
527  	}
528  
529  	SSP_WRITE_REG(SSP_DMACR(cfg->reg), SSP_DMACR_MASK_RXDMAE | SSP_DMACR_MASK_TXDMAE);
530  
531  on_error:
532  	if (ret < 0) {
533  		for (size_t i = 0; i < spi_pl022_dma_enabled_num(dev); i++) {
534  			dma_stop(cfg->dma[i].dev, cfg->dma[i].channel);
535  		}
536  	}
537  	return ret;
538  }
539  
spi_pl022_chunk_transfer_finished(const struct device * dev)540  static bool spi_pl022_chunk_transfer_finished(const struct device *dev)
541  {
542  	struct spi_pl022_data *data = dev->data;
543  	struct spi_pl022_dma_data *dma = data->dma;
544  	const size_t chunk_len = spi_context_max_continuous_chunk(&data->ctx);
545  
546  	return (MIN(dma[TX].count, dma[RX].count) >= chunk_len);
547  }
548  
spi_pl022_complete(const struct device * dev,int status)549  static void spi_pl022_complete(const struct device *dev, int status)
550  {
551  	struct spi_pl022_data *data = dev->data;
552  	const struct spi_pl022_cfg *cfg = dev->config;
553  
554  	for (size_t i = 0; i < spi_pl022_dma_enabled_num(dev); i++) {
555  		dma_stop(cfg->dma[i].dev, cfg->dma[i].channel);
556  	}
557  
558  	spi_context_complete(&data->ctx, dev, status);
559  }
560  
spi_pl022_dma_callback(const struct device * dma_dev,void * arg,uint32_t channel,int status)561  static void spi_pl022_dma_callback(const struct device *dma_dev, void *arg, uint32_t channel,
562  				   int status)
563  {
564  	const struct device *dev = (const struct device *)arg;
565  	const struct spi_pl022_cfg *cfg = dev->config;
566  	struct spi_pl022_data *data = dev->data;
567  	bool complete = false;
568  	k_spinlock_key_t key;
569  	size_t chunk_len;
570  	int err = 0;
571  
572  	if (status < 0) {
573  		key = k_spin_lock(&data->lock);
574  
575  		LOG_ERR("dma:%p ch:%d callback gets error: %d", dma_dev, channel, status);
576  		spi_pl022_complete(dev, status);
577  
578  		k_spin_unlock(&data->lock, key);
579  		return;
580  	}
581  
582  	key = k_spin_lock(&data->lock);
583  
584  	chunk_len = spi_context_max_continuous_chunk(&data->ctx);
585  	for (size_t i = 0; i < ARRAY_SIZE(cfg->dma); i++) {
586  		if (dma_dev == cfg->dma[i].dev && channel == cfg->dma[i].channel) {
587  			data->dma[i].count += chunk_len;
588  			data->dma[i].callbacked = true;
589  		}
590  	}
591  	/* Check transfer finished.
592  	 * The transmission of this chunk is complete if both the dma[TX].count
593  	 * and the dma[RX].count reach greater than or equal to the chunk_len.
594  	 * chunk_len is zero here means the transfer is already complete.
595  	 */
596  	if (spi_pl022_chunk_transfer_finished(dev)) {
597  		if (SPI_WORD_SIZE_GET(data->ctx.config->operation) == 8) {
598  			spi_context_update_tx(&data->ctx, 1, chunk_len);
599  			spi_context_update_rx(&data->ctx, 1, chunk_len);
600  		} else {
601  			spi_context_update_tx(&data->ctx, 2, chunk_len);
602  			spi_context_update_rx(&data->ctx, 2, chunk_len);
603  		}
604  
605  		if (spi_pl022_transfer_ongoing(data)) {
606  			/* Next chunk is available, reset the count and
607  			 * continue processing
608  			 */
609  			data->dma[TX].count = 0;
610  			data->dma[RX].count = 0;
611  		} else {
612  			/* All data is processed, complete the process */
613  			complete = true;
614  		}
615  	}
616  
617  	if (!complete && data->dma[TX].callbacked && data->dma[RX].callbacked) {
618  		err = spi_pl022_start_dma_transceive(dev);
619  		if (err) {
620  			complete = true;
621  		}
622  	}
623  
624  	if (complete) {
625  		spi_pl022_complete(dev, err);
626  	}
627  
628  	k_spin_unlock(&data->lock, key);
629  }
630  
631  #endif /* DMA */
632  
633  #if defined(CONFIG_SPI_PL022_INTERRUPT)
634  
spi_pl022_async_xfer(const struct device * dev)635  static void spi_pl022_async_xfer(const struct device *dev)
636  {
637  	const struct spi_pl022_cfg *cfg = dev->config;
638  	struct spi_pl022_data *data = dev->data;
639  	struct spi_context *ctx = &data->ctx;
640  	/* Process by per chunk */
641  	size_t chunk_len = spi_context_max_continuous_chunk(ctx);
642  	uint32_t txrx;
643  
644  	/* Read RX FIFO */
645  	while (SSP_RX_FIFO_NOT_EMPTY(cfg->reg) && (data->rx_count < chunk_len)) {
646  		txrx = SSP_READ_REG(SSP_DR(cfg->reg));
647  
648  		/* Discard received data if rx buffer not assigned */
649  		if (ctx->rx_buf) {
650  			*(((uint8_t *)ctx->rx_buf) + data->rx_count) = (uint8_t)txrx;
651  		}
652  		data->rx_count++;
653  	}
654  
655  	/* Check transfer finished.
656  	 * The transmission of this chunk is complete if both the tx_count
657  	 * and the rx_count reach greater than or equal to the chunk_len.
658  	 * chunk_len is zero here means the transfer is already complete.
659  	 */
660  	if (MIN(data->tx_count, data->rx_count) >= chunk_len && chunk_len > 0) {
661  		spi_context_update_tx(ctx, 1, chunk_len);
662  		spi_context_update_rx(ctx, 1, chunk_len);
663  		if (spi_pl022_transfer_ongoing(data)) {
664  			/* Next chunk is available, reset the count and continue processing */
665  			data->tx_count = 0;
666  			data->rx_count = 0;
667  			chunk_len = spi_context_max_continuous_chunk(ctx);
668  		} else {
669  			/* All data is processed, complete the process */
670  			spi_context_complete(ctx, dev, 0);
671  			return;
672  		}
673  	}
674  
675  	/* Fill up TX FIFO */
676  	for (uint32_t i = 0; i < SSP_FIFO_DEPTH; i++) {
677  		if ((data->tx_count < chunk_len) && SSP_TX_FIFO_NOT_FULL(cfg->reg)) {
678  			/* Send 0 in the case of read only operation */
679  			txrx = 0;
680  
681  			if (ctx->tx_buf) {
682  				txrx = *(((uint8_t *)ctx->tx_buf) + data->tx_count);
683  			}
684  			SSP_WRITE_REG(SSP_DR(cfg->reg), txrx);
685  			data->tx_count++;
686  		} else {
687  			break;
688  		}
689  	}
690  }
691  
spi_pl022_start_async_xfer(const struct device * dev)692  static void spi_pl022_start_async_xfer(const struct device *dev)
693  {
694  	const struct spi_pl022_cfg *cfg = dev->config;
695  	struct spi_pl022_data *data = dev->data;
696  
697  	/* Ensure writable */
698  	while (!SSP_TX_FIFO_EMPTY(cfg->reg)) {
699  		;
700  	}
701  	/* Drain RX FIFO */
702  	while (SSP_RX_FIFO_NOT_EMPTY(cfg->reg)) {
703  		SSP_READ_REG(SSP_DR(cfg->reg));
704  	}
705  
706  	data->tx_count = 0;
707  	data->rx_count = 0;
708  
709  	SSP_WRITE_REG(SSP_ICR(cfg->reg), SSP_ICR_MASK_RORIC | SSP_ICR_MASK_RTIC);
710  
711  	spi_pl022_async_xfer(dev);
712  }
713  
spi_pl022_isr(const struct device * dev)714  static void spi_pl022_isr(const struct device *dev)
715  {
716  	const struct spi_pl022_cfg *cfg = dev->config;
717  	struct spi_pl022_data *data = dev->data;
718  	struct spi_context *ctx = &data->ctx;
719  	uint32_t mis = SSP_READ_REG(SSP_MIS(cfg->reg));
720  
721  	if (mis & SSP_MIS_MASK_RORMIS) {
722  		SSP_WRITE_REG(SSP_IMSC(cfg->reg), 0);
723  		spi_context_complete(ctx, dev, -EIO);
724  	} else {
725  		spi_pl022_async_xfer(dev);
726  	}
727  
728  	SSP_WRITE_REG(SSP_ICR(cfg->reg), SSP_ICR_MASK_RORIC | SSP_ICR_MASK_RTIC);
729  }
730  
731  #else
732  
spi_pl022_xfer(const struct device * dev)733  static void spi_pl022_xfer(const struct device *dev)
734  {
735  	const struct spi_pl022_cfg *cfg = dev->config;
736  	struct spi_pl022_data *data = dev->data;
737  	const size_t chunk_len = spi_context_max_continuous_chunk(&data->ctx);
738  	const void *txbuf = data->ctx.tx_buf;
739  	void *rxbuf = data->ctx.rx_buf;
740  	uint32_t txrx;
741  	size_t fifo_cnt = 0;
742  
743  	data->tx_count = 0;
744  	data->rx_count = 0;
745  
746  	/* Ensure writable */
747  	while (!SSP_TX_FIFO_EMPTY(cfg->reg)) {
748  		;
749  	}
750  	/* Drain RX FIFO */
751  	while (SSP_RX_FIFO_NOT_EMPTY(cfg->reg)) {
752  		SSP_READ_REG(SSP_DR(cfg->reg));
753  	}
754  
755  	while (data->rx_count < chunk_len || data->tx_count < chunk_len) {
756  		/* Fill up fifo with available TX data */
757  		while (SSP_TX_FIFO_NOT_FULL(cfg->reg) && data->tx_count < chunk_len &&
758  		       fifo_cnt < SSP_FIFO_DEPTH) {
759  			/* Send 0 in the case of read only operation */
760  			txrx = 0;
761  
762  			if (txbuf) {
763  				txrx = ((uint8_t *)txbuf)[data->tx_count];
764  			}
765  			SSP_WRITE_REG(SSP_DR(cfg->reg), txrx);
766  			data->tx_count++;
767  			fifo_cnt++;
768  		}
769  		while (data->rx_count < chunk_len && fifo_cnt > 0) {
770  			if (!SSP_RX_FIFO_NOT_EMPTY(cfg->reg)) {
771  				continue;
772  			}
773  
774  			txrx = SSP_READ_REG(SSP_DR(cfg->reg));
775  
776  			/* Discard received data if rx buffer not assigned */
777  			if (rxbuf) {
778  				((uint8_t *)rxbuf)[data->rx_count] = (uint8_t)txrx;
779  			}
780  			data->rx_count++;
781  			fifo_cnt--;
782  		}
783  	}
784  }
785  
786  #endif
787  
spi_pl022_transceive_impl(const struct device * dev,const struct spi_config * config,const struct spi_buf_set * tx_bufs,const struct spi_buf_set * rx_bufs,spi_callback_t cb,void * userdata)788  static int spi_pl022_transceive_impl(const struct device *dev,
789  				     const struct spi_config *config,
790  				     const struct spi_buf_set *tx_bufs,
791  				     const struct spi_buf_set *rx_bufs,
792  				     spi_callback_t cb,
793  				     void *userdata)
794  {
795  	const struct spi_pl022_cfg *cfg = dev->config;
796  	struct spi_pl022_data *data = dev->data;
797  	struct spi_context *ctx = &data->ctx;
798  	int ret;
799  
800  	spi_context_lock(&data->ctx, (cb ? true : false), cb, userdata, config);
801  
802  	ret = spi_pl022_configure(dev, config);
803  	if (ret < 0) {
804  		goto error;
805  	}
806  
807  	spi_context_buffers_setup(ctx, tx_bufs, rx_bufs, 1);
808  
809  	spi_context_cs_control(ctx, true);
810  
811  	if (cfg->dma_enabled) {
812  #if defined(CONFIG_SPI_PL022_DMA)
813  		for (size_t i = 0; i < ARRAY_SIZE(data->dma); i++) {
814  			struct dma_status stat = {.busy = true};
815  
816  			dma_stop(cfg->dma[i].dev, cfg->dma[i].channel);
817  
818  			while (stat.busy) {
819  				dma_get_status(cfg->dma[i].dev,
820  					       cfg->dma[i].channel, &stat);
821  			}
822  
823  			data->dma[i].count = 0;
824  		}
825  
826  		ret = spi_pl022_start_dma_transceive(dev);
827  		if (ret < 0) {
828  			spi_context_cs_control(ctx, false);
829  			goto error;
830  		}
831  		ret = spi_context_wait_for_completion(ctx);
832  #endif
833  	} else
834  #if defined(CONFIG_SPI_PL022_INTERRUPT)
835  	{
836  		spi_pl022_start_async_xfer(dev);
837  		ret = spi_context_wait_for_completion(ctx);
838  	}
839  #else
840  	{
841  		do {
842  			spi_pl022_xfer(dev);
843  			spi_context_update_tx(ctx, 1, data->tx_count);
844  			spi_context_update_rx(ctx, 1, data->rx_count);
845  		} while (spi_pl022_transfer_ongoing(data));
846  
847  #if defined(CONFIG_SPI_ASYNC)
848  		spi_context_complete(&data->ctx, dev, ret);
849  #endif
850  	}
851  #endif
852  
853  	spi_context_cs_control(ctx, false);
854  
855  error:
856  	spi_context_release(&data->ctx, ret);
857  
858  	return ret;
859  }
860  
861  /* API Functions */
862  
spi_pl022_transceive(const struct device * dev,const struct spi_config * config,const struct spi_buf_set * tx_bufs,const struct spi_buf_set * rx_bufs)863  static int spi_pl022_transceive(const struct device *dev,
864  				const struct spi_config *config,
865  				const struct spi_buf_set *tx_bufs,
866  				const struct spi_buf_set *rx_bufs)
867  {
868  	return spi_pl022_transceive_impl(dev, config, tx_bufs, rx_bufs, NULL, NULL);
869  }
870  
871  #if defined(CONFIG_SPI_ASYNC)
872  
spi_pl022_transceive_async(const struct device * dev,const struct spi_config * config,const struct spi_buf_set * tx_bufs,const struct spi_buf_set * rx_bufs,spi_callback_t cb,void * userdata)873  static int spi_pl022_transceive_async(const struct device *dev,
874  				      const struct spi_config *config,
875  				      const struct spi_buf_set *tx_bufs,
876  				      const struct spi_buf_set *rx_bufs,
877  				      spi_callback_t cb,
878  				      void *userdata)
879  {
880  	return spi_pl022_transceive_impl(dev, config, tx_bufs, rx_bufs, cb, userdata);
881  }
882  
883  #endif
884  
spi_pl022_release(const struct device * dev,const struct spi_config * config)885  static int spi_pl022_release(const struct device *dev,
886  			     const struct spi_config *config)
887  {
888  	struct spi_pl022_data *data = dev->data;
889  
890  	spi_context_unlock_unconditionally(&data->ctx);
891  
892  	return 0;
893  }
894  
895  static DEVICE_API(spi, spi_pl022_api) = {
896  	.transceive = spi_pl022_transceive,
897  #if defined(CONFIG_SPI_ASYNC)
898  	.transceive_async = spi_pl022_transceive_async,
899  #endif
900  #ifdef CONFIG_SPI_RTIO
901  	.iodev_submit = spi_rtio_iodev_default_submit,
902  #endif
903  	.release = spi_pl022_release
904  };
905  
spi_pl022_init(const struct device * dev)906  static int spi_pl022_init(const struct device *dev)
907  {
908  	/* Initialize with lowest frequency */
909  	const struct spi_config spicfg = {
910  		.frequency = 0,
911  		.operation = SPI_WORD_SET(8),
912  		.slave = 0,
913  	};
914  	const struct spi_pl022_cfg *cfg = dev->config;
915  	struct spi_pl022_data *data = dev->data;
916  	int ret;
917  
918  #if defined(CONFIG_CLOCK_CONTROL)
919  	if (cfg->clk_dev) {
920  		ret = clock_control_on(cfg->clk_dev, cfg->clk_id);
921  		if (ret < 0) {
922  			LOG_ERR("Failed to enable the clock");
923  			return ret;
924  		}
925  	}
926  #endif
927  
928  #if defined(CONFIG_RESET)
929  	if (cfg->reset.dev) {
930  		ret = reset_line_toggle_dt(&cfg->reset);
931  		if (ret < 0) {
932  			return ret;
933  		}
934  	}
935  #endif
936  
937  #if defined(CONFIG_PINCTRL)
938  	ret = pinctrl_apply_state(cfg->pincfg, PINCTRL_STATE_DEFAULT);
939  	if (ret < 0) {
940  		LOG_ERR("Failed to apply pinctrl state");
941  		return ret;
942  	}
943  #endif
944  
945  	if (cfg->dma_enabled) {
946  #if defined(CONFIG_SPI_PL022_DMA)
947  		for (size_t i = 0; i < spi_pl022_dma_enabled_num(dev); i++) {
948  			uint32_t ch_filter = BIT(cfg->dma[i].channel);
949  
950  			if (!device_is_ready(cfg->dma[i].dev)) {
951  				LOG_ERR("DMA %s not ready", cfg->dma[i].dev->name);
952  				return -ENODEV;
953  			}
954  
955  			ret = dma_request_channel(cfg->dma[i].dev, &ch_filter);
956  			if (ret < 0) {
957  				LOG_ERR("dma_request_channel failed %d", ret);
958  				return ret;
959  			}
960  		}
961  #endif
962  	} else {
963  #if defined(CONFIG_SPI_PL022_INTERRUPT)
964  		cfg->irq_config(dev);
965  #endif
966  	}
967  
968  	ret = spi_pl022_configure(dev, &spicfg);
969  	if (ret < 0) {
970  		LOG_ERR("Failed to configure spi");
971  		return ret;
972  	}
973  
974  	ret = spi_context_cs_configure_all(&data->ctx);
975  	if (ret < 0) {
976  		LOG_ERR("Failed to spi_context configure");
977  		return ret;
978  	}
979  
980  	/* Make sure the context is unlocked */
981  	spi_context_unlock_unconditionally(&data->ctx);
982  
983  	return 0;
984  }
985  
986  #define DMA_INITIALIZER(idx, dir)                                                                  \
987  	{                                                                                          \
988  		.dev = DEVICE_DT_GET(DT_INST_DMAS_CTLR_BY_NAME(idx, dir)),                         \
989  		.channel = DT_INST_DMAS_CELL_BY_NAME(idx, dir, channel),                           \
990  		.slot = DT_INST_DMAS_CELL_BY_NAME(idx, dir, slot),                                 \
991  		.channel_config = DT_INST_DMAS_CELL_BY_NAME(idx, dir, channel_config),             \
992  	}
993  
994  #define DMAS_DECL(idx)                                                                             \
995  	{                                                                                          \
996  		COND_CODE_1(DT_INST_DMAS_HAS_NAME(idx, tx), (DMA_INITIALIZER(idx, tx)), ({0})),    \
997  		COND_CODE_1(DT_INST_DMAS_HAS_NAME(idx, rx), (DMA_INITIALIZER(idx, rx)), ({0})),    \
998  	}
999  
1000  #define DMAS_ENABLED(idx) (DT_INST_DMAS_HAS_NAME(idx, tx) && DT_INST_DMAS_HAS_NAME(idx, rx))
1001  
1002  #define CLOCK_ID_DECL(idx)                                                                         \
1003  	IF_ENABLED(DT_INST_NODE_HAS_PROP(0, clocks),                                               \
1004  	(static const clock_control_subsys_t pl022_clk_id##idx =                                   \
1005  		(clock_control_subsys_t)DT_INST_PHA_BY_IDX(idx, clocks, 0, clk_id);))              \
1006  
1007  #define SPI_PL022_INIT(idx)                                                                        \
1008  	IF_ENABLED(CONFIG_PINCTRL, (PINCTRL_DT_INST_DEFINE(idx);))                                 \
1009  	IF_ENABLED(CONFIG_SPI_PL022_INTERRUPT,                                                     \
1010  		   (static void spi_pl022_irq_config_##idx(const struct device *dev)               \
1011  		    {                                                                              \
1012  			   IRQ_CONNECT(DT_INST_IRQN(idx), DT_INST_IRQ(idx, priority),              \
1013  				       spi_pl022_isr, DEVICE_DT_INST_GET(idx), 0);                 \
1014  			   irq_enable(DT_INST_IRQN(idx));                                          \
1015  		    }))                                                                            \
1016  	IF_ENABLED(CONFIG_CLOCK_CONTROL, (CLOCK_ID_DECL(idx)))                                     \
1017  	static struct spi_pl022_data spi_pl022_data_##idx = {                                      \
1018  		SPI_CONTEXT_INIT_LOCK(spi_pl022_data_##idx, ctx),                                  \
1019  		SPI_CONTEXT_INIT_SYNC(spi_pl022_data_##idx, ctx),                                  \
1020  		SPI_CONTEXT_CS_GPIOS_INITIALIZE(DT_DRV_INST(idx), ctx)};                           \
1021  	static struct spi_pl022_cfg spi_pl022_cfg_##idx = {                                        \
1022  		.reg = DT_INST_REG_ADDR(idx),                                                      \
1023  		IF_ENABLED(CONFIG_CLOCK_CONTROL, (IF_ENABLED(DT_INST_NODE_HAS_PROP(0, clocks),     \
1024  			(.clk_dev = DEVICE_DT_GET(DT_INST_CLOCKS_CTLR(idx)),                       \
1025  			 .clk_id = pl022_clk_id##idx,))))                                          \
1026  		IF_ENABLED(CONFIG_RESET, (IF_ENABLED(DT_INST_NODE_HAS_PROP(0, resets),             \
1027  			   (.reset = RESET_DT_SPEC_INST_GET(idx),))))                              \
1028  		IF_ENABLED(CONFIG_PINCTRL, (.pincfg = PINCTRL_DT_INST_DEV_CONFIG_GET(idx),))       \
1029  		IF_ENABLED(CONFIG_SPI_PL022_DMA, (.dma = DMAS_DECL(idx),)) COND_CODE_1(            \
1030  				CONFIG_SPI_PL022_DMA, (.dma_enabled = DMAS_ENABLED(idx),),         \
1031  				(.dma_enabled = false,))                                           \
1032  		IF_ENABLED(CONFIG_SPI_PL022_INTERRUPT,                                             \
1033  					   (.irq_config = spi_pl022_irq_config_##idx,))};          \
1034  	SPI_DEVICE_DT_INST_DEFINE(idx, spi_pl022_init, NULL, &spi_pl022_data_##idx,                \
1035  			      &spi_pl022_cfg_##idx, POST_KERNEL, CONFIG_SPI_INIT_PRIORITY,         \
1036  			      &spi_pl022_api);
1037  
1038  DT_INST_FOREACH_STATUS_OKAY(SPI_PL022_INIT)
1039