1 /*
2  * Copyright (c) 2022 Schlumberger
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  */
6 
7 #define DT_DRV_COMPAT infineon_xmc4xxx_spi
8 
9 #define LOG_LEVEL CONFIG_SPI_LOG_LEVEL
10 #include <zephyr/logging/log.h>
11 LOG_MODULE_REGISTER(spi_xmc4xxx);
12 
13 #include "spi_context.h"
14 
15 #include <zephyr/drivers/dma.h>
16 #include <zephyr/drivers/pinctrl.h>
17 #include <zephyr/drivers/spi.h>
18 #include <zephyr/drivers/spi/rtio.h>
19 
20 #include <xmc_spi.h>
21 #include <xmc_usic.h>
22 
23 #define USIC_IRQ_MIN  84
24 #define USIC_IRQ_MAX  101
25 #define IRQS_PER_USIC 6
26 
27 #define SPI_XMC4XXX_DMA_ERROR_FLAG   BIT(0)
28 #define SPI_XMC4XXX_DMA_RX_DONE_FLAG BIT(1)
29 #define SPI_XMC4XXX_DMA_TX_DONE_FLAG BIT(2)
30 
31 
32 #ifdef CONFIG_SPI_XMC4XXX_DMA
33 static const uint8_t __aligned(4) tx_dummy_data;
34 #endif
35 
36 struct spi_xmc4xxx_config {
37 	XMC_USIC_CH_t *spi;
38 	const struct pinctrl_dev_config *pcfg;
39 	uint8_t miso_src;
40 #if defined(CONFIG_SPI_XMC4XXX_INTERRUPT)
41 	void (*irq_config_func)(const struct device *dev);
42 #endif
43 #if defined(CONFIG_SPI_XMC4XXX_DMA)
44 	uint8_t irq_num_tx;
45 	uint8_t irq_num_rx;
46 #endif
47 };
48 
49 #ifdef CONFIG_SPI_XMC4XXX_DMA
50 struct spi_xmc4xxx_dma_stream {
51 	const struct device *dev_dma;
52 	uint32_t dma_channel;
53 	struct dma_config dma_cfg;
54 	struct dma_block_config blk_cfg;
55 };
56 #endif
57 
58 struct spi_xmc4xxx_data {
59 	struct spi_context ctx;
60 #if defined(CONFIG_SPI_XMC4XXX_DMA)
61 	struct spi_xmc4xxx_dma_stream dma_rx;
62 	struct spi_xmc4xxx_dma_stream dma_tx;
63 	struct k_sem status_sem;
64 	uint8_t dma_status_flags;
65 	uint8_t dma_completion_flags;
66 	uint8_t service_request_tx;
67 	uint8_t service_request_rx;
68 #endif
69 };
70 
71 #if defined(CONFIG_SPI_XMC4XXX_DMA)
spi_xmc4xxx_dma_callback(const struct device * dev_dma,void * arg,uint32_t dma_channel,int status)72 static void spi_xmc4xxx_dma_callback(const struct device *dev_dma, void *arg, uint32_t dma_channel,
73 				     int status)
74 {
75 	struct spi_xmc4xxx_data *data = arg;
76 
77 	if (status != 0) {
78 		LOG_ERR("DMA callback error on channel %d.", dma_channel);
79 		data->dma_status_flags |= SPI_XMC4XXX_DMA_ERROR_FLAG;
80 	} else {
81 		if (dev_dma == data->dma_tx.dev_dma && dma_channel == data->dma_tx.dma_channel) {
82 			data->dma_status_flags |= SPI_XMC4XXX_DMA_TX_DONE_FLAG;
83 		} else if (dev_dma == data->dma_rx.dev_dma &&
84 			   dma_channel == data->dma_rx.dma_channel) {
85 			data->dma_status_flags |= SPI_XMC4XXX_DMA_RX_DONE_FLAG;
86 		} else {
87 			LOG_ERR("DMA callback channel %d is not valid.", dma_channel);
88 			data->dma_status_flags |= SPI_XMC4XXX_DMA_ERROR_FLAG;
89 		}
90 	}
91 	k_sem_give(&data->status_sem);
92 }
93 
94 #endif
95 
spi_xmc4xxx_flush_rx(XMC_USIC_CH_t * spi)96 static void spi_xmc4xxx_flush_rx(XMC_USIC_CH_t *spi)
97 {
98 	uint32_t recv_status;
99 
100 	recv_status = XMC_USIC_CH_GetReceiveBufferStatus(spi);
101 	if (recv_status & USIC_CH_RBUFSR_RDV0_Msk) {
102 		XMC_SPI_CH_GetReceivedData(spi);
103 	}
104 	if (recv_status & USIC_CH_RBUFSR_RDV1_Msk) {
105 		XMC_SPI_CH_GetReceivedData(spi);
106 	}
107 }
108 
spi_xmc4xxx_shift_frames(const struct device * dev)109 static void spi_xmc4xxx_shift_frames(const struct device *dev)
110 {
111 	struct spi_xmc4xxx_data *data = dev->data;
112 	const struct spi_xmc4xxx_config *config = dev->config;
113 	struct spi_context *ctx = &data->ctx;
114 	uint8_t tx_data = 0;
115 	uint8_t rx_data;
116 	uint32_t status;
117 
118 	if (spi_context_tx_buf_on(ctx)) {
119 		tx_data = ctx->tx_buf[0];
120 	}
121 
122 	XMC_SPI_CH_ClearStatusFlag(config->spi,
123 				   XMC_SPI_CH_STATUS_FLAG_TRANSMIT_SHIFT_INDICATION |
124 				   XMC_SPI_CH_STATUS_FLAG_RECEIVE_INDICATION |
125 				   XMC_SPI_CH_STATUS_FLAG_ALTERNATIVE_RECEIVE_INDICATION);
126 
127 	spi_context_update_tx(ctx, 1, 1);
128 
129 	XMC_SPI_CH_Transmit(config->spi, tx_data, XMC_SPI_CH_MODE_STANDARD);
130 
131 #if defined(CONFIG_SPI_XMC4XXX_INTERRUPT)
132 	return;
133 #endif
134 
135 	/* Wait to finish transmitting */
136 	while (1) {
137 		status = XMC_SPI_CH_GetStatusFlag(config->spi);
138 		if (status & XMC_SPI_CH_STATUS_FLAG_TRANSMIT_SHIFT_INDICATION) {
139 			break;
140 		}
141 	}
142 
143 	/* Wait to finish receiving */
144 	while (1) {
145 		status = XMC_SPI_CH_GetStatusFlag(config->spi);
146 		if (status & (XMC_SPI_CH_STATUS_FLAG_RECEIVE_INDICATION |
147 			      XMC_SPI_CH_STATUS_FLAG_ALTERNATIVE_RECEIVE_INDICATION)) {
148 			break;
149 		}
150 	}
151 
152 	rx_data = XMC_SPI_CH_GetReceivedData(config->spi);
153 
154 	if (spi_context_rx_buf_on(ctx)) {
155 		*ctx->rx_buf = rx_data;
156 	}
157 	spi_context_update_rx(ctx, 1, 1);
158 }
159 
160 #if defined(CONFIG_SPI_XMC4XXX_INTERRUPT)
spi_xmc4xxx_isr(const struct device * dev)161 static void spi_xmc4xxx_isr(const struct device *dev)
162 {
163 	struct spi_xmc4xxx_data *data = dev->data;
164 	const struct spi_xmc4xxx_config *config = dev->config;
165 	struct spi_context *ctx = &data->ctx;
166 	uint8_t rx_data;
167 
168 	rx_data = XMC_SPI_CH_GetReceivedData(config->spi);
169 
170 	if (spi_context_rx_buf_on(ctx)) {
171 		*ctx->rx_buf = rx_data;
172 	}
173 	spi_context_update_rx(ctx, 1, 1);
174 
175 	if (spi_context_tx_on(ctx) || spi_context_rx_on(ctx)) {
176 		spi_xmc4xxx_shift_frames(dev);
177 		return;
178 	}
179 
180 	if (!(ctx->config->operation & SPI_HOLD_ON_CS)) {
181 		spi_context_cs_control(ctx, false);
182 	}
183 
184 	spi_context_complete(ctx, dev, 0);
185 }
186 #endif
187 
188 #define LOOPBACK_SRC 6
spi_xmc4xxx_configure(const struct device * dev,const struct spi_config * spi_cfg)189 static int spi_xmc4xxx_configure(const struct device *dev, const struct spi_config *spi_cfg)
190 {
191 	int ret;
192 	struct spi_xmc4xxx_data *data = dev->data;
193 	const struct spi_xmc4xxx_config *config = dev->config;
194 	struct spi_context *ctx = &data->ctx;
195 	uint16_t settings = spi_cfg->operation;
196 	bool CPOL = SPI_MODE_GET(settings) & SPI_MODE_CPOL;
197 	bool CPHA = SPI_MODE_GET(settings) & SPI_MODE_CPHA;
198 	XMC_SPI_CH_CONFIG_t usic_cfg = {.baudrate = spi_cfg->frequency};
199 	XMC_SPI_CH_BRG_SHIFT_CLOCK_PASSIVE_LEVEL_t clock_settings =
200 		XMC_SPI_CH_BRG_SHIFT_CLOCK_PASSIVE_LEVEL_0_DELAY_ENABLED;
201 
202 	if (spi_context_configured(ctx, spi_cfg)) {
203 		return 0;
204 	}
205 
206 	ctx->config = spi_cfg;
207 
208 	if (spi_cfg->operation & SPI_HALF_DUPLEX) {
209 		LOG_ERR("Half-duplex not supported");
210 		return -ENOTSUP;
211 	}
212 
213 	if (spi_cfg->operation & SPI_OP_MODE_SLAVE) {
214 		LOG_ERR("Slave mode not supported");
215 		return -ENOTSUP;
216 	}
217 
218 	if (SPI_WORD_SIZE_GET(spi_cfg->operation) != 8) {
219 		LOG_ERR("Only 8 bit word size is supported");
220 		return -ENOTSUP;
221 	}
222 
223 	ret = XMC_SPI_CH_Stop(config->spi);
224 	if (ret != XMC_SPI_CH_STATUS_OK) {
225 		return -EBUSY;
226 	}
227 	XMC_SPI_CH_Init(config->spi, &usic_cfg);
228 	XMC_SPI_CH_Start(config->spi);
229 
230 	if (SPI_MODE_GET(settings) & SPI_MODE_LOOP) {
231 		XMC_SPI_CH_SetInputSource(config->spi, XMC_SPI_CH_INPUT_DIN0, LOOPBACK_SRC);
232 	} else {
233 		XMC_SPI_CH_SetInputSource(config->spi, XMC_SPI_CH_INPUT_DIN0, config->miso_src);
234 	}
235 
236 	if (!CPOL && !CPHA) {
237 		clock_settings = XMC_SPI_CH_BRG_SHIFT_CLOCK_PASSIVE_LEVEL_0_DELAY_ENABLED;
238 	} else if (!CPOL && CPHA) {
239 		clock_settings = XMC_SPI_CH_BRG_SHIFT_CLOCK_PASSIVE_LEVEL_0_DELAY_DISABLED;
240 	} else if (CPOL && !CPHA) {
241 		clock_settings = XMC_SPI_CH_BRG_SHIFT_CLOCK_PASSIVE_LEVEL_1_DELAY_ENABLED;
242 	} else if (CPOL && CPHA) {
243 		clock_settings = XMC_SPI_CH_BRG_SHIFT_CLOCK_PASSIVE_LEVEL_1_DELAY_DISABLED;
244 	}
245 	XMC_SPI_CH_ConfigureShiftClockOutput(config->spi, clock_settings,
246 					     XMC_SPI_CH_BRG_SHIFT_CLOCK_OUTPUT_SCLK);
247 
248 	if (settings & SPI_TRANSFER_LSB) {
249 		XMC_SPI_CH_SetBitOrderLsbFirst(config->spi);
250 	} else {
251 		XMC_SPI_CH_SetBitOrderMsbFirst(config->spi);
252 	}
253 
254 	XMC_SPI_CH_SetWordLength(config->spi, 8);
255 
256 	return 0;
257 }
258 
spi_xmc4xxx_transceive(const struct device * dev,const struct spi_config * spi_cfg,const struct spi_buf_set * tx_bufs,const struct spi_buf_set * rx_bufs,bool asynchronous,spi_callback_t cb,void * userdata)259 static int spi_xmc4xxx_transceive(const struct device *dev, const struct spi_config *spi_cfg,
260 				  const struct spi_buf_set *tx_bufs,
261 				  const struct spi_buf_set *rx_bufs,
262 				  bool asynchronous, spi_callback_t cb, void *userdata)
263 {
264 	struct spi_xmc4xxx_data *data = dev->data;
265 	const struct spi_xmc4xxx_config *config = dev->config;
266 	struct spi_context *ctx = &data->ctx;
267 	int ret;
268 
269 	if (!tx_bufs && !rx_bufs) {
270 		return 0;
271 	}
272 
273 #ifndef CONFIG_SPI_XMC4XXX_INTERRUPT
274 	if (asynchronous) {
275 		return -ENOTSUP;
276 	}
277 #endif
278 
279 	spi_context_lock(ctx, asynchronous, cb, userdata, spi_cfg);
280 
281 	ret = spi_xmc4xxx_configure(dev, spi_cfg);
282 	if (ret) {
283 		LOG_DBG("SPI config on device %s failed", dev->name);
284 		spi_context_release(ctx, ret);
285 		return ret;
286 	}
287 
288 	spi_xmc4xxx_flush_rx(config->spi);
289 
290 	spi_context_buffers_setup(ctx, tx_bufs, rx_bufs, 1);
291 
292 	spi_context_cs_control(ctx, true);
293 
294 #if defined(CONFIG_SPI_XMC4XXX_INTERRUPT)
295 	XMC_SPI_CH_EnableEvent(config->spi, XMC_SPI_CH_EVENT_STANDARD_RECEIVE |
296 					    XMC_SPI_CH_EVENT_ALTERNATIVE_RECEIVE);
297 	spi_xmc4xxx_shift_frames(dev);
298 	ret = spi_context_wait_for_completion(ctx);
299 	/* cs released in isr */
300 #else
301 	while (spi_context_tx_on(ctx) || spi_context_rx_on(ctx)) {
302 		spi_xmc4xxx_shift_frames(dev);
303 	}
304 
305 	if (!(spi_cfg->operation & SPI_HOLD_ON_CS)) {
306 		spi_context_cs_control(ctx, false);
307 	}
308 #endif
309 
310 	spi_context_release(ctx, ret);
311 
312 	return ret;
313 }
314 
315 #if defined(CONFIG_SPI_ASYNC)
spi_xmc4xxx_transceive_async(const struct device * dev,const struct spi_config * spi_cfg,const struct spi_buf_set * tx_bufs,const struct spi_buf_set * rx_bufs,spi_callback_t cb,void * userdata)316 static int spi_xmc4xxx_transceive_async(const struct device *dev, const struct spi_config *spi_cfg,
317 					const struct spi_buf_set *tx_bufs,
318 					const struct spi_buf_set *rx_bufs,
319 					spi_callback_t cb,
320 					void *userdata)
321 {
322 	return spi_xmc4xxx_transceive(dev, spi_cfg, tx_bufs, rx_bufs, true, cb, userdata);
323 }
324 #endif
325 
326 #if defined(CONFIG_SPI_XMC4XXX_DMA)
spi_xmc4xxx_dma_rx_tx_done(struct spi_xmc4xxx_data * data)327 static int spi_xmc4xxx_dma_rx_tx_done(struct spi_xmc4xxx_data *data)
328 {
329 	for (;;) {
330 		int ret;
331 
332 		ret = k_sem_take(&data->status_sem, K_MSEC(CONFIG_SPI_XMC4XXX_DMA_TIMEOUT_MSEC));
333 		if (ret != 0) {
334 			LOG_ERR("Sem take error %d", ret);
335 			return ret;
336 		}
337 		if (data->dma_status_flags & SPI_XMC4XXX_DMA_ERROR_FLAG) {
338 			return -EIO;
339 		}
340 		if (data->dma_status_flags == data->dma_completion_flags) {
341 			return 0;
342 		}
343 	}
344 }
345 
spi_xmc4xxx_transceive_dma(const struct device * dev,const struct spi_config * spi_cfg,const struct spi_buf_set * tx_bufs,const struct spi_buf_set * rx_bufs,bool asynchronous,spi_callback_t cb,void * userdata)346 static int spi_xmc4xxx_transceive_dma(const struct device *dev, const struct spi_config *spi_cfg,
347 				      const struct spi_buf_set *tx_bufs,
348 				      const struct spi_buf_set *rx_bufs,
349 				      bool asynchronous,
350 				      spi_callback_t cb, void *userdata)
351 {
352 	struct spi_xmc4xxx_data *data = dev->data;
353 	const struct spi_xmc4xxx_config *config = dev->config;
354 	struct spi_context *ctx = &data->ctx;
355 	struct spi_xmc4xxx_dma_stream *dma_tx = &data->dma_tx;
356 	struct spi_xmc4xxx_dma_stream *dma_rx = &data->dma_rx;
357 	int ret;
358 
359 	if (!tx_bufs && !rx_bufs) {
360 		return 0;
361 	}
362 
363 	if (asynchronous) {
364 		return -ENOTSUP;
365 	}
366 
367 	spi_context_lock(ctx, asynchronous, cb, userdata, spi_cfg);
368 
369 	k_sem_reset(&data->status_sem);
370 
371 	ret = spi_xmc4xxx_configure(dev, spi_cfg);
372 	if (ret) {
373 		LOG_ERR("SPI config on device %s failed", dev->name);
374 		spi_context_release(ctx, ret);
375 		return ret;
376 	}
377 
378 	/* stop async isr from triggering */
379 	irq_disable(config->irq_num_rx);
380 	spi_context_buffers_setup(ctx, tx_bufs, rx_bufs, 1);
381 	spi_context_cs_control(ctx, true);
382 
383 	while (spi_context_tx_on(ctx) || spi_context_rx_on(ctx)) {
384 		int dma_len;
385 		uint8_t dma_completion_flags = SPI_XMC4XXX_DMA_TX_DONE_FLAG;
386 
387 		/* make sure the tx is not transmitting */
388 		while (XMC_USIC_CH_GetTransmitBufferStatus(config->spi) ==
389 			XMC_USIC_CH_TBUF_STATUS_BUSY) {
390 		};
391 
392 		if (data->ctx.rx_len == 0) {
393 			dma_len = data->ctx.tx_len;
394 		} else if (data->ctx.tx_len == 0) {
395 			dma_len = data->ctx.rx_len;
396 		} else {
397 			dma_len = MIN(data->ctx.tx_len, data->ctx.rx_len);
398 		}
399 
400 		if (ctx->rx_buf) {
401 
402 			spi_xmc4xxx_flush_rx(config->spi);
403 
404 			dma_rx->blk_cfg.dest_address = (uint32_t)ctx->rx_buf;
405 			dma_rx->blk_cfg.dest_addr_adj = DMA_ADDR_ADJ_INCREMENT;
406 			dma_rx->blk_cfg.block_size = dma_len;
407 			dma_rx->blk_cfg.source_addr_adj = DMA_ADDR_ADJ_NO_CHANGE;
408 
409 			ret = dma_config(dma_rx->dev_dma, dma_rx->dma_channel, &dma_rx->dma_cfg);
410 			if (ret < 0) {
411 				break;
412 			}
413 
414 			XMC_SPI_CH_EnableEvent(config->spi, XMC_SPI_CH_EVENT_STANDARD_RECEIVE |
415 							    XMC_SPI_CH_EVENT_ALTERNATIVE_RECEIVE);
416 			dma_completion_flags |= SPI_XMC4XXX_DMA_RX_DONE_FLAG;
417 
418 			ret = dma_start(dma_rx->dev_dma, dma_rx->dma_channel);
419 			if (ret < 0) {
420 				break;
421 			}
422 
423 		} else {
424 			XMC_SPI_CH_DisableEvent(config->spi,
425 						XMC_SPI_CH_EVENT_STANDARD_RECEIVE |
426 						XMC_SPI_CH_EVENT_ALTERNATIVE_RECEIVE);
427 		}
428 
429 		if (ctx->tx_buf) {
430 			dma_tx->blk_cfg.source_address = (uint32_t)ctx->tx_buf;
431 			dma_tx->blk_cfg.source_addr_adj = DMA_ADDR_ADJ_INCREMENT;
432 		} else {
433 			dma_tx->blk_cfg.source_address = (uint32_t)&tx_dummy_data;
434 			dma_tx->blk_cfg.source_addr_adj = DMA_ADDR_ADJ_NO_CHANGE;
435 		}
436 
437 		dma_tx->blk_cfg.block_size = dma_len;
438 
439 		ret = dma_config(dma_tx->dev_dma, dma_tx->dma_channel, &dma_tx->dma_cfg);
440 		if (ret < 0) {
441 			break;
442 		}
443 
444 		data->dma_status_flags = 0;
445 		data->dma_completion_flags = dma_completion_flags;
446 
447 		XMC_SPI_CH_EnableEvent(config->spi, XMC_SPI_CH_EVENT_RECEIVE_START);
448 		XMC_USIC_CH_TriggerServiceRequest(config->spi, data->service_request_tx);
449 
450 		ret = dma_start(dma_tx->dev_dma, dma_tx->dma_channel);
451 		if (ret < 0) {
452 			break;
453 		}
454 
455 		ret = spi_xmc4xxx_dma_rx_tx_done(data);
456 		if (ret) {
457 			break;
458 		}
459 
460 		spi_context_update_tx(ctx, 1, dma_len);
461 		spi_context_update_rx(ctx, 1, dma_len);
462 	}
463 
464 	if (ret < 0) {
465 		dma_stop(dma_tx->dev_dma, dma_tx->dma_channel);
466 		dma_stop(dma_rx->dev_dma, dma_rx->dma_channel);
467 	}
468 
469 	if (!(spi_cfg->operation & SPI_HOLD_ON_CS)) {
470 		spi_context_cs_control(ctx, false);
471 	}
472 
473 #if defined(CONFIG_SPI_XMC4XXX_INTERRUPT)
474 	irq_enable(config->irq_num_rx);
475 #endif
476 	spi_context_release(ctx, ret);
477 
478 	return ret;
479 }
480 #endif
481 
spi_xmc4xxx_transceive_sync(const struct device * dev,const struct spi_config * spi_cfg,const struct spi_buf_set * tx_bufs,const struct spi_buf_set * rx_bufs)482 static int spi_xmc4xxx_transceive_sync(const struct device *dev, const struct spi_config *spi_cfg,
483 				       const struct spi_buf_set *tx_bufs,
484 				       const struct spi_buf_set *rx_bufs)
485 {
486 #if defined(CONFIG_SPI_XMC4XXX_DMA)
487 	struct spi_xmc4xxx_data *data = dev->data;
488 
489 	if (data->dma_tx.dev_dma != NULL && data->dma_rx.dev_dma != NULL) {
490 		return spi_xmc4xxx_transceive_dma(dev, spi_cfg, tx_bufs, rx_bufs, false, NULL,
491 						  NULL);
492 	}
493 #endif
494 	return spi_xmc4xxx_transceive(dev, spi_cfg, tx_bufs, rx_bufs, false, NULL, NULL);
495 }
496 
spi_xmc4xxx_release(const struct device * dev,const struct spi_config * config)497 static int spi_xmc4xxx_release(const struct device *dev, const struct spi_config *config)
498 {
499 	struct spi_xmc4xxx_data *data = dev->data;
500 
501 	if (!spi_context_configured(&data->ctx, config)) {
502 		return -EINVAL;
503 	}
504 
505 	spi_context_unlock_unconditionally(&data->ctx);
506 	return 0;
507 }
508 
509 #if defined(CONFIG_SPI_XMC4XXX_DMA)
spi_xmc4xxx_configure_rx_service_requests(const struct device * dev)510 static void spi_xmc4xxx_configure_rx_service_requests(const struct device *dev)
511 {
512 	const struct spi_xmc4xxx_config *config = dev->config;
513 	struct spi_xmc4xxx_data *data = dev->data;
514 
515 	__ASSERT(config->irq_num_rx >= USIC_IRQ_MIN && config->irq_num_rx <= USIC_IRQ_MAX,
516 		 "Invalid irq number\n");
517 
518 	data->service_request_rx = (config->irq_num_rx - USIC_IRQ_MIN) % IRQS_PER_USIC;
519 
520 	XMC_SPI_CH_SelectInterruptNodePointer(config->spi,
521 					      XMC_SPI_CH_INTERRUPT_NODE_POINTER_RECEIVE,
522 					      data->service_request_rx);
523 	XMC_SPI_CH_SelectInterruptNodePointer(config->spi,
524 					      XMC_SPI_CH_INTERRUPT_NODE_POINTER_ALTERNATE_RECEIVE,
525 					      data->service_request_rx);
526 }
527 
spi_xmc4xxx_configure_tx_service_requests(const struct device * dev)528 static void spi_xmc4xxx_configure_tx_service_requests(const struct device *dev)
529 {
530 	const struct spi_xmc4xxx_config *config = dev->config;
531 	struct spi_xmc4xxx_data *data = dev->data;
532 
533 	__ASSERT(config->irq_num_tx >= USIC_IRQ_MIN && config->irq_num_tx <= USIC_IRQ_MAX,
534 		 "Invalid irq number\n");
535 
536 	data->service_request_tx = (config->irq_num_tx - USIC_IRQ_MIN) % IRQS_PER_USIC;
537 
538 	XMC_USIC_CH_SetInterruptNodePointer(config->spi,
539 					    XMC_USIC_CH_INTERRUPT_NODE_POINTER_TRANSMIT_BUFFER,
540 					    data->service_request_tx);
541 }
542 #endif
543 
spi_xmc4xxx_init(const struct device * dev)544 static int spi_xmc4xxx_init(const struct device *dev)
545 {
546 	struct spi_xmc4xxx_data *data = dev->data;
547 	const struct spi_xmc4xxx_config *config = dev->config;
548 	int ret;
549 
550 	XMC_USIC_CH_Enable(config->spi);
551 
552 	spi_context_unlock_unconditionally(&data->ctx);
553 
554 #if defined(CONFIG_SPI_XMC4XXX_INTERRUPT)
555 	config->irq_config_func(dev);
556 #endif
557 
558 #if defined(CONFIG_SPI_XMC4XXX_DMA)
559 	spi_xmc4xxx_configure_tx_service_requests(dev);
560 	spi_xmc4xxx_configure_rx_service_requests(dev);
561 
562 	if (data->dma_rx.dev_dma != NULL) {
563 		if (!device_is_ready(data->dma_rx.dev_dma)) {
564 			return -ENODEV;
565 		}
566 		data->dma_rx.blk_cfg.source_address = (uint32_t)&config->spi->RBUF;
567 		data->dma_rx.dma_cfg.head_block = &data->dma_rx.blk_cfg;
568 		data->dma_rx.dma_cfg.user_data = (void *)data;
569 	}
570 
571 	if (data->dma_tx.dev_dma != NULL) {
572 		if (!device_is_ready(data->dma_tx.dev_dma)) {
573 			return -ENODEV;
574 		}
575 		data->dma_tx.blk_cfg.dest_address =
576 			(uint32_t)&config->spi->TBUF[XMC_SPI_CH_MODE_STANDARD];
577 		data->dma_tx.blk_cfg.dest_addr_adj = DMA_ADDR_ADJ_NO_CHANGE;
578 		data->dma_tx.dma_cfg.head_block = &data->dma_tx.blk_cfg;
579 		data->dma_tx.dma_cfg.user_data = (void *)data;
580 	}
581 	k_sem_init(&data->status_sem, 0, 2);
582 #endif
583 
584 	ret = pinctrl_apply_state(config->pcfg, PINCTRL_STATE_DEFAULT);
585 	if (ret < 0) {
586 		return ret;
587 	}
588 
589 	XMC_SPI_CH_SetInputSource(config->spi, XMC_SPI_CH_INPUT_DIN0, config->miso_src);
590 	spi_context_cs_configure_all(&data->ctx);
591 
592 	return 0;
593 }
594 
595 static DEVICE_API(spi, spi_xmc4xxx_driver_api) = {
596 	.transceive = spi_xmc4xxx_transceive_sync,
597 #if defined(CONFIG_SPI_ASYNC)
598 	.transceive_async = spi_xmc4xxx_transceive_async,
599 #endif
600 #ifdef CONFIG_SPI_RTIO
601 	.iodev_submit = spi_rtio_iodev_default_submit,
602 #endif
603 	.release = spi_xmc4xxx_release,
604 };
605 
606 #if defined(CONFIG_SPI_XMC4XXX_DMA)
607 #define SPI_DMA_CHANNEL_INIT(index, dir, ch_dir, src_burst, dst_burst)                             \
608 	.dev_dma = DEVICE_DT_GET(DT_INST_DMAS_CTLR_BY_NAME(index, dir)),                           \
609 	.dma_channel = DT_INST_DMAS_CELL_BY_NAME(index, dir, channel),                             \
610 	.dma_cfg = {                                                                               \
611 		.dma_slot = DT_INST_DMAS_CELL_BY_NAME(index, dir, config),                         \
612 		.channel_direction = ch_dir,                                                       \
613 		.channel_priority = DT_INST_DMAS_CELL_BY_NAME(index, dir, priority),               \
614 		.source_data_size = 1,                                                             \
615 		.dest_data_size = 1,                                                               \
616 		.source_burst_length = src_burst,                                                  \
617 		.dest_burst_length = dst_burst,                                                    \
618 		.block_count = 1,                                                                  \
619 		.dma_callback = spi_xmc4xxx_dma_callback,                                          \
620 		.complete_callback_en = true,                                                      \
621 	},
622 
623 #define SPI_DMA_CHANNEL(index, dir, ch_dir, src_burst, dst_burst)                                  \
624 	.dma_##dir = {COND_CODE_1(                                                                 \
625 		DT_INST_DMAS_HAS_NAME(index, dir),                                                 \
626 		(SPI_DMA_CHANNEL_INIT(index, dir, ch_dir, src_burst, dst_burst)), (NULL))},
627 #else
628 #define SPI_DMA_CHANNEL(index, dir, ch_dir, src_burst, dst_burst)
629 #endif
630 
631 #if defined(CONFIG_SPI_XMC4XXX_INTERRUPT)
632 
633 #define XMC4XXX_IRQ_HANDLER_INIT(index)                                                            \
634 	static void spi_xmc4xxx_irq_setup_##index(const struct device *dev)                        \
635 	{                                                                                          \
636 		const struct spi_xmc4xxx_config *config = dev->config;                             \
637 		uint8_t service_request;                                                           \
638 		uint8_t irq_num;                                                                   \
639 												   \
640 		irq_num = DT_INST_IRQ_BY_NAME(index, rx, irq);                                     \
641 		service_request = (irq_num - USIC_IRQ_MIN) % IRQS_PER_USIC;                        \
642 												   \
643 		XMC_SPI_CH_SelectInterruptNodePointer(                                             \
644 			config->spi, XMC_SPI_CH_INTERRUPT_NODE_POINTER_RECEIVE, service_request);  \
645 		XMC_SPI_CH_SelectInterruptNodePointer(                                             \
646 			config->spi, XMC_SPI_CH_INTERRUPT_NODE_POINTER_ALTERNATE_RECEIVE,          \
647 			service_request);                                                          \
648 												   \
649 		XMC_SPI_CH_EnableEvent(config->spi, XMC_SPI_CH_EVENT_STANDARD_RECEIVE |            \
650 						    XMC_SPI_CH_EVENT_ALTERNATIVE_RECEIVE);         \
651 												   \
652 		IRQ_CONNECT(DT_INST_IRQ_BY_NAME(index, rx, irq),                                   \
653 			    DT_INST_IRQ_BY_NAME(index, rx, priority), spi_xmc4xxx_isr,             \
654 			    DEVICE_DT_INST_GET(index), 0);                                         \
655 												   \
656 		irq_enable(irq_num);                                                               \
657 	}
658 
659 #define XMC4XXX_IRQ_HANDLER_STRUCT_INIT(index) .irq_config_func = spi_xmc4xxx_irq_setup_##index,
660 
661 #else
662 #define XMC4XXX_IRQ_HANDLER_INIT(index)
663 #define XMC4XXX_IRQ_HANDLER_STRUCT_INIT(index)
664 #endif
665 
666 #if defined(CONFIG_SPI_XMC4XXX_DMA)
667 #define XMC4XXX_IRQ_DMA_STRUCT_INIT(index)                                                         \
668 	.irq_num_rx = DT_INST_IRQ_BY_NAME(index, rx, irq),                                         \
669 	.irq_num_tx = DT_INST_IRQ_BY_NAME(index, tx, irq),
670 #else
671 #define XMC4XXX_IRQ_DMA_STRUCT_INIT(index)
672 #endif
673 
674 #define XMC4XXX_INIT(index)                                                                        \
675 	PINCTRL_DT_INST_DEFINE(index);                                                             \
676 	XMC4XXX_IRQ_HANDLER_INIT(index)                                                            \
677 	static struct spi_xmc4xxx_data xmc4xxx_data_##index = {                                    \
678 		SPI_CONTEXT_CS_GPIOS_INITIALIZE(DT_DRV_INST(index), ctx)                           \
679 			SPI_CONTEXT_INIT_LOCK(xmc4xxx_data_##index, ctx),                          \
680 		SPI_CONTEXT_INIT_SYNC(xmc4xxx_data_##index, ctx),                                  \
681 		SPI_DMA_CHANNEL(index, tx, MEMORY_TO_PERIPHERAL, 8, 1)                             \
682 			SPI_DMA_CHANNEL(index, rx, PERIPHERAL_TO_MEMORY, 1, 8)};                   \
683                                                                                                    \
684 	static const struct spi_xmc4xxx_config xmc4xxx_config_##index = {                          \
685 		.spi = (XMC_USIC_CH_t *)DT_INST_REG_ADDR(index),                                   \
686 		.pcfg = PINCTRL_DT_INST_DEV_CONFIG_GET(index),                                     \
687 		.miso_src = DT_INST_ENUM_IDX(index, miso_src),                                     \
688 		XMC4XXX_IRQ_HANDLER_STRUCT_INIT(index) XMC4XXX_IRQ_DMA_STRUCT_INIT(index)};        \
689                                                                                                    \
690 	SPI_DEVICE_DT_INST_DEFINE(index, spi_xmc4xxx_init, NULL, &xmc4xxx_data_##index,            \
691 				  &xmc4xxx_config_##index, POST_KERNEL, CONFIG_SPI_INIT_PRIORITY,  \
692 				  &spi_xmc4xxx_driver_api);
693 
694 DT_INST_FOREACH_STATUS_OKAY(XMC4XXX_INIT)
695