1 /*
2  * Copyright 2022-2024 NXP
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  */
6 
7 #define DT_DRV_COMPAT nxp_s32_spi
8 
9 #include <zephyr/drivers/clock_control.h>
10 #include <zephyr/drivers/pinctrl.h>
11 #include "spi_nxp_s32.h"
12 
spi_nxp_s32_last_packet(struct spi_nxp_s32_data * data)13 static bool spi_nxp_s32_last_packet(struct spi_nxp_s32_data *data)
14 {
15 	struct spi_context *ctx = &data->ctx;
16 
17 	if (ctx->tx_count <= 1U && ctx->rx_count <= 1U) {
18 		if (!spi_context_tx_on(ctx) && (data->transfer_len == ctx->rx_len)) {
19 			return true;
20 		}
21 
22 		if (!spi_context_rx_on(ctx) && (data->transfer_len == ctx->tx_len)) {
23 			return true;
24 		}
25 
26 		if ((ctx->rx_len == ctx->tx_len) && (data->transfer_len == ctx->tx_len)) {
27 			return true;
28 		}
29 	}
30 
31 	return false;
32 }
33 
spi_nxp_s32_transfer_done(struct spi_context * ctx)34 static inline bool spi_nxp_s32_transfer_done(struct spi_context *ctx)
35 {
36 	return !spi_context_tx_on(ctx) && !spi_context_rx_on(ctx);
37 }
38 
spi_nxp_s32_transfer_next_packet(const struct device * dev)39 static int spi_nxp_s32_transfer_next_packet(const struct device *dev)
40 {
41 	const struct spi_nxp_s32_config *config = dev->config;
42 	struct spi_nxp_s32_data *data = dev->data;
43 
44 	Spi_Ip_StatusType status;
45 	Spi_Ip_CallbackType data_cb;
46 	Spi_Ip_TransferAdjustmentType param;
47 
48 #ifdef CONFIG_NXP_S32_SPI_INTERRUPT
49 	data_cb = config->cb;
50 #else
51 	data_cb = NULL;
52 #endif /* CONFIG_NXP_S32_SPI_INTERRUPT */
53 
54 	data->transfer_len = spi_context_max_continuous_chunk(&data->ctx);
55 	data->transfer_len = MIN(data->transfer_len,
56 					SPI_NXP_S32_MAX_BYTES_PER_PACKAGE(data->bytes_per_frame));
57 
58 	param.KeepCs = !spi_nxp_s32_last_packet(data);
59 	param.DeviceParams = NULL;
60 	Spi_Ip_UpdateTransferParam(&data->transfer_cfg, &param);
61 
62 	status = Spi_Ip_AsyncTransmit(&data->transfer_cfg, (uint8_t *)data->ctx.tx_buf,
63 						data->ctx.rx_buf, data->transfer_len, data_cb);
64 
65 	if (status) {
66 		LOG_ERR("Transfer could not start");
67 		return -EIO;
68 	}
69 
70 #ifdef CONFIG_NXP_S32_SPI_INTERRUPT
71 	return 0;
72 #else
73 
74 	while (Spi_Ip_GetStatus(config->spi_hw_cfg->Instance) == SPI_IP_BUSY) {
75 		Spi_Ip_ManageBuffers(config->spi_hw_cfg->Instance);
76 	}
77 
78 	if (Spi_Ip_GetStatus(config->spi_hw_cfg->Instance) == SPI_IP_FAULT) {
79 		return -EIO;
80 	}
81 
82 	return 0;
83 #endif /* CONFIG_NXP_S32_SPI_INTERRUPT */
84 }
85 
86 /*
87  * The function to get Scaler and Prescaler for corresponding registers
88  * to configure the baudrate for the transmission. The real frequency is
89  * computated to ensure it will always equal or the nearest approximation
90  * lower to the expected one.
91  */
spi_nxp_s32_getbestfreq(uint32_t clock_frequency,uint32_t requested_baud,struct spi_nxp_s32_baudrate_param * best_baud)92 static void spi_nxp_s32_getbestfreq(uint32_t clock_frequency,
93 					uint32_t requested_baud,
94 					struct spi_nxp_s32_baudrate_param *best_baud)
95 {
96 	uint8_t scaler;
97 	uint8_t prescaler;
98 
99 	uint32_t low, high;
100 	uint32_t curr_freq;
101 
102 	uint32_t best_freq = 0U;
103 
104 	static const uint8_t prescaler_arr[SPI_NXP_S32_NUM_PRESCALER] = {2U, 3U, 5U, 7U};
105 
106 	static const uint16_t scaller_arr[SPI_NXP_S32_NUM_SCALER] = {
107 		2U, 4U, 6U, 8U, 16U, 32U, 64U, 128U, 256U, 512U, 1024U, 2048U,
108 		4096U, 8192U, 16384U, 32768U
109 	};
110 
111 	for (prescaler = 0U; prescaler < SPI_NXP_S32_NUM_PRESCALER; prescaler++) {
112 		low = 0U;
113 		high = SPI_NXP_S32_NUM_SCALER - 1U;
114 
115 		/* Implement golden section search algorithm */
116 		do {
117 			scaler = (low + high) / 2U;
118 
119 			curr_freq = clock_frequency * 1U /
120 					(prescaler_arr[prescaler] * scaller_arr[scaler]);
121 
122 			/*
123 			 * If the scaler make current frequency higher than the
124 			 * expected one, skip the next step
125 			 */
126 			if (curr_freq > requested_baud) {
127 				low = scaler;
128 				continue;
129 			} else {
130 				high = scaler;
131 			}
132 
133 			if ((requested_baud - best_freq) > (requested_baud - curr_freq)) {
134 				best_freq = curr_freq;
135 				best_baud->prescaler = prescaler;
136 				best_baud->scaler    = scaler;
137 			}
138 
139 			if (best_freq == requested_baud) {
140 				break;
141 			}
142 
143 		} while ((high - low) > 1U);
144 
145 		if ((high - low) <= 1U) {
146 
147 			if (high == scaler) {
148 				/* use low value */
149 				scaler = low;
150 			} else {
151 				scaler = high;
152 			}
153 
154 			curr_freq = clock_frequency * 1U /
155 					(prescaler_arr[prescaler] * scaller_arr[scaler]);
156 
157 			if (curr_freq <= requested_baud) {
158 
159 				if ((requested_baud - best_freq) > (requested_baud - curr_freq)) {
160 					best_freq = curr_freq;
161 					best_baud->prescaler = prescaler;
162 					best_baud->scaler    = scaler;
163 				}
164 			}
165 		}
166 
167 		if (best_freq == requested_baud) {
168 			break;
169 		}
170 	}
171 
172 	best_baud->frequency = best_freq;
173 }
174 
175 /*
176  * The function to get Scaler and Prescaler for corresponding registers
177  * to configure the delay for the transmission. The real delay is computated
178  * to ensure it will always equal or the nearest approximation higher to
179  * the expected one. In the worst case, use the delay as much as possible.
180  */
spi_nxp_s32_getbestdelay(uint32_t clock_frequency,uint32_t requested_delay,uint8_t * best_scaler,uint8_t * best_prescaler)181 static void spi_nxp_s32_getbestdelay(uint32_t clock_frequency, uint32_t requested_delay,
182 					uint8_t *best_scaler, uint8_t *best_prescaler)
183 {
184 	uint32_t current_delay;
185 	uint8_t scaler, prescaler;
186 	uint32_t low, high;
187 
188 	uint32_t best_delay = 0xFFFFFFFFU;
189 
190 	/* The scaler array is a power of two, so do not need to be defined */
191 	static const uint8_t prescaler_arr[SPI_NXP_S32_NUM_PRESCALER] = {1U, 3U, 5U, 7U};
192 
193 	clock_frequency = clock_frequency / MHZ(1);
194 
195 	for (prescaler = 0; prescaler < SPI_NXP_S32_NUM_PRESCALER; prescaler++) {
196 		low = 0U;
197 		high = SPI_NXP_S32_NUM_SCALER - 1U;
198 
199 		do {
200 			scaler = (low + high) / 2U;
201 
202 			current_delay = NSEC_PER_USEC * prescaler_arr[prescaler]
203 						* (1U << (scaler + 1)) / clock_frequency;
204 
205 			/*
206 			 * If the scaler make current delay smaller than
207 			 * the expected one, skip the next step
208 			 */
209 			if (current_delay < requested_delay) {
210 				low = scaler;
211 				continue;
212 			} else {
213 				high = scaler;
214 			}
215 
216 			if ((best_delay - requested_delay) > (current_delay - requested_delay)) {
217 				best_delay = current_delay;
218 				*best_prescaler = prescaler;
219 				*best_scaler = scaler;
220 			}
221 
222 			if (best_delay == requested_delay) {
223 				break;
224 			}
225 
226 		} while ((high - low) > 1U);
227 
228 		if ((high - low) <= 1U) {
229 
230 			if (high == scaler) {
231 				/* use low value */
232 				scaler = low;
233 			} else {
234 				scaler = high;
235 			}
236 
237 			current_delay = NSEC_PER_USEC * prescaler_arr[prescaler]
238 						* (1U << (scaler + 1)) / clock_frequency;
239 
240 			if (current_delay >= requested_delay) {
241 				if ((best_delay - requested_delay) >
242 					(current_delay - requested_delay)) {
243 
244 					best_delay = current_delay;
245 					*best_prescaler = prescaler;
246 					*best_scaler = scaler;
247 				}
248 			}
249 		}
250 
251 		if (best_delay == requested_delay) {
252 			break;
253 		}
254 	}
255 
256 	if (best_delay == 0xFFFFFFFFU) {
257 		/* Use the delay as much as possible */
258 		*best_prescaler = SPI_NXP_S32_NUM_PRESCALER - 1U;
259 		*best_scaler = SPI_NXP_S32_NUM_SCALER - 1U;
260 	}
261 }
262 
spi_nxp_s32_configure(const struct device * dev,const struct spi_config * spi_cfg)263 static int spi_nxp_s32_configure(const struct device *dev,
264 				const struct spi_config *spi_cfg)
265 {
266 	const struct spi_nxp_s32_config *config = dev->config;
267 	struct spi_nxp_s32_data *data = dev->data;
268 
269 	bool clk_phase, clk_polarity;
270 	bool lsb, hold_cs;
271 	bool slave_mode, cs_active_high;
272 
273 	uint8_t frame_size;
274 
275 	struct spi_nxp_s32_baudrate_param best_baud = {0};
276 	uint32_t clock_rate;
277 	int err;
278 
279 	if (spi_context_configured(&data->ctx, spi_cfg)) {
280 		/* This configuration is already in use */
281 		return 0;
282 	}
283 
284 	err = clock_control_get_rate(config->clock_dev, config->clock_subsys, &clock_rate);
285 	if (err) {
286 		LOG_ERR("Failed to get clock frequency");
287 		return err;
288 	}
289 
290 	clk_phase	= !!(SPI_MODE_GET(spi_cfg->operation) & SPI_MODE_CPHA);
291 	clk_polarity	= !!(SPI_MODE_GET(spi_cfg->operation) & SPI_MODE_CPOL);
292 
293 	hold_cs		= !!(spi_cfg->operation & SPI_HOLD_ON_CS);
294 	lsb		= !!(spi_cfg->operation & SPI_TRANSFER_LSB);
295 
296 	slave_mode	= !!(SPI_OP_MODE_GET(spi_cfg->operation));
297 	frame_size	= SPI_WORD_SIZE_GET(spi_cfg->operation);
298 	cs_active_high	= !!(spi_cfg->operation & SPI_CS_ACTIVE_HIGH);
299 
300 	if (slave_mode == (!!(config->spi_hw_cfg->Mcr & SPI_MCR_MSTR_MASK))) {
301 		LOG_ERR("SPI mode (master/slave) must be same as configured in DT");
302 		return -ENOTSUP;
303 	}
304 
305 	if (slave_mode && !IS_ENABLED(CONFIG_SPI_SLAVE)) {
306 		LOG_ERR("Kconfig for enable SPI in slave mode is not enabled");
307 		return -ENOTSUP;
308 	}
309 
310 	if (slave_mode && lsb) {
311 		LOG_ERR("SPI does not support to shifting out with LSB in slave mode");
312 		return -ENOTSUP;
313 	}
314 
315 	if (spi_cfg->slave >= config->num_cs) {
316 		LOG_ERR("Slave %d excess the allowed maximum value (%d)",
317 			spi_cfg->slave, config->num_cs - 1);
318 		return -ENOTSUP;
319 	}
320 
321 	if (frame_size > 32U) {
322 		LOG_ERR("Unsupported frame size %d bits", frame_size);
323 		return -ENOTSUP;
324 	}
325 
326 	if ((spi_cfg->operation & SPI_LINES_MASK) != SPI_LINES_SINGLE) {
327 		LOG_ERR("Only single line mode is supported");
328 		return -ENOTSUP;
329 	}
330 
331 	if (spi_cfg->operation & SPI_MODE_LOOP) {
332 		LOG_ERR("Loopback mode is not supported");
333 		return -ENOTSUP;
334 	}
335 
336 	if (cs_active_high && !spi_cs_is_gpio(spi_cfg)) {
337 		LOG_ERR("For CS has active state is high, a GPIO pin must be used to"
338 			" control CS line instead");
339 		return -ENOTSUP;
340 	}
341 
342 	if (!slave_mode) {
343 
344 		if ((spi_cfg->frequency < SPI_NXP_S32_MIN_FREQ) ||
345 			(spi_cfg->frequency > SPI_NXP_S32_MAX_FREQ)) {
346 
347 			LOG_ERR("The frequency is out of range");
348 			return -ENOTSUP;
349 		}
350 
351 		spi_nxp_s32_getbestfreq(clock_rate, spi_cfg->frequency, &best_baud);
352 
353 		data->transfer_cfg.Ctar &= ~(SPI_CTAR_BR_MASK | SPI_CTAR_PBR_MASK);
354 		data->transfer_cfg.Ctar |= SPI_CTAR_BR(best_baud.scaler) |
355 						SPI_CTAR_PBR(best_baud.prescaler);
356 
357 		data->transfer_cfg.PushrCmd &= ~((SPI_PUSHR_CONT_MASK | SPI_PUSHR_PCS_MASK) >> 16U);
358 
359 		if (!spi_cs_is_gpio(spi_cfg)) {
360 			/* Use inner CS signal from SPI module */
361 			data->transfer_cfg.PushrCmd |= hold_cs << 15U;
362 			data->transfer_cfg.PushrCmd |= (1U << spi_cfg->slave);
363 		}
364 	}
365 
366 	data->transfer_cfg.Ctar &= ~(SPI_CTAR_CPHA_MASK | SPI_CTAR_CPOL_MASK);
367 	data->transfer_cfg.Ctar |= SPI_CTAR_CPHA(clk_phase) | SPI_CTAR_CPOL(clk_polarity);
368 
369 	Spi_Ip_UpdateFrameSize(&data->transfer_cfg, frame_size);
370 	Spi_Ip_UpdateLsb(&data->transfer_cfg, lsb);
371 
372 	data->ctx.config	= spi_cfg;
373 	data->bytes_per_frame	= SPI_NXP_S32_BYTE_PER_FRAME(frame_size);
374 
375 	if (slave_mode) {
376 		LOG_DBG("SPI configuration: cpol = %u, cpha = %u,"
377 			" lsb = %u, frame_size = %u, mode: slave",
378 			clk_polarity, clk_phase, lsb, frame_size);
379 	} else {
380 		LOG_DBG("SPI configuration: frequency = %uHz, cpol = %u,"
381 			" cpha = %u, lsb = %u, hold_cs = %u, frame_size = %u,"
382 			" mode: master, CS = %u\n",
383 			best_baud.frequency, clk_polarity, clk_phase,
384 			lsb, hold_cs, frame_size, spi_cfg->slave);
385 	}
386 
387 	return 0;
388 }
389 
transceive(const struct device * dev,const struct spi_config * spi_cfg,const struct spi_buf_set * tx_bufs,const struct spi_buf_set * rx_bufs,bool asynchronous,spi_callback_t cb,void * userdata)390 static int transceive(const struct device *dev,
391 			const struct spi_config *spi_cfg,
392 			const struct spi_buf_set *tx_bufs,
393 			const struct spi_buf_set *rx_bufs,
394 			bool asynchronous,
395 			spi_callback_t cb,
396 			void *userdata)
397 {
398 	struct spi_nxp_s32_data *data = dev->data;
399 	struct spi_context *context = &data->ctx;
400 	int ret;
401 
402 	if (!tx_bufs && !rx_bufs) {
403 		return 0;
404 	}
405 
406 #ifndef CONFIG_NXP_S32_SPI_INTERRUPT
407 	if (asynchronous) {
408 		return -ENOTSUP;
409 	}
410 #endif /* CONFIG_NXP_S32_SPI_INTERRUPT */
411 
412 	spi_context_lock(context, asynchronous, cb, userdata, spi_cfg);
413 
414 	ret = spi_nxp_s32_configure(dev, spi_cfg);
415 	if (ret) {
416 		LOG_ERR("An error occurred in the SPI configuration");
417 		spi_context_release(context, ret);
418 		return ret;
419 	}
420 
421 	spi_context_buffers_setup(context, tx_bufs, rx_bufs, 1U);
422 
423 	if (spi_nxp_s32_transfer_done(context)) {
424 		spi_context_release(context, 0);
425 		return 0;
426 	}
427 
428 	spi_context_cs_control(context, true);
429 
430 #ifdef CONFIG_NXP_S32_SPI_INTERRUPT
431 	ret = spi_nxp_s32_transfer_next_packet(dev);
432 
433 	if (!ret) {
434 		ret = spi_context_wait_for_completion(context);
435 	} else {
436 		spi_context_cs_control(context, false);
437 	}
438 #else
439 	do {
440 		ret = spi_nxp_s32_transfer_next_packet(dev);
441 
442 		if (!ret) {
443 			spi_context_update_tx(context, 1U, data->transfer_len);
444 			spi_context_update_rx(context, 1U, data->transfer_len);
445 		}
446 	} while (!ret && !spi_nxp_s32_transfer_done(context));
447 
448 	spi_context_cs_control(context, false);
449 
450 #ifdef CONFIG_SPI_SLAVE
451 	if (spi_context_is_slave(context) && !ret) {
452 		ret = data->ctx.recv_frames;
453 	}
454 #endif /* CONFIG_SPI_SLAVE */
455 #endif /* CONFIG_NXP_S32_SPI_INTERRUPT */
456 
457 	spi_context_release(context, ret);
458 
459 	return ret;
460 }
461 
spi_nxp_s32_transceive(const struct device * dev,const struct spi_config * spi_cfg,const struct spi_buf_set * tx_bufs,const struct spi_buf_set * rx_bufs)462 static int spi_nxp_s32_transceive(const struct device *dev,
463 				const struct spi_config *spi_cfg,
464 				const struct spi_buf_set *tx_bufs,
465 				const struct spi_buf_set *rx_bufs)
466 {
467 	return transceive(dev, spi_cfg, tx_bufs, rx_bufs, false, NULL, NULL);
468 }
469 #ifdef CONFIG_SPI_ASYNC
spi_nxp_s32_transceive_async(const struct device * dev,const struct spi_config * spi_cfg,const struct spi_buf_set * tx_bufs,const struct spi_buf_set * rx_bufs,spi_callback_t callback,void * userdata)470 static int spi_nxp_s32_transceive_async(const struct device *dev,
471 				const struct spi_config *spi_cfg,
472 				const struct spi_buf_set *tx_bufs,
473 				const struct spi_buf_set *rx_bufs,
474 				spi_callback_t callback,
475 				void *userdata)
476 {
477 	return transceive(dev, spi_cfg, tx_bufs, rx_bufs, true, callback, userdata);
478 }
479 #endif /* CONFIG_SPI_ASYNC */
480 
spi_nxp_s32_release(const struct device * dev,const struct spi_config * spi_cfg)481 static int spi_nxp_s32_release(const struct device *dev,
482 				const struct spi_config *spi_cfg)
483 {
484 	struct spi_nxp_s32_data *data = dev->data;
485 
486 	(void)spi_cfg;
487 
488 	spi_context_unlock_unconditionally(&data->ctx);
489 
490 	return 0;
491 }
492 
spi_nxp_s32_init(const struct device * dev)493 static int spi_nxp_s32_init(const struct device *dev)
494 {
495 	const struct spi_nxp_s32_config *config = dev->config;
496 	struct spi_nxp_s32_data *data = dev->data;
497 	uint32_t clock_rate;
498 	uint8_t scaler, prescaler;
499 
500 	uint32_t ctar = 0;
501 	int ret = 0;
502 
503 	if (!device_is_ready(config->clock_dev)) {
504 		LOG_ERR("Clock control device not ready");
505 		return -ENODEV;
506 	}
507 
508 	ret = clock_control_on(config->clock_dev, config->clock_subsys);
509 	if (ret) {
510 		LOG_ERR("Failed to enable clock");
511 		return ret;
512 	}
513 
514 	ret = clock_control_get_rate(config->clock_dev, config->clock_subsys, &clock_rate);
515 	if (ret) {
516 		LOG_ERR("Failed to get clock frequency");
517 		return ret;
518 	}
519 
520 	ret = pinctrl_apply_state(config->pincfg, PINCTRL_STATE_DEFAULT);
521 	if (ret < 0) {
522 		return ret;
523 	}
524 
525 	if (Spi_Ip_Init(config->spi_hw_cfg)) {
526 		return -EBUSY;
527 	}
528 
529 #ifdef CONFIG_NXP_S32_SPI_INTERRUPT
530 	if (Spi_Ip_UpdateTransferMode(config->spi_hw_cfg->Instance, SPI_IP_INTERRUPT)) {
531 		return -EBUSY;
532 	}
533 
534 	config->irq_config_func(dev);
535 #endif /* CONFIG_NXP_S32_SPI_INTERRUPT */
536 
537 	/*
538 	 * Update the delay timings configuration that are
539 	 * applied for all inner CS signals of SPI module.
540 	 */
541 	spi_nxp_s32_getbestdelay(clock_rate, config->sck_cs_delay, &scaler, &prescaler);
542 
543 	ctar |= SPI_CTAR_ASC(scaler) | SPI_CTAR_PASC(prescaler);
544 
545 	spi_nxp_s32_getbestdelay(clock_rate, config->cs_sck_delay, &scaler, &prescaler);
546 
547 	ctar |= SPI_CTAR_CSSCK(scaler) | SPI_CTAR_PCSSCK(prescaler);
548 
549 	spi_nxp_s32_getbestdelay(clock_rate, config->cs_cs_delay, &scaler, &prescaler);
550 
551 	ctar |= SPI_CTAR_DT(scaler) | SPI_CTAR_PDT(prescaler);
552 
553 	data->transfer_cfg.Ctar |= ctar;
554 	data->transfer_cfg.DeviceParams = &data->transfer_params;
555 
556 	ret = spi_context_cs_configure_all(&data->ctx);
557 	if (ret < 0) {
558 		return ret;
559 	}
560 
561 	spi_context_unlock_unconditionally(&data->ctx);
562 
563 	return 0;
564 }
565 
566 
567 #ifdef CONFIG_NXP_S32_SPI_INTERRUPT
spi_nxp_s32_isr(const struct device * dev)568 void spi_nxp_s32_isr(const struct device *dev)
569 {
570 	const struct spi_nxp_s32_config *config = dev->config;
571 
572 	Spi_Ip_IrqHandler(config->spi_hw_cfg->Instance);
573 }
574 
spi_nxp_s32_transfer_callback(const struct device * dev,Spi_Ip_EventType event)575 static void spi_nxp_s32_transfer_callback(const struct device *dev, Spi_Ip_EventType event)
576 {
577 	struct spi_nxp_s32_data *data = dev->data;
578 	int ret = 0;
579 
580 	if (event == SPI_IP_EVENT_END_TRANSFER) {
581 		spi_context_update_tx(&data->ctx, 1U, data->transfer_len);
582 		spi_context_update_rx(&data->ctx, 1U, data->transfer_len);
583 
584 		if (spi_nxp_s32_transfer_done(&data->ctx)) {
585 			spi_context_complete(&data->ctx, dev, 0);
586 			spi_context_cs_control(&data->ctx, false);
587 		} else {
588 			ret = spi_nxp_s32_transfer_next_packet(dev);
589 		}
590 	} else {
591 		LOG_ERR("Failing in transfer_callback");
592 		ret = -EIO;
593 	}
594 
595 	if (ret) {
596 		spi_context_complete(&data->ctx, dev, ret);
597 		spi_context_cs_control(&data->ctx, false);
598 	}
599 }
600 #endif /*CONFIG_NXP_S32_SPI_INTERRUPT*/
601 
602 static DEVICE_API(spi, spi_nxp_s32_driver_api) = {
603 	.transceive = spi_nxp_s32_transceive,
604 #ifdef CONFIG_SPI_ASYNC
605 	.transceive_async = spi_nxp_s32_transceive_async,
606 #endif
607 #ifdef CONFIG_SPI_RTIO
608 	.iodev_submit = spi_rtio_iodev_default_submit,
609 #endif
610 	.release = spi_nxp_s32_release,
611 };
612 
613 #define SPI_NXP_S32_HW_INSTANCE_CHECK(i, n) \
614 	((DT_INST_REG_ADDR(n) == IP_SPI_##i##_BASE) ? i : 0)
615 
616 #define SPI_NXP_S32_HW_INSTANCE(n) \
617 	LISTIFY(__DEBRACKET SPI_INSTANCE_COUNT, SPI_NXP_S32_HW_INSTANCE_CHECK, (|), n)
618 
619 #define SPI_NXP_S32_NUM_CS(n)		DT_INST_PROP(n, num_cs)
620 #define SPI_NXP_S32_IS_MASTER(n)	!DT_INST_PROP(n, slave)
621 
622 #ifdef CONFIG_SPI_SLAVE
623 #define SPI_NXP_S32_SET_SLAVE(n)	.SlaveMode = DT_INST_PROP(n, slave),
624 #else
625 #define SPI_NXP_S32_SET_SLAVE(n)
626 #endif
627 
628 #ifdef CONFIG_NXP_S32_SPI_INTERRUPT
629 
630 #define SPI_NXP_S32_CONFIG_INTERRUPT_FUNC(n)						\
631 	.irq_config_func = spi_nxp_s32_config_func_##n,
632 
633 #define SPI_NXP_S32_INTERRUPT_DEFINE(n)							\
634 	static void spi_nxp_s32_config_func_##n(const struct device *dev)		\
635 	{										\
636 		IRQ_CONNECT(DT_INST_IRQN(n), DT_INST_IRQ(n, priority),			\
637 			spi_nxp_s32_isr, DEVICE_DT_INST_GET(n),		\
638 			DT_INST_IRQ(n, flags));						\
639 		irq_enable(DT_INST_IRQN(n));						\
640 	}
641 
642 #define SPI_NXP_S32_CONFIG_CALLBACK_FUNC(n)						\
643 	.cb = spi_nxp_s32_##n##_callback,
644 
645 #define SPI_NXP_S32_CALLBACK_DEFINE(n)							\
646 	static void spi_nxp_s32_##n##_callback(uint8 instance, Spi_Ip_EventType event)	\
647 	{										\
648 		ARG_UNUSED(instance);							\
649 		const struct device *dev = DEVICE_DT_INST_GET(n);			\
650 											\
651 		spi_nxp_s32_transfer_callback(dev, event);				\
652 	}
653 #else
654 #define SPI_NXP_S32_CONFIG_INTERRUPT_FUNC(n)
655 #define SPI_NXP_S32_INTERRUPT_DEFINE(n)
656 #define SPI_NXP_S32_CONFIG_CALLBACK_FUNC(n)
657 #define SPI_NXP_S32_CALLBACK_DEFINE(n)
658 #endif /*CONFIG_NXP_S32_SPI_INTERRUPT*/
659 
660 /*
661  * Declare the default configuration for SPI driver, no DMA
662  * support, all inner module Chip Selects are active low.
663  */
664 #define SPI_NXP_S32_INSTANCE_CONFIG(n)							\
665 	static const Spi_Ip_ConfigType spi_nxp_s32_default_config_##n = {		\
666 		.Instance = SPI_NXP_S32_HW_INSTANCE(n),					\
667 		.Mcr = (SPI_MCR_MSTR(SPI_NXP_S32_IS_MASTER(n)) |			\
668 			SPI_MCR_CONT_SCKE(0U) |	SPI_MCR_FRZ(0U) |			\
669 			SPI_MCR_MTFE(0U) | SPI_MCR_SMPL_PT(0U) |			\
670 			SPI_MCR_PCSIS(BIT_MASK(SPI_NXP_S32_NUM_CS(n))) |		\
671 			SPI_MCR_MDIS(0U) | SPI_MCR_XSPI(1U) | SPI_MCR_HALT(1U)),	\
672 		.TransferMode = SPI_IP_POLLING,						\
673 		.StateIndex   = n,							\
674 		SPI_NXP_S32_SET_SLAVE(n)						\
675 	}
676 
677 #define SPI_NXP_S32_TRANSFER_CONFIG(n)							\
678 	.transfer_cfg = {								\
679 		.Instance = SPI_NXP_S32_HW_INSTANCE(n),					\
680 		.Ctare = SPI_CTARE_FMSZE(0U) | SPI_CTARE_DTCP(1U),			\
681 	}
682 
683 #define SPI_NXP_S32_DEVICE(n)								\
684 	PINCTRL_DT_INST_DEFINE(n);							\
685 	SPI_NXP_S32_CALLBACK_DEFINE(n)							\
686 	SPI_NXP_S32_INTERRUPT_DEFINE(n)							\
687 	SPI_NXP_S32_INSTANCE_CONFIG(n);							\
688 	static const struct spi_nxp_s32_config spi_nxp_s32_config_##n = {		\
689 		.num_cs	      = SPI_NXP_S32_NUM_CS(n),					\
690 		.clock_dev    = DEVICE_DT_GET(DT_INST_CLOCKS_CTLR(n)),			\
691 		.clock_subsys = (clock_control_subsys_t)DT_INST_CLOCKS_CELL(n, name),	\
692 		.sck_cs_delay = DT_INST_PROP_OR(n, spi_sck_cs_delay, 0U),		\
693 		.cs_sck_delay = DT_INST_PROP_OR(n, spi_cs_sck_delay, 0U),		\
694 		.cs_cs_delay  = DT_INST_PROP_OR(n, spi_cs_cs_delay, 0U),		\
695 		.spi_hw_cfg = (Spi_Ip_ConfigType *)&spi_nxp_s32_default_config_##n,	\
696 		.pincfg = PINCTRL_DT_INST_DEV_CONFIG_GET(n),				\
697 		SPI_NXP_S32_CONFIG_CALLBACK_FUNC(n)					\
698 		SPI_NXP_S32_CONFIG_INTERRUPT_FUNC(n)					\
699 	};										\
700 	static struct spi_nxp_s32_data spi_nxp_s32_data_##n = {				\
701 		SPI_NXP_S32_TRANSFER_CONFIG(n),						\
702 		SPI_CONTEXT_INIT_LOCK(spi_nxp_s32_data_##n, ctx),			\
703 		SPI_CONTEXT_INIT_SYNC(spi_nxp_s32_data_##n, ctx),			\
704 		SPI_CONTEXT_CS_GPIOS_INITIALIZE(DT_DRV_INST(n), ctx)			\
705 	};										\
706 	SPI_DEVICE_DT_INST_DEFINE(n,							\
707 			spi_nxp_s32_init, NULL,						\
708 			&spi_nxp_s32_data_##n, &spi_nxp_s32_config_##n,			\
709 			POST_KERNEL, CONFIG_SPI_INIT_PRIORITY,				\
710 			&spi_nxp_s32_driver_api);
711 
712 DT_INST_FOREACH_STATUS_OKAY(SPI_NXP_S32_DEVICE)
713