1 /*
2  * Copyright (c) 2020 Antmicro <www.antmicro.com>
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  */
6 
7 #include <string.h>
8 #include <zephyr/drivers/i2s.h>
9 #include <zephyr/kernel.h>
10 #include <zephyr/sys/byteorder.h>
11 #include <soc.h>
12 #include <zephyr/sys/util.h>
13 #include <zephyr/sys/__assert.h>
14 #include "i2s_litex.h"
15 #include <zephyr/logging/log.h>
16 #include <zephyr/irq.h>
17 
18 LOG_MODULE_REGISTER(i2s_litex);
19 
20 #define MODULO_INC(val, max)                                                   \
21 	{					                               \
22 		val = (val == max - 1) ? 0 : val + 1;                          \
23 	}
24 
25 /**
26  * @brief Enable i2s device
27  *
28  * @param reg base register of device
29  */
i2s_enable(uintptr_t reg)30 static void i2s_enable(uintptr_t reg)
31 {
32 	uint8_t reg_data = litex_read8(reg + I2S_CONTROL_OFFSET);
33 
34 	litex_write8(reg_data | I2S_ENABLE, reg + I2S_CONTROL_OFFSET);
35 }
36 
37 /**
38  * @brief Disable i2s device
39  *
40  * @param reg base register of device
41  */
i2s_disable(uintptr_t reg)42 static void i2s_disable(uintptr_t reg)
43 {
44 	uint8_t reg_data = litex_read8(reg + I2S_CONTROL_OFFSET);
45 
46 	litex_write8(reg_data & ~(I2S_ENABLE), reg + I2S_CONTROL_OFFSET);
47 }
48 
49 /**
50  * @brief Reset i2s fifo
51  *
52  * @param reg base register of device
53  */
i2s_reset_fifo(uintptr_t reg)54 static void i2s_reset_fifo(uintptr_t reg)
55 {
56 	uint8_t reg_data = litex_read8(reg + I2S_CONTROL_OFFSET);
57 
58 	litex_write8(reg_data | I2S_FIFO_RESET, reg + I2S_CONTROL_OFFSET);
59 }
60 
61 /**
62  * @brief Get i2s format handled by device
63  *
64  * @param reg base register of device
65  *
66  * @return currently supported format or error
67  *			when format can't be handled
68  */
i2s_get_foramt(uintptr_t reg)69 static i2s_fmt_t i2s_get_foramt(uintptr_t reg)
70 {
71 	uint8_t reg_data = litex_read32(reg + I2S_CONFIG_OFFSET);
72 
73 	reg_data &= I2S_CONF_FORMAT_MASK;
74 	if (reg_data == LITEX_I2S_STANDARD) {
75 		return I2S_FMT_DATA_FORMAT_I2S;
76 	} else if (reg_data == LITEX_I2S_LEFT_JUSTIFIED) {
77 		return I2S_FMT_DATA_FORMAT_LEFT_JUSTIFIED;
78 	}
79 	return -EINVAL;
80 }
81 
82 /**
83  * @brief Get i2s sample width handled by device
84  *
85  * @param reg base register of device
86  *
87  * @return i2s sample width in bits
88  */
i2s_get_sample_width(uintptr_t reg)89 static uint32_t i2s_get_sample_width(uintptr_t reg)
90 {
91 	uint32_t reg_data = litex_read32(reg + I2S_CONFIG_OFFSET);
92 
93 	reg_data &= I2S_CONF_SAMPLE_WIDTH_MASK;
94 	return reg_data >> I2S_CONF_SAMPLE_WIDTH_OFFSET;
95 }
96 
97 /**
98  * @brief Get i2s audio sampling rate handled by device
99  *
100  * @param reg base register of device
101  *
102  * @return audio sampling rate in Hz
103  */
i2s_get_audio_freq(uintptr_t reg)104 static uint32_t i2s_get_audio_freq(uintptr_t reg)
105 {
106 	uint32_t reg_data = litex_read32(reg + I2S_CONFIG_OFFSET);
107 
108 	reg_data &= I2S_CONF_LRCK_MASK;
109 	return reg_data >> I2S_CONF_LRCK_FREQ_OFFSET;
110 }
111 
112 /**
113  * @brief Enable i2s interrupt in event register
114  *
115  * @param reg base register of device
116  * @param irq_type irq type to be enabled one of I2S_EV_READY or I2S_EV_ERROR
117  */
i2s_irq_enable(uintptr_t reg,int irq_type)118 static void i2s_irq_enable(uintptr_t reg, int irq_type)
119 {
120 	__ASSERT_NO_MSG(irq_type == I2S_EV_READY || irq_type == I2S_EV_ERROR);
121 
122 	uint8_t reg_data = litex_read8(reg + I2S_EV_ENABLE_OFFSET);
123 
124 	litex_write8(reg_data | irq_type, reg + I2S_EV_ENABLE_OFFSET);
125 }
126 
127 /**
128  * @brief Disable i2s interrupt in event register
129  *
130  * @param reg base register of device
131  * @param irq_type irq type to be disabled one of I2S_EV_READY or I2S_EV_ERROR
132  */
i2s_irq_disable(uintptr_t reg,int irq_type)133 static void i2s_irq_disable(uintptr_t reg, int irq_type)
134 {
135 	__ASSERT_NO_MSG(irq_type == I2S_EV_READY || irq_type == I2S_EV_ERROR);
136 
137 	uint8_t reg_data = litex_read8(reg + I2S_EV_ENABLE_OFFSET);
138 
139 	litex_write8(reg_data & ~(irq_type), reg + I2S_EV_ENABLE_OFFSET);
140 }
141 
142 /**
143  * @brief Clear all pending irqs
144  *
145  * @param reg base register of device
146  */
i2s_clear_pending_irq(uintptr_t reg)147 static void i2s_clear_pending_irq(uintptr_t reg)
148 {
149 	uint8_t reg_data = litex_read8(reg + I2S_EV_PENDING_OFFSET);
150 
151 	litex_write8(reg_data, reg + I2S_EV_PENDING_OFFSET);
152 }
153 
154 /**
155  * @brief Fast data copy function
156  *
157  * Each operation copies 32 bit data chunks
158  * This function copies data from fifo into user buffer
159  *
160  * @param dst memory destination where fifo data will be copied to
161  * @param size amount of data to be copied
162  * @param sample_width width of single sample in bits
163  * @param channels number of received channels
164  */
i2s_copy_from_fifo(uint8_t * dst,size_t size,int sample_width,int channels)165 static void i2s_copy_from_fifo(uint8_t *dst, size_t size, int sample_width,
166 			       int channels)
167 {
168 	uint32_t data;
169 	int chan_size = sample_width / 8;
170 #if CONFIG_I2S_LITEX_CHANNELS_CONCATENATED
171 	if (channels == 2) {
172 		for (size_t i = 0; i < size / chan_size; i += 4) {
173 			/* using sys_read function, as fifo is not a csr,
174 			 * but a contiguous memory space
175 			 */
176 			*(dst + i) = sys_read32(I2S_RX_FIFO_ADDR);
177 		}
178 	} else {
179 		for (size_t i = 0; i < size / chan_size; i += 2) {
180 			data = sys_read32(I2S_RX_FIFO_ADDR);
181 			*((uint16_t *)(dst + i)) = data & 0xffff;
182 		}
183 	}
184 #else
185 	int max_off = chan_size - 1;
186 
187 	for (size_t i = 0; i < size / chan_size; ++i) {
188 		data = sys_read32(I2S_RX_FIFO_ADDR);
189 		for (int off = max_off; off >= 0; off--) {
190 #if CONFIG_I2S_LITEX_DATA_BIG_ENDIAN
191 			*(dst + i * chan_size + (max_off - off)) =
192 				data >> 8 * off;
193 #else
194 			*(dst + i * chan_size + off) = data >> 8 * off;
195 #endif
196 		}
197 		/* if mono, copy every left channel
198 		 * right channel is discarded
199 		 */
200 		if (channels == 1) {
201 			sys_read32(I2S_RX_FIFO_ADDR);
202 		}
203 	}
204 #endif
205 }
206 
207 /**
208  * @brief Fast data copy function
209  *
210  * Each operation copies 32 bit data chunks
211  * This function copies data from user buffer into fifo
212  *
213  * @param src memory from which data will be copied to fifo
214  * @param size amount of data to be copied in bytes
215  * @param sample_width width of single sample in bits
216  * @param channels number of received channels
217  */
i2s_copy_to_fifo(uint8_t * src,size_t size,int sample_width,int channels)218 static void i2s_copy_to_fifo(uint8_t *src, size_t size, int sample_width,
219 			     int channels)
220 {
221 	int chan_size = sample_width / 8;
222 #if CONFIG_I2S_LITEX_CHANNELS_CONCATENATED
223 	if (channels == 2) {
224 		for (size_t i = 0; i < size / chan_size; i += 4) {
225 			/* using sys_write function, as fifo is not a csr,
226 			 * but a contignous memory space
227 			 */
228 			sys_write32(*(src + i), I2S_TX_FIFO_ADDR);
229 		}
230 	} else {
231 		for (size_t i = 0; i < size / chan_size; i += 2) {
232 			sys_write32(*((uint16_t *)(src + i)), I2S_TX_FIFO_ADDR);
233 		}
234 	}
235 #else
236 	int max_off = chan_size - 1;
237 	uint32_t data;
238 	uint8_t *d_ptr = (uint8_t *)&data;
239 
240 	for (size_t i = 0; i < size / chan_size; ++i) {
241 		for (int off = max_off; off >= 0; off--) {
242 #if CONFIG_I2S_LITEX_DATA_BIG_ENDIAN
243 			*(d_ptr + off) =
244 				*(src + i * chan_size + (max_off - off));
245 #else
246 			*(d_ptr + off) = *(src + i * chan_size + off);
247 #endif
248 		}
249 		sys_write32(data, I2S_TX_FIFO_ADDR);
250 		/* if mono send every left channel
251 		 * right channel will be same as left
252 		 */
253 		if (channels == 1) {
254 			sys_write32(data, I2S_TX_FIFO_ADDR);
255 		}
256 	}
257 #endif
258 }
259 
260 /*
261  * Get data from the queue
262  */
queue_get(struct ring_buf * rb,void ** mem_block,size_t * size)263 static int queue_get(struct ring_buf *rb, void **mem_block, size_t *size)
264 {
265 	unsigned int key;
266 
267 	key = irq_lock();
268 
269 	if (rb->tail == rb->head) {
270 		/* Ring buffer is empty */
271 		irq_unlock(key);
272 		return -ENOMEM;
273 	}
274 	*mem_block = rb->buf[rb->tail].mem_block;
275 	*size = rb->buf[rb->tail].size;
276 	MODULO_INC(rb->tail, rb->len);
277 
278 	irq_unlock(key);
279 	return 0;
280 }
281 
282 /*
283  * Put data in the queue
284  */
queue_put(struct ring_buf * rb,void * mem_block,size_t size)285 static int queue_put(struct ring_buf *rb, void *mem_block, size_t size)
286 {
287 	uint16_t head_next;
288 	unsigned int key;
289 
290 	key = irq_lock();
291 
292 	head_next = rb->head;
293 	MODULO_INC(head_next, rb->len);
294 
295 	if (head_next == rb->tail) {
296 		/* Ring buffer is full */
297 		irq_unlock(key);
298 		return -ENOMEM;
299 	}
300 
301 	rb->buf[rb->head].mem_block = mem_block;
302 	rb->buf[rb->head].size = size;
303 	rb->head = head_next;
304 
305 	irq_unlock(key);
306 	return 0;
307 }
308 
i2s_litex_initialize(const struct device * dev)309 static int i2s_litex_initialize(const struct device *dev)
310 {
311 	const struct i2s_litex_cfg *cfg = dev->config;
312 	struct i2s_litex_data *const dev_data = dev->data;
313 
314 	k_sem_init(&dev_data->rx.sem, 0, CONFIG_I2S_LITEX_RX_BLOCK_COUNT);
315 	k_sem_init(&dev_data->tx.sem, CONFIG_I2S_LITEX_TX_BLOCK_COUNT - 1,
316 		   CONFIG_I2S_LITEX_TX_BLOCK_COUNT);
317 
318 	cfg->irq_config(dev);
319 	return 0;
320 }
321 
i2s_litex_configure(const struct device * dev,enum i2s_dir dir,const struct i2s_config * i2s_cfg)322 static int i2s_litex_configure(const struct device *dev, enum i2s_dir dir,
323 			       const struct i2s_config *i2s_cfg)
324 {
325 	struct i2s_litex_data *const dev_data = dev->data;
326 	const struct i2s_litex_cfg *const cfg = dev->config;
327 	struct stream *stream;
328 	int channels_concatenated = litex_read8(cfg->base + I2S_STATUS_OFFSET);
329 	int dev_audio_freq = i2s_get_audio_freq(cfg->base);
330 	int channel_div;
331 
332 	if (dir == I2S_DIR_RX) {
333 		stream = &dev_data->rx;
334 		channels_concatenated &= I2S_RX_STAT_CHANNEL_CONCATENATED_MASK;
335 	} else if (dir == I2S_DIR_TX) {
336 		stream = &dev_data->tx;
337 		channels_concatenated &= I2S_TX_STAT_CHANNEL_CONCATENATED_MASK;
338 	} else if (dir == I2S_DIR_BOTH) {
339 		return -ENOSYS;
340 	} else {
341 		LOG_ERR("either RX or TX direction must be selected");
342 		return -EINVAL;
343 	}
344 
345 	if (stream->state != I2S_STATE_NOT_READY &&
346 	    stream->state != I2S_STATE_READY) {
347 		LOG_ERR("invalid state");
348 		return -EINVAL;
349 	}
350 
351 	if (i2s_cfg->options & I2S_OPT_BIT_CLK_GATED) {
352 		LOG_ERR("invalid operating mode");
353 		return -EINVAL;
354 	}
355 
356 	if (i2s_cfg->frame_clk_freq != dev_audio_freq) {
357 		LOG_WRN("invalid audio frequency sampling rate");
358 	}
359 
360 	if (i2s_cfg->channels == 1) {
361 		channel_div = 2;
362 	} else if (i2s_cfg->channels == 2) {
363 		channel_div = 1;
364 	} else {
365 		LOG_ERR("invalid channels number");
366 		return -EINVAL;
367 	}
368 	int req_buf_s =
369 		(cfg->fifo_depth * (i2s_cfg->word_size / 8)) / channel_div;
370 
371 	if (i2s_cfg->block_size < req_buf_s) {
372 		LOG_ERR("not enough space to allocate single buffer");
373 		LOG_ERR("fifo requires at least %i bytes", req_buf_s);
374 		return -EINVAL;
375 	} else if (i2s_cfg->block_size != req_buf_s) {
376 		LOG_WRN("the buffer is greater than required,"
377 			"only %"
378 			"i bytes of data are valid ",
379 			req_buf_s);
380 		/* The block_size field will be corrected to req_buf_s in the
381 		 * structure copied as stream configuration (see below).
382 		 */
383 	}
384 
385 	int dev_sample_width = i2s_get_sample_width(cfg->base);
386 
387 	if (i2s_cfg->word_size != 8U && i2s_cfg->word_size != 16U &&
388 	    i2s_cfg->word_size != 24U && i2s_cfg->word_size != 32U &&
389 	    i2s_cfg->word_size != dev_sample_width) {
390 		LOG_ERR("invalid word size");
391 		return -EINVAL;
392 	}
393 
394 	int dev_format = i2s_get_foramt(cfg->base);
395 
396 	if (dev_format != i2s_cfg->format) {
397 		LOG_ERR("unsupported I2S data format");
398 		return -EINVAL;
399 	}
400 
401 #if CONFIG_I2S_LITEX_CHANNELS_CONCATENATED
402 #if CONFIG_I2S_LITEX_DATA_BIG_ENDIAN
403 	LOG_ERR("Big endian is not uspported "
404 			"when channels are conncatenated");
405 	return -EINVAL;
406 #endif
407 	if (channels_concatenated == 0) {
408 		LOG_ERR("invalid state. "
409 				"Your device is configured to send "
410 				"channels with padding. "
411 				"Please reconfigure driver");
412 		return -EINVAL;
413 	}
414 
415 	if (i2s_cfg->word_size != 16) {
416 		LOG_ERR("invalid word size");
417 		return -EINVAL;
418 	}
419 
420 #endif
421 
422 	memcpy(&stream->cfg, i2s_cfg, sizeof(struct i2s_config));
423 	stream->cfg.block_size = req_buf_s;
424 
425 	stream->state = I2S_STATE_READY;
426 	return 0;
427 }
428 
i2s_litex_read(const struct device * dev,void ** mem_block,size_t * size)429 static int i2s_litex_read(const struct device *dev, void **mem_block,
430 			  size_t *size)
431 {
432 	struct i2s_litex_data *const dev_data = dev->data;
433 	int ret;
434 
435 	if (dev_data->rx.state == I2S_STATE_NOT_READY) {
436 		LOG_DBG("invalid state");
437 		return -ENOMEM;
438 	}
439 	/* just to implement timeout*/
440 	ret = k_sem_take(&dev_data->rx.sem,
441 			 SYS_TIMEOUT_MS(dev_data->rx.cfg.timeout));
442 	if (ret < 0) {
443 		return ret;
444 	}
445 	/* Get data from the beginning of RX queue */
446 	return queue_get(&dev_data->rx.mem_block_queue, mem_block, size);
447 }
448 
i2s_litex_write(const struct device * dev,void * mem_block,size_t size)449 static int i2s_litex_write(const struct device *dev, void *mem_block,
450 			   size_t size)
451 {
452 	struct i2s_litex_data *const dev_data = dev->data;
453 	const struct i2s_litex_cfg *cfg = dev->config;
454 	int ret;
455 
456 	if (dev_data->tx.state != I2S_STATE_RUNNING &&
457 	    dev_data->tx.state != I2S_STATE_READY) {
458 		LOG_DBG("invalid state");
459 		return -EIO;
460 	}
461 	/* just to implement timeout */
462 	ret = k_sem_take(&dev_data->tx.sem,
463 			 SYS_TIMEOUT_MS(dev_data->tx.cfg.timeout));
464 	if (ret < 0) {
465 		return ret;
466 	}
467 	/* Add data to the end of the TX queue */
468 	ret = queue_put(&dev_data->tx.mem_block_queue, mem_block, size);
469 	if (ret < 0) {
470 		return ret;
471 	}
472 
473 	if (dev_data->tx.state == I2S_STATE_READY) {
474 		i2s_irq_enable(cfg->base, I2S_EV_READY);
475 		dev_data->tx.state = I2S_STATE_RUNNING;
476 	}
477 	return ret;
478 }
479 
i2s_litex_trigger(const struct device * dev,enum i2s_dir dir,enum i2s_trigger_cmd cmd)480 static int i2s_litex_trigger(const struct device *dev, enum i2s_dir dir,
481 			     enum i2s_trigger_cmd cmd)
482 {
483 	struct i2s_litex_data *const dev_data = dev->data;
484 	const struct i2s_litex_cfg *const cfg = dev->config;
485 	struct stream *stream;
486 
487 	if (dir == I2S_DIR_RX) {
488 		stream = &dev_data->rx;
489 	} else if (dir == I2S_DIR_TX) {
490 		stream = &dev_data->tx;
491 	} else if (dir == I2S_DIR_BOTH) {
492 		return -ENOSYS;
493 	} else {
494 		LOG_ERR("either RX or TX direction must be selected");
495 		return -EINVAL;
496 	}
497 
498 	switch (cmd) {
499 	case I2S_TRIGGER_START:
500 		if (stream->state != I2S_STATE_READY) {
501 			LOG_ERR("START trigger: invalid state %d",
502 				stream->state);
503 			return -EIO;
504 		}
505 		__ASSERT_NO_MSG(stream->mem_block == NULL);
506 		i2s_reset_fifo(cfg->base);
507 		i2s_enable(cfg->base);
508 		i2s_irq_enable(cfg->base, I2S_EV_READY);
509 		stream->state = I2S_STATE_RUNNING;
510 		break;
511 
512 	case I2S_TRIGGER_STOP:
513 		if (stream->state != I2S_STATE_RUNNING &&
514 		    stream->state != I2S_STATE_READY) {
515 			LOG_ERR("STOP trigger: invalid state");
516 			return -EIO;
517 		}
518 		i2s_disable(cfg->base);
519 		i2s_irq_disable(cfg->base, I2S_EV_READY);
520 		stream->state = I2S_STATE_READY;
521 		break;
522 
523 	default:
524 		LOG_ERR("unsupported trigger command");
525 		return -EINVAL;
526 	}
527 	return 0;
528 }
529 
clear_rx_fifo(const struct i2s_litex_cfg * cfg)530 static inline void clear_rx_fifo(const struct i2s_litex_cfg *cfg)
531 {
532 	for (int i = 0; i < I2S_RX_FIFO_DEPTH; i++) {
533 		sys_read32(I2S_RX_FIFO_ADDR);
534 	}
535 	i2s_clear_pending_irq(cfg->base);
536 }
537 
i2s_litex_isr_rx(void * arg)538 static void i2s_litex_isr_rx(void *arg)
539 {
540 	const struct device *dev = (const struct device *)arg;
541 	const struct i2s_litex_cfg *cfg = dev->config;
542 	struct i2s_litex_data *data = dev->data;
543 	struct stream *stream = &data->rx;
544 	int ret;
545 
546 	/* Prepare to receive the next data block */
547 	ret = k_mem_slab_alloc(stream->cfg.mem_slab, &stream->mem_block,
548 			       K_NO_WAIT);
549 	if (ret < 0) {
550 		clear_rx_fifo(cfg);
551 		return;
552 	}
553 	i2s_copy_from_fifo((uint8_t *)stream->mem_block, stream->cfg.block_size,
554 			   stream->cfg.word_size, stream->cfg.channels);
555 	i2s_clear_pending_irq(cfg->base);
556 
557 	ret = queue_put(&stream->mem_block_queue, stream->mem_block,
558 			stream->cfg.block_size);
559 	if (ret < 0) {
560 		LOG_WRN("Couldn't copy data "
561 				"from RX fifo to the ring "
562 				"buffer (no space left) - "
563 				"dropping a frame");
564 		return;
565 	}
566 
567 	k_sem_give(&stream->sem);
568 }
569 
i2s_litex_isr_tx(void * arg)570 static void i2s_litex_isr_tx(void *arg)
571 {
572 	const struct device *dev = (const struct device *)arg;
573 	const struct i2s_litex_cfg *cfg = dev->config;
574 	struct i2s_litex_data *data = dev->data;
575 	size_t mem_block_size;
576 	struct stream *stream = &data->tx;
577 	int ret;
578 
579 	ret = queue_get(&stream->mem_block_queue, &stream->mem_block,
580 			&mem_block_size);
581 	if (ret < 0) {
582 		i2s_irq_disable(cfg->base, I2S_EV_READY);
583 		stream->state = I2S_STATE_READY;
584 		return;
585 	}
586 	k_sem_give(&stream->sem);
587 	i2s_copy_to_fifo((uint8_t *)stream->mem_block, mem_block_size,
588 			 stream->cfg.word_size, stream->cfg.channels);
589 	i2s_clear_pending_irq(cfg->base);
590 
591 	k_mem_slab_free(stream->cfg.mem_slab, stream->mem_block);
592 }
593 
594 static DEVICE_API(i2s, i2s_litex_driver_api) = {
595 	.configure = i2s_litex_configure,
596 	.read = i2s_litex_read,
597 	.write = i2s_litex_write,
598 	.trigger = i2s_litex_trigger,
599 };
600 
601 #define I2S_INIT(dir)                                                          \
602 									       \
603 	static struct queue_item rx_ring_buf[CONFIG_I2S_LITEX_RX_BLOCK_COUNT]; \
604 	static struct queue_item tx_ring_buf[CONFIG_I2S_LITEX_TX_BLOCK_COUNT]; \
605 									       \
606 	static struct i2s_litex_data i2s_litex_data_##dir = {                  \
607 		.dir.mem_block_queue.buf = dir##_ring_buf,                     \
608 		.dir.mem_block_queue.len =                                     \
609 			sizeof(dir##_ring_buf) / sizeof(struct queue_item),    \
610 	};                                                                     \
611 									       \
612 	static void i2s_litex_irq_config_func_##dir(const struct device *dev); \
613 									       \
614 	static struct i2s_litex_cfg i2s_litex_cfg_##dir = {                    \
615 		.base = DT_REG_ADDR(DT_NODELABEL(i2s_##dir)), \
616 		.fifo_base =                                                   \
617 			DT_REG_ADDR_BY_NAME(DT_NODELABEL(i2s_##dir), fifo),    \
618 		.fifo_depth = DT_PROP(DT_NODELABEL(i2s_##dir), fifo_depth),    \
619 		.irq_config = i2s_litex_irq_config_func_##dir                  \
620 	};                                                                     \
621 	DEVICE_DT_DEFINE(DT_NODELABEL(i2s_##dir), i2s_litex_initialize,        \
622 				NULL, &i2s_litex_data_##dir,		       \
623 				&i2s_litex_cfg_##dir, POST_KERNEL,             \
624 				CONFIG_I2S_INIT_PRIORITY,		       \
625 				&i2s_litex_driver_api);			       \
626 									       \
627 	static void i2s_litex_irq_config_func_##dir(const struct device *dev)  \
628 	{                                                                      \
629 		IRQ_CONNECT(DT_IRQN(DT_NODELABEL(i2s_##dir)),                  \
630 					DT_IRQ(DT_NODELABEL(i2s_##dir),	       \
631 						priority),		       \
632 					i2s_litex_isr_##dir,		       \
633 					DEVICE_DT_GET(DT_NODELABEL(i2s_##dir)), 0);\
634 		irq_enable(DT_IRQN(DT_NODELABEL(i2s_##dir)));                  \
635 	}
636 
637 #if DT_NODE_HAS_STATUS_OKAY(DT_NODELABEL(i2s_rx))
638 I2S_INIT(rx);
639 #endif
640 #if DT_NODE_HAS_STATUS_OKAY(DT_NODELABEL(i2s_tx))
641 I2S_INIT(tx);
642 #endif
643