Lines Matching +full:tx +full:- +full:rx +full:- +full:swap
2 * Copyright 2021,2023-2024 NXP Semiconductor INC.
5 * SPDX-License-Identifier: Apache-2.0
22 #include <zephyr/dt-bindings/clock/imx_ccm.h>
52 * This indicates the Tx/Rx stream.
106 struct stream tx; member
109 struct stream rx; member
124 while (k_msgq_get(&strm->in_queue, &buffer, K_NO_WAIT) == 0) { in i2s_purge_stream_buffers()
130 while (k_msgq_get(&strm->out_queue, &buffer, K_NO_WAIT) == 0) { in i2s_purge_stream_buffers()
138 struct i2s_dev_data *dev_data = dev->data; in i2s_tx_stream_disable()
139 struct stream *strm = &dev_data->tx; in i2s_tx_stream_disable()
140 const struct device *dev_dma = dev_data->dev_dma; in i2s_tx_stream_disable()
141 const struct i2s_mcux_config *dev_cfg = dev->config; in i2s_tx_stream_disable()
143 LOG_DBG("Stopping DMA channel %u for TX stream", strm->dma_channel); in i2s_tx_stream_disable()
146 SAI_TxEnableDMA(dev_cfg->base, kSAI_FIFORequestDMAEnable, false); in i2s_tx_stream_disable()
148 dma_stop(dev_dma, strm->dma_channel); in i2s_tx_stream_disable()
150 /* wait for TX FIFO to drain before disabling */ in i2s_tx_stream_disable()
151 while ((dev_cfg->base->TCSR & I2S_TCSR_FWF_MASK) == 0) { in i2s_tx_stream_disable()
156 dev_cfg->base->TCR3 &= ~I2S_TCR3_TCE_MASK; in i2s_tx_stream_disable()
158 /* Disable Tx */ in i2s_tx_stream_disable()
159 SAI_TxEnable(dev_cfg->base, false); in i2s_tx_stream_disable()
161 /* If Tx is disabled, reset the FIFO pointer, clear error flags */ in i2s_tx_stream_disable()
162 if ((dev_cfg->base->TCSR & I2S_TCSR_TE_MASK) == 0UL) { in i2s_tx_stream_disable()
163 dev_cfg->base->TCSR |= (I2S_TCSR_FR_MASK | I2S_TCSR_SR_MASK); in i2s_tx_stream_disable()
164 dev_cfg->base->TCSR &= ~I2S_TCSR_SR_MASK; in i2s_tx_stream_disable()
169 i2s_purge_stream_buffers(strm, dev_data->tx.cfg.mem_slab, true, true); in i2s_tx_stream_disable()
175 struct i2s_dev_data *dev_data = dev->data; in i2s_rx_stream_disable()
176 struct stream *strm = &dev_data->rx; in i2s_rx_stream_disable()
177 const struct device *dev_dma = dev_data->dev_dma; in i2s_rx_stream_disable()
178 const struct i2s_mcux_config *dev_cfg = dev->config; in i2s_rx_stream_disable()
180 LOG_DBG("Stopping RX stream & DMA channel %u", strm->dma_channel); in i2s_rx_stream_disable()
181 dma_stop(dev_dma, strm->dma_channel); in i2s_rx_stream_disable()
184 dev_cfg->base->RCR3 &= ~I2S_RCR3_RCE_MASK; in i2s_rx_stream_disable()
187 SAI_RxEnableDMA(dev_cfg->base, kSAI_FIFORequestDMAEnable, false); in i2s_rx_stream_disable()
189 /* Disable Rx */ in i2s_rx_stream_disable()
190 SAI_RxEnable(dev_cfg->base, false); in i2s_rx_stream_disable()
193 while (dev_cfg->base->RCSR & I2S_RCSR_RE_MASK) { in i2s_rx_stream_disable()
197 dev_cfg->base->RCSR |= (I2S_RCSR_FR_MASK | I2S_RCSR_SR_MASK); in i2s_rx_stream_disable()
198 dev_cfg->base->RCSR &= ~I2S_RCSR_SR_MASK; in i2s_rx_stream_disable()
202 i2s_purge_stream_buffers(strm, dev_data->rx.cfg.mem_slab, in_drop, out_drop); in i2s_rx_stream_disable()
208 struct i2s_dev_data *dev_data = dev->data; in i2s_tx_reload_multiple_dma_blocks()
209 const struct i2s_mcux_config *dev_cfg = dev->config; in i2s_tx_reload_multiple_dma_blocks()
210 I2S_Type *base = (I2S_Type *)dev_cfg->base; in i2s_tx_reload_multiple_dma_blocks()
211 struct stream *strm = &dev_data->tx; in i2s_tx_reload_multiple_dma_blocks()
221 while (strm->free_tx_dma_blocks) { in i2s_tx_reload_multiple_dma_blocks()
223 ret = k_msgq_get(&strm->in_queue, &buffer, K_NO_WAIT); in i2s_tx_reload_multiple_dma_blocks()
231 ret = dma_reload(dev_data->dev_dma, strm->dma_channel, (uint32_t)buffer, in i2s_tx_reload_multiple_dma_blocks()
232 (uint32_t)&base->TDR[strm->start_channel], strm->cfg.block_size); in i2s_tx_reload_multiple_dma_blocks()
238 (strm->free_tx_dma_blocks)--; in i2s_tx_reload_multiple_dma_blocks()
240 ret = k_msgq_put(&strm->out_queue, &buffer, K_NO_WAIT); in i2s_tx_reload_multiple_dma_blocks()
242 LOG_ERR("buffer %p -> out %p err %d", buffer, &strm->out_queue, ret); in i2s_tx_reload_multiple_dma_blocks()
258 struct i2s_dev_data *dev_data = dev->data; in i2s_dma_tx_callback()
259 struct stream *strm = &dev_data->tx; in i2s_dma_tx_callback()
264 LOG_DBG("tx cb"); in i2s_dma_tx_callback()
266 ret = k_msgq_get(&strm->out_queue, &buffer, K_NO_WAIT); in i2s_dma_tx_callback()
269 k_mem_slab_free(strm->cfg.mem_slab, buffer); in i2s_dma_tx_callback()
270 (strm->free_tx_dma_blocks)++; in i2s_dma_tx_callback()
275 if (strm->free_tx_dma_blocks > MAX_TX_DMA_BLOCKS) { in i2s_dma_tx_callback()
276 strm->state = I2S_STATE_ERROR; in i2s_dma_tx_callback()
277 LOG_ERR("free_tx_dma_blocks exceeded maximum, now %d", strm->free_tx_dma_blocks); in i2s_dma_tx_callback()
281 /* Received a STOP trigger, terminate TX immediately */ in i2s_dma_tx_callback()
282 if (strm->last_block) { in i2s_dma_tx_callback()
283 strm->state = I2S_STATE_READY; in i2s_dma_tx_callback()
284 LOG_DBG("TX STOPPED last_block set"); in i2s_dma_tx_callback()
290 strm->state = I2S_STATE_ERROR; in i2s_dma_tx_callback()
294 switch (strm->state) { in i2s_dma_tx_callback()
300 strm->state = I2S_STATE_ERROR; in i2s_dma_tx_callback()
304 if (blocks_queued || (strm->free_tx_dma_blocks < MAX_TX_DMA_BLOCKS)) { in i2s_dma_tx_callback()
308 if (strm->state == I2S_STATE_STOPPING) { in i2s_dma_tx_callback()
309 /* TX queue has drained */ in i2s_dma_tx_callback()
310 strm->state = I2S_STATE_READY; in i2s_dma_tx_callback()
311 LOG_DBG("TX stream has stopped"); in i2s_dma_tx_callback()
313 strm->state = I2S_STATE_ERROR; in i2s_dma_tx_callback()
314 LOG_ERR("TX Failed to reload DMA"); in i2s_dma_tx_callback()
340 const struct i2s_mcux_config *dev_cfg = dev->config; in i2s_dma_rx_callback()
341 I2S_Type *base = (I2S_Type *)dev_cfg->base; in i2s_dma_rx_callback()
342 struct i2s_dev_data *dev_data = dev->data; in i2s_dma_rx_callback()
343 struct stream *strm = &dev_data->rx; in i2s_dma_rx_callback()
347 LOG_DBG("RX cb"); in i2s_dma_rx_callback()
349 switch (strm->state) { in i2s_dma_rx_callback()
353 ret = k_msgq_get(&strm->in_queue, &buffer, K_NO_WAIT); in i2s_dma_rx_callback()
357 ret = k_msgq_put(&strm->out_queue, &buffer, K_NO_WAIT); in i2s_dma_rx_callback()
359 LOG_ERR("buffer %p -> out_queue %p err %d", buffer, &strm->out_queue, ret); in i2s_dma_rx_callback()
361 strm->state = I2S_STATE_ERROR; in i2s_dma_rx_callback()
364 if (strm->state == I2S_STATE_RUNNING) { in i2s_dma_rx_callback()
366 ret = k_mem_slab_alloc(strm->cfg.mem_slab, &buffer, K_NO_WAIT); in i2s_dma_rx_callback()
368 LOG_ERR("buffer alloc from slab %p err %d", strm->cfg.mem_slab, in i2s_dma_rx_callback()
371 strm->state = I2S_STATE_ERROR; in i2s_dma_rx_callback()
373 uint32_t data_path = strm->start_channel; in i2s_dma_rx_callback()
375 ret = dma_reload(dev_data->dev_dma, strm->dma_channel, in i2s_dma_rx_callback()
376 (uint32_t)&base->RDR[data_path], (uint32_t)buffer, in i2s_dma_rx_callback()
377 strm->cfg.block_size); in i2s_dma_rx_callback()
381 strm->state = I2S_STATE_ERROR; in i2s_dma_rx_callback()
386 ret = k_msgq_put(&strm->in_queue, &buffer, K_NO_WAIT); in i2s_dma_rx_callback()
388 LOG_ERR("%p -> in_queue %p err %d", buffer, &strm->in_queue, in i2s_dma_rx_callback()
396 strm->state = I2S_STATE_READY; in i2s_dma_rx_callback()
407 const struct i2s_mcux_config *dev_cfg = dev->config; in enable_mclk_direction()
408 uint32_t offset = dev_cfg->mclk_pin_offset; in enable_mclk_direction()
409 uint32_t mask = dev_cfg->mclk_pin_mask; in enable_mclk_direction()
410 uint32_t *base = (uint32_t *)(dev_cfg->mclk_control_base + offset); in enable_mclk_direction()
421 const struct i2s_mcux_config *dev_cfg = dev->config; in get_mclk_rate()
422 const struct device *ccm_dev = dev_cfg->ccm_dev; in get_mclk_rate()
423 clock_control_subsys_t clk_sub_sys = dev_cfg->clk_sub_sys; in get_mclk_rate()
439 const struct i2s_mcux_config *dev_cfg = dev->config; in i2s_mcux_config()
440 I2S_Type *base = (I2S_Type *)dev_cfg->base; in i2s_mcux_config()
441 struct i2s_dev_data *dev_data = dev->data; in i2s_mcux_config()
445 uint8_t num_words = i2s_cfg->channels; in i2s_mcux_config()
446 uint8_t word_size_bits = i2s_cfg->word_size; in i2s_mcux_config()
448 if ((dev_data->tx.state != I2S_STATE_NOT_READY) && in i2s_mcux_config()
449 (dev_data->tx.state != I2S_STATE_READY) && in i2s_mcux_config()
450 (dev_data->rx.state != I2S_STATE_NOT_READY) && in i2s_mcux_config()
451 (dev_data->rx.state != I2S_STATE_READY)) { in i2s_mcux_config()
452 LOG_ERR("invalid state tx(%u) rx(%u)", dev_data->tx.state, dev_data->rx.state); in i2s_mcux_config()
454 dev_data->tx.state = I2S_STATE_NOT_READY; in i2s_mcux_config()
456 dev_data->rx.state = I2S_STATE_NOT_READY; in i2s_mcux_config()
458 return -EINVAL; in i2s_mcux_config()
461 if (i2s_cfg->frame_clk_freq == 0U) { in i2s_mcux_config()
462 LOG_ERR("Invalid frame_clk_freq %u", i2s_cfg->frame_clk_freq); in i2s_mcux_config()
464 dev_data->tx.state = I2S_STATE_NOT_READY; in i2s_mcux_config()
466 dev_data->rx.state = I2S_STATE_NOT_READY; in i2s_mcux_config()
474 dev_data->tx.state = I2S_STATE_NOT_READY; in i2s_mcux_config()
476 dev_data->rx.state = I2S_STATE_NOT_READY; in i2s_mcux_config()
478 return -EINVAL; in i2s_mcux_config()
484 dev_data->tx.state = I2S_STATE_NOT_READY; in i2s_mcux_config()
486 dev_data->rx.state = I2S_STATE_NOT_READY; in i2s_mcux_config()
488 return -EINVAL; in i2s_mcux_config()
491 if ((i2s_cfg->options & I2S_OPT_PINGPONG) == I2S_OPT_PINGPONG) { in i2s_mcux_config()
492 LOG_ERR("Ping-pong mode not supported"); in i2s_mcux_config()
494 dev_data->tx.state = I2S_STATE_NOT_READY; in i2s_mcux_config()
496 dev_data->rx.state = I2S_STATE_NOT_READY; in i2s_mcux_config()
498 return -ENOTSUP; in i2s_mcux_config()
503 const bool is_mclk_slave = i2s_cfg->options & I2S_OPT_BIT_CLK_SLAVE; in i2s_mcux_config()
531 switch (i2s_cfg->format & I2S_FMT_DATA_FORMAT_MASK) { in i2s_mcux_config()
533 SAI_GetClassicI2SConfig(&config, word_size_bits, kSAI_Stereo, dev_cfg->tx_channel); in i2s_mcux_config()
537 dev_cfg->tx_channel); in i2s_mcux_config()
541 dev_cfg->tx_channel); in i2s_mcux_config()
551 dev_cfg->tx_channel); in i2s_mcux_config()
557 dev_data->tx.state = I2S_STATE_NOT_READY; in i2s_mcux_config()
559 dev_data->rx.state = I2S_STATE_NOT_READY; in i2s_mcux_config()
561 return -EINVAL; in i2s_mcux_config()
566 /* TX */ in i2s_mcux_config()
567 if (dev_cfg->tx_sync_mode) { in i2s_mcux_config()
573 /* RX */ in i2s_mcux_config()
574 if (dev_cfg->rx_sync_mode) { in i2s_mcux_config()
581 if (i2s_cfg->options & I2S_OPT_FRAME_CLK_SLAVE) { in i2s_mcux_config()
582 if (i2s_cfg->options & I2S_OPT_BIT_CLK_SLAVE) { in i2s_mcux_config()
588 if (i2s_cfg->options & I2S_OPT_BIT_CLK_SLAVE) { in i2s_mcux_config()
596 switch (i2s_cfg->format & I2S_FMT_CLK_FORMAT_MASK) { in i2s_mcux_config()
602 /* Swap bclk polarity */ in i2s_mcux_config()
610 /* Swap frame sync polarity */ in i2s_mcux_config()
618 /* Swap frame sync and bclk polarity */ in i2s_mcux_config()
631 if ((i2s_cfg->format & I2S_FMT_DATA_FORMAT_MASK) != I2S_FMT_DATA_FORMAT_PCM_SHORT) { in i2s_mcux_config()
636 memcpy(&dev_data->tx.cfg, i2s_cfg, sizeof(struct i2s_config)); in i2s_mcux_config()
637 LOG_DBG("tx slab free_list = 0x%x", (uint32_t)i2s_cfg->mem_slab->free_list); in i2s_mcux_config()
638 LOG_DBG("tx slab num_blocks = %d", (uint32_t)i2s_cfg->mem_slab->info.num_blocks); in i2s_mcux_config()
639 LOG_DBG("tx slab block_size = %d", (uint32_t)i2s_cfg->mem_slab->info.block_size); in i2s_mcux_config()
640 LOG_DBG("tx slab buffer = 0x%x", (uint32_t)i2s_cfg->mem_slab->buffer); in i2s_mcux_config()
644 dev_data->tx.start_channel = config.startChannel; in i2s_mcux_config()
646 base->TCR3 &= ~I2S_TCR3_TCE_MASK; in i2s_mcux_config()
647 SAI_TxSetBitClockRate(base, mclk, i2s_cfg->frame_clk_freq, word_size_bits, in i2s_mcux_config()
648 i2s_cfg->channels); in i2s_mcux_config()
649 LOG_DBG("tx start_channel = %d", dev_data->tx.start_channel); in i2s_mcux_config()
651 dev_data->tx.dma_cfg.source_data_size = word_size_bits / 8; in i2s_mcux_config()
652 dev_data->tx.dma_cfg.dest_data_size = word_size_bits / 8; in i2s_mcux_config()
653 dev_data->tx.dma_cfg.source_burst_length = i2s_cfg->word_size / 8; in i2s_mcux_config()
654 dev_data->tx.dma_cfg.dest_burst_length = i2s_cfg->word_size / 8; in i2s_mcux_config()
655 dev_data->tx.dma_cfg.user_data = (void *)dev; in i2s_mcux_config()
656 dev_data->tx.state = I2S_STATE_READY; in i2s_mcux_config()
658 /* For RX, DMA reads from FIFO whenever data present */ in i2s_mcux_config()
661 memcpy(&dev_data->rx.cfg, i2s_cfg, sizeof(struct i2s_config)); in i2s_mcux_config()
662 LOG_DBG("rx slab free_list = 0x%x", (uint32_t)i2s_cfg->mem_slab->free_list); in i2s_mcux_config()
663 LOG_DBG("rx slab num_blocks = %d", (uint32_t)i2s_cfg->mem_slab->info.num_blocks); in i2s_mcux_config()
664 LOG_DBG("rx slab block_size = %d", (uint32_t)i2s_cfg->mem_slab->info.block_size); in i2s_mcux_config()
665 LOG_DBG("rx slab buffer = 0x%x", (uint32_t)i2s_cfg->mem_slab->buffer); in i2s_mcux_config()
669 dev_data->rx.start_channel = config.startChannel; in i2s_mcux_config()
670 SAI_RxSetBitClockRate(base, mclk, i2s_cfg->frame_clk_freq, word_size_bits, in i2s_mcux_config()
671 i2s_cfg->channels); in i2s_mcux_config()
672 LOG_DBG("rx start_channel = %d", dev_data->rx.start_channel); in i2s_mcux_config()
674 dev_data->rx.dma_cfg.source_data_size = word_size_bits / 8; in i2s_mcux_config()
675 dev_data->rx.dma_cfg.dest_data_size = word_size_bits / 8; in i2s_mcux_config()
676 dev_data->rx.dma_cfg.source_burst_length = i2s_cfg->word_size / 8; in i2s_mcux_config()
677 dev_data->rx.dma_cfg.dest_burst_length = i2s_cfg->word_size / 8; in i2s_mcux_config()
678 dev_data->rx.dma_cfg.user_data = (void *)dev; in i2s_mcux_config()
679 dev_data->rx.state = I2S_STATE_READY; in i2s_mcux_config()
687 struct i2s_dev_data *dev_data = dev->data; in i2s_mcux_config_get()
690 return &dev_data->rx.cfg; in i2s_mcux_config_get()
693 return &dev_data->tx.cfg; in i2s_mcux_config_get()
700 struct i2s_dev_data *dev_data = dev->data; in i2s_tx_stream_start()
701 struct stream *strm = &dev_data->tx; in i2s_tx_stream_start()
702 const struct device *dev_dma = dev_data->dev_dma; in i2s_tx_stream_start()
703 const struct i2s_mcux_config *dev_cfg = dev->config; in i2s_tx_stream_start()
704 I2S_Type *base = (I2S_Type *)dev_cfg->base; in i2s_tx_stream_start()
707 ret = k_msgq_get(&strm->in_queue, &buffer, K_NO_WAIT); in i2s_tx_stream_start()
710 return -EIO; in i2s_tx_stream_start()
713 LOG_DBG("tx stream start"); in i2s_tx_stream_start()
716 strm->free_tx_dma_blocks = MAX_TX_DMA_BLOCKS; in i2s_tx_stream_start()
718 /* Configure the DMA with the first TX block */ in i2s_tx_stream_start()
719 struct dma_block_config *blk_cfg = &strm->dma_block; in i2s_tx_stream_start()
723 uint32_t data_path = strm->start_channel; in i2s_tx_stream_start()
725 blk_cfg->dest_address = (uint32_t)&base->TDR[data_path]; in i2s_tx_stream_start()
726 blk_cfg->source_address = (uint32_t)buffer; in i2s_tx_stream_start()
727 blk_cfg->block_size = strm->cfg.block_size; in i2s_tx_stream_start()
728 blk_cfg->dest_scatter_en = 1; in i2s_tx_stream_start()
730 strm->dma_cfg.block_count = 1; in i2s_tx_stream_start()
732 strm->dma_cfg.head_block = &strm->dma_block; in i2s_tx_stream_start()
733 strm->dma_cfg.user_data = (void *)dev; in i2s_tx_stream_start()
735 (strm->free_tx_dma_blocks)--; in i2s_tx_stream_start()
736 dma_config(dev_dma, strm->dma_channel, &strm->dma_cfg); in i2s_tx_stream_start()
739 ret = k_msgq_put(&strm->out_queue, &buffer, K_NO_WAIT); in i2s_tx_stream_start()
753 ret = dma_start(dev_dma, strm->dma_channel); in i2s_tx_stream_start()
763 base->TCR3 |= I2S_TCR3_TCE(1UL << strm->start_channel); in i2s_tx_stream_start()
765 /* Enable SAI Tx clock */ in i2s_tx_stream_start()
775 struct i2s_dev_data *dev_data = dev->data; in i2s_rx_stream_start()
776 struct stream *strm = &dev_data->rx; in i2s_rx_stream_start()
777 const struct device *dev_dma = dev_data->dev_dma; in i2s_rx_stream_start()
778 const struct i2s_mcux_config *dev_cfg = dev->config; in i2s_rx_stream_start()
779 I2S_Type *base = (I2S_Type *)dev_cfg->base; in i2s_rx_stream_start()
782 num_of_bufs = k_mem_slab_num_free_get(strm->cfg.mem_slab); in i2s_rx_stream_start()
785 * Need at least NUM_DMA_BLOCKS_RX_PREP buffers on the RX memory slab in i2s_rx_stream_start()
789 return -EINVAL; in i2s_rx_stream_start()
793 ret = k_mem_slab_alloc(strm->cfg.mem_slab, &buffer, K_NO_WAIT); in i2s_rx_stream_start()
800 struct dma_block_config *blk_cfg = &strm->dma_block; in i2s_rx_stream_start()
804 uint32_t data_path = strm->start_channel; in i2s_rx_stream_start()
806 blk_cfg->dest_address = (uint32_t)buffer; in i2s_rx_stream_start()
807 blk_cfg->source_address = (uint32_t)&base->RDR[data_path]; in i2s_rx_stream_start()
808 blk_cfg->block_size = strm->cfg.block_size; in i2s_rx_stream_start()
810 blk_cfg->source_gather_en = 1; in i2s_rx_stream_start()
812 strm->dma_cfg.block_count = 1; in i2s_rx_stream_start()
813 strm->dma_cfg.head_block = &strm->dma_block; in i2s_rx_stream_start()
814 strm->dma_cfg.user_data = (void *)dev; in i2s_rx_stream_start()
816 dma_config(dev_dma, strm->dma_channel, &strm->dma_cfg); in i2s_rx_stream_start()
819 ret = k_msgq_put(&strm->in_queue, &buffer, K_NO_WAIT); in i2s_rx_stream_start()
825 /* prep DMA for each of remaining (NUM_DMA_BLOCKS_RX_PREP-1) buffers */ in i2s_rx_stream_start()
826 for (int i = 0; i < NUM_DMA_BLOCKS_RX_PREP - 1; i++) { in i2s_rx_stream_start()
829 ret = k_mem_slab_alloc(strm->cfg.mem_slab, &buffer, K_NO_WAIT); in i2s_rx_stream_start()
835 ret = dma_reload(dev_dma, strm->dma_channel, (uint32_t)&base->RDR[data_path], in i2s_rx_stream_start()
836 (uint32_t)buffer, blk_cfg->block_size); in i2s_rx_stream_start()
843 ret = k_msgq_put(&strm->in_queue, &buffer, K_NO_WAIT); in i2s_rx_stream_start()
850 LOG_DBG("Starting DMA Ch%u", strm->dma_channel); in i2s_rx_stream_start()
851 ret = dma_start(dev_dma, strm->dma_channel); in i2s_rx_stream_start()
853 LOG_ERR("Failed to start DMA Ch%d (%d)", strm->dma_channel, ret); in i2s_rx_stream_start()
861 base->RCR3 |= I2S_RCR3_RCE(1UL << strm->start_channel); in i2s_rx_stream_start()
863 /* Enable SAI Rx clock */ in i2s_rx_stream_start()
871 struct i2s_dev_data *dev_data = dev->data; in i2s_mcux_trigger()
877 return -ENOSYS; in i2s_mcux_trigger()
880 strm = (dir == I2S_DIR_TX) ? &dev_data->tx : &dev_data->rx; in i2s_mcux_trigger()
885 if (strm->state != I2S_STATE_READY) { in i2s_mcux_trigger()
886 LOG_ERR("START trigger: invalid state %u", strm->state); in i2s_mcux_trigger()
887 ret = -EIO; in i2s_mcux_trigger()
899 ret = -EIO; in i2s_mcux_trigger()
903 strm->state = I2S_STATE_RUNNING; in i2s_mcux_trigger()
904 strm->last_block = false; in i2s_mcux_trigger()
908 if (strm->state == I2S_STATE_NOT_READY) { in i2s_mcux_trigger()
909 LOG_ERR("DROP trigger: invalid state %d", strm->state); in i2s_mcux_trigger()
910 ret = -EIO; in i2s_mcux_trigger()
914 strm->state = I2S_STATE_READY; in i2s_mcux_trigger()
923 if (strm->state != I2S_STATE_RUNNING) { in i2s_mcux_trigger()
924 LOG_ERR("STOP trigger: invalid state %d", strm->state); in i2s_mcux_trigger()
925 ret = -EIO; in i2s_mcux_trigger()
929 strm->state = I2S_STATE_STOPPING; in i2s_mcux_trigger()
930 strm->last_block = true; in i2s_mcux_trigger()
934 if (strm->state != I2S_STATE_RUNNING) { in i2s_mcux_trigger()
935 LOG_ERR("DRAIN/STOP trigger: invalid state %d", strm->state); in i2s_mcux_trigger()
936 ret = -EIO; in i2s_mcux_trigger()
940 strm->state = I2S_STATE_STOPPING; in i2s_mcux_trigger()
944 if (strm->state != I2S_STATE_ERROR) { in i2s_mcux_trigger()
945 LOG_ERR("PREPARE trigger: invalid state %d", strm->state); in i2s_mcux_trigger()
946 ret = -EIO; in i2s_mcux_trigger()
949 strm->state = I2S_STATE_READY; in i2s_mcux_trigger()
959 ret = -EINVAL; in i2s_mcux_trigger()
968 struct i2s_dev_data *dev_data = dev->data; in i2s_mcux_read()
969 struct stream *strm = &dev_data->rx; in i2s_mcux_read()
974 if (strm->state == I2S_STATE_NOT_READY) { in i2s_mcux_read()
975 LOG_ERR("invalid state %d", strm->state); in i2s_mcux_read()
976 return -EIO; in i2s_mcux_read()
979 status = k_msgq_get(&strm->out_queue, &buffer, SYS_TIMEOUT_MS(strm->cfg.timeout)); in i2s_mcux_read()
981 if (strm->state == I2S_STATE_ERROR) { in i2s_mcux_read()
982 ret = -EIO; in i2s_mcux_read()
985 ret = -EAGAIN; in i2s_mcux_read()
991 *size = strm->cfg.block_size; in i2s_mcux_read()
997 struct i2s_dev_data *dev_data = dev->data; in i2s_mcux_write()
998 struct stream *strm = &dev_data->tx; in i2s_mcux_write()
1002 if (strm->state != I2S_STATE_RUNNING && strm->state != I2S_STATE_READY) { in i2s_mcux_write()
1003 LOG_ERR("invalid state (%d)", strm->state); in i2s_mcux_write()
1004 return -EIO; in i2s_mcux_write()
1007 ret = k_msgq_put(&strm->in_queue, &mem_block, SYS_TIMEOUT_MS(strm->cfg.timeout)); in i2s_mcux_write()
1018 const struct i2s_mcux_config *dev_cfg = dev->config; in sai_driver_irq()
1019 I2S_Type *base = (I2S_Type *)dev_cfg->base; in sai_driver_irq()
1021 if ((base->TCSR & I2S_TCSR_FEF_MASK) == I2S_TCSR_FEF_MASK) { in sai_driver_irq()
1028 LOG_DBG("sai tx error occurred"); in sai_driver_irq()
1031 if ((base->RCSR & I2S_RCSR_FEF_MASK) == I2S_RCSR_FEF_MASK) { in sai_driver_irq()
1038 LOG_DBG("sai rx error occurred"); in sai_driver_irq()
1046 const struct i2s_mcux_config *dev_cfg = dev->config; in i2s_mcux_isr()
1047 I2S_Type *base = (I2S_Type *)dev_cfg->base; in i2s_mcux_isr()
1049 if ((base->RCSR & I2S_TCSR_FEF_MASK) == I2S_TCSR_FEF_MASK) { in i2s_mcux_isr()
1053 if ((base->TCSR & I2S_RCSR_FEF_MASK) == I2S_RCSR_FEF_MASK) { in i2s_mcux_isr()
1057 * Add for ARM errata 838869, affects Cortex-M4, in i2s_mcux_isr()
1058 * Cortex-M4F Store immediate overlapping exception return operation in i2s_mcux_isr()
1069 const struct i2s_mcux_config *dev_cfg = dev->config; in audio_clock_settings()
1070 uint32_t clock_name = (uint32_t)dev_cfg->clk_sub_sys; in audio_clock_settings()
1073 imxrt_audio_codec_pll_init(clock_name, dev_cfg->clk_src, dev_cfg->clk_pre_div, in audio_clock_settings()
1074 dev_cfg->clk_src_div); in audio_clock_settings()
1077 audioPllConfig.loopDivider = dev_cfg->pll_lp; in audio_clock_settings()
1078 audioPllConfig.postDivider = dev_cfg->pll_pd; in audio_clock_settings()
1079 audioPllConfig.numerator = dev_cfg->pll_num; in audio_clock_settings()
1080 audioPllConfig.denominator = dev_cfg->pll_den; in audio_clock_settings()
1083 audioPllConfig.src = dev_cfg->pll_src; in audio_clock_settings()
1084 audioPllConfig.loopDivider = dev_cfg->pll_lp; in audio_clock_settings()
1085 audioPllConfig.postDivider = dev_cfg->pll_pd; in audio_clock_settings()
1086 audioPllConfig.numerator = dev_cfg->pll_num; in audio_clock_settings()
1087 audioPllConfig.denominator = dev_cfg->pll_den; in audio_clock_settings()
1089 #error Initialize SOC Series-specific clock_audio_pll_config_t in audio_clock_settings()
1097 const struct i2s_mcux_config *dev_cfg = dev->config; in i2s_mcux_initialize()
1098 I2S_Type *base = (I2S_Type *)dev_cfg->base; in i2s_mcux_initialize()
1099 struct i2s_dev_data *dev_data = dev->data; in i2s_mcux_initialize()
1103 if (!dev_data->dev_dma) { in i2s_mcux_initialize()
1105 return -ENODEV; in i2s_mcux_initialize()
1109 k_msgq_init(&dev_data->tx.in_queue, (char *)dev_data->tx_in_msgs, sizeof(void *), in i2s_mcux_initialize()
1111 k_msgq_init(&dev_data->rx.in_queue, (char *)dev_data->rx_in_msgs, sizeof(void *), in i2s_mcux_initialize()
1113 k_msgq_init(&dev_data->tx.out_queue, (char *)dev_data->tx_out_msgs, sizeof(void *), in i2s_mcux_initialize()
1115 k_msgq_init(&dev_data->rx.out_queue, (char *)dev_data->rx_out_msgs, sizeof(void *), in i2s_mcux_initialize()
1119 dev_cfg->irq_connect(dev); in i2s_mcux_initialize()
1121 err = pinctrl_apply_state(dev_cfg->pinctrl, PINCTRL_STATE_DEFAULT); in i2s_mcux_initialize()
1132 dev_data->tx.state = I2S_STATE_NOT_READY; in i2s_mcux_initialize()
1133 dev_data->rx.state = I2S_STATE_NOT_READY; in i2s_mcux_initialize()
1158 LOG_INF("Device %s initialized", dev->name); in i2s_mcux_initialize()
1201 .dev_dma = DEVICE_DT_GET(DT_INST_DMAS_CTLR_BY_NAME(i2s_id, rx)), \
1202 .tx = \
1213 .head_block = &i2s_##i2s_id##_data.tx.dma_block, \
1215 .dma_slot = DT_INST_DMAS_CELL_BY_NAME(i2s_id, tx, \
1220 .rx = \
1231 .head_block = &i2s_##i2s_id##_data.rx.dma_block, \
1233 .dma_slot = DT_INST_DMAS_CELL_BY_NAME(i2s_id, rx, \