Lines Matching +full:tx +full:- +full:channel
4 * SPDX-License-Identifier: Apache-2.0
36 uint32_t channel; /* stores the channel for dma */ member
55 struct stream tx; member
56 /* For tx, the in queue is for requests generated by
78 fsl_cfg->dataLength = i2s_cfg->word_size; in i2s_mcux_flexcomm_cfg_convert()
79 if ((i2s_cfg->format & I2S_FMT_DATA_FORMAT_MASK) == in i2s_mcux_flexcomm_cfg_convert()
82 fsl_cfg->frameLength = 2 * i2s_cfg->word_size; in i2s_mcux_flexcomm_cfg_convert()
84 fsl_cfg->frameLength = i2s_cfg->channels * i2s_cfg->word_size; in i2s_mcux_flexcomm_cfg_convert()
87 if (fsl_cfg->dataLength < 4 || fsl_cfg->dataLength > 32) { in i2s_mcux_flexcomm_cfg_convert()
89 return -EINVAL; in i2s_mcux_flexcomm_cfg_convert()
92 if (fsl_cfg->frameLength < 4 || fsl_cfg->frameLength > 2048) { in i2s_mcux_flexcomm_cfg_convert()
94 return -EINVAL; in i2s_mcux_flexcomm_cfg_convert()
98 switch (i2s_cfg->options & (I2S_OPT_BIT_CLK_SLAVE | in i2s_mcux_flexcomm_cfg_convert()
101 fsl_cfg->masterSlave = kI2S_MasterSlaveNormalMaster; in i2s_mcux_flexcomm_cfg_convert()
104 fsl_cfg->masterSlave = kI2S_MasterSlaveNormalSlave; in i2s_mcux_flexcomm_cfg_convert()
108 fsl_cfg->masterSlave = kI2S_MasterSlaveExtSckMaster; in i2s_mcux_flexcomm_cfg_convert()
112 fsl_cfg->masterSlave = kI2S_MasterSlaveWsSyncMaster; in i2s_mcux_flexcomm_cfg_convert()
116 switch (i2s_cfg->format & I2S_FMT_DATA_FORMAT_MASK) { in i2s_mcux_flexcomm_cfg_convert()
118 fsl_cfg->mode = kI2S_ModeI2sClassic; in i2s_mcux_flexcomm_cfg_convert()
121 fsl_cfg->mode = kI2S_ModeDspWsShort; in i2s_mcux_flexcomm_cfg_convert()
122 fsl_cfg->wsPol = true; in i2s_mcux_flexcomm_cfg_convert()
125 fsl_cfg->mode = kI2S_ModeDspWsLong; in i2s_mcux_flexcomm_cfg_convert()
126 fsl_cfg->wsPol = true; in i2s_mcux_flexcomm_cfg_convert()
129 fsl_cfg->mode = kI2S_ModeDspWs50; in i2s_mcux_flexcomm_cfg_convert()
130 fsl_cfg->wsPol = true; in i2s_mcux_flexcomm_cfg_convert()
134 return -EINVAL; in i2s_mcux_flexcomm_cfg_convert()
137 if (fsl_cfg->masterSlave == kI2S_MasterSlaveNormalMaster || in i2s_mcux_flexcomm_cfg_convert()
138 fsl_cfg->masterSlave == kI2S_MasterSlaveWsSyncMaster) { in i2s_mcux_flexcomm_cfg_convert()
139 fsl_cfg->divider = base_frequency / in i2s_mcux_flexcomm_cfg_convert()
140 i2s_cfg->frame_clk_freq / in i2s_mcux_flexcomm_cfg_convert()
141 fsl_cfg->frameLength; in i2s_mcux_flexcomm_cfg_convert()
148 switch (i2s_cfg->format & I2S_FMT_CLK_FORMAT_MASK) { in i2s_mcux_flexcomm_cfg_convert()
152 fsl_cfg->sckPol = !fsl_cfg->sckPol; in i2s_mcux_flexcomm_cfg_convert()
155 fsl_cfg->wsPol = !fsl_cfg->wsPol; in i2s_mcux_flexcomm_cfg_convert()
158 fsl_cfg->sckPol = !fsl_cfg->sckPol; in i2s_mcux_flexcomm_cfg_convert()
159 fsl_cfg->wsPol = !fsl_cfg->wsPol; in i2s_mcux_flexcomm_cfg_convert()
163 return -EINVAL; in i2s_mcux_flexcomm_cfg_convert()
172 struct i2s_mcux_data *dev_data = dev->data; in i2s_mcux_config_get()
176 stream = &dev_data->rx; in i2s_mcux_config_get()
178 stream = &dev_data->tx; in i2s_mcux_config_get()
181 if (stream->state == I2S_STATE_NOT_READY) { in i2s_mcux_config_get()
185 return &stream->cfg; in i2s_mcux_config_get()
191 const struct i2s_mcux_config *cfg = dev->config; in i2s_mcux_configure()
192 struct i2s_mcux_data *dev_data = dev->data; in i2s_mcux_configure()
199 stream = &dev_data->rx; in i2s_mcux_configure()
201 stream = &dev_data->tx; in i2s_mcux_configure()
203 return -ENOSYS; in i2s_mcux_configure()
205 LOG_ERR("Either RX or TX direction must be selected"); in i2s_mcux_configure()
206 return -EINVAL; in i2s_mcux_configure()
209 if (stream->state != I2S_STATE_NOT_READY && in i2s_mcux_configure()
210 stream->state != I2S_STATE_READY) { in i2s_mcux_configure()
212 return -EINVAL; in i2s_mcux_configure()
215 if (i2s_cfg->frame_clk_freq == 0U) { in i2s_mcux_configure()
216 stream->state = I2S_STATE_NOT_READY; in i2s_mcux_configure()
223 * However for 8-bit word_size the I2S hardware expects the data in i2s_mcux_configure()
228 if (i2s_cfg->word_size <= 8) { in i2s_mcux_configure()
229 return -ENOTSUP; in i2s_mcux_configure()
232 if (!device_is_ready(cfg->clock_dev)) { in i2s_mcux_configure()
234 return -ENODEV; in i2s_mcux_configure()
238 if (clock_control_get_rate(cfg->clock_dev, in i2s_mcux_configure()
239 cfg->clock_subsys, &base_frequency)) { in i2s_mcux_configure()
240 return -EINVAL; in i2s_mcux_configure()
255 I2S_RxInit(cfg->base, &fsl_cfg); in i2s_mcux_configure()
257 I2S_TxInit(cfg->base, &fsl_cfg); in i2s_mcux_configure()
260 if ((i2s_cfg->channels > 2) && in i2s_mcux_configure()
261 (i2s_cfg->format & I2S_FMT_DATA_FORMAT_MASK) != in i2s_mcux_configure()
265 * secondary channel pairs. in i2s_mcux_configure()
269 for (uint32_t slot = 1; slot < i2s_cfg->channels / 2; slot++) { in i2s_mcux_configure()
271 * with previous channel pair. Each channel pair in i2s_mcux_configure()
274 I2S_EnableSecondaryChannel(cfg->base, slot - 1, false, in i2s_mcux_configure()
275 i2s_cfg->word_size * 2 * slot); in i2s_mcux_configure()
279 return -ENOTSUP; in i2s_mcux_configure()
290 * If the data length is 4-16, the FIFO should be filled in i2s_mcux_configure()
291 * with two 16 bit values (one for left, one for right channel) in i2s_mcux_configure()
293 * If the data length is 17-24, the FIFO should be filled with 2 24 bit in i2s_mcux_configure()
294 * values (one for left, one for right channel). We can just transfer in i2s_mcux_configure()
298 * If the data length is 25-32, the FIFO should be filled in i2s_mcux_configure()
299 * with one 32 bit value. First value is left channel, second is right. in i2s_mcux_configure()
304 stream->dma_cfg.dest_data_size = 4U; in i2s_mcux_configure()
305 stream->dma_cfg.source_data_size = 4U; in i2s_mcux_configure()
308 memcpy(&stream->cfg, i2s_cfg, sizeof(struct i2s_config)); in i2s_mcux_configure()
310 stream->state = I2S_STATE_READY; in i2s_mcux_configure()
316 bool tx) in i2s_purge_stream_buffers() argument
320 if (tx) { in i2s_purge_stream_buffers()
323 while (k_msgq_get(&stream->in_queue, &queue_entry, K_NO_WAIT) == 0) { in i2s_purge_stream_buffers()
327 while (k_msgq_get(&stream->in_queue, &buffer, K_NO_WAIT) == 0) { in i2s_purge_stream_buffers()
331 while (k_msgq_get(&stream->out_queue, &buffer, K_NO_WAIT) == 0) { in i2s_purge_stream_buffers()
338 const struct i2s_mcux_config *cfg = dev->config; in i2s_mcux_tx_stream_disable()
339 struct i2s_mcux_data *dev_data = dev->data; in i2s_mcux_tx_stream_disable()
340 struct stream *stream = &dev_data->tx; in i2s_mcux_tx_stream_disable()
341 I2S_Type *base = cfg->base; in i2s_mcux_tx_stream_disable()
343 LOG_DBG("Stopping DMA channel %u for TX stream", stream->channel); in i2s_mcux_tx_stream_disable()
344 dma_stop(stream->dev_dma, stream->channel); in i2s_mcux_tx_stream_disable()
346 /* Clear TX error interrupt flag */ in i2s_mcux_tx_stream_disable()
347 base->FIFOSTAT = I2S_FIFOSTAT_TXERR(1U); in i2s_mcux_tx_stream_disable()
350 if (base->CFG1 & I2S_CFG1_MAINENABLE_MASK) { in i2s_mcux_tx_stream_disable()
352 while ((base->FIFOSTAT & I2S_FIFOSTAT_TXEMPTY_MASK) == 0U) { in i2s_mcux_tx_stream_disable()
359 base->FIFOWR = 0U; in i2s_mcux_tx_stream_disable()
360 while ((base->FIFOSTAT & I2S_FIFOSTAT_TXEMPTY_MASK) == 0U) { in i2s_mcux_tx_stream_disable()
364 /* Disable TX DMA */ in i2s_mcux_tx_stream_disable()
365 base->FIFOCFG &= (~I2S_FIFOCFG_DMATX_MASK); in i2s_mcux_tx_stream_disable()
366 base->FIFOCFG |= I2S_FIFOCFG_EMPTYTX_MASK; in i2s_mcux_tx_stream_disable()
373 i2s_purge_stream_buffers(stream, stream->cfg.mem_slab, true); in i2s_mcux_tx_stream_disable()
379 const struct i2s_mcux_config *cfg = dev->config; in i2s_mcux_rx_stream_disable()
380 struct i2s_mcux_data *dev_data = dev->data; in i2s_mcux_rx_stream_disable()
381 struct stream *stream = &dev_data->rx; in i2s_mcux_rx_stream_disable()
382 I2S_Type *base = cfg->base; in i2s_mcux_rx_stream_disable()
384 LOG_DBG("Stopping DMA channel %u for RX stream", stream->channel); in i2s_mcux_rx_stream_disable()
385 dma_stop(stream->dev_dma, stream->channel); in i2s_mcux_rx_stream_disable()
388 base->FIFOSTAT = I2S_FIFOSTAT_RXERR(1U); in i2s_mcux_rx_stream_disable()
393 base->FIFOCFG &= (~I2S_FIFOCFG_DMARX_MASK); in i2s_mcux_rx_stream_disable()
394 base->FIFOCFG |= I2S_FIFOCFG_EMPTYRX_MASK; in i2s_mcux_rx_stream_disable()
400 i2s_purge_stream_buffers(stream, stream->cfg.mem_slab, false); in i2s_mcux_rx_stream_disable()
408 const struct i2s_mcux_config *cfg = dev->config; in i2s_mcux_config_dma_blocks()
409 struct i2s_mcux_data *dev_data = dev->data; in i2s_mcux_config_dma_blocks()
410 I2S_Type *base = cfg->base; in i2s_mcux_config_dma_blocks()
415 stream = &dev_data->rx; in i2s_mcux_config_dma_blocks()
416 blk_cfg = &dev_data->rx_dma_blocks[0]; in i2s_mcux_config_dma_blocks()
417 memset(blk_cfg, 0, sizeof(dev_data->rx_dma_blocks)); in i2s_mcux_config_dma_blocks()
419 stream = &dev_data->tx; in i2s_mcux_config_dma_blocks()
420 blk_cfg = &dev_data->tx_dma_block; in i2s_mcux_config_dma_blocks()
421 memset(blk_cfg, 0, sizeof(dev_data->tx_dma_block)); in i2s_mcux_config_dma_blocks()
424 stream->dma_cfg.head_block = blk_cfg; in i2s_mcux_config_dma_blocks()
428 blk_cfg->source_address = (uint32_t)&base->FIFORD; in i2s_mcux_config_dma_blocks()
429 blk_cfg->dest_address = (uint32_t)buffer[0]; in i2s_mcux_config_dma_blocks()
430 blk_cfg->block_size = block_size; in i2s_mcux_config_dma_blocks()
431 blk_cfg->next_block = &dev_data->rx_dma_blocks[1]; in i2s_mcux_config_dma_blocks()
432 blk_cfg->dest_reload_en = 1; in i2s_mcux_config_dma_blocks()
434 blk_cfg = &dev_data->rx_dma_blocks[1]; in i2s_mcux_config_dma_blocks()
435 blk_cfg->source_address = (uint32_t)&base->FIFORD; in i2s_mcux_config_dma_blocks()
436 blk_cfg->dest_address = (uint32_t)buffer[1]; in i2s_mcux_config_dma_blocks()
437 blk_cfg->block_size = block_size; in i2s_mcux_config_dma_blocks()
439 blk_cfg->dest_address = (uint32_t)&base->FIFOWR; in i2s_mcux_config_dma_blocks()
440 blk_cfg->source_address = (uint32_t)buffer; in i2s_mcux_config_dma_blocks()
441 blk_cfg->block_size = block_size; in i2s_mcux_config_dma_blocks()
444 stream->dma_cfg.user_data = (void *)dev; in i2s_mcux_config_dma_blocks()
446 dma_config(stream->dev_dma, stream->channel, &stream->dma_cfg); in i2s_mcux_config_dma_blocks()
448 LOG_DBG("dma_slot is %d", stream->dma_cfg.dma_slot); in i2s_mcux_config_dma_blocks()
449 LOG_DBG("channel_direction is %d", stream->dma_cfg.channel_direction); in i2s_mcux_config_dma_blocks()
451 stream->dma_cfg.complete_callback_en); in i2s_mcux_config_dma_blocks()
452 LOG_DBG("error_callback_dis is %d", stream->dma_cfg.error_callback_dis); in i2s_mcux_config_dma_blocks()
453 LOG_DBG("source_handshake is %d", stream->dma_cfg.source_handshake); in i2s_mcux_config_dma_blocks()
454 LOG_DBG("dest_handshake is %d", stream->dma_cfg.dest_handshake); in i2s_mcux_config_dma_blocks()
455 LOG_DBG("channel_priority is %d", stream->dma_cfg.channel_priority); in i2s_mcux_config_dma_blocks()
456 LOG_DBG("source_chaining_en is %d", stream->dma_cfg.source_chaining_en); in i2s_mcux_config_dma_blocks()
457 LOG_DBG("dest_chaining_en is %d", stream->dma_cfg.dest_chaining_en); in i2s_mcux_config_dma_blocks()
458 LOG_DBG("linked_channel is %d", stream->dma_cfg.linked_channel); in i2s_mcux_config_dma_blocks()
459 LOG_DBG("source_data_size is %d", stream->dma_cfg.source_data_size); in i2s_mcux_config_dma_blocks()
460 LOG_DBG("dest_data_size is %d", stream->dma_cfg.dest_data_size); in i2s_mcux_config_dma_blocks()
461 LOG_DBG("source_burst_length is %d", stream->dma_cfg.source_burst_length); in i2s_mcux_config_dma_blocks()
462 LOG_DBG("dest_burst_length is %d", stream->dma_cfg.dest_burst_length); in i2s_mcux_config_dma_blocks()
463 LOG_DBG("block_count is %d", stream->dma_cfg.block_count); in i2s_mcux_config_dma_blocks()
468 uint32_t channel, int status) in i2s_mcux_dma_tx_callback() argument
471 struct i2s_mcux_data *dev_data = dev->data; in i2s_mcux_dma_tx_callback()
472 struct stream *stream = &dev_data->tx; in i2s_mcux_dma_tx_callback()
476 LOG_DBG("tx cb: %d", stream->state); in i2s_mcux_dma_tx_callback()
478 ret = k_msgq_get(&stream->out_queue, &queue_entry.mem_block, K_NO_WAIT); in i2s_mcux_dma_tx_callback()
481 k_mem_slab_free(stream->cfg.mem_slab, queue_entry.mem_block); in i2s_mcux_dma_tx_callback()
483 LOG_ERR("no buffer in output queue for channel %u", channel); in i2s_mcux_dma_tx_callback()
486 /* Received a STOP trigger, terminate TX immediately */ in i2s_mcux_dma_tx_callback()
487 if (stream->last_block) { in i2s_mcux_dma_tx_callback()
488 stream->state = I2S_STATE_READY; in i2s_mcux_dma_tx_callback()
490 LOG_DBG("TX STOPPED"); in i2s_mcux_dma_tx_callback()
494 switch (stream->state) { in i2s_mcux_dma_tx_callback()
498 ret = k_msgq_get(&stream->in_queue, &queue_entry, K_NO_WAIT); in i2s_mcux_dma_tx_callback()
504 k_msgq_put(&stream->out_queue, &queue_entry.mem_block, K_NO_WAIT); in i2s_mcux_dma_tx_callback()
505 dma_start(stream->dev_dma, stream->channel); in i2s_mcux_dma_tx_callback()
514 LOG_DBG("DMA status %08x channel %u k_msgq_get ret %d", in i2s_mcux_dma_tx_callback()
515 status, channel, ret); in i2s_mcux_dma_tx_callback()
516 if (stream->state == I2S_STATE_STOPPING) { in i2s_mcux_dma_tx_callback()
517 stream->state = I2S_STATE_READY; in i2s_mcux_dma_tx_callback()
519 stream->state = I2S_STATE_ERROR; in i2s_mcux_dma_tx_callback()
531 uint32_t channel, int status) in i2s_mcux_dma_rx_callback() argument
534 struct i2s_mcux_data *dev_data = dev->data; in i2s_mcux_dma_rx_callback()
535 struct stream *stream = &dev_data->rx; in i2s_mcux_dma_rx_callback()
539 LOG_DBG("rx cb: %d", stream->state); in i2s_mcux_dma_rx_callback()
542 stream->state = I2S_STATE_ERROR; in i2s_mcux_dma_rx_callback()
547 switch (stream->state) { in i2s_mcux_dma_rx_callback()
551 ret = k_msgq_get(&stream->in_queue, &buffer, K_NO_WAIT); in i2s_mcux_dma_rx_callback()
555 ret = k_msgq_put(&stream->out_queue, &buffer, K_NO_WAIT); in i2s_mcux_dma_rx_callback()
557 LOG_ERR("buffer %p -> out_queue %p err %d", buffer, in i2s_mcux_dma_rx_callback()
558 &stream->out_queue, ret); in i2s_mcux_dma_rx_callback()
560 stream->state = I2S_STATE_ERROR; in i2s_mcux_dma_rx_callback()
562 if (stream->state == I2S_STATE_RUNNING) { in i2s_mcux_dma_rx_callback()
564 ret = k_mem_slab_alloc(stream->cfg.mem_slab, &buffer, K_NO_WAIT); in i2s_mcux_dma_rx_callback()
567 stream->cfg.mem_slab, ret); in i2s_mcux_dma_rx_callback()
569 stream->state = I2S_STATE_ERROR; in i2s_mcux_dma_rx_callback()
571 const struct i2s_mcux_config *cfg = dev->config; in i2s_mcux_dma_rx_callback()
572 I2S_Type *base = cfg->base; in i2s_mcux_dma_rx_callback()
574 dma_reload(stream->dev_dma, stream->channel, in i2s_mcux_dma_rx_callback()
575 (uint32_t)&base->FIFORD, (uint32_t)buffer, in i2s_mcux_dma_rx_callback()
576 stream->cfg.block_size); in i2s_mcux_dma_rx_callback()
578 ret = k_msgq_put(&stream->in_queue, &buffer, K_NO_WAIT); in i2s_mcux_dma_rx_callback()
580 LOG_ERR("buffer %p -> in_queue %p err %d", in i2s_mcux_dma_rx_callback()
581 buffer, &stream->in_queue, ret); in i2s_mcux_dma_rx_callback()
583 dma_start(stream->dev_dma, stream->channel); in i2s_mcux_dma_rx_callback()
588 stream->state = I2S_STATE_READY; in i2s_mcux_dma_rx_callback()
600 const struct i2s_mcux_config *cfg = dev->config; in i2s_mcux_tx_stream_start()
601 struct i2s_mcux_data *dev_data = dev->data; in i2s_mcux_tx_stream_start()
602 struct stream *stream = &dev_data->tx; in i2s_mcux_tx_stream_start()
603 I2S_Type *base = cfg->base; in i2s_mcux_tx_stream_start()
607 ret = k_msgq_get(&stream->in_queue, &queue_entry, K_NO_WAIT); in i2s_mcux_tx_stream_start()
618 ret = k_msgq_put(&stream->out_queue, &queue_entry.mem_block, K_NO_WAIT); in i2s_mcux_tx_stream_start()
624 /* Enable TX DMA */ in i2s_mcux_tx_stream_start()
625 base->FIFOCFG |= I2S_FIFOCFG_DMATX_MASK; in i2s_mcux_tx_stream_start()
627 ret = dma_start(stream->dev_dma, stream->channel); in i2s_mcux_tx_stream_start()
643 const struct i2s_mcux_config *cfg = dev->config; in i2s_mcux_rx_stream_start()
644 struct i2s_mcux_data *dev_data = dev->data; in i2s_mcux_rx_stream_start()
645 struct stream *stream = &dev_data->rx; in i2s_mcux_rx_stream_start()
646 I2S_Type *base = cfg->base; in i2s_mcux_rx_stream_start()
649 num_of_bufs = k_mem_slab_num_free_get(stream->cfg.mem_slab); in i2s_mcux_rx_stream_start()
656 return -EINVAL; in i2s_mcux_rx_stream_start()
660 ret = k_mem_slab_alloc(stream->cfg.mem_slab, &buffer[i], in i2s_mcux_rx_stream_start()
669 stream->cfg.block_size); in i2s_mcux_rx_stream_start()
673 ret = k_msgq_put(&stream->in_queue, &buffer[i], K_NO_WAIT); in i2s_mcux_rx_stream_start()
681 base->FIFOCFG |= I2S_FIFOCFG_DMARX_MASK; in i2s_mcux_rx_stream_start()
683 ret = dma_start(stream->dev_dma, stream->channel); in i2s_mcux_rx_stream_start()
685 LOG_ERR("Failed to start DMA Ch%d (%d)", stream->channel, ret); in i2s_mcux_rx_stream_start()
698 struct i2s_mcux_data *dev_data = dev->data; in i2s_mcux_trigger()
704 stream = &dev_data->rx; in i2s_mcux_trigger()
706 stream = &dev_data->tx; in i2s_mcux_trigger()
708 return -ENOSYS; in i2s_mcux_trigger()
710 LOG_ERR("Either RX or TX direction must be selected"); in i2s_mcux_trigger()
711 return -EINVAL; in i2s_mcux_trigger()
718 if (stream->state != I2S_STATE_READY) { in i2s_mcux_trigger()
720 stream->state); in i2s_mcux_trigger()
721 ret = -EIO; in i2s_mcux_trigger()
736 stream->state = I2S_STATE_RUNNING; in i2s_mcux_trigger()
737 stream->last_block = false; in i2s_mcux_trigger()
741 if (stream->state != I2S_STATE_RUNNING) { in i2s_mcux_trigger()
742 LOG_ERR("STOP trigger: invalid state %d", stream->state); in i2s_mcux_trigger()
743 ret = -EIO; in i2s_mcux_trigger()
746 stream->state = I2S_STATE_STOPPING; in i2s_mcux_trigger()
747 stream->last_block = true; in i2s_mcux_trigger()
751 if (stream->state != I2S_STATE_RUNNING) { in i2s_mcux_trigger()
752 LOG_ERR("DRAIN trigger: invalid state %d", stream->state); in i2s_mcux_trigger()
753 ret = -EIO; in i2s_mcux_trigger()
756 stream->state = I2S_STATE_STOPPING; in i2s_mcux_trigger()
760 if (stream->state == I2S_STATE_NOT_READY) { in i2s_mcux_trigger()
761 LOG_ERR("DROP trigger: invalid state %d", stream->state); in i2s_mcux_trigger()
762 ret = -EIO; in i2s_mcux_trigger()
765 stream->state = I2S_STATE_READY; in i2s_mcux_trigger()
774 if (stream->state != I2S_STATE_ERROR) { in i2s_mcux_trigger()
775 LOG_ERR("PREPARE trigger: invalid state %d", stream->state); in i2s_mcux_trigger()
776 ret = -EIO; in i2s_mcux_trigger()
779 stream->state = I2S_STATE_READY; in i2s_mcux_trigger()
789 ret = -EINVAL; in i2s_mcux_trigger()
800 struct i2s_mcux_data *dev_data = dev->data; in i2s_mcux_read()
801 struct stream *stream = &dev_data->rx; in i2s_mcux_read()
805 if (stream->state == I2S_STATE_NOT_READY) { in i2s_mcux_read()
806 LOG_ERR("invalid state %d", stream->state); in i2s_mcux_read()
807 return -EIO; in i2s_mcux_read()
810 ret = k_msgq_get(&stream->out_queue, &buffer, in i2s_mcux_read()
811 SYS_TIMEOUT_MS(stream->cfg.timeout)); in i2s_mcux_read()
814 if (stream->state == I2S_STATE_ERROR) { in i2s_mcux_read()
815 return -EIO; in i2s_mcux_read()
817 return -EAGAIN; in i2s_mcux_read()
822 *size = stream->cfg.block_size; in i2s_mcux_read()
829 struct i2s_mcux_data *dev_data = dev->data; in i2s_mcux_write()
830 struct stream *stream = &dev_data->tx; in i2s_mcux_write()
837 if (stream->state != I2S_STATE_RUNNING && in i2s_mcux_write()
838 stream->state != I2S_STATE_READY) { in i2s_mcux_write()
839 LOG_ERR("invalid state (%d)", stream->state); in i2s_mcux_write()
840 return -EIO; in i2s_mcux_write()
843 ret = k_msgq_put(&stream->in_queue, &queue_entry, in i2s_mcux_write()
844 SYS_TIMEOUT_MS(stream->cfg.timeout)); in i2s_mcux_write()
864 const struct i2s_mcux_config *cfg = dev->config; in i2s_mcux_isr()
865 struct i2s_mcux_data *dev_data = dev->data; in i2s_mcux_isr()
866 struct stream *stream = &dev_data->tx; in i2s_mcux_isr()
867 I2S_Type *base = cfg->base; in i2s_mcux_isr()
868 uint32_t intstat = base->FIFOINTSTAT; in i2s_mcux_isr()
871 /* Clear TX error interrupt flag */ in i2s_mcux_isr()
872 base->FIFOSTAT = I2S_FIFOSTAT_TXERR(1U); in i2s_mcux_isr()
873 stream = &dev_data->tx; in i2s_mcux_isr()
874 stream->state = I2S_STATE_ERROR; in i2s_mcux_isr()
879 base->FIFOSTAT = I2S_FIFOSTAT_RXERR(1U); in i2s_mcux_isr()
880 stream = &dev_data->rx; in i2s_mcux_isr()
881 stream->state = I2S_STATE_ERROR; in i2s_mcux_isr()
887 const struct i2s_mcux_config *cfg = dev->config; in i2s_mcux_init()
888 struct i2s_mcux_data *const data = dev->data; in i2s_mcux_init()
891 err = pinctrl_apply_state(cfg->pincfg, PINCTRL_STATE_DEFAULT); in i2s_mcux_init()
896 cfg->irq_config(dev); in i2s_mcux_init()
899 k_msgq_init(&data->tx.in_queue, (char *)data->tx_in_msgs, in i2s_mcux_init()
901 k_msgq_init(&data->rx.in_queue, (char *)data->rx_in_msgs, in i2s_mcux_init()
903 k_msgq_init(&data->tx.out_queue, (char *)data->tx_out_msgs, in i2s_mcux_init()
905 k_msgq_init(&data->rx.out_queue, (char *)data->rx_out_msgs, in i2s_mcux_init()
908 if (data->tx.dev_dma != NULL) { in i2s_mcux_init()
909 if (!device_is_ready(data->tx.dev_dma)) { in i2s_mcux_init()
910 LOG_ERR("%s device not ready", data->tx.dev_dma->name); in i2s_mcux_init()
911 return -ENODEV; in i2s_mcux_init()
915 if (data->rx.dev_dma != NULL) { in i2s_mcux_init()
916 if (!device_is_ready(data->rx.dev_dma)) { in i2s_mcux_init()
917 LOG_ERR("%s device not ready", data->rx.dev_dma->name); in i2s_mcux_init()
918 return -ENODEV; in i2s_mcux_init()
922 data->tx.state = I2S_STATE_NOT_READY; in i2s_mcux_init()
923 data->rx.state = I2S_STATE_NOT_READY; in i2s_mcux_init()
925 LOG_DBG("Device %s inited", dev->name); in i2s_mcux_init()
931 .tx = { \
933 DT_INST_DMAS_HAS_NAME(id, tx), \
934 DEVICE_DT_GET(DT_INST_DMAS_CTLR_BY_NAME(id, tx))), \
935 .channel = UTIL_AND( \
936 DT_INST_DMAS_HAS_NAME(id, tx), \
937 DT_INST_DMAS_CELL_BY_NAME(id, tx, channel)), \
948 .channel = UTIL_AND( \
950 DT_INST_DMAS_CELL_BY_NAME(id, rx, channel)), \