Lines Matching +full:bad +full:- +full:key +full:- +full:2
4 * SPDX-License-Identifier: Apache-2.0
93 if (i2s_cfg->word_size == 24) { in i2s_esp32_calculate_clock()
97 if (i2s_cfg->options & I2S_OPT_FRAME_CLK_SLAVE || in i2s_esp32_calculate_clock()
98 i2s_cfg->options & I2S_OPT_BIT_CLK_SLAVE) { in i2s_esp32_calculate_clock()
99 i2s_hal_clock_info->bclk_div = 8; in i2s_esp32_calculate_clock()
100 i2s_hal_clock_info->bclk = in i2s_esp32_calculate_clock()
101 i2s_cfg->frame_clk_freq * i2s_cfg->channels * channel_length; in i2s_esp32_calculate_clock()
102 i2s_hal_clock_info->mclk = i2s_cfg->frame_clk_freq * i2s_hal_clock_info->bclk_div; in i2s_esp32_calculate_clock()
104 i2s_hal_clock_info->bclk = in i2s_esp32_calculate_clock()
105 i2s_cfg->frame_clk_freq * i2s_cfg->channels * channel_length; in i2s_esp32_calculate_clock()
106 i2s_hal_clock_info->mclk = i2s_cfg->frame_clk_freq * mclk_multiple; in i2s_esp32_calculate_clock()
107 i2s_hal_clock_info->bclk_div = i2s_hal_clock_info->mclk / i2s_hal_clock_info->bclk; in i2s_esp32_calculate_clock()
110 i2s_hal_clock_info->sclk = i2s_esp32_get_source_clk_freq(I2S_ESP32_CLK_SRC); in i2s_esp32_calculate_clock()
111 i2s_hal_clock_info->mclk_div = i2s_hal_clock_info->sclk / i2s_hal_clock_info->mclk; in i2s_esp32_calculate_clock()
112 if (i2s_hal_clock_info->mclk_div == 0) { in i2s_esp32_calculate_clock()
124 while (k_msgq_get(&stream->queue, &item, K_NO_WAIT) == 0) { in i2s_esp32_queue_drop()
125 k_mem_slab_free(stream->i2s_cfg.mem_slab, item.buffer); in i2s_esp32_queue_drop()
136 const struct i2s_esp32_cfg *dev_cfg = dev->config; in i2s_esp32_restart_dma()
137 struct i2s_esp32_data *const dev_data = dev->data; in i2s_esp32_restart_dma()
143 stream = &dev_data->rx; in i2s_esp32_restart_dma()
144 dst = stream->mem_block; in i2s_esp32_restart_dma()
146 stream = &dev_data->tx; in i2s_esp32_restart_dma()
147 src = stream->mem_block; in i2s_esp32_restart_dma()
150 return -EINVAL; in i2s_esp32_restart_dma()
153 err = dma_reload(stream->dma_dev, stream->dma_channel, (uint32_t)src, (uint32_t)dst, in i2s_esp32_restart_dma()
154 stream->mem_block_len); in i2s_esp32_restart_dma()
156 LOG_ERR("Error reloading DMA channel[%d]: %d", (int)stream->dma_channel, err); in i2s_esp32_restart_dma()
157 return -EIO; in i2s_esp32_restart_dma()
161 i2s_ll_rx_set_eof_num(dev_cfg->hal_cxt.dev, stream->mem_block_len); in i2s_esp32_restart_dma()
164 err = dma_start(stream->dma_dev, stream->dma_channel); in i2s_esp32_restart_dma()
166 LOG_ERR("Error starting DMA channel[%d]: %d", (int)stream->dma_channel, err); in i2s_esp32_restart_dma()
167 return -EIO; in i2s_esp32_restart_dma()
175 const struct i2s_esp32_cfg *dev_cfg = dev->config; in i2s_esp32_start_dma()
176 struct i2s_esp32_data *const dev_data = dev->data; in i2s_esp32_start_dma()
180 unsigned int key; in i2s_esp32_start_dma() local
184 stream = &dev_data->rx; in i2s_esp32_start_dma()
186 stream = &dev_data->tx; in i2s_esp32_start_dma()
189 return -EINVAL; in i2s_esp32_start_dma()
192 key = irq_lock(); in i2s_esp32_start_dma()
194 dma_blk.block_size = stream->mem_block_len; in i2s_esp32_start_dma()
196 dma_blk.dest_address = (uint32_t)stream->mem_block; in i2s_esp32_start_dma()
200 dma_blk.source_address = (uint32_t)stream->mem_block; in i2s_esp32_start_dma()
206 dev_cfg->unit == 0 ? ESP_GDMA_TRIG_PERIPH_I2S0 : ESP_GDMA_TRIG_PERIPH_I2S1; in i2s_esp32_start_dma()
210 err = dma_config(stream->dma_dev, stream->dma_channel, &dma_cfg); in i2s_esp32_start_dma()
212 LOG_ERR("Error configuring DMA channel[%d]: %d", (int)stream->dma_channel, err); in i2s_esp32_start_dma()
213 ret = -EINVAL; in i2s_esp32_start_dma()
218 i2s_ll_rx_set_eof_num(dev_cfg->hal_cxt.dev, stream->mem_block_len); in i2s_esp32_start_dma()
221 err = dma_start(stream->dma_dev, stream->dma_channel); in i2s_esp32_start_dma()
223 LOG_ERR("Error starting DMA channel[%d]: %d", (int)stream->dma_channel, err); in i2s_esp32_start_dma()
224 ret = -EIO; in i2s_esp32_start_dma()
229 irq_unlock(key); in i2s_esp32_start_dma()
235 struct i2s_esp32_data *const dev_data = dev->data; in i2s_esp32_rx_start_transfer()
236 const struct i2s_esp32_cfg *dev_cfg = dev->config; in i2s_esp32_rx_start_transfer()
237 struct i2s_esp32_stream *stream = &dev_data->rx; in i2s_esp32_rx_start_transfer()
238 const i2s_hal_context_t *hal_cxt = &dev_cfg->hal_cxt; in i2s_esp32_rx_start_transfer()
241 err = k_mem_slab_alloc(stream->i2s_cfg.mem_slab, &stream->mem_block, K_NO_WAIT); in i2s_esp32_rx_start_transfer()
243 return -ENOMEM; in i2s_esp32_rx_start_transfer()
245 stream->mem_block_len = stream->i2s_cfg.block_size; in i2s_esp32_rx_start_transfer()
254 return -EIO; in i2s_esp32_rx_start_transfer()
264 const struct i2s_esp32_cfg *dev_cfg = dev->config; in i2s_esp32_tx_start_transfer()
265 struct i2s_esp32_data *const dev_data = dev->data; in i2s_esp32_tx_start_transfer()
266 struct i2s_esp32_stream *stream = &dev_data->tx; in i2s_esp32_tx_start_transfer()
267 const i2s_hal_context_t *hal_cxt = &dev_cfg->hal_cxt; in i2s_esp32_tx_start_transfer()
271 err = k_msgq_get(&stream->queue, &item, K_NO_WAIT); in i2s_esp32_tx_start_transfer()
273 return -ENOMEM; in i2s_esp32_tx_start_transfer()
276 stream->mem_block = item.buffer; in i2s_esp32_tx_start_transfer()
277 stream->mem_block_len = item.size; in i2s_esp32_tx_start_transfer()
286 return -EIO; in i2s_esp32_tx_start_transfer()
296 struct i2s_esp32_data *const dev_data = dev->data; in i2s_esp32_rx_stop_transfer()
297 struct i2s_esp32_stream *stream = &dev_data->rx; in i2s_esp32_rx_stop_transfer()
299 dma_stop(stream->dma_dev, stream->dma_channel); in i2s_esp32_rx_stop_transfer()
301 if (stream->mem_block != NULL) { in i2s_esp32_rx_stop_transfer()
302 k_mem_slab_free(stream->i2s_cfg.mem_slab, stream->mem_block); in i2s_esp32_rx_stop_transfer()
303 stream->mem_block = NULL; in i2s_esp32_rx_stop_transfer()
304 stream->mem_block_len = 0; in i2s_esp32_rx_stop_transfer()
310 struct i2s_esp32_data *const dev_data = dev->data; in i2s_esp32_tx_stop_transfer()
311 struct i2s_esp32_stream *stream = &dev_data->tx; in i2s_esp32_tx_stop_transfer()
313 dma_stop(stream->dma_dev, stream->dma_channel); in i2s_esp32_tx_stop_transfer()
315 if (stream->mem_block != NULL) { in i2s_esp32_tx_stop_transfer()
316 k_mem_slab_free(stream->i2s_cfg.mem_slab, stream->mem_block); in i2s_esp32_tx_stop_transfer()
317 stream->mem_block = NULL; in i2s_esp32_tx_stop_transfer()
318 stream->mem_block_len = 0; in i2s_esp32_tx_stop_transfer()
326 struct i2s_esp32_data *const dev_data = dev->data; in i2s_esp32_rx_callback()
327 struct i2s_esp32_stream *stream = &dev_data->rx; in i2s_esp32_rx_callback()
331 stream->state = I2S_STATE_ERROR; in i2s_esp32_rx_callback()
332 LOG_ERR("RX status bad: %d", status); in i2s_esp32_rx_callback()
336 if (stream->mem_block == NULL) { in i2s_esp32_rx_callback()
337 if (stream->state != I2S_STATE_READY) { in i2s_esp32_rx_callback()
338 stream->state = I2S_STATE_ERROR; in i2s_esp32_rx_callback()
346 struct queue_item item = {.buffer = stream->mem_block, .size = stream->mem_block_len}; in i2s_esp32_rx_callback()
348 err = k_msgq_put(&stream->queue, &item, K_NO_WAIT); in i2s_esp32_rx_callback()
350 stream->state = I2S_STATE_ERROR; in i2s_esp32_rx_callback()
354 if (stream->state == I2S_STATE_STOPPING) { in i2s_esp32_rx_callback()
355 stream->state = I2S_STATE_READY; in i2s_esp32_rx_callback()
359 err = k_mem_slab_alloc(stream->i2s_cfg.mem_slab, &stream->mem_block, K_NO_WAIT); in i2s_esp32_rx_callback()
361 stream->state = I2S_STATE_ERROR; in i2s_esp32_rx_callback()
364 stream->mem_block_len = stream->i2s_cfg.block_size; in i2s_esp32_rx_callback()
368 stream->state = I2S_STATE_ERROR; in i2s_esp32_rx_callback()
376 stream->stop_transfer(dev); in i2s_esp32_rx_callback()
383 struct i2s_esp32_data *const dev_data = dev->data; in i2s_esp32_tx_callback()
384 struct i2s_esp32_stream *stream = &dev_data->tx; in i2s_esp32_tx_callback()
390 stream->state = I2S_STATE_ERROR; in i2s_esp32_tx_callback()
391 LOG_ERR("TX bad status: %d", status); in i2s_esp32_tx_callback()
395 if (stream->mem_block == NULL) { in i2s_esp32_tx_callback()
396 if (stream->state != I2S_STATE_READY) { in i2s_esp32_tx_callback()
397 stream->state = I2S_STATE_ERROR; in i2s_esp32_tx_callback()
405 if (stream->state == I2S_STATE_STOPPING) { in i2s_esp32_tx_callback()
406 if (k_msgq_num_used_get(&stream->queue) == 0) { in i2s_esp32_tx_callback()
407 stream->state = I2S_STATE_READY; in i2s_esp32_tx_callback()
409 } else if (stream->stop_without_draining == true) { in i2s_esp32_tx_callback()
410 stream->queue_drop(stream); in i2s_esp32_tx_callback()
411 stream->state = I2S_STATE_READY; in i2s_esp32_tx_callback()
417 if (stream->last_block) { in i2s_esp32_tx_callback()
418 stream->state = I2S_STATE_READY; in i2s_esp32_tx_callback()
422 err = k_msgq_get(&stream->queue, &item, K_NO_WAIT); in i2s_esp32_tx_callback()
424 stream->state = I2S_STATE_ERROR; in i2s_esp32_tx_callback()
428 mem_block_tmp = stream->mem_block; in i2s_esp32_tx_callback()
430 stream->mem_block = item.buffer; in i2s_esp32_tx_callback()
431 stream->mem_block_len = item.size; in i2s_esp32_tx_callback()
435 stream->state = I2S_STATE_ERROR; in i2s_esp32_tx_callback()
440 k_mem_slab_free(stream->i2s_cfg.mem_slab, mem_block_tmp); in i2s_esp32_tx_callback()
445 stream->stop_transfer(dev); in i2s_esp32_tx_callback()
450 const struct i2s_esp32_cfg *dev_cfg = dev->config; in i2s_esp32_initialize()
451 struct i2s_esp32_data *const dev_data = dev->data; in i2s_esp32_initialize()
452 const struct device *clk_dev = dev_cfg->clock_dev; in i2s_esp32_initialize()
455 if (dev_data->tx.dma_dev && !device_is_ready(dev_data->tx.dma_dev)) { in i2s_esp32_initialize()
456 LOG_ERR("%s device not ready", dev_data->tx.dma_dev->name); in i2s_esp32_initialize()
457 return -ENODEV; in i2s_esp32_initialize()
460 if (dev_data->rx.dma_dev && !device_is_ready(dev_data->rx.dma_dev)) { in i2s_esp32_initialize()
461 LOG_ERR("%s device not ready", dev_data->rx.dma_dev->name); in i2s_esp32_initialize()
462 return -ENODEV; in i2s_esp32_initialize()
467 return -ENODEV; in i2s_esp32_initialize()
470 err = clock_control_on(clk_dev, dev_cfg->clock_subsys); in i2s_esp32_initialize()
473 return -EIO; in i2s_esp32_initialize()
476 if (dev_data->tx.dma_dev) { in i2s_esp32_initialize()
477 err = k_msgq_alloc_init(&dev_data->tx.queue, sizeof(struct queue_item), in i2s_esp32_initialize()
484 if (dev_data->rx.dma_dev) { in i2s_esp32_initialize()
485 err = k_msgq_alloc_init(&dev_data->rx.queue, sizeof(struct queue_item), in i2s_esp32_initialize()
492 i2s_ll_enable_clock(dev_cfg->hal_cxt.dev); in i2s_esp32_initialize()
494 LOG_INF("%s initialized", dev->name); in i2s_esp32_initialize()
502 const struct i2s_esp32_cfg *const dev_cfg = dev->config; in i2s_esp32_configure()
503 struct i2s_esp32_data *const dev_data = dev->data; in i2s_esp32_configure()
509 stream = &dev_data->rx; in i2s_esp32_configure()
510 if (stream->dma_dev == NULL) { in i2s_esp32_configure()
512 return -EINVAL; in i2s_esp32_configure()
516 stream = &dev_data->tx; in i2s_esp32_configure()
517 if (stream->dma_dev == NULL) { in i2s_esp32_configure()
519 return -EINVAL; in i2s_esp32_configure()
524 return -ENOSYS; in i2s_esp32_configure()
527 return -EINVAL; in i2s_esp32_configure()
530 if (stream->state != I2S_STATE_NOT_READY && stream->state != I2S_STATE_READY) { in i2s_esp32_configure()
531 LOG_ERR("Invalid state: %d", (int)stream->state); in i2s_esp32_configure()
532 return -EINVAL; in i2s_esp32_configure()
535 if (i2s_cfg->frame_clk_freq == 0U) { in i2s_esp32_configure()
536 stream->queue_drop(stream); in i2s_esp32_configure()
537 memset(&stream->i2s_cfg, 0, sizeof(struct i2s_config)); in i2s_esp32_configure()
538 stream->is_slave = false; in i2s_esp32_configure()
539 stream->state = I2S_STATE_NOT_READY; in i2s_esp32_configure()
543 data_format = i2s_cfg->format & I2S_FMT_DATA_FORMAT_MASK; in i2s_esp32_configure()
549 return -EINVAL; in i2s_esp32_configure()
552 if (i2s_cfg->word_size != 8 && i2s_cfg->word_size != 16 && i2s_cfg->word_size != 24 && in i2s_esp32_configure()
553 i2s_cfg->word_size != 32) { in i2s_esp32_configure()
554 LOG_ERR("Word size not supported: %d", (int)i2s_cfg->word_size); in i2s_esp32_configure()
555 return -EINVAL; in i2s_esp32_configure()
558 if (i2s_cfg->channels != 2) { in i2s_esp32_configure()
559 LOG_ERR("Currently only 2 channels are supported"); in i2s_esp32_configure()
560 return -EINVAL; in i2s_esp32_configure()
563 if (i2s_cfg->options & I2S_OPT_LOOPBACK) { in i2s_esp32_configure()
565 return -EINVAL; in i2s_esp32_configure()
568 if (i2s_cfg->options & I2S_OPT_PINGPONG) { in i2s_esp32_configure()
570 return -EINVAL; in i2s_esp32_configure()
573 if ((i2s_cfg->options & I2S_OPT_FRAME_CLK_SLAVE) != 0 && in i2s_esp32_configure()
574 (i2s_cfg->options & I2S_OPT_BIT_CLK_SLAVE) != 0) { in i2s_esp32_configure()
575 stream->is_slave = true; in i2s_esp32_configure()
576 } else if ((i2s_cfg->options & I2S_OPT_FRAME_CLK_SLAVE) == 0 && in i2s_esp32_configure()
577 (i2s_cfg->options & I2S_OPT_BIT_CLK_SLAVE) == 0) { in i2s_esp32_configure()
578 stream->is_slave = false; in i2s_esp32_configure()
582 return -EINVAL; in i2s_esp32_configure()
587 slot_cfg.data_bit_width = i2s_cfg->word_size; in i2s_esp32_configure()
589 slot_cfg.slot_bit_width = i2s_cfg->word_size > 16 ? 32 : 16; in i2s_esp32_configure()
591 slot_cfg.std.ws_pol = i2s_cfg->format & I2S_FMT_FRAME_CLK_INV ? true : false; in i2s_esp32_configure()
595 slot_cfg.std.ws_pol = i2s_cfg->format & I2S_FMT_FRAME_CLK_INV ? false : true; in i2s_esp32_configure()
608 slot_cfg.std.bit_order_lsb = i2s_cfg->format & I2S_FMT_DATA_ORDER_LSB ? true : false; in i2s_esp32_configure()
612 i2s_hal_context_t *hal_cxt = (i2s_hal_context_t *)&dev_cfg->hal_cxt; in i2s_esp32_configure()
616 return -EINVAL; in i2s_esp32_configure()
620 if (dev_data->rx.state != I2S_STATE_NOT_READY) { in i2s_esp32_configure()
621 if (stream->is_slave && !dev_data->rx.is_slave) { /*full duplex*/ in i2s_esp32_configure()
622 i2s_ll_share_bck_ws(hal_cxt->dev, true); in i2s_esp32_configure()
624 i2s_ll_share_bck_ws(hal_cxt->dev, false); in i2s_esp32_configure()
627 i2s_ll_share_bck_ws(hal_cxt->dev, false); in i2s_esp32_configure()
630 i2s_hal_std_set_tx_slot(hal_cxt, stream->is_slave, &slot_cfg); in i2s_esp32_configure()
634 err = pinctrl_apply_state(dev_cfg->pcfg, PINCTRL_STATE_DEFAULT); in i2s_esp32_configure()
637 return -EIO; in i2s_esp32_configure()
640 if (dev_data->tx.state != I2S_STATE_NOT_READY) { in i2s_esp32_configure()
641 if (stream->is_slave && dev_data->rx.is_slave) { in i2s_esp32_configure()
642 i2s_ll_mclk_bind_to_tx_clk(hal_cxt->dev); in i2s_esp32_configure()
648 if (dev_data->tx.state != I2S_STATE_NOT_READY) { in i2s_esp32_configure()
649 if (stream->is_slave && !dev_data->tx.is_slave) { /*full duplex*/ in i2s_esp32_configure()
650 i2s_ll_share_bck_ws(hal_cxt->dev, true); in i2s_esp32_configure()
652 i2s_ll_share_bck_ws(hal_cxt->dev, false); in i2s_esp32_configure()
655 i2s_ll_share_bck_ws(hal_cxt->dev, false); in i2s_esp32_configure()
658 i2s_hal_std_set_rx_slot(hal_cxt, stream->is_slave, &slot_cfg); in i2s_esp32_configure()
662 err = pinctrl_apply_state(dev_cfg->pcfg, PINCTRL_STATE_DEFAULT); in i2s_esp32_configure()
665 return -EIO; in i2s_esp32_configure()
668 if (dev_data->tx.state != I2S_STATE_NOT_READY) { in i2s_esp32_configure()
669 if (!stream->is_slave && !dev_data->tx.is_slave) { in i2s_esp32_configure()
670 i2s_ll_mclk_bind_to_rx_clk(hal_cxt->dev); in i2s_esp32_configure()
676 memcpy(&stream->i2s_cfg, i2s_cfg, sizeof(struct i2s_config)); in i2s_esp32_configure()
678 stream->state = I2S_STATE_READY; in i2s_esp32_configure()
685 struct i2s_esp32_data *dev_data = dev->data; in i2s_esp32_config_get()
689 stream = &dev_data->rx; in i2s_esp32_config_get()
691 stream = &dev_data->tx; in i2s_esp32_config_get()
694 if (stream->state == I2S_STATE_NOT_READY) { in i2s_esp32_config_get()
698 return &stream->i2s_cfg; in i2s_esp32_config_get()
703 struct i2s_esp32_data *const dev_data = dev->data; in i2s_esp32_trigger()
706 unsigned int key; in i2s_esp32_trigger() local
711 stream = &dev_data->rx; in i2s_esp32_trigger()
714 stream = &dev_data->tx; in i2s_esp32_trigger()
718 return -ENOSYS; in i2s_esp32_trigger()
721 return -EINVAL; in i2s_esp32_trigger()
726 if (stream->state != I2S_STATE_READY) { in i2s_esp32_trigger()
727 LOG_ERR("START - Invalid state: %d", (int)stream->state); in i2s_esp32_trigger()
728 return -EIO; in i2s_esp32_trigger()
731 err = stream->start_transfer(dev); in i2s_esp32_trigger()
733 LOG_ERR("START - Transfer start failed: %d", err); in i2s_esp32_trigger()
734 return -EIO; in i2s_esp32_trigger()
736 stream->last_block = false; in i2s_esp32_trigger()
737 stream->state = I2S_STATE_RUNNING; in i2s_esp32_trigger()
741 key = irq_lock(); in i2s_esp32_trigger()
742 if (stream->state != I2S_STATE_RUNNING) { in i2s_esp32_trigger()
743 irq_unlock(key); in i2s_esp32_trigger()
744 LOG_ERR("STOP - Invalid state: %d", (int)stream->state); in i2s_esp32_trigger()
745 return -EIO; in i2s_esp32_trigger()
748 err = dma_get_status(stream->dma_dev, stream->dma_channel, &dma_channel_status); in i2s_esp32_trigger()
750 irq_unlock(key); in i2s_esp32_trigger()
752 (int)stream->dma_channel, err); in i2s_esp32_trigger()
753 return -EIO; in i2s_esp32_trigger()
757 stream->stop_without_draining = true; in i2s_esp32_trigger()
758 stream->state = I2S_STATE_STOPPING; in i2s_esp32_trigger()
760 stream->stop_transfer(dev); in i2s_esp32_trigger()
761 stream->last_block = true; in i2s_esp32_trigger()
762 stream->state = I2S_STATE_READY; in i2s_esp32_trigger()
765 irq_unlock(key); in i2s_esp32_trigger()
769 key = irq_lock(); in i2s_esp32_trigger()
770 if (stream->state != I2S_STATE_RUNNING) { in i2s_esp32_trigger()
771 irq_unlock(key); in i2s_esp32_trigger()
772 LOG_ERR("DRAIN - Invalid state: %d", (int)stream->state); in i2s_esp32_trigger()
773 return -EIO; in i2s_esp32_trigger()
776 err = dma_get_status(stream->dma_dev, stream->dma_channel, &dma_channel_status); in i2s_esp32_trigger()
778 irq_unlock(key); in i2s_esp32_trigger()
780 (int)stream->dma_channel, err); in i2s_esp32_trigger()
781 return -EIO; in i2s_esp32_trigger()
785 if (k_msgq_num_used_get(&stream->queue) > 0 || dma_channel_status.busy) { in i2s_esp32_trigger()
786 stream->stop_without_draining = false; in i2s_esp32_trigger()
787 stream->state = I2S_STATE_STOPPING; in i2s_esp32_trigger()
789 stream->stop_transfer(dev); in i2s_esp32_trigger()
790 stream->state = I2S_STATE_READY; in i2s_esp32_trigger()
794 stream->stop_without_draining = true; in i2s_esp32_trigger()
795 stream->state = I2S_STATE_STOPPING; in i2s_esp32_trigger()
797 stream->stop_transfer(dev); in i2s_esp32_trigger()
798 stream->last_block = true; in i2s_esp32_trigger()
799 stream->state = I2S_STATE_READY; in i2s_esp32_trigger()
802 irq_unlock(key); in i2s_esp32_trigger()
804 return -EINVAL; in i2s_esp32_trigger()
807 irq_unlock(key); in i2s_esp32_trigger()
811 if (stream->state == I2S_STATE_NOT_READY) { in i2s_esp32_trigger()
812 LOG_ERR("DROP - invalid state: %d", (int)stream->state); in i2s_esp32_trigger()
813 return -EIO; in i2s_esp32_trigger()
815 stream->stop_transfer(dev); in i2s_esp32_trigger()
816 stream->queue_drop(stream); in i2s_esp32_trigger()
817 stream->state = I2S_STATE_READY; in i2s_esp32_trigger()
821 if (stream->state != I2S_STATE_ERROR) { in i2s_esp32_trigger()
822 LOG_ERR("PREPARE - invalid state: %d", (int)stream->state); in i2s_esp32_trigger()
823 return -EIO; in i2s_esp32_trigger()
825 stream->queue_drop(stream); in i2s_esp32_trigger()
826 stream->state = I2S_STATE_READY; in i2s_esp32_trigger()
831 return -EINVAL; in i2s_esp32_trigger()
839 struct i2s_esp32_data *const dev_data = dev->data; in i2s_esp32_read()
843 if (dev_data->rx.state == I2S_STATE_NOT_READY) { in i2s_esp32_read()
844 LOG_ERR("RX invalid state: %d", (int)dev_data->rx.state); in i2s_esp32_read()
845 return -EIO; in i2s_esp32_read()
846 } else if (dev_data->rx.state == I2S_STATE_ERROR && in i2s_esp32_read()
847 k_msgq_num_used_get(&dev_data->rx.queue) == 0) { in i2s_esp32_read()
849 return -EIO; in i2s_esp32_read()
852 err = k_msgq_get(&dev_data->rx.queue, &item, K_MSEC(dev_data->rx.i2s_cfg.timeout)); in i2s_esp32_read()
866 struct i2s_esp32_data *const dev_data = dev->data; in i2s_esp32_write()
869 if (dev_data->tx.state != I2S_STATE_RUNNING && dev_data->tx.state != I2S_STATE_READY) { in i2s_esp32_write()
870 LOG_ERR("TX Invalid state: %d", (int)dev_data->tx.state); in i2s_esp32_write()
871 return -EIO; in i2s_esp32_write()
874 if (size > dev_data->tx.i2s_cfg.block_size) { in i2s_esp32_write()
875 LOG_ERR("Max write size is: %u", (unsigned int)dev_data->tx.i2s_cfg.block_size); in i2s_esp32_write()
876 return -EINVAL; in i2s_esp32_write()
881 err = k_msgq_put(&dev_data->tx.queue, &item, K_MSEC(dev_data->tx.i2s_cfg.timeout)); in i2s_esp32_write()