Lines Matching +full:burst +full:- +full:length
4 * SPDX-License-Identifier: Apache-2.0
83 struct dma_esp32_data *data = (struct dma_esp32_data *const)(dev)->data; in dma_esp32_isr_handle_rx()
86 gdma_ll_rx_clear_interrupt_status(data->hal.dev, rx->channel_id, intr_status); in dma_esp32_isr_handle_rx()
97 status = -intr_status; in dma_esp32_isr_handle_rx()
100 if (rx->cb) { in dma_esp32_isr_handle_rx()
101 rx->cb(dev, rx->user_data, rx->channel_id * 2, status); in dma_esp32_isr_handle_rx()
108 struct dma_esp32_data *data = (struct dma_esp32_data *const)(dev)->data; in dma_esp32_isr_handle_tx()
110 gdma_ll_tx_clear_interrupt_status(data->hal.dev, tx->channel_id, intr_status); in dma_esp32_isr_handle_tx()
114 if (tx->cb) { in dma_esp32_isr_handle_tx()
115 tx->cb(dev, tx->user_data, tx->channel_id * 2 + 1, -intr_status); in dma_esp32_isr_handle_tx()
122 struct dma_esp32_config *config = (struct dma_esp32_config *)dev->config; in dma_esp32_isr_handle()
123 struct dma_esp32_data *data = (struct dma_esp32_data *const)(dev)->data; in dma_esp32_isr_handle()
124 struct dma_esp32_channel *dma_channel_rx = &config->dma_channel[rx_id]; in dma_esp32_isr_handle()
125 struct dma_esp32_channel *dma_channel_tx = &config->dma_channel[tx_id]; in dma_esp32_isr_handle()
128 intr_status = gdma_ll_rx_get_interrupt_status(data->hal.dev, dma_channel_rx->channel_id); in dma_esp32_isr_handle()
133 intr_status = gdma_ll_tx_get_interrupt_status(data->hal.dev, dma_channel_tx->channel_id); in dma_esp32_isr_handle()
145 return -EINVAL; in dma_esp32_config_rx_descriptor()
148 if (!esp_ptr_dma_capable((uint32_t *)block->dest_address) in dma_esp32_config_rx_descriptor()
150 && !esp_ptr_dma_ext_capable((uint32_t *)block->dest_address) in dma_esp32_config_rx_descriptor()
153 LOG_ERR("Rx buffer not in DMA capable memory: %p", (uint32_t *)block->dest_address); in dma_esp32_config_rx_descriptor()
154 return -EINVAL; in dma_esp32_config_rx_descriptor()
157 dma_descriptor_t *desc_iter = dma_channel->desc_list; in dma_esp32_config_rx_descriptor()
160 if (block->block_size > DMA_DESCRIPTOR_BUFFER_MAX_SIZE) { in dma_esp32_config_rx_descriptor()
162 return -EINVAL; in dma_esp32_config_rx_descriptor()
165 desc_iter->buffer = (void *)block->dest_address; in dma_esp32_config_rx_descriptor()
166 desc_iter->dw0.size = block->block_size; in dma_esp32_config_rx_descriptor()
167 desc_iter->dw0.owner = DMA_DESCRIPTOR_BUFFER_OWNER_DMA; in dma_esp32_config_rx_descriptor()
168 if (!block->next_block) { in dma_esp32_config_rx_descriptor()
169 desc_iter->next = NULL; in dma_esp32_config_rx_descriptor()
172 desc_iter->next = desc_iter + 1; in dma_esp32_config_rx_descriptor()
174 block = block->next_block; in dma_esp32_config_rx_descriptor()
177 if (desc_iter->next) { in dma_esp32_config_rx_descriptor()
178 memset(dma_channel->desc_list, 0, sizeof(dma_channel->desc_list)); in dma_esp32_config_rx_descriptor()
180 return -EINVAL; in dma_esp32_config_rx_descriptor()
189 struct dma_esp32_config *config = (struct dma_esp32_config *)dev->config; in dma_esp32_config_rx()
190 struct dma_esp32_data *data = (struct dma_esp32_data *const)(dev)->data; in dma_esp32_config_rx()
192 dma_channel->dir = DMA_RX; in dma_esp32_config_rx()
194 gdma_ll_rx_reset_channel(data->hal.dev, dma_channel->channel_id); in dma_esp32_config_rx()
197 data->hal.dev, dma_channel->channel_id, in dma_esp32_config_rx()
198 dma_channel->periph_id == SOC_GDMA_TRIG_PERIPH_M2M0 ? ESP_DMA_M2M_ON in dma_esp32_config_rx()
200 dma_channel->periph_id == SOC_GDMA_TRIG_PERIPH_M2M0 ? ESP_DMA_M2M_ON in dma_esp32_config_rx()
201 : dma_channel->periph_id); in dma_esp32_config_rx()
203 if (config_dma->dest_burst_length) { in dma_esp32_config_rx()
205 * RX channel burst mode depends on specific data alignment in dma_esp32_config_rx()
207 gdma_ll_rx_enable_data_burst(data->hal.dev, dma_channel->channel_id, in dma_esp32_config_rx()
208 config->sram_alignment >= 4); in dma_esp32_config_rx()
209 gdma_ll_rx_enable_descriptor_burst(data->hal.dev, dma_channel->channel_id, in dma_esp32_config_rx()
210 config->sram_alignment >= 4); in dma_esp32_config_rx()
213 dma_channel->cb = config_dma->dma_callback; in dma_esp32_config_rx()
214 dma_channel->user_data = config_dma->user_data; in dma_esp32_config_rx()
216 gdma_ll_rx_clear_interrupt_status(data->hal.dev, dma_channel->channel_id, UINT32_MAX); in dma_esp32_config_rx()
217 gdma_ll_rx_enable_interrupt(data->hal.dev, dma_channel->channel_id, UINT32_MAX, in dma_esp32_config_rx()
218 config_dma->dma_callback != NULL); in dma_esp32_config_rx()
220 return dma_esp32_config_rx_descriptor(dma_channel, config_dma->head_block); in dma_esp32_config_rx()
228 return -EINVAL; in dma_esp32_config_tx_descriptor()
231 if (!esp_ptr_dma_capable((uint32_t *)block->source_address) in dma_esp32_config_tx_descriptor()
233 && !esp_ptr_dma_ext_capable((uint32_t *)block->source_address) in dma_esp32_config_tx_descriptor()
237 (uint32_t *)block->source_address); in dma_esp32_config_tx_descriptor()
238 return -EINVAL; in dma_esp32_config_tx_descriptor()
241 dma_descriptor_t *desc_iter = dma_channel->desc_list; in dma_esp32_config_tx_descriptor()
244 if (block->block_size > DMA_DESCRIPTOR_BUFFER_MAX_SIZE) { in dma_esp32_config_tx_descriptor()
246 return -EINVAL; in dma_esp32_config_tx_descriptor()
249 desc_iter->buffer = (void *)block->source_address; in dma_esp32_config_tx_descriptor()
250 desc_iter->dw0.size = block->block_size; in dma_esp32_config_tx_descriptor()
251 desc_iter->dw0.length = block->block_size; in dma_esp32_config_tx_descriptor()
252 desc_iter->dw0.owner = DMA_DESCRIPTOR_BUFFER_OWNER_DMA; in dma_esp32_config_tx_descriptor()
253 if (!block->next_block) { in dma_esp32_config_tx_descriptor()
254 desc_iter->next = NULL; in dma_esp32_config_tx_descriptor()
255 desc_iter->dw0.suc_eof = 1; in dma_esp32_config_tx_descriptor()
258 desc_iter->next = desc_iter + 1; in dma_esp32_config_tx_descriptor()
260 block = block->next_block; in dma_esp32_config_tx_descriptor()
263 if (desc_iter->next) { in dma_esp32_config_tx_descriptor()
264 memset(dma_channel->desc_list, 0, sizeof(dma_channel->desc_list)); in dma_esp32_config_tx_descriptor()
266 return -EINVAL; in dma_esp32_config_tx_descriptor()
275 struct dma_esp32_data *data = (struct dma_esp32_data *const)(dev)->data; in dma_esp32_config_tx()
277 dma_channel->dir = DMA_TX; in dma_esp32_config_tx()
279 gdma_ll_tx_reset_channel(data->hal.dev, dma_channel->channel_id); in dma_esp32_config_tx()
282 data->hal.dev, dma_channel->channel_id, in dma_esp32_config_tx()
283 dma_channel->periph_id == SOC_GDMA_TRIG_PERIPH_M2M0 ? ESP_DMA_M2M_ON in dma_esp32_config_tx()
285 dma_channel->periph_id == SOC_GDMA_TRIG_PERIPH_M2M0 ? ESP_DMA_M2M_ON in dma_esp32_config_tx()
286 : dma_channel->periph_id); in dma_esp32_config_tx()
289 * TX channel can always enable burst mode, no matter data alignment in dma_esp32_config_tx()
291 if (config_dma->source_burst_length) { in dma_esp32_config_tx()
292 gdma_ll_tx_enable_data_burst(data->hal.dev, dma_channel->channel_id, true); in dma_esp32_config_tx()
293 gdma_ll_tx_enable_descriptor_burst(data->hal.dev, dma_channel->channel_id, true); in dma_esp32_config_tx()
296 dma_channel->cb = config_dma->dma_callback; in dma_esp32_config_tx()
297 dma_channel->user_data = config_dma->user_data; in dma_esp32_config_tx()
299 gdma_ll_tx_clear_interrupt_status(data->hal.dev, dma_channel->channel_id, UINT32_MAX); in dma_esp32_config_tx()
301 gdma_ll_tx_enable_interrupt(data->hal.dev, dma_channel->channel_id, GDMA_LL_EVENT_TX_EOF, in dma_esp32_config_tx()
302 config_dma->dma_callback != NULL); in dma_esp32_config_tx()
304 return dma_esp32_config_tx_descriptor(dma_channel, config_dma->head_block); in dma_esp32_config_tx()
310 struct dma_esp32_config *config = (struct dma_esp32_config *)dev->config; in dma_esp32_config()
311 struct dma_esp32_channel *dma_channel = &config->dma_channel[channel]; in dma_esp32_config()
314 if (channel >= config->dma_channel_max) { in dma_esp32_config()
316 return -EINVAL; in dma_esp32_config()
320 return -EINVAL; in dma_esp32_config()
323 if (config_dma->source_burst_length != config_dma->dest_burst_length) { in dma_esp32_config()
324 LOG_ERR("Source and destination burst lengths must be equal"); in dma_esp32_config()
325 return -EINVAL; in dma_esp32_config()
328 dma_channel->periph_id = config_dma->channel_direction == MEMORY_TO_MEMORY in dma_esp32_config()
330 : config_dma->dma_slot; in dma_esp32_config()
332 dma_channel->channel_id = channel / 2; in dma_esp32_config()
334 switch (config_dma->channel_direction) { in dma_esp32_config()
340 &config->dma_channel[dma_channel->channel_id * 2]; in dma_esp32_config()
342 &config->dma_channel[(dma_channel->channel_id * 2) + 1]; in dma_esp32_config()
344 dma_channel_rx->channel_id = dma_channel->channel_id; in dma_esp32_config()
345 dma_channel_tx->channel_id = dma_channel->channel_id; in dma_esp32_config()
347 dma_channel_rx->periph_id = dma_channel->periph_id; in dma_esp32_config()
348 dma_channel_tx->periph_id = dma_channel->periph_id; in dma_esp32_config()
361 return -EINVAL; in dma_esp32_config()
369 struct dma_esp32_config *config = (struct dma_esp32_config *)dev->config; in dma_esp32_start()
370 struct dma_esp32_data *data = (struct dma_esp32_data *const)(dev)->data; in dma_esp32_start()
371 struct dma_esp32_channel *dma_channel = &config->dma_channel[channel]; in dma_esp32_start()
373 if (channel >= config->dma_channel_max) { in dma_esp32_start()
375 return -EINVAL; in dma_esp32_start()
378 if (dma_channel->periph_id == SOC_GDMA_TRIG_PERIPH_M2M0) { in dma_esp32_start()
380 &config->dma_channel[dma_channel->channel_id * 2]; in dma_esp32_start()
382 &config->dma_channel[(dma_channel->channel_id * 2) + 1]; in dma_esp32_start()
384 gdma_ll_rx_enable_interrupt(data->hal.dev, dma_channel->channel_id, in dma_esp32_start()
386 gdma_ll_tx_enable_interrupt(data->hal.dev, dma_channel->channel_id, in dma_esp32_start()
389 gdma_ll_rx_set_desc_addr(data->hal.dev, dma_channel->channel_id, in dma_esp32_start()
390 (int32_t)dma_channel_rx->desc_list); in dma_esp32_start()
391 gdma_ll_rx_start(data->hal.dev, dma_channel->channel_id); in dma_esp32_start()
393 gdma_ll_tx_set_desc_addr(data->hal.dev, dma_channel->channel_id, in dma_esp32_start()
394 (int32_t)dma_channel_tx->desc_list); in dma_esp32_start()
395 gdma_ll_tx_start(data->hal.dev, dma_channel->channel_id); in dma_esp32_start()
397 if (dma_channel->dir == DMA_RX) { in dma_esp32_start()
398 gdma_ll_rx_enable_interrupt(data->hal.dev, dma_channel->channel_id, in dma_esp32_start()
400 gdma_ll_rx_set_desc_addr(data->hal.dev, dma_channel->channel_id, in dma_esp32_start()
401 (int32_t)dma_channel->desc_list); in dma_esp32_start()
402 gdma_ll_rx_start(data->hal.dev, dma_channel->channel_id); in dma_esp32_start()
403 } else if (dma_channel->dir == DMA_TX) { in dma_esp32_start()
404 gdma_ll_tx_enable_interrupt(data->hal.dev, dma_channel->channel_id, in dma_esp32_start()
406 gdma_ll_tx_set_desc_addr(data->hal.dev, dma_channel->channel_id, in dma_esp32_start()
407 (int32_t)dma_channel->desc_list); in dma_esp32_start()
408 gdma_ll_tx_start(data->hal.dev, dma_channel->channel_id); in dma_esp32_start()
411 return -EINVAL; in dma_esp32_start()
420 struct dma_esp32_config *config = (struct dma_esp32_config *)dev->config; in dma_esp32_stop()
421 struct dma_esp32_data *data = (struct dma_esp32_data *const)(dev)->data; in dma_esp32_stop()
422 struct dma_esp32_channel *dma_channel = &config->dma_channel[channel]; in dma_esp32_stop()
424 if (channel >= config->dma_channel_max) { in dma_esp32_stop()
426 return -EINVAL; in dma_esp32_stop()
429 if (dma_channel->periph_id == SOC_GDMA_TRIG_PERIPH_M2M0) { in dma_esp32_stop()
430 gdma_ll_rx_enable_interrupt(data->hal.dev, dma_channel->channel_id, in dma_esp32_stop()
432 gdma_ll_tx_enable_interrupt(data->hal.dev, dma_channel->channel_id, in dma_esp32_stop()
434 gdma_ll_rx_stop(data->hal.dev, dma_channel->channel_id); in dma_esp32_stop()
435 gdma_ll_tx_stop(data->hal.dev, dma_channel->channel_id); in dma_esp32_stop()
438 if (dma_channel->dir == DMA_RX) { in dma_esp32_stop()
439 gdma_ll_rx_enable_interrupt(data->hal.dev, dma_channel->channel_id, in dma_esp32_stop()
441 gdma_ll_rx_stop(data->hal.dev, dma_channel->channel_id); in dma_esp32_stop()
442 } else if (dma_channel->dir == DMA_TX) { in dma_esp32_stop()
443 gdma_ll_tx_enable_interrupt(data->hal.dev, dma_channel->channel_id, in dma_esp32_stop()
445 gdma_ll_tx_stop(data->hal.dev, dma_channel->channel_id); in dma_esp32_stop()
454 struct dma_esp32_config *config = (struct dma_esp32_config *)dev->config; in dma_esp32_get_status()
455 struct dma_esp32_data *data = (struct dma_esp32_data *const)(dev)->data; in dma_esp32_get_status()
456 struct dma_esp32_channel *dma_channel = &config->dma_channel[channel]; in dma_esp32_get_status()
459 if (channel >= config->dma_channel_max) { in dma_esp32_get_status()
461 return -EINVAL; in dma_esp32_get_status()
465 return -EINVAL; in dma_esp32_get_status()
470 if (dma_channel->dir == DMA_RX) { in dma_esp32_get_status()
471 status->busy = !gdma_ll_rx_is_fsm_idle(data->hal.dev, dma_channel->channel_id); in dma_esp32_get_status()
472 status->dir = PERIPHERAL_TO_MEMORY; in dma_esp32_get_status()
474 data->hal.dev, dma_channel->channel_id); in dma_esp32_get_status()
475 if (desc >= dma_channel->desc_list) { in dma_esp32_get_status()
476 status->read_position = desc - dma_channel->desc_list; in dma_esp32_get_status()
477 status->total_copied = desc->dw0.length in dma_esp32_get_status()
478 + dma_channel->desc_list[0].dw0.size in dma_esp32_get_status()
479 * status->read_position; in dma_esp32_get_status()
481 } else if (dma_channel->dir == DMA_TX) { in dma_esp32_get_status()
482 status->busy = !gdma_ll_tx_is_fsm_idle(data->hal.dev, dma_channel->channel_id); in dma_esp32_get_status()
483 status->dir = MEMORY_TO_PERIPHERAL; in dma_esp32_get_status()
485 data->hal.dev, dma_channel->channel_id); in dma_esp32_get_status()
486 if (desc >= dma_channel->desc_list) { in dma_esp32_get_status()
487 status->write_position = desc - dma_channel->desc_list; in dma_esp32_get_status()
497 struct dma_esp32_config *config = (struct dma_esp32_config *)dev->config; in dma_esp32_reload()
498 struct dma_esp32_data *data = (struct dma_esp32_data *const)(dev)->data; in dma_esp32_reload()
499 struct dma_esp32_channel *dma_channel = &config->dma_channel[channel]; in dma_esp32_reload()
500 dma_descriptor_t *desc_iter = dma_channel->desc_list; in dma_esp32_reload()
503 if (channel >= config->dma_channel_max) { in dma_esp32_reload()
505 return -EINVAL; in dma_esp32_reload()
508 if (dma_channel->dir == DMA_RX) { in dma_esp32_reload()
509 gdma_ll_rx_reset_channel(data->hal.dev, dma_channel->channel_id); in dma_esp32_reload()
511 } else if (dma_channel->dir == DMA_TX) { in dma_esp32_reload()
512 gdma_ll_tx_reset_channel(data->hal.dev, dma_channel->channel_id); in dma_esp32_reload()
515 return -EINVAL; in dma_esp32_reload()
518 for (int i = 0; i < ARRAY_SIZE(dma_channel->desc_list); ++i) { in dma_esp32_reload()
520 desc_iter->buffer = (void *)(buf + DMA_DESCRIPTOR_BUFFER_MAX_SIZE * i); in dma_esp32_reload()
521 desc_iter->dw0.owner = DMA_DESCRIPTOR_BUFFER_OWNER_DMA; in dma_esp32_reload()
523 desc_iter->dw0.size = size; in dma_esp32_reload()
524 if (dma_channel->dir == DMA_TX) { in dma_esp32_reload()
525 desc_iter->dw0.length = size; in dma_esp32_reload()
526 desc_iter->dw0.suc_eof = 1; in dma_esp32_reload()
528 desc_iter->next = NULL; in dma_esp32_reload()
531 desc_iter->dw0.size = DMA_DESCRIPTOR_BUFFER_MAX_SIZE; in dma_esp32_reload()
532 if (dma_channel->dir == DMA_TX) { in dma_esp32_reload()
533 desc_iter->dw0.length = DMA_DESCRIPTOR_BUFFER_MAX_SIZE; in dma_esp32_reload()
535 size -= DMA_DESCRIPTOR_BUFFER_MAX_SIZE; in dma_esp32_reload()
536 desc_iter->next = desc_iter + 1; in dma_esp32_reload()
540 if (desc_iter->next) { in dma_esp32_reload()
543 return -EINVAL; in dma_esp32_reload()
551 struct dma_esp32_config *config = (struct dma_esp32_config *)dev->config; in dma_esp32_configure_irq()
552 struct irq_config *irq_cfg = (struct irq_config *)config->irq_config; in dma_esp32_configure_irq()
554 for (uint8_t i = 0; i < config->irq_size; i++) { in dma_esp32_configure_irq()
558 (ISR_HANDLER)config->irq_handlers[i], in dma_esp32_configure_irq()
572 struct dma_esp32_config *config = (struct dma_esp32_config *)dev->config; in dma_esp32_init()
573 struct dma_esp32_data *data = (struct dma_esp32_data *)dev->data; in dma_esp32_init()
577 if (!device_is_ready(config->clock_dev)) { in dma_esp32_init()
579 return -ENODEV; in dma_esp32_init()
582 ret = clock_control_on(config->clock_dev, config->clock_subsys); in dma_esp32_init()
595 dma_channel = &config->dma_channel[i]; in dma_esp32_init()
596 dma_channel->cb = NULL; in dma_esp32_init()
597 dma_channel->dir = DMA_UNCONFIGURED; in dma_esp32_init()
598 dma_channel->periph_id = ESP_GDMA_TRIG_PERIPH_INVALID; in dma_esp32_init()
599 memset(dma_channel->desc_list, 0, sizeof(dma_channel->desc_list)); in dma_esp32_init()
602 gdma_hal_init(&data->hal, 0); in dma_esp32_init()
603 gdma_ll_enable_clock(data->hal.dev, true); in dma_esp32_init()
622 struct dma_esp32_config *config = (struct dma_esp32_config *)dev->config; \
623 struct dma_esp32_data *data = (struct dma_esp32_data *const)(dev)->data; \
624 uint32_t intr_status = gdma_ll_rx_get_interrupt_status(data->hal.dev, channel); \
626 dma_esp32_isr_handle_rx(dev, &config->dma_channel[channel * 2], \
634 struct dma_esp32_config *config = (struct dma_esp32_config *)dev->config; \
635 struct dma_esp32_data *data = (struct dma_esp32_data *const)(dev)->data; \
636 uint32_t intr_status = gdma_ll_tx_get_interrupt_status(data->hal.dev, channel); \
638 dma_esp32_isr_handle_tx(dev, &config->dma_channel[channel * 2 + 1], \