Lines Matching +full:dma +full:- +full:channels

4  * SPDX-License-Identifier: Apache-2.0
13 #include <zephyr/drivers/dma.h>
14 #include <zephyr/dt-bindings/dma/infineon-xmc4xxx-dma.h>
53 uint32_t llp; /* linked-list pointer to the next descriptor or null if last descriptor */
69 XMC_DMA_t *dma; member
75 struct dma_xmc4xxx_channel *channels; member
81 uint32_t channels_event = get_channels_event(dma); \
82 int channel = find_lsb_set(channels_event) - 1; \
86 dma_channel = &dev_data->channels[channel]; \
88 /* dma_start() and re-enable the event */ \
89 XMC_DMA_CH_ClearEventStatus(dma, channel, XMC_DMA_CH_##event_test); \
90 if (dma_channel->cb) { \
91 dma_channel->cb(dev, dma_channel->user_data, channel, (ret)); \
96 /* Isr is level triggered, so we don't have to loop over all the channels */
100 struct dma_xmc4xxx_data *dev_data = dev->data; in dma_xmc4xxx_isr()
101 const struct dma_xmc4xxx_config *dev_cfg = dev->config; in dma_xmc4xxx_isr()
102 int num_dma_channels = dev_data->ctx.dma_channels; in dma_xmc4xxx_isr()
103 XMC_DMA_t *dma = dev_cfg->dma; in dma_xmc4xxx_isr() local
107 /* There are two types of possible DMA error events: */ in dma_xmc4xxx_isr()
108 /* 1. Error response from AHB slave on the HRESP bus during DMA transfer. */ in dma_xmc4xxx_isr()
113 event = XMC_DMA_GetEventStatus(dma); in dma_xmc4xxx_isr()
114 HANDLE_EVENT(EVENT_ERROR, XMC_DMA_GetChannelsErrorStatus, -EPERM); in dma_xmc4xxx_isr()
118 sr_overruns = DLR->OVRSTAT; in dma_xmc4xxx_isr()
125 DLR->OVRCLR = sr_overruns; in dma_xmc4xxx_isr()
131 dma_channel = &dev_data->channels[i]; in dma_xmc4xxx_isr()
132 if (dma_channel->dlr_line != DLR_LINE_UNSET && in dma_xmc4xxx_isr()
133 sr_overruns & BIT(dma_channel->dlr_line)) { in dma_xmc4xxx_isr()
135 /* From XMC4700/4800 reference documentation - Section 4.4.1 */ in dma_xmc4xxx_isr()
140 DLR->LNEN &= ~BIT(dma_channel->dlr_line); in dma_xmc4xxx_isr()
141 DLR->LNEN |= BIT(dma_channel->dlr_line); in dma_xmc4xxx_isr()
144 if (dma_channel->cb != NULL) { in dma_xmc4xxx_isr()
145 dma_channel->cb(dev, dma_channel->user_data, i, -EIO); in dma_xmc4xxx_isr()
155 ctll = config->dest_data_size / 2 << GPDMA0_CH_CTLL_DST_TR_WIDTH_Pos | in dma_xmc4xxx_reg_ctll()
156 config->source_data_size / 2 << GPDMA0_CH_CTLL_SRC_TR_WIDTH_Pos | in dma_xmc4xxx_reg_ctll()
157 block->dest_addr_adj << GPDMA0_CH_CTLL_DINC_Pos | in dma_xmc4xxx_reg_ctll()
158 block->source_addr_adj << GPDMA0_CH_CTLL_SINC_Pos | in dma_xmc4xxx_reg_ctll()
159 config->dest_burst_length / 4 << GPDMA0_CH_CTLL_DEST_MSIZE_Pos | in dma_xmc4xxx_reg_ctll()
160 config->source_burst_length / 4 << GPDMA0_CH_CTLL_SRC_MSIZE_Pos | in dma_xmc4xxx_reg_ctll()
164 if (config->channel_direction == MEMORY_TO_PERIPHERAL) { in dma_xmc4xxx_reg_ctll()
168 if (config->channel_direction == PERIPHERAL_TO_MEMORY) { in dma_xmc4xxx_reg_ctll()
172 if (block->source_gather_en && block->source_gather_count > 0) { in dma_xmc4xxx_reg_ctll()
176 if (block->dest_scatter_en && block->dest_scatter_count > 0) { in dma_xmc4xxx_reg_ctll()
185 if (block->type##_en && block->type##_count > 0 && !type.enabled) { \
187 type.interval = block->type##_interval; \
188 type.count = block->type##_count; \
189 } else if (block->type##_en && type.enabled) { \
190 if (block->type##_interval != type.interval || \
191 block->type##_count != type.count) { \
194 return -EINVAL; \
201 struct dma_xmc4xxx_data *dev_data = dev->data; in dma_xmc4xxx_config()
202 const struct dma_xmc4xxx_config *dev_cfg = dev->config; in dma_xmc4xxx_config()
203 struct dma_block_config *block = config->head_block; in dma_xmc4xxx_config()
204 XMC_DMA_t *dma = dev_cfg->dma; in dma_xmc4xxx_config() local
209 if (channel >= dev_data->ctx.dma_channels) { in dma_xmc4xxx_config()
211 return -EINVAL; in dma_xmc4xxx_config()
214 if (config->channel_priority > MAX_PRIORITY) { in dma_xmc4xxx_config()
216 return -EINVAL; in dma_xmc4xxx_config()
219 if (config->source_chaining_en || config->dest_chaining_en) { in dma_xmc4xxx_config()
221 return -EINVAL; in dma_xmc4xxx_config()
224 if (config->channel_direction != MEMORY_TO_MEMORY && in dma_xmc4xxx_config()
225 config->channel_direction != MEMORY_TO_PERIPHERAL && in dma_xmc4xxx_config()
226 config->channel_direction != PERIPHERAL_TO_MEMORY) { in dma_xmc4xxx_config()
228 return -EINVAL; in dma_xmc4xxx_config()
231 if (config->block_count > CONFIG_DMA_XMC4XXX_NUM_DESCRIPTORS) { in dma_xmc4xxx_config()
233 return -EINVAL; in dma_xmc4xxx_config()
236 if (block->source_gather_en || block->dest_scatter_en || config->block_count != 1 || in dma_xmc4xxx_config()
237 config->cyclic) { in dma_xmc4xxx_config()
238 if ((uint32_t)dma != (uint32_t)XMC_DMA0 || channel >= 2) { in dma_xmc4xxx_config()
239 LOG_ERR("Multi-block, cyclic and gather/scatter only supported on DMA0 on " in dma_xmc4xxx_config()
241 return -EINVAL; in dma_xmc4xxx_config()
245 if (config->dest_data_size != 1 && config->dest_data_size != 2 && in dma_xmc4xxx_config()
246 config->dest_data_size != 4) { in dma_xmc4xxx_config()
248 return -EINVAL; in dma_xmc4xxx_config()
251 if (config->source_data_size != 1 && config->source_data_size != 2 && in dma_xmc4xxx_config()
252 config->source_data_size != 4) { in dma_xmc4xxx_config()
254 return -EINVAL; in dma_xmc4xxx_config()
257 if (config->source_burst_length != 1 && config->source_burst_length != 4 && in dma_xmc4xxx_config()
258 config->source_burst_length != 8) { in dma_xmc4xxx_config()
260 return -EINVAL; in dma_xmc4xxx_config()
263 if (config->dest_burst_length != 1 && config->dest_burst_length != 4 && in dma_xmc4xxx_config()
264 config->dest_burst_length != 8) { in dma_xmc4xxx_config()
266 return -EINVAL; in dma_xmc4xxx_config()
269 if (block->block_size / config->source_data_size > DMA_MAX_BLOCK_LEN) { in dma_xmc4xxx_config()
271 return -EINVAL; in dma_xmc4xxx_config()
274 if (XMC_DMA_CH_IsEnabled(dma, channel)) { in dma_xmc4xxx_config()
276 return -EINVAL; in dma_xmc4xxx_config()
279 XMC_DMA_CH_ClearEventStatus(dma, channel, ALL_EVENTS); in dma_xmc4xxx_config()
281 /* check dma slot number */ in dma_xmc4xxx_config()
282 if (config->block_count == 1 && config->cyclic == 0) { in dma_xmc4xxx_config()
285 dma->CH[channel].SAR = block->source_address; in dma_xmc4xxx_config()
286 dma->CH[channel].DAR = block->dest_address; in dma_xmc4xxx_config()
287 dma->CH[channel].LLP = 0; in dma_xmc4xxx_config()
290 dma->CH[channel].CTLH = block->block_size / config->source_data_size; in dma_xmc4xxx_config()
297 dma->CH[channel].CTLL = ctll; in dma_xmc4xxx_config()
302 dma->CH[channel].LLP = (uint32_t)&descriptor_list[channel][0]; in dma_xmc4xxx_config()
303 dma->CH[channel].CTLL = BIT(GPDMA0_CH_CTLL_LLP_DST_EN_Pos) | in dma_xmc4xxx_config()
305 for (int i = 0; i < config->block_count; i++) { in dma_xmc4xxx_config()
310 desc->sar = block->source_address; in dma_xmc4xxx_config()
311 desc->dar = block->dest_address; in dma_xmc4xxx_config()
312 desc->ctlh = block->block_size / config->source_data_size; in dma_xmc4xxx_config()
316 if (i < config->block_count - 1) { in dma_xmc4xxx_config()
317 desc->llp = (uint32_t)&descriptor_list[channel][i + 1]; in dma_xmc4xxx_config()
318 } else if (config->cyclic) { in dma_xmc4xxx_config()
319 desc->llp = (uint32_t)&descriptor_list[channel][0]; in dma_xmc4xxx_config()
321 desc->llp = 0; in dma_xmc4xxx_config()
324 if (i < config->block_count - 1 || config->cyclic) { in dma_xmc4xxx_config()
329 desc->ctll = ctll; in dma_xmc4xxx_config()
334 block = block->next_block; in dma_xmc4xxx_config()
338 block = config->head_block; in dma_xmc4xxx_config()
342 dma->CH[channel].CFGL = (config->channel_priority << GPDMA0_CH_CFGL_CH_PRIOR_Pos) | in dma_xmc4xxx_config()
345 dma->CH[channel].CFGH = 0; in dma_xmc4xxx_config()
347 if (config->channel_direction == MEMORY_TO_PERIPHERAL || in dma_xmc4xxx_config()
348 config->channel_direction == PERIPHERAL_TO_MEMORY) { in dma_xmc4xxx_config()
349 uint8_t request_source = XMC4XXX_DMA_GET_REQUEST_SOURCE(config->dma_slot); in dma_xmc4xxx_config()
350 uint8_t dlr_line_reg = XMC4XXX_DMA_GET_LINE(config->dma_slot); in dma_xmc4xxx_config()
353 if ((uint32_t)dma == (uint32_t)XMC_DMA0 && dlr_line > 7) { in dma_xmc4xxx_config()
356 return -EINVAL; in dma_xmc4xxx_config()
359 if ((uint32_t)dma == (uint32_t)XMC_DMA1 && (dlr_line < 8 || dlr_line > 11)) { in dma_xmc4xxx_config()
362 return -EINVAL; in dma_xmc4xxx_config()
366 DLR->OVRCLR = BIT(dlr_line); in dma_xmc4xxx_config()
367 /* enable the dma line */ in dma_xmc4xxx_config()
368 DLR->LNEN &= ~BIT(dlr_line); in dma_xmc4xxx_config()
369 DLR->LNEN |= BIT(dlr_line); in dma_xmc4xxx_config()
371 /* connect DMA Line to SR */ in dma_xmc4xxx_config()
372 if ((uint32_t)dma == (uint32_t)XMC_DMA0) { in dma_xmc4xxx_config()
373 DLR->SRSEL0 &= ~(DLR_SRSEL_RS_MSK << (dlr_line_reg * DLR_SRSEL_RS_BITSIZE)); in dma_xmc4xxx_config()
374 DLR->SRSEL0 |= request_source << (dlr_line_reg * DLR_SRSEL_RS_BITSIZE); in dma_xmc4xxx_config()
377 if ((uint32_t)dma == (uint32_t)XMC_DMA1) { in dma_xmc4xxx_config()
378 dlr_line_reg -= 8; in dma_xmc4xxx_config()
379 DLR->SRSEL1 &= ~(DLR_SRSEL_RS_MSK << (dlr_line_reg * DLR_SRSEL_RS_BITSIZE)); in dma_xmc4xxx_config()
380 DLR->SRSEL1 |= request_source << (dlr_line_reg * DLR_SRSEL_RS_BITSIZE); in dma_xmc4xxx_config()
383 /* connect DMA channel to DMA line */ in dma_xmc4xxx_config()
384 if (config->channel_direction == MEMORY_TO_PERIPHERAL) { in dma_xmc4xxx_config()
385 dma->CH[channel].CFGH = (dlr_line_reg << GPDMA0_CH_CFGH_DEST_PER_Pos) | 4; in dma_xmc4xxx_config()
386 dma->CH[channel].CFGL &= ~BIT(GPDMA0_CH_CFGL_HS_SEL_DST_Pos); in dma_xmc4xxx_config()
389 if (config->channel_direction == PERIPHERAL_TO_MEMORY) { in dma_xmc4xxx_config()
390 dma->CH[channel].CFGH = (dlr_line_reg << GPDMA0_CH_CFGH_SRC_PER_Pos) | 4; in dma_xmc4xxx_config()
391 dma->CH[channel].CFGL &= ~BIT(GPDMA0_CH_CFGL_HS_SEL_SRC_Pos); in dma_xmc4xxx_config()
395 if (block->fifo_mode_control > 0) { in dma_xmc4xxx_config()
396 dma->CH[channel].CFGH |= GPDMA0_CH_CFGH_FIFO_MODE_Msk; in dma_xmc4xxx_config()
399 if ((uint32_t)dma == (uint32_t)XMC_DMA0) { in dma_xmc4xxx_config()
402 dma->CH[channel].SGR = 0; in dma_xmc4xxx_config()
403 dma->CH[channel].DSR = 0; in dma_xmc4xxx_config()
409 dma->CH[channel].SGR = (source_gather.interval & GPDMA0_CH_SGR_SGI_Msk) | in dma_xmc4xxx_config()
415 dma->CH[channel].DSR = (dest_scatter.interval & GPDMA0_CH_DSR_DSI_Msk) | in dma_xmc4xxx_config()
419 dev_data->channels[channel].cb = config->dma_callback; in dma_xmc4xxx_config()
420 dev_data->channels[channel].user_data = config->user_data; in dma_xmc4xxx_config()
421 dev_data->channels[channel].transfer_size = block->block_size; in dma_xmc4xxx_config()
422 dev_data->channels[channel].source_data_size = config->source_data_size; in dma_xmc4xxx_config()
423 dev_data->channels[channel].dlr_line = dlr_line; in dma_xmc4xxx_config()
424 dev_data->channels[channel].channel_direction = config->channel_direction; in dma_xmc4xxx_config()
425 dev_data->channels[channel].dest_addr_adj = block->dest_addr_adj; in dma_xmc4xxx_config()
426 dev_data->channels[channel].dest_address = block->dest_address; in dma_xmc4xxx_config()
428 if (config->block_count > 1) { in dma_xmc4xxx_config()
429 dev_data->channels[channel].multi_block = true; in dma_xmc4xxx_config()
431 dev_data->channels[channel].multi_block = false; in dma_xmc4xxx_config()
434 XMC_DMA_CH_DisableEvent(dma, channel, ALL_EVENTS); in dma_xmc4xxx_config()
435 XMC_DMA_CH_EnableEvent(dma, channel, XMC_DMA_CH_EVENT_TRANSFER_COMPLETE); in dma_xmc4xxx_config()
438 if (config->complete_callback_en) { in dma_xmc4xxx_config()
439 XMC_DMA_CH_EnableEvent(dma, channel, XMC_DMA_CH_EVENT_BLOCK_TRANSFER_COMPLETE); in dma_xmc4xxx_config()
442 if (!config->error_callback_dis) { in dma_xmc4xxx_config()
443 XMC_DMA_CH_EnableEvent(dma, channel, XMC_DMA_CH_EVENT_ERROR); in dma_xmc4xxx_config()
446 LOG_DBG("Configured channel %d for %08X to %08X (%u)", channel, block->source_address, in dma_xmc4xxx_config()
447 block->dest_address, block->block_size); in dma_xmc4xxx_config()
454 const struct dma_xmc4xxx_config *dev_cfg = dev->config; in dma_xmc4xxx_start()
455 struct dma_xmc4xxx_data *dev_data = dev->data; in dma_xmc4xxx_start()
456 uint8_t dlr_line = dev_data->channels[channel].dlr_line; in dma_xmc4xxx_start()
459 if (dlr_line != DLR_LINE_UNSET && (DLR->LNEN & BIT(dlr_line)) == 0) { in dma_xmc4xxx_start()
460 DLR->LNEN |= BIT(dlr_line); in dma_xmc4xxx_start()
463 XMC_DMA_CH_Enable(dev_cfg->dma, channel); in dma_xmc4xxx_start()
469 const struct dma_xmc4xxx_config *dev_cfg = dev->config; in dma_xmc4xxx_stop()
470 struct dma_xmc4xxx_data *dev_data = dev->data; in dma_xmc4xxx_stop()
472 XMC_DMA_t *dma = dev_cfg->dma; in dma_xmc4xxx_stop() local
474 dma_channel = &dev_data->channels[channel]; in dma_xmc4xxx_stop()
475 XMC_DMA_CH_Suspend(dma, channel); in dma_xmc4xxx_stop()
478 while (XMC_DMA_CH_IsEnabled(dma, channel) && in dma_xmc4xxx_stop()
479 (dma->CH[channel].CFGL & GPDMA0_CH_CFGL_FIFO_EMPTY_Msk) == 0) { in dma_xmc4xxx_stop()
483 if (dma_channel->dlr_line != DLR_LINE_UNSET) { in dma_xmc4xxx_stop()
484 DLR->LNEN &= ~BIT(dma_channel->dlr_line); in dma_xmc4xxx_stop()
487 XMC_DMA_CH_Disable(dma, channel); in dma_xmc4xxx_stop()
488 XMC_DMA_CH_Resume(dma, channel); in dma_xmc4xxx_stop()
495 struct dma_xmc4xxx_data *dev_data = dev->data; in dma_xmc4xxx_reload()
497 const struct dma_xmc4xxx_config *dev_cfg = dev->config; in dma_xmc4xxx_reload()
498 XMC_DMA_t *dma = dev_cfg->dma; in dma_xmc4xxx_reload() local
501 if (channel >= dev_data->ctx.dma_channels) { in dma_xmc4xxx_reload()
503 return -EINVAL; in dma_xmc4xxx_reload()
506 if (XMC_DMA_CH_IsEnabled(dma, channel)) { in dma_xmc4xxx_reload()
508 return -EINVAL; in dma_xmc4xxx_reload()
511 dma_channel = &dev_data->channels[channel]; in dma_xmc4xxx_reload()
512 block_ts = size / dma_channel->source_data_size; in dma_xmc4xxx_reload()
515 return -EINVAL; in dma_xmc4xxx_reload()
517 dma_channel->transfer_size = size; in dma_xmc4xxx_reload()
518 dma_channel->dest_address = dst; in dma_xmc4xxx_reload()
521 dma->CH[channel].SAR = src; in dma_xmc4xxx_reload()
522 dma->CH[channel].DAR = dst; in dma_xmc4xxx_reload()
523 dma->CH[channel].CTLH = block_ts; in dma_xmc4xxx_reload()
531 struct dma_xmc4xxx_data *dev_data = dev->data; in dma_xmc4xxx_get_status()
532 const struct dma_xmc4xxx_config *dev_cfg = dev->config; in dma_xmc4xxx_get_status()
533 XMC_DMA_t *dma = dev_cfg->dma; in dma_xmc4xxx_get_status() local
537 if (channel >= dev_data->ctx.dma_channels) { in dma_xmc4xxx_get_status()
539 return -EINVAL; in dma_xmc4xxx_get_status()
541 dma_channel = &dev_data->channels[channel]; in dma_xmc4xxx_get_status()
543 stat->busy = XMC_DMA_CH_IsEnabled(dma, channel); in dma_xmc4xxx_get_status()
545 if (dma_channel->multi_block) { in dma_xmc4xxx_get_status()
546 /* not supported for multi-block transfers */ in dma_xmc4xxx_get_status()
547 stat->pending_length = 0; in dma_xmc4xxx_get_status()
553 if (dma_channel->dest_addr_adj == DMA_ADDR_ADJ_INCREMENT) { in dma_xmc4xxx_get_status()
554 transferred_bytes = dma->CH[channel].DAR - dma_channel->dest_address; in dma_xmc4xxx_get_status()
555 stat->pending_length = dma_channel->transfer_size - transferred_bytes; in dma_xmc4xxx_get_status()
556 } else if (dma_channel->dest_addr_adj == DMA_ADDR_ADJ_DECREMENT) { in dma_xmc4xxx_get_status()
557 transferred_bytes = dma_channel->dest_address - dma->CH[channel].DAR; in dma_xmc4xxx_get_status()
558 stat->pending_length = dma_channel->transfer_size - transferred_bytes; in dma_xmc4xxx_get_status()
560 uint32_t num_source_transfers = XMC_DMA_CH_GetTransferredData(dma, channel); in dma_xmc4xxx_get_status()
562 stat->pending_length = dma_channel->transfer_size - in dma_xmc4xxx_get_status()
563 num_source_transfers * dma_channel->source_data_size; in dma_xmc4xxx_get_status()
566 /* stat->dir and other remaining fields are not set. They are not */ in dma_xmc4xxx_get_status()
591 struct dma_xmc4xxx_data *dev_data = dev->data; in dma_xmc4xxx_suspend()
592 const struct dma_xmc4xxx_config *dev_cfg = dev->config; in dma_xmc4xxx_suspend()
593 XMC_DMA_t *dma = dev_cfg->dma; in dma_xmc4xxx_suspend() local
595 if (channel >= dev_data->ctx.dma_channels) { in dma_xmc4xxx_suspend()
597 return -EINVAL; in dma_xmc4xxx_suspend()
600 XMC_DMA_CH_Suspend(dma, channel); in dma_xmc4xxx_suspend()
606 struct dma_xmc4xxx_data *dev_data = dev->data; in dma_xmc4xxx_resume()
607 const struct dma_xmc4xxx_config *dev_cfg = dev->config; in dma_xmc4xxx_resume()
608 XMC_DMA_t *dma = dev_cfg->dma; in dma_xmc4xxx_resume() local
610 if (channel >= dev_data->ctx.dma_channels) { in dma_xmc4xxx_resume()
612 return -EINVAL; in dma_xmc4xxx_resume()
615 XMC_DMA_CH_Resume(dma, channel); in dma_xmc4xxx_resume()
621 const struct dma_xmc4xxx_config *dev_cfg = dev->config; in dma_xmc4xxx_init()
623 XMC_DMA_Enable(dev_cfg->dma); in dma_xmc4xxx_init()
624 dev_cfg->irq_configure(); in dma_xmc4xxx_init()
628 static DEVICE_API(dma, dma_xmc4xxx_driver_api) = {
649 .dma = (XMC_DMA_t *)DT_INST_REG_ADDR(inst), \
663 .channels = dma_xmc4xxx##inst##_channels, \