1 /*
2 * Copyright (c) 2023 Cypress Semiconductor Corporation (an Infineon company) or
3 * an affiliate of Cypress Semiconductor Corporation
4 *
5 * SPDX-License-Identifier: Apache-2.0
6 */
7
8 /**
9 * @brief DMA driver for Infineon CAT1 MCU family.
10 */
11
12 #define DT_DRV_COMPAT infineon_cat1_dma
13
14 #include <zephyr/device.h>
15 #include <soc.h>
16 #include <zephyr/drivers/dma.h>
17 #include <zephyr/drivers/gpio.h>
18
19 #include <zephyr/logging/log.h>
20 #include <zephyr/irq.h>
21
22 #include <cy_pdl.h>
23 #include <cyhal_dma_dw.h>
24
25 #if CYHAL_DRIVER_AVAILABLE_SYSPM && CONFIG_PM
26 #include "cyhal_syspm_impl.h"
27 #endif
28
29 #include <zephyr/devicetree.h>
30 LOG_MODULE_REGISTER(ifx_cat1_dma, CONFIG_DMA_LOG_LEVEL);
31
32 #define CH_NUM 32
33 #define DESCRIPTOR_POOL_SIZE CH_NUM + 5 /* TODO: add parameter to Kconfig */
34 #define DMA_LOOP_X_COUNT_MAX CY_DMA_LOOP_COUNT_MAX
35 #define DMA_LOOP_Y_COUNT_MAX CY_DMA_LOOP_COUNT_MAX
36
37 #if CONFIG_SOC_FAMILY_INFINEON_CAT1B
38 /* For CAT1B we must use SBUS instead CBUS when operate with
39 * flash area, so convert address from CBUS to SBUS
40 */
41 #define IFX_CAT1B_FLASH_SBUS_ADDR (0x60000000)
42 #define IFX_CAT1B_FLASH_CBUS_ADDR (0x8000000)
43 #define IFX_CAT1_DMA_SRC_ADDR(v) \
44 (void *)(((uint32_t)v & IFX_CAT1B_FLASH_CBUS_ADDR) \
45 ? (IFX_CAT1B_FLASH_SBUS_ADDR + ((uint32_t)v - IFX_CAT1B_FLASH_CBUS_ADDR)) \
46 : (uint32_t)v)
47 #else
48 #define IFX_CAT1_DMA_SRC_ADDR(v) ((void *)v)
49 #endif
50
51 struct ifx_cat1_dma_channel {
52 uint32_t channel_direction: 3;
53 uint32_t error_callback_dis: 1;
54
55 cy_stc_dma_descriptor_t *descr;
56 IRQn_Type irq;
57
58 /* store config data from dma_config structure */
59 dma_callback_t callback;
60 void *user_data;
61 };
62
63 struct ifx_cat1_dma_data {
64 struct dma_context ctx;
65 struct ifx_cat1_dma_channel *channels;
66
67 #if CYHAL_DRIVER_AVAILABLE_SYSPM && CONFIG_PM
68 cyhal_syspm_callback_data_t syspm_callback_args;
69 #endif
70 };
71
72 struct ifx_cat1_dma_config {
73 uint8_t num_channels;
74 DW_Type *regs;
75 void (*irq_configure)(void);
76 };
77
78 /* Descriptors pool */
79 K_MEM_SLAB_DEFINE_STATIC(ifx_cat1_dma_descriptors_pool_slab, sizeof(cy_stc_dma_descriptor_t),
80 DESCRIPTOR_POOL_SIZE, 4);
81
_get_hw_block_num(DW_Type * reg_addr)82 static int32_t _get_hw_block_num(DW_Type *reg_addr)
83 {
84 #if (CPUSS_DW0_PRESENT == 1)
85 if ((uint32_t)reg_addr == DW0_BASE) {
86 return 0;
87 }
88 #endif
89
90 #if (CPUSS_DW1_PRESENT == 1)
91 if ((uint32_t)reg_addr == DW1_BASE) {
92 return 1;
93 }
94 #endif
95 return 0;
96 }
97
_dma_alloc_descriptor(void ** descr)98 static int _dma_alloc_descriptor(void **descr)
99 {
100 int ret = k_mem_slab_alloc(&ifx_cat1_dma_descriptors_pool_slab, (void **)descr, K_NO_WAIT);
101
102 if (!ret) {
103 memset(*descr, 0, sizeof(cy_stc_dma_descriptor_t));
104 }
105
106 return ret;
107 }
108
_dma_free_descriptor(cy_stc_dma_descriptor_t * descr)109 void _dma_free_descriptor(cy_stc_dma_descriptor_t *descr)
110 {
111 k_mem_slab_free(&ifx_cat1_dma_descriptors_pool_slab, descr);
112 }
113
_dma_free_linked_descriptors(cy_stc_dma_descriptor_t * descr)114 void _dma_free_linked_descriptors(cy_stc_dma_descriptor_t *descr)
115 {
116 if (descr == NULL) {
117 return;
118 }
119 cy_stc_dma_descriptor_t *descr_to_remove = descr;
120 cy_stc_dma_descriptor_t *descr_to_remove_next = NULL;
121
122 do {
123 descr_to_remove_next = (cy_stc_dma_descriptor_t *)descr_to_remove->nextPtr;
124 _dma_free_descriptor(descr_to_remove);
125 descr_to_remove = descr_to_remove_next;
126
127 } while (descr_to_remove);
128 }
129
ifx_cat1_dma_ex_connect_digital(const struct device * dev,uint32_t channel,cyhal_source_t source,cyhal_dma_input_t input)130 int ifx_cat1_dma_ex_connect_digital(const struct device *dev, uint32_t channel,
131 cyhal_source_t source, cyhal_dma_input_t input)
132 {
133 const struct ifx_cat1_dma_config *const cfg = dev->config;
134
135 cyhal_dma_t dma_obj = {
136 .resource.type = CYHAL_RSC_DW,
137 .resource.block_num = _get_hw_block_num(cfg->regs),
138 .resource.channel_num = channel,
139 };
140
141 cy_rslt_t rslt = cyhal_dma_connect_digital(&dma_obj, source, input);
142
143 return rslt ? -EIO : 0;
144 }
145
ifx_cat1_dma_ex_enable_output(const struct device * dev,uint32_t channel,cyhal_dma_output_t output,cyhal_source_t * source)146 int ifx_cat1_dma_ex_enable_output(const struct device *dev, uint32_t channel,
147 cyhal_dma_output_t output, cyhal_source_t *source)
148 {
149 const struct ifx_cat1_dma_config *const cfg = dev->config;
150
151 cyhal_dma_t dma_obj = {
152 .resource.type = CYHAL_RSC_DW,
153 .resource.block_num = _get_hw_block_num(cfg->regs),
154 .resource.channel_num = channel,
155 };
156
157 cy_rslt_t rslt = cyhal_dma_enable_output(&dma_obj, output, source);
158
159 return rslt ? -EIO : 0;
160 }
161
_convert_dma_data_size_z_to_pdl(struct dma_config * config)162 static cy_en_dma_data_size_t _convert_dma_data_size_z_to_pdl(struct dma_config *config)
163 {
164 cy_en_dma_data_size_t pdl_dma_data_size = CY_DMA_BYTE;
165
166 switch (config->source_data_size) {
167 case 1:
168 /* One byte */
169 pdl_dma_data_size = CY_DMA_BYTE;
170 break;
171 case 2:
172 /* Half word (two bytes) */
173 pdl_dma_data_size = CY_DMA_HALFWORD;
174 break;
175 case 4:
176 /* Full word (four bytes) */
177 pdl_dma_data_size = CY_DMA_WORD;
178 break;
179 }
180 return pdl_dma_data_size;
181 }
182
_convert_dma_xy_increment_z_to_pdl(uint32_t addr_adj)183 static int _convert_dma_xy_increment_z_to_pdl(uint32_t addr_adj)
184 {
185 switch (addr_adj) {
186 case DMA_ADDR_ADJ_INCREMENT:
187 return 1;
188
189 case DMA_ADDR_ADJ_DECREMENT:
190 return -1;
191
192 case DMA_ADDR_ADJ_NO_CHANGE:
193 return 0;
194
195 default:
196 return 0;
197 }
198 }
199
_initialize_descriptor(cy_stc_dma_descriptor_t * descriptor,struct dma_config * config,struct dma_block_config * block_config,uint32_t block_num,uint32_t bytes,uint32_t offset)200 static int _initialize_descriptor(cy_stc_dma_descriptor_t *descriptor, struct dma_config *config,
201 struct dma_block_config *block_config, uint32_t block_num,
202 uint32_t bytes, uint32_t offset)
203 {
204 cy_en_dma_status_t dma_status;
205 cy_stc_dma_descriptor_config_t descriptor_config = {0u};
206
207 /* Retrigger descriptor immediately */
208 descriptor_config.retrigger = CY_DMA_RETRIG_IM;
209
210 /* Setup Interrupt Type */
211 descriptor_config.interruptType = CY_DMA_DESCR_CHAIN;
212
213 if (((offset + bytes) == block_config->block_size) &&
214 (block_num + 1 == config->block_count)) {
215 descriptor_config.channelState = CY_DMA_CHANNEL_DISABLED;
216 } else {
217 descriptor_config.channelState = CY_DMA_CHANNEL_ENABLED;
218 }
219
220 /* TODO: should be able to configure triggerInType/triggerOutType */
221 descriptor_config.triggerOutType = CY_DMA_1ELEMENT;
222
223 if (config->channel_direction == MEMORY_TO_MEMORY) {
224 descriptor_config.triggerInType = CY_DMA_DESCR_CHAIN;
225 } else {
226 descriptor_config.triggerInType = CY_DMA_1ELEMENT;
227 }
228
229 /* Set data size byte / 2 bytes / word */
230 descriptor_config.dataSize = _convert_dma_data_size_z_to_pdl(config);
231
232 /* By default, transfer what the user set for dataSize. However, if transferring between
233 * memory and a peripheral, make sure the peripheral access is using words.
234 */
235 descriptor_config.srcTransferSize = CY_DMA_TRANSFER_SIZE_DATA;
236 descriptor_config.dstTransferSize = CY_DMA_TRANSFER_SIZE_DATA;
237
238 if (config->channel_direction == PERIPHERAL_TO_MEMORY) {
239 descriptor_config.srcTransferSize = CY_DMA_TRANSFER_SIZE_WORD;
240 } else if (config->channel_direction == MEMORY_TO_PERIPHERAL) {
241 descriptor_config.dstTransferSize = CY_DMA_TRANSFER_SIZE_WORD;
242 }
243
244 /* Setup destination increment for X source loop */
245 descriptor_config.srcXincrement =
246 _convert_dma_xy_increment_z_to_pdl(block_config->source_addr_adj);
247
248 /* Setup destination increment for X destination loop */
249 descriptor_config.dstXincrement =
250 _convert_dma_xy_increment_z_to_pdl(block_config->dest_addr_adj);
251
252 /* Setup 1D/2D descriptor for each data block */
253 if (bytes >= DMA_LOOP_X_COUNT_MAX) {
254 descriptor_config.descriptorType = CY_DMA_2D_TRANSFER;
255 descriptor_config.xCount = DMA_LOOP_X_COUNT_MAX;
256 descriptor_config.yCount = DIV_ROUND_UP(bytes, DMA_LOOP_X_COUNT_MAX);
257 descriptor_config.srcYincrement =
258 descriptor_config.srcXincrement * DMA_LOOP_X_COUNT_MAX;
259 descriptor_config.dstYincrement =
260 descriptor_config.dstXincrement * DMA_LOOP_X_COUNT_MAX;
261 } else {
262 descriptor_config.descriptorType = CY_DMA_1D_TRANSFER;
263 descriptor_config.xCount = bytes;
264 descriptor_config.yCount = 1;
265 descriptor_config.srcYincrement = 0;
266 descriptor_config.dstYincrement = 0;
267 }
268
269 /* Set source and destination for descriptor */
270 descriptor_config.srcAddress = IFX_CAT1_DMA_SRC_ADDR(
271 (block_config->source_address + (descriptor_config.srcXincrement ? offset : 0)));
272 descriptor_config.dstAddress = (void *)(block_config->dest_address +
273 (descriptor_config.dstXincrement ? offset : 0));
274
275 /* initialize descriptor */
276 dma_status = Cy_DMA_Descriptor_Init(descriptor, &descriptor_config);
277 if (dma_status != CY_DMA_SUCCESS) {
278 return -EIO;
279 }
280
281 return 0;
282 }
283
284 /* Configure a channel v2 */
ifx_cat1_dma_configure(const struct device * dev,uint32_t channel,struct dma_config * config)285 static int ifx_cat1_dma_configure(const struct device *dev, uint32_t channel,
286 struct dma_config *config)
287 {
288 bool use_dt_config = false;
289 cy_en_dma_status_t dma_status;
290 struct ifx_cat1_dma_data *data = dev->data;
291 const struct ifx_cat1_dma_config *const cfg = dev->config;
292
293 cy_stc_dma_channel_config_t channel_config = {0u};
294 cy_stc_dma_descriptor_t *descriptor = NULL;
295 cy_stc_dma_descriptor_t *prev_descriptor = NULL;
296
297 if (channel >= cfg->num_channels) {
298 LOG_ERR("Unsupported channel");
299 return -EINVAL;
300 }
301
302 /* Support only the same data width for source and dest */
303 if (config->dest_data_size != config->source_data_size) {
304 LOG_ERR("Source and dest data size differ.");
305 return -EINVAL;
306 }
307
308 if ((config->dest_data_size != 1) && (config->dest_data_size != 2) &&
309 (config->dest_data_size != 4)) {
310 LOG_ERR("dest_data_size must be 1, 2, or 4 (%" PRIu32 ")", config->dest_data_size);
311 return -EINVAL;
312 }
313
314 if (config->complete_callback_en > 1) {
315 LOG_ERR("Callback on each block not implemented");
316 return -ENOTSUP;
317 }
318
319 data->channels[channel].callback = config->dma_callback;
320 data->channels[channel].user_data = config->user_data;
321 data->channels[channel].channel_direction = config->channel_direction;
322 data->channels[channel].error_callback_dis = config->error_callback_dis;
323
324 /* Remove all allocated linked descriptors */
325 _dma_free_linked_descriptors(data->channels[channel].descr);
326 data->channels[channel].descr = NULL;
327
328 /* Lock and page in the channel configuration */
329 uint32_t key = irq_lock();
330
331 struct dma_block_config *block_config = config->head_block;
332
333 for (uint32_t i = 0u; i < config->block_count; i++) {
334 uint32_t block_pending_bytes = block_config->block_size;
335 uint32_t offset = 0;
336
337 do {
338 /* Configure descriptors for one block */
339 uint32_t bytes;
340
341 /* allocate new descriptor */
342 if (_dma_alloc_descriptor((void **)&descriptor)) {
343 LOG_ERR("Can't allocate new descriptor");
344 return -EINVAL;
345 }
346
347 if (data->channels[channel].descr == NULL) {
348 /* Store first descriptor in data structure */
349 data->channels[channel].descr = descriptor;
350 }
351
352 /* Mendotary chain descriptors in scope of one pack */
353 if (prev_descriptor != NULL) {
354 Cy_DMA_Descriptor_SetNextDescriptor(prev_descriptor, descriptor);
355 }
356
357 /* Calculate bytes, block_pending_bytes for 1D/2D descriptor */
358 if (block_pending_bytes <= DMA_LOOP_X_COUNT_MAX) {
359 /* Calculate bytes for 1D descriptor */
360 bytes = block_pending_bytes;
361 block_pending_bytes = 0;
362 } else {
363 /* Calculate bytes for 2D descriptor */
364 if (block_pending_bytes >
365 (DMA_LOOP_X_COUNT_MAX * DMA_LOOP_Y_COUNT_MAX)) {
366 bytes = DMA_LOOP_X_COUNT_MAX * DMA_LOOP_Y_COUNT_MAX;
367 } else {
368 bytes = DMA_LOOP_Y_COUNT_MAX *
369 (block_pending_bytes / DMA_LOOP_Y_COUNT_MAX);
370 }
371 block_pending_bytes -= bytes;
372 }
373
374 _initialize_descriptor(descriptor, config, block_config,
375 /* block_num */ i, bytes, offset);
376 offset += bytes;
377 prev_descriptor = descriptor;
378
379 } while (block_pending_bytes > 0);
380
381 block_config = block_config->next_block;
382 }
383
384 /* Set a descriptor for the specified DMA channel */
385 channel_config.descriptor = data->channels[channel].descr;
386
387 /* Set a priority for the DMA channel */
388 if (use_dt_config == false) {
389 Cy_DMA_Channel_SetPriority(cfg->regs, channel, config->channel_priority);
390 }
391
392 /* Initialize channel */
393 dma_status = Cy_DMA_Channel_Init(cfg->regs, channel, &channel_config);
394 if (dma_status != CY_DMA_SUCCESS) {
395 return -EIO;
396 }
397
398 irq_unlock(key);
399 return 0;
400 }
401
ifx_cat1_dma_get_regs(const struct device * dev)402 DW_Type *ifx_cat1_dma_get_regs(const struct device *dev)
403 {
404 const struct ifx_cat1_dma_config *const cfg = dev->config;
405
406 return cfg->regs;
407 }
408
ifx_cat1_dma_start(const struct device * dev,uint32_t channel)409 static int ifx_cat1_dma_start(const struct device *dev, uint32_t channel)
410 {
411 const struct ifx_cat1_dma_config *const cfg = dev->config;
412 struct ifx_cat1_dma_data *data = dev->data;
413
414 if (channel >= cfg->num_channels) {
415 LOG_ERR("Unsupported channel");
416 return -EINVAL;
417 }
418
419 /* Enable DMA interrupt source. */
420 Cy_DMA_Channel_SetInterruptMask(cfg->regs, channel, CY_DMA_INTR_MASK);
421
422 /* Enable the interrupt */
423 irq_enable(data->channels[channel].irq);
424
425 /* Enable DMA channel */
426 Cy_DMA_Channel_Enable(cfg->regs, channel);
427 if ((data->channels[channel].channel_direction == MEMORY_TO_MEMORY) ||
428 (data->channels[channel].channel_direction == MEMORY_TO_PERIPHERAL)) {
429 cyhal_dma_t dma_obj = {
430 .resource.type = CYHAL_RSC_DW,
431 .resource.block_num = _get_hw_block_num(cfg->regs),
432 .resource.channel_num = channel,
433 };
434 (void)cyhal_dma_start_transfer(&dma_obj);
435 }
436 return 0;
437 }
438
ifx_cat1_dma_stop(const struct device * dev,uint32_t channel)439 static int ifx_cat1_dma_stop(const struct device *dev, uint32_t channel)
440 {
441 const struct ifx_cat1_dma_config *const cfg = dev->config;
442
443 if (channel >= cfg->num_channels) {
444 LOG_ERR("Unsupported channel");
445 return -EINVAL;
446 }
447
448 /* Disable DMA channel */
449 Cy_DMA_Channel_Disable(cfg->regs, channel);
450
451 return 0;
452 }
453
ifx_cat1_dma_reload(const struct device * dev,uint32_t channel,uint32_t src,uint32_t dst,size_t size)454 int ifx_cat1_dma_reload(const struct device *dev, uint32_t channel, uint32_t src, uint32_t dst,
455 size_t size)
456 {
457 struct ifx_cat1_dma_data *data = dev->data;
458 const struct ifx_cat1_dma_config *const cfg = dev->config;
459 cy_stc_dma_descriptor_t *descriptor = data->channels[channel].descr;
460
461 if (channel >= cfg->num_channels) {
462 LOG_ERR("Unsupported channel");
463 return -EINVAL;
464 }
465
466 /* Disable Channel */
467 Cy_DMA_Channel_Disable(cfg->regs, channel);
468
469 /* Update source/destination address for the specified descriptor */
470 descriptor->src = (uint32_t)IFX_CAT1_DMA_SRC_ADDR(src);
471 descriptor->dst = dst;
472
473 /* Initialize channel */
474 Cy_DMA_Channel_Enable(cfg->regs, channel);
475
476 return 0;
477 }
478
get_total_size(const struct device * dev,uint32_t channel)479 uint32_t get_total_size(const struct device *dev, uint32_t channel)
480 {
481 struct ifx_cat1_dma_data *data = dev->data;
482 uint32_t total_size = 0;
483 uint32_t x_size = 0;
484 uint32_t y_size = 0;
485 cy_stc_dma_descriptor_t *curr_descr = data->channels[channel].descr;
486
487 while (curr_descr != NULL) {
488 x_size = Cy_DMA_Descriptor_GetXloopDataCount(curr_descr);
489
490 if (CY_DMA_2D_TRANSFER == Cy_DMA_Descriptor_GetDescriptorType(curr_descr)) {
491 y_size = Cy_DMA_Descriptor_GetYloopDataCount(curr_descr);
492 } else {
493 y_size = 0;
494 }
495
496 total_size += y_size != 0 ? x_size * y_size : x_size;
497 curr_descr = Cy_DMA_Descriptor_GetNextDescriptor(curr_descr);
498 }
499
500 return total_size;
501 }
502
get_transferred_size(const struct device * dev,uint32_t channel)503 uint32_t get_transferred_size(const struct device *dev, uint32_t channel)
504 {
505 struct ifx_cat1_dma_data *data = dev->data;
506 const struct ifx_cat1_dma_config *const cfg = dev->config;
507 uint32_t transferred_data_size = 0;
508 uint32_t x_size = 0;
509 uint32_t y_size = 0;
510
511 cy_stc_dma_descriptor_t *next_descr = data->channels[channel].descr;
512 cy_stc_dma_descriptor_t *curr_descr =
513 Cy_DMA_Channel_GetCurrentDescriptor(ifx_cat1_dma_get_regs(dev), channel);
514
515 /* Calculates all processed descriptors */
516 while ((next_descr != NULL) && (next_descr != curr_descr)) {
517 x_size = Cy_DMA_Descriptor_GetXloopDataCount(next_descr);
518 y_size = Cy_DMA_Descriptor_GetYloopDataCount(next_descr);
519 transferred_data_size += y_size != 0 ? x_size * y_size : x_size;
520 next_descr = Cy_DMA_Descriptor_GetNextDescriptor(next_descr);
521 }
522
523 /* Calculates current descriptors (in progress) */
524 transferred_data_size +=
525 _FLD2VAL(DW_CH_STRUCT_CH_IDX_X_IDX, DW_CH_IDX(cfg->regs, channel)) +
526 (_FLD2VAL(DW_CH_STRUCT_CH_IDX_Y_IDX, DW_CH_IDX(cfg->regs, channel)) *
527 Cy_DMA_Descriptor_GetXloopDataCount(curr_descr));
528
529 return transferred_data_size;
530 }
531
ifx_cat1_dma_get_status(const struct device * dev,uint32_t channel,struct dma_status * stat)532 static int ifx_cat1_dma_get_status(const struct device *dev, uint32_t channel,
533 struct dma_status *stat)
534 {
535 struct ifx_cat1_dma_data *data = dev->data;
536 const struct ifx_cat1_dma_config *const cfg = dev->config;
537 uint32_t pending_status = 0;
538
539 if (channel >= cfg->num_channels) {
540 LOG_ERR("Unsupported channel");
541 return -EINVAL;
542 }
543
544 if (stat != NULL) {
545 /* Check is current DMA channel busy or idle */
546 #if CONFIG_SOC_FAMILY_INFINEON_CAT1A
547 pending_status = DW_CH_STATUS(cfg->regs, channel) &
548 (1UL << DW_CH_STRUCT_V2_CH_STATUS_PENDING_Pos);
549 #elif CONFIG_SOC_FAMILY_INFINEON_CAT1B
550 pending_status = DW_CH_STATUS(cfg->regs, channel) &
551 (1UL << DW_CH_STRUCT_CH_STATUS_PENDING_Pos);
552 #endif
553 /* busy status info */
554 stat->busy = pending_status ? true : false;
555
556 if (data->channels[channel].descr != NULL) {
557 uint32_t total_transfer_size = get_total_size(dev, channel);
558 uint32_t transferred_size = get_transferred_size(dev, channel);
559
560 stat->pending_length = total_transfer_size - transferred_size;
561 } else {
562 stat->pending_length = 0;
563 }
564
565 /* direction info */
566 stat->dir = data->channels[channel].channel_direction;
567 }
568
569 return 0;
570 }
571
572 #if CYHAL_DRIVER_AVAILABLE_SYSPM && CONFIG_PM
573
_cyhal_dma_dmac_pm_callback(cyhal_syspm_callback_state_t state,cyhal_syspm_callback_mode_t mode,void * callback_arg)574 static bool _cyhal_dma_dmac_pm_callback(cyhal_syspm_callback_state_t state,
575 cyhal_syspm_callback_mode_t mode, void *callback_arg)
576 {
577 CY_UNUSED_PARAMETER(state);
578 bool block_transition = false;
579 struct ifx_cat1_dma_config *conf = (struct ifx_cat1_dma_config *)callback_arg;
580 uint8_t i;
581
582 switch (mode) {
583 case CYHAL_SYSPM_CHECK_READY:
584 for (i = 0u; i < conf->num_channels; i++) {
585 #if CONFIG_SOC_FAMILY_INFINEON_CAT1A
586 block_transition |= DW_CH_STATUS(conf->regs, i) &
587 (1UL << DW_CH_STRUCT_V2_CH_STATUS_PENDING_Pos);
588 #elif CONFIG_SOC_FAMILY_INFINEON_CAT1B
589 block_transition |= DW_CH_STATUS(conf->regs, i) &
590 (1UL << DW_CH_STRUCT_CH_STATUS_PENDING_Pos);
591 #endif
592 }
593 break;
594 case CYHAL_SYSPM_CHECK_FAIL:
595 case CYHAL_SYSPM_AFTER_TRANSITION:
596 break;
597 default:
598 CY_ASSERT(false);
599 break;
600 }
601
602 return !block_transition;
603 }
604 #endif
605
ifx_cat1_dma_init(const struct device * dev)606 static int ifx_cat1_dma_init(const struct device *dev)
607 {
608 const struct ifx_cat1_dma_config *const cfg = dev->config;
609
610 #if CYHAL_DRIVER_AVAILABLE_SYSPM && CONFIG_PM
611 struct ifx_cat1_dma_data *data = dev->data;
612
613 _cyhal_syspm_register_peripheral_callback(&data->syspm_callback_args);
614 #endif
615
616 /* Enable DMA block to start descriptor execution process */
617 Cy_DMA_Enable(cfg->regs);
618
619 /* Configure IRQ */
620 cfg->irq_configure();
621
622 return 0;
623 }
624
625 /* Handles DMA interrupts and dispatches to the individual channel */
626 struct ifx_cat1_dma_irq_context {
627 const struct device *dev;
628 uint32_t channel;
629 };
630
ifx_cat1_dma_isr(struct ifx_cat1_dma_irq_context * irq_context)631 static void ifx_cat1_dma_isr(struct ifx_cat1_dma_irq_context *irq_context)
632 {
633 uint32_t channel = irq_context->channel;
634 struct ifx_cat1_dma_data *data = irq_context->dev->data;
635 const struct ifx_cat1_dma_config *cfg = irq_context->dev->config;
636 dma_callback_t callback = data->channels[channel].callback;
637 int status;
638
639 /* Remove all allocated linked descriptors */
640 _dma_free_linked_descriptors(data->channels[channel].descr);
641 data->channels[channel].descr = NULL;
642
643 uint32_t intr_status = Cy_DMA_Channel_GetStatus(cfg->regs, channel);
644
645 /* Clear all interrupts */
646 Cy_DMA_Channel_ClearInterrupt(cfg->regs, channel);
647
648 /* Get interrupt type and call users event callback if they have enabled that event */
649 switch (intr_status) {
650 case CY_DMA_INTR_CAUSE_COMPLETION:
651 status = 0;
652 break;
653 case CY_DMA_INTR_CAUSE_DESCR_BUS_ERROR: /* Descriptor bus error */
654 case CY_DMA_INTR_CAUSE_SRC_BUS_ERROR: /* Source bus error */
655 case CY_DMA_INTR_CAUSE_DST_BUS_ERROR: /* Destination bus error */
656 status = -EPERM;
657 break;
658 case CY_DMA_INTR_CAUSE_SRC_MISAL: /* Source address is not aligned */
659 case CY_DMA_INTR_CAUSE_DST_MISAL: /* Destination address is not aligned */
660 status = -EPERM;
661 break;
662 case CY_DMA_INTR_CAUSE_CURR_PTR_NULL: /* Current descr pointer is NULL */
663 case CY_DMA_INTR_CAUSE_ACTIVE_CH_DISABLED: /* Active channel is disabled */
664 default:
665 status = -EIO;
666 break;
667 }
668
669 if ((callback != NULL && status == 0) ||
670 (callback != NULL && data->channels[channel].error_callback_dis)) {
671 void *callback_arg = data->channels[channel].user_data;
672
673 callback(irq_context->dev, callback_arg, channel, status);
674 }
675 }
676
677 static DEVICE_API(dma, ifx_cat1_dma_api) = {
678 .config = ifx_cat1_dma_configure,
679 .start = ifx_cat1_dma_start,
680 .stop = ifx_cat1_dma_stop,
681 .reload = ifx_cat1_dma_reload,
682 .get_status = ifx_cat1_dma_get_status,
683 };
684
685 #define IRQ_CONFIGURE(n, inst) \
686 static const struct ifx_cat1_dma_irq_context irq_context##inst##n = { \
687 .dev = DEVICE_DT_INST_GET(inst), \
688 .channel = n, \
689 }; \
690 \
691 IRQ_CONNECT(DT_INST_IRQ_BY_IDX(inst, n, irq), DT_INST_IRQ_BY_IDX(inst, n, priority), \
692 ifx_cat1_dma_isr, &irq_context##inst##n, 0); \
693 \
694 ifx_cat1_dma_channels##inst[n].irq = DT_INST_IRQ_BY_IDX(inst, n, irq);
695
696 #define CONFIGURE_ALL_IRQS(inst, n) LISTIFY(n, IRQ_CONFIGURE, (), inst)
697
698 #if CYHAL_DRIVER_AVAILABLE_SYSPM && CONFIG_PM
699 #define SYSPM_CALLBACK_ARGS(n) \
700 .syspm_callback_args = { \
701 .callback = &_cyhal_dma_dmac_pm_callback, \
702 .states = (cyhal_syspm_callback_state_t)(CYHAL_SYSPM_CB_CPU_DEEPSLEEP | \
703 CYHAL_SYSPM_CB_CPU_DEEPSLEEP_RAM | \
704 CYHAL_SYSPM_CB_SYSTEM_HIBERNATE), \
705 .next = NULL, \
706 .args = (void *)&ifx_cat1_dma_config##n, \
707 .ignore_modes = \
708 (cyhal_syspm_callback_mode_t)(CYHAL_SYSPM_BEFORE_TRANSITION | \
709 CYHAL_SYSPM_AFTER_DS_WFI_TRANSITION)},
710 #else
711 #define SYSPM_CALLBACK_ARGS(n)
712 #endif
713
714 #define INFINEON_CAT1_DMA_INIT(n) \
715 \
716 static void ifx_cat1_dma_irq_configure##n(void); \
717 \
718 \
719 static struct ifx_cat1_dma_channel \
720 ifx_cat1_dma_channels##n[DT_INST_PROP(n, dma_channels)]; \
721 \
722 static const struct ifx_cat1_dma_config ifx_cat1_dma_config##n = { \
723 .num_channels = DT_INST_PROP(n, dma_channels), \
724 .regs = (DW_Type *)DT_INST_REG_ADDR(n), \
725 .irq_configure = ifx_cat1_dma_irq_configure##n, \
726 }; \
727 \
728 ATOMIC_DEFINE(ifx_cat1_dma_##n, DT_INST_PROP(n, dma_channels)); \
729 static __aligned(32) struct ifx_cat1_dma_data ifx_cat1_dma_data##n = { \
730 .ctx = \
731 { \
732 .magic = DMA_MAGIC, \
733 .atomic = ifx_cat1_dma_##n, \
734 .dma_channels = DT_INST_PROP(n, dma_channels), \
735 }, \
736 .channels = ifx_cat1_dma_channels##n, \
737 SYSPM_CALLBACK_ARGS(n)}; \
738 \
739 static void ifx_cat1_dma_irq_configure##n(void) \
740 { \
741 extern struct ifx_cat1_dma_channel ifx_cat1_dma_channels##n[]; \
742 CONFIGURE_ALL_IRQS(n, DT_NUM_IRQS(DT_DRV_INST(n))); \
743 } \
744 \
745 DEVICE_DT_INST_DEFINE(n, &ifx_cat1_dma_init, NULL, &ifx_cat1_dma_data##n, \
746 &ifx_cat1_dma_config##n, PRE_KERNEL_1, CONFIG_DMA_INIT_PRIORITY, \
747 &ifx_cat1_dma_api);
748
749 DT_INST_FOREACH_STATUS_OKAY(INFINEON_CAT1_DMA_INIT)
750