1 /*
2  * Copyright (c) 2015, Freescale Semiconductor, Inc.
3  * Copyright 2016-2022 NXP
4  * All rights reserved.
5  *
6  * SPDX-License-Identifier: BSD-3-Clause
7  */
8 
9 #include "fsl_edma.h"
10 #if defined FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET && FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET
11 #include "fsl_memory.h"
12 #endif
13 /*******************************************************************************
14  * Definitions
15  ******************************************************************************/
16 
17 /* Component ID definition, used by tools. */
18 #ifndef FSL_COMPONENT_ID
19 #define FSL_COMPONENT_ID "platform.drivers.edma"
20 #endif
21 
22 #define EDMA_TRANSFER_ENABLED_MASK 0x80U
23 
24 /*******************************************************************************
25  * Prototypes
26  ******************************************************************************/
27 
28 /*!
29  * @brief Get instance offset.
30  *
31  * @param instance EDMA peripheral instance number.
32  */
33 static uint32_t EDMA_GetInstanceOffset(uint32_t instance);
34 
35 /*!
36  * @brief Map transfer width.
37  *
38  * @param width transfer width.
39  */
40 static edma_transfer_size_t EDMA_TransferWidthMapping(uint32_t width);
41 /*******************************************************************************
42  * Variables
43  ******************************************************************************/
44 
45 /*! @brief Array to map EDMA instance number to base pointer. */
46 static DMA_Type *const s_edmaBases[] = DMA_BASE_PTRS;
47 
48 #if !(defined(FSL_SDK_DISABLE_DRIVER_CLOCK_CONTROL) && FSL_SDK_DISABLE_DRIVER_CLOCK_CONTROL)
49 /*! @brief Array to map EDMA instance number to clock name. */
50 static const clock_ip_name_t s_edmaClockName[] = EDMA_CLOCKS;
51 #endif /* FSL_SDK_DISABLE_DRIVER_CLOCK_CONTROL */
52 
53 /*! @brief Array to map EDMA instance number to IRQ number. */
54 static const IRQn_Type s_edmaIRQNumber[][FSL_FEATURE_EDMA_MODULE_CHANNEL] = DMA_CHN_IRQS;
55 
56 /*! @brief Pointers to transfer handle for each EDMA channel. */
57 static edma_handle_t *s_EDMAHandle[FSL_FEATURE_EDMA_MODULE_CHANNEL * FSL_FEATURE_SOC_EDMA_COUNT];
58 
59 /*******************************************************************************
60  * Code
61  ******************************************************************************/
62 
EDMA_GetInstance(DMA_Type * base)63 static uint32_t EDMA_GetInstance(DMA_Type *base)
64 {
65     uint32_t instance;
66 
67     /* Find the instance index from base address mappings. */
68     for (instance = 0; instance < ARRAY_SIZE(s_edmaBases); instance++)
69     {
70         if (s_edmaBases[instance] == base)
71         {
72             break;
73         }
74     }
75 
76     assert(instance < ARRAY_SIZE(s_edmaBases));
77 
78     return instance;
79 }
80 
81 /*!
82  * brief Push content of TCD structure into hardware TCD register.
83  *
84  * param base EDMA peripheral base address.
85  * param channel EDMA channel number.
86  * param tcd Point to TCD structure.
87  */
EDMA_InstallTCD(DMA_Type * base,uint32_t channel,edma_tcd_t * tcd)88 void EDMA_InstallTCD(DMA_Type *base, uint32_t channel, edma_tcd_t *tcd)
89 {
90     assert(channel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
91     assert(tcd != NULL);
92     assert(((uint32_t)tcd & 0x1FU) == 0U);
93 
94     /* Push tcd into hardware TCD register */
95     base->TCD[channel].SADDR         = tcd->SADDR;
96     base->TCD[channel].SOFF          = tcd->SOFF;
97     base->TCD[channel].ATTR          = tcd->ATTR;
98     base->TCD[channel].NBYTES_MLNO   = tcd->NBYTES;
99     base->TCD[channel].SLAST         = (int32_t)tcd->SLAST;
100     base->TCD[channel].DADDR         = tcd->DADDR;
101     base->TCD[channel].DOFF          = tcd->DOFF;
102     base->TCD[channel].CITER_ELINKNO = tcd->CITER;
103     base->TCD[channel].DLAST_SGA     = (int32_t)tcd->DLAST_SGA;
104     /* Clear DONE bit first, otherwise ESG cannot be set */
105     base->TCD[channel].CSR           = 0;
106     base->TCD[channel].CSR           = tcd->CSR;
107     base->TCD[channel].BITER_ELINKNO = tcd->BITER;
108 }
109 
110 /*!
111  * brief Initializes the eDMA peripheral.
112  *
113  * This function ungates the eDMA clock and configures the eDMA peripheral according
114  * to the configuration structure.
115  *
116  * param base eDMA peripheral base address.
117  * param config A pointer to the configuration structure, see "edma_config_t".
118  * note This function enables the minor loop map feature.
119  */
EDMA_Init(DMA_Type * base,const edma_config_t * config)120 void EDMA_Init(DMA_Type *base, const edma_config_t *config)
121 {
122     assert(config != NULL);
123 
124     uint32_t tmpreg;
125 
126 #if !(defined(FSL_SDK_DISABLE_DRIVER_CLOCK_CONTROL) && FSL_SDK_DISABLE_DRIVER_CLOCK_CONTROL)
127     /* Ungate EDMA peripheral clock */
128     CLOCK_EnableClock(s_edmaClockName[EDMA_GetInstance(base)]);
129 #endif /* FSL_SDK_DISABLE_DRIVER_CLOCK_CONTROL */
130 
131     /* clear all the enabled request, status to make sure EDMA status is in normal condition */
132     base->ERQ = 0U;
133     base->INT = 0xFFFFFFFFU;
134     base->ERR = 0xFFFFFFFFU;
135     /* Configure EDMA peripheral according to the configuration structure. */
136     tmpreg = base->CR;
137     tmpreg &= ~(DMA_CR_ERCA_MASK | DMA_CR_HOE_MASK | DMA_CR_CLM_MASK | DMA_CR_EDBG_MASK);
138     tmpreg |= (DMA_CR_ERCA(config->enableRoundRobinArbitration) | DMA_CR_HOE(config->enableHaltOnError) |
139                DMA_CR_CLM(config->enableContinuousLinkMode) | DMA_CR_EDBG(config->enableDebugMode) | DMA_CR_EMLM(1U));
140     base->CR = tmpreg;
141 }
142 
143 /*!
144  * brief Deinitializes the eDMA peripheral.
145  *
146  * This function gates the eDMA clock.
147  *
148  * param base eDMA peripheral base address.
149  */
EDMA_Deinit(DMA_Type * base)150 void EDMA_Deinit(DMA_Type *base)
151 {
152 #if !(defined(FSL_SDK_DISABLE_DRIVER_CLOCK_CONTROL) && FSL_SDK_DISABLE_DRIVER_CLOCK_CONTROL)
153     /* Gate EDMA peripheral clock */
154     CLOCK_DisableClock(s_edmaClockName[EDMA_GetInstance(base)]);
155 #endif /* FSL_SDK_DISABLE_DRIVER_CLOCK_CONTROL */
156 }
157 
158 /*!
159  * brief Gets the eDMA default configuration structure.
160  *
161  * This function sets the configuration structure to default values.
162  * The default configuration is set to the following values.
163  * code
164  *   config.enableContinuousLinkMode = false;
165  *   config.enableHaltOnError = true;
166  *   config.enableRoundRobinArbitration = false;
167  *   config.enableDebugMode = false;
168  * endcode
169  *
170  * param config A pointer to the eDMA configuration structure.
171  */
EDMA_GetDefaultConfig(edma_config_t * config)172 void EDMA_GetDefaultConfig(edma_config_t *config)
173 {
174     assert(config != NULL);
175 
176     /* Initializes the configure structure to zero. */
177     (void)memset(config, 0, sizeof(*config));
178 
179     config->enableRoundRobinArbitration = false;
180     config->enableHaltOnError           = true;
181     config->enableContinuousLinkMode    = false;
182     config->enableDebugMode             = false;
183 }
184 
185 /*!
186  * brief Sets all TCD registers to default values.
187  *
188  * This function sets TCD registers for this channel to default values.
189  *
190  * param base eDMA peripheral base address.
191  * param channel eDMA channel number.
192  * note This function must not be called while the channel transfer is ongoing
193  *       or it causes unpredictable results.
194  * note This function enables the auto stop request feature.
195  */
EDMA_ResetChannel(DMA_Type * base,uint32_t channel)196 void EDMA_ResetChannel(DMA_Type *base, uint32_t channel)
197 {
198     assert(channel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
199 
200     EDMA_TcdReset((edma_tcd_t *)(uint32_t)&base->TCD[channel]);
201 }
202 
203 /*!
204  * brief Configures the eDMA transfer attribute.
205  *
206  * This function configures the transfer attribute, including source address, destination address,
207  * transfer size, address offset, and so on. It also configures the scatter gather feature if the
208  * user supplies the TCD address.
209  * Example:
210  * code
211  *  edma_transfer_t config;
212  *  edma_tcd_t tcd;
213  *  config.srcAddr = ..;
214  *  config.destAddr = ..;
215  *  ...
216  *  EDMA_SetTransferConfig(DMA0, channel, &config, &stcd);
217  * endcode
218  *
219  * param base eDMA peripheral base address.
220  * param channel eDMA channel number.
221  * param config Pointer to eDMA transfer configuration structure.
222  * param nextTcd Point to TCD structure. It can be NULL if users
223  *                do not want to enable scatter/gather feature.
224  * note If nextTcd is not NULL, it means scatter gather feature is enabled
225  *       and DREQ bit is cleared in the previous transfer configuration, which
226  *       is set in the eDMA_ResetChannel.
227  */
EDMA_SetTransferConfig(DMA_Type * base,uint32_t channel,const edma_transfer_config_t * config,edma_tcd_t * nextTcd)228 void EDMA_SetTransferConfig(DMA_Type *base, uint32_t channel, const edma_transfer_config_t *config, edma_tcd_t *nextTcd)
229 {
230     assert(channel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
231     assert(config != NULL);
232     assert(((uint32_t)nextTcd & 0x1FU) == 0U);
233 
234 /* If there is address offset, convert the address */
235 #if defined FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET && FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET
236     nextTcd = (edma_tcd_t *)(MEMORY_ConvertMemoryMapAddress((uint32_t)nextTcd, kMEMORY_Local2DMA));
237 #endif /* FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET */
238     EDMA_TcdSetTransferConfig((edma_tcd_t *)(uint32_t)&base->TCD[channel], config, nextTcd);
239 }
240 
241 /*!
242  * brief Configures the eDMA minor offset feature.
243  *
244  * The minor offset means that the signed-extended value is added to the source address or destination
245  * address after each minor loop.
246  *
247  * param base eDMA peripheral base address.
248  * param channel eDMA channel number.
249  * param config A pointer to the minor offset configuration structure.
250  */
EDMA_SetMinorOffsetConfig(DMA_Type * base,uint32_t channel,const edma_minor_offset_config_t * config)251 void EDMA_SetMinorOffsetConfig(DMA_Type *base, uint32_t channel, const edma_minor_offset_config_t *config)
252 {
253     assert(channel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
254     assert(config != NULL);
255 
256     uint32_t tmpreg;
257 
258     tmpreg = base->TCD[channel].NBYTES_MLOFFYES;
259     tmpreg &= ~(DMA_NBYTES_MLOFFYES_SMLOE_MASK | DMA_NBYTES_MLOFFYES_DMLOE_MASK | DMA_NBYTES_MLOFFYES_MLOFF_MASK);
260     tmpreg |=
261         (DMA_NBYTES_MLOFFYES_SMLOE(config->enableSrcMinorOffset) |
262          DMA_NBYTES_MLOFFYES_DMLOE(config->enableDestMinorOffset) | DMA_NBYTES_MLOFFYES_MLOFF(config->minorOffset));
263     base->TCD[channel].NBYTES_MLOFFYES = tmpreg;
264 }
265 
266 /*!
267  * brief Configures the eDMA channel TCD major offset feature.
268  *
269  * Adjustment value added to the source address at the completion of the major iteration count
270  *
271  * param base eDMA peripheral base address.
272  * param channel edma channel number.
273  * param sourceOffset source address offset.
274  * param destOffset destination address offset.
275  */
EDMA_SetMajorOffsetConfig(DMA_Type * base,uint32_t channel,int32_t sourceOffset,int32_t destOffset)276 void EDMA_SetMajorOffsetConfig(DMA_Type *base, uint32_t channel, int32_t sourceOffset, int32_t destOffset)
277 {
278     assert(channel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
279 
280     base->TCD[channel].SLAST     = sourceOffset;
281     base->TCD[channel].DLAST_SGA = destOffset;
282 }
283 
284 /*!
285  * brief Configures the eDMA channel preemption feature.
286  *
287  * This function configures the channel preemption attribute and the priority of the channel.
288  *
289  * param base eDMA peripheral base address.
290  * param channel eDMA channel number
291  * param config A pointer to the channel preemption configuration structure.
292  */
EDMA_SetChannelPreemptionConfig(DMA_Type * base,uint32_t channel,const edma_channel_Preemption_config_t * config)293 void EDMA_SetChannelPreemptionConfig(DMA_Type *base, uint32_t channel, const edma_channel_Preemption_config_t *config)
294 {
295     assert(channel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
296     assert(config != NULL);
297 
298     bool tmpEnablePreemptAbility    = config->enablePreemptAbility;
299     bool tmpEnableChannelPreemption = config->enableChannelPreemption;
300     uint8_t tmpChannelPriority      = config->channelPriority;
301     volatile uint8_t *tmpReg        = &base->DCHPRI3;
302 
303     ((volatile uint8_t *)tmpReg)[DMA_DCHPRI_INDEX(channel)] =
304         (DMA_DCHPRI0_DPA((true == tmpEnablePreemptAbility ? 0U : 1U)) |
305          DMA_DCHPRI0_ECP((true == tmpEnableChannelPreemption ? 1U : 0U)) | DMA_DCHPRI0_CHPRI(tmpChannelPriority));
306 }
307 
308 /*!
309  * brief Sets the channel link for the eDMA transfer.
310  *
311  * This function configures either the minor link or the major link mode. The minor link means that the channel link is
312  * triggered every time CITER decreases by 1. The major link means that the channel link is triggered when the CITER is
313  * exhausted.
314  *
315  * param base eDMA peripheral base address.
316  * param channel eDMA channel number.
317  * param linkType A channel link type, which can be one of the following:
318  *   arg kEDMA_LinkNone
319  *   arg kEDMA_MinorLink
320  *   arg kEDMA_MajorLink
321  * param linkedChannel The linked channel number.
322  * note Users should ensure that DONE flag is cleared before calling this interface, or the configuration is invalid.
323  */
EDMA_SetChannelLink(DMA_Type * base,uint32_t channel,edma_channel_link_type_t linkType,uint32_t linkedChannel)324 void EDMA_SetChannelLink(DMA_Type *base, uint32_t channel, edma_channel_link_type_t linkType, uint32_t linkedChannel)
325 {
326     assert(channel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
327     assert(linkedChannel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
328 
329     EDMA_TcdSetChannelLink((edma_tcd_t *)(uint32_t)&base->TCD[channel], linkType, linkedChannel);
330 }
331 
332 /*!
333  * brief Sets the bandwidth for the eDMA transfer.
334  *
335  * Because the eDMA processes the minor loop, it continuously generates read/write sequences
336  * until the minor count is exhausted. The bandwidth forces the eDMA to stall after the completion of
337  * each read/write access to control the bus request bandwidth seen by the crossbar switch.
338  *
339  * param base eDMA peripheral base address.
340  * param channel eDMA channel number.
341  * param bandWidth A bandwidth setting, which can be one of the following:
342  *     arg kEDMABandwidthStallNone
343  *     arg kEDMABandwidthStall4Cycle
344  *     arg kEDMABandwidthStall8Cycle
345  */
EDMA_SetBandWidth(DMA_Type * base,uint32_t channel,edma_bandwidth_t bandWidth)346 void EDMA_SetBandWidth(DMA_Type *base, uint32_t channel, edma_bandwidth_t bandWidth)
347 {
348     assert(channel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
349 
350     base->TCD[channel].CSR = (uint16_t)((base->TCD[channel].CSR & (~DMA_CSR_BWC_MASK)) | DMA_CSR_BWC(bandWidth));
351 }
352 
353 /*!
354  * brief Sets the source modulo and the destination modulo for the eDMA transfer.
355  *
356  * This function defines a specific address range specified to be the value after (SADDR + SOFF)/(DADDR + DOFF)
357  * calculation is performed or the original register value. It provides the ability to implement a circular data
358  * queue easily.
359  *
360  * param base eDMA peripheral base address.
361  * param channel eDMA channel number.
362  * param srcModulo A source modulo value.
363  * param destModulo A destination modulo value.
364  */
EDMA_SetModulo(DMA_Type * base,uint32_t channel,edma_modulo_t srcModulo,edma_modulo_t destModulo)365 void EDMA_SetModulo(DMA_Type *base, uint32_t channel, edma_modulo_t srcModulo, edma_modulo_t destModulo)
366 {
367     assert(channel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
368 
369     uint16_t tmpreg;
370 
371     tmpreg                  = base->TCD[channel].ATTR & (~(uint16_t)(DMA_ATTR_SMOD_MASK | DMA_ATTR_DMOD_MASK));
372     base->TCD[channel].ATTR = tmpreg | DMA_ATTR_DMOD(destModulo) | DMA_ATTR_SMOD(srcModulo);
373 }
374 
375 /*!
376  * brief Enables the interrupt source for the eDMA transfer.
377  *
378  * param base eDMA peripheral base address.
379  * param channel eDMA channel number.
380  * param mask The mask of interrupt source to be set. Users need to use
381  *             the defined edma_interrupt_enable_t type.
382  */
EDMA_EnableChannelInterrupts(DMA_Type * base,uint32_t channel,uint32_t mask)383 void EDMA_EnableChannelInterrupts(DMA_Type *base, uint32_t channel, uint32_t mask)
384 {
385     assert(channel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
386 
387     /* Enable error interrupt */
388     if (0U != (mask & (uint32_t)kEDMA_ErrorInterruptEnable))
389     {
390         base->EEI |= ((uint32_t)0x1U << channel);
391     }
392 
393     /* Enable Major interrupt */
394     if (0U != (mask & (uint32_t)kEDMA_MajorInterruptEnable))
395     {
396         base->TCD[channel].CSR |= DMA_CSR_INTMAJOR_MASK;
397     }
398 
399     /* Enable Half major interrupt */
400     if (0U != (mask & (uint32_t)kEDMA_HalfInterruptEnable))
401     {
402         base->TCD[channel].CSR |= DMA_CSR_INTHALF_MASK;
403     }
404 }
405 
406 /*!
407  * brief Disables the interrupt source for the eDMA transfer.
408  *
409  * param base eDMA peripheral base address.
410  * param channel eDMA channel number.
411  * param mask The mask of the interrupt source to be set. Use
412  *             the defined edma_interrupt_enable_t type.
413  */
EDMA_DisableChannelInterrupts(DMA_Type * base,uint32_t channel,uint32_t mask)414 void EDMA_DisableChannelInterrupts(DMA_Type *base, uint32_t channel, uint32_t mask)
415 {
416     assert(channel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
417 
418     /* Disable error interrupt */
419     if (0U != (mask & (uint32_t)kEDMA_ErrorInterruptEnable))
420     {
421         base->EEI &= (~((uint32_t)0x1U << channel));
422     }
423 
424     /* Disable Major interrupt */
425     if (0U != (mask & (uint32_t)kEDMA_MajorInterruptEnable))
426     {
427         base->TCD[channel].CSR &= ~(uint16_t)DMA_CSR_INTMAJOR_MASK;
428     }
429 
430     /* Disable Half major interrupt */
431     if (0U != (mask & (uint32_t)kEDMA_HalfInterruptEnable))
432     {
433         base->TCD[channel].CSR &= ~(uint16_t)DMA_CSR_INTHALF_MASK;
434     }
435 }
436 
437 /*!
438  * brief Sets all fields to default values for the TCD structure.
439  *
440  * This function sets all fields for this TCD structure to default value.
441  *
442  * param tcd Pointer to the TCD structure.
443  * note This function enables the auto stop request feature.
444  */
EDMA_TcdReset(edma_tcd_t * tcd)445 void EDMA_TcdReset(edma_tcd_t *tcd)
446 {
447     assert(tcd != NULL);
448     assert(((uint32_t)tcd & 0x1FU) == 0U);
449 
450     /* Reset channel TCD */
451     tcd->SADDR     = 0U;
452     tcd->SOFF      = 0U;
453     tcd->ATTR      = 0U;
454     tcd->NBYTES    = 0U;
455     tcd->SLAST     = 0U;
456     tcd->DADDR     = 0U;
457     tcd->DOFF      = 0U;
458     tcd->CITER     = 0U;
459     tcd->DLAST_SGA = 0U;
460     /* Enable auto disable request feature */
461     tcd->CSR   = DMA_CSR_DREQ(1U);
462     tcd->BITER = 0U;
463 }
464 
465 /*!
466  * brief Configures the eDMA TCD transfer attribute.
467  *
468  * The TCD is a transfer control descriptor. The content of the TCD is the same as the hardware TCD registers.
469  * The TCD is used in the scatter-gather mode.
470  * This function configures the TCD transfer attribute, including source address, destination address,
471  * transfer size, address offset, and so on. It also configures the scatter gather feature if the
472  * user supplies the next TCD address.
473  * Example:
474  * code
475  *   edma_transfer_t config = {
476  *   ...
477  *   }
478  *   edma_tcd_t tcd __aligned(32);
479  *   edma_tcd_t nextTcd __aligned(32);
480  *   EDMA_TcdSetTransferConfig(&tcd, &config, &nextTcd);
481  * endcode
482  *
483  * param tcd Pointer to the TCD structure.
484  * param config Pointer to eDMA transfer configuration structure.
485  * param nextTcd Pointer to the next TCD structure. It can be NULL if users
486  *                do not want to enable scatter/gather feature.
487  * note TCD address should be 32 bytes aligned or it causes an eDMA error.
488  * note If the nextTcd is not NULL, the scatter gather feature is enabled
489  *       and DREQ bit is cleared in the previous transfer configuration, which
490  *       is set in the EDMA_TcdReset.
491  */
EDMA_TcdSetTransferConfig(edma_tcd_t * tcd,const edma_transfer_config_t * config,edma_tcd_t * nextTcd)492 void EDMA_TcdSetTransferConfig(edma_tcd_t *tcd, const edma_transfer_config_t *config, edma_tcd_t *nextTcd)
493 {
494     assert(tcd != NULL);
495     assert(((uint32_t)tcd & 0x1FU) == 0U);
496     assert(config != NULL);
497     assert(((uint32_t)nextTcd & 0x1FU) == 0U);
498     assert((config->srcAddr % (1UL << (uint32_t)config->srcTransferSize)) == 0U);
499     assert((config->destAddr % (1UL << (uint32_t)config->destTransferSize)) == 0U);
500 
501     /* source address */
502 #if defined FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET && FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET
503     tcd->SADDR = MEMORY_ConvertMemoryMapAddress(config->srcAddr, kMEMORY_Local2DMA);
504     /* destination address */
505     tcd->DADDR = MEMORY_ConvertMemoryMapAddress(config->destAddr, kMEMORY_Local2DMA);
506 #else
507     tcd->SADDR = config->srcAddr;
508     /* destination address */
509     tcd->DADDR = config->destAddr;
510 #endif /* FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET */
511     /* Source data and destination data transfer size */
512     tcd->ATTR = DMA_ATTR_SSIZE(config->srcTransferSize) | DMA_ATTR_DSIZE(config->destTransferSize);
513     /* Source address signed offset */
514     tcd->SOFF = (uint16_t)config->srcOffset;
515     /* Destination address signed offset */
516     tcd->DOFF = (uint16_t)config->destOffset;
517     /* Minor byte transfer count */
518     tcd->NBYTES = config->minorLoopBytes;
519     /* Current major iteration count */
520     tcd->CITER = (uint16_t)config->majorLoopCounts;
521     /* Starting major iteration count */
522     tcd->BITER = (uint16_t)config->majorLoopCounts;
523     /* Enable scatter/gather processing */
524     if (nextTcd != NULL)
525     {
526 #if defined FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET && FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET
527         tcd->DLAST_SGA = MEMORY_ConvertMemoryMapAddress((uint32_t)nextTcd, kMEMORY_Local2DMA);
528 #else
529         tcd->DLAST_SGA = (uint32_t)nextTcd;
530 #endif /* FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET */
531         /*
532             Before call EDMA_TcdSetTransferConfig or EDMA_SetTransferConfig,
533             user must call EDMA_TcdReset or EDMA_ResetChannel which will set
534             DREQ, so must use "|" or "&" rather than "=".
535 
536             Clear the DREQ bit because scatter gather has been enabled, so the
537             previous transfer is not the last transfer, and channel request should
538             be enabled at the next transfer(the next TCD).
539         */
540         tcd->CSR = (tcd->CSR | (uint16_t)DMA_CSR_ESG_MASK) & ~(uint16_t)DMA_CSR_DREQ_MASK;
541     }
542 }
543 
544 /*!
545  * brief Configures the eDMA TCD minor offset feature.
546  *
547  * A minor offset is a signed-extended value added to the source address or a destination
548  * address after each minor loop.
549  *
550  * param tcd A point to the TCD structure.
551  * param config A pointer to the minor offset configuration structure.
552  */
EDMA_TcdSetMinorOffsetConfig(edma_tcd_t * tcd,const edma_minor_offset_config_t * config)553 void EDMA_TcdSetMinorOffsetConfig(edma_tcd_t *tcd, const edma_minor_offset_config_t *config)
554 {
555     assert(tcd != NULL);
556     assert(((uint32_t)tcd & 0x1FU) == 0U);
557 
558     uint32_t tmpreg;
559 
560     tmpreg = tcd->NBYTES &
561              ~(DMA_NBYTES_MLOFFYES_SMLOE_MASK | DMA_NBYTES_MLOFFYES_DMLOE_MASK | DMA_NBYTES_MLOFFYES_MLOFF_MASK);
562     tmpreg |=
563         (DMA_NBYTES_MLOFFYES_SMLOE(config->enableSrcMinorOffset) |
564          DMA_NBYTES_MLOFFYES_DMLOE(config->enableDestMinorOffset) | DMA_NBYTES_MLOFFYES_MLOFF(config->minorOffset));
565     tcd->NBYTES = tmpreg;
566 }
567 
568 /*!
569  * brief Configures the eDMA TCD major offset feature.
570  *
571  * Adjustment value added to the source address at the completion of the major iteration count
572  *
573  * param tcd A point to the TCD structure.
574  * param sourceOffset source address offset.
575  * param destOffset destination address offset.
576  */
EDMA_TcdSetMajorOffsetConfig(edma_tcd_t * tcd,int32_t sourceOffset,int32_t destOffset)577 void EDMA_TcdSetMajorOffsetConfig(edma_tcd_t *tcd, int32_t sourceOffset, int32_t destOffset)
578 {
579     assert(tcd != NULL);
580     assert(((uint32_t)tcd & 0x1FU) == 0U);
581 
582     tcd->SLAST     = (uint32_t)sourceOffset;
583     tcd->DLAST_SGA = (uint32_t)destOffset;
584 }
585 
586 /*!
587  * brief Sets the channel link for the eDMA TCD.
588  *
589  * This function configures either a minor link or a major link. The minor link means the channel link is
590  * triggered every time CITER decreases by 1. The major link means that the channel link  is triggered when the CITER is
591  * exhausted.
592  *
593  * note Users should ensure that DONE flag is cleared before calling this interface, or the configuration is invalid.
594  * param tcd Point to the TCD structure.
595  * param linkType Channel link type, it can be one of:
596  *   arg kEDMA_LinkNone
597  *   arg kEDMA_MinorLink
598  *   arg kEDMA_MajorLink
599  * param linkedChannel The linked channel number.
600  */
EDMA_TcdSetChannelLink(edma_tcd_t * tcd,edma_channel_link_type_t linkType,uint32_t linkedChannel)601 void EDMA_TcdSetChannelLink(edma_tcd_t *tcd, edma_channel_link_type_t linkType, uint32_t linkedChannel)
602 {
603     assert(tcd != NULL);
604     assert(((uint32_t)tcd & 0x1FU) == 0U);
605     assert(linkedChannel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
606 
607     if (linkType == kEDMA_MinorLink) /* Minor link config */
608     {
609         uint16_t tmpreg;
610 
611         /* Enable minor link */
612         tcd->CITER |= DMA_CITER_ELINKYES_ELINK_MASK;
613         tcd->BITER |= DMA_BITER_ELINKYES_ELINK_MASK;
614         /* Set linked channel */
615         tmpreg = tcd->CITER & (~(uint16_t)DMA_CITER_ELINKYES_LINKCH_MASK);
616         tmpreg |= DMA_CITER_ELINKYES_LINKCH(linkedChannel);
617         tcd->CITER = tmpreg;
618         tmpreg     = tcd->BITER & (~(uint16_t)DMA_BITER_ELINKYES_LINKCH_MASK);
619         tmpreg |= DMA_BITER_ELINKYES_LINKCH(linkedChannel);
620         tcd->BITER = tmpreg;
621     }
622     else if (linkType == kEDMA_MajorLink) /* Major link config */
623     {
624         uint16_t tmpreg;
625 
626         /* Enable major link */
627         tcd->CSR |= DMA_CSR_MAJORELINK_MASK;
628         /* Set major linked channel */
629         tmpreg   = tcd->CSR & (~(uint16_t)DMA_CSR_MAJORLINKCH_MASK);
630         tcd->CSR = tmpreg | DMA_CSR_MAJORLINKCH(linkedChannel);
631     }
632     else /* Link none */
633     {
634         tcd->CITER &= ~(uint16_t)DMA_CITER_ELINKYES_ELINK_MASK;
635         tcd->BITER &= ~(uint16_t)DMA_BITER_ELINKYES_ELINK_MASK;
636         tcd->CSR &= ~(uint16_t)DMA_CSR_MAJORELINK_MASK;
637     }
638 }
639 
640 /*!
641  * brief Sets the source modulo and the destination modulo for the eDMA TCD.
642  *
643  * This function defines a specific address range specified to be the value after (SADDR + SOFF)/(DADDR + DOFF)
644  * calculation is performed or the original register value. It provides the ability to implement a circular data
645  * queue easily.
646  *
647  * param tcd A pointer to the TCD structure.
648  * param srcModulo A source modulo value.
649  * param destModulo A destination modulo value.
650  */
EDMA_TcdSetModulo(edma_tcd_t * tcd,edma_modulo_t srcModulo,edma_modulo_t destModulo)651 void EDMA_TcdSetModulo(edma_tcd_t *tcd, edma_modulo_t srcModulo, edma_modulo_t destModulo)
652 {
653     assert(tcd != NULL);
654     assert(((uint32_t)tcd & 0x1FU) == 0U);
655 
656     uint16_t tmpreg;
657 
658     tmpreg    = tcd->ATTR & (~(uint16_t)(DMA_ATTR_SMOD_MASK | DMA_ATTR_DMOD_MASK));
659     tcd->ATTR = tmpreg | DMA_ATTR_DMOD(destModulo) | DMA_ATTR_SMOD(srcModulo);
660 }
661 
662 /*!
663  * brief Enables the interrupt source for the eDMA TCD.
664  *
665  * param tcd Point to the TCD structure.
666  * param mask The mask of interrupt source to be set. Users need to use
667  *             the defined edma_interrupt_enable_t type.
668  */
EDMA_TcdEnableInterrupts(edma_tcd_t * tcd,uint32_t mask)669 void EDMA_TcdEnableInterrupts(edma_tcd_t *tcd, uint32_t mask)
670 {
671     assert(tcd != NULL);
672 
673     /* Enable Major interrupt */
674     if (0U != (mask & (uint32_t)kEDMA_MajorInterruptEnable))
675     {
676         tcd->CSR |= DMA_CSR_INTMAJOR_MASK;
677     }
678 
679     /* Enable Half major interrupt */
680     if (0U != (mask & (uint32_t)kEDMA_HalfInterruptEnable))
681     {
682         tcd->CSR |= DMA_CSR_INTHALF_MASK;
683     }
684 }
685 
686 /*!
687  * brief Disables the interrupt source for the eDMA TCD.
688  *
689  * param tcd Point to the TCD structure.
690  * param mask The mask of interrupt source to be set. Users need to use
691  *             the defined edma_interrupt_enable_t type.
692  */
EDMA_TcdDisableInterrupts(edma_tcd_t * tcd,uint32_t mask)693 void EDMA_TcdDisableInterrupts(edma_tcd_t *tcd, uint32_t mask)
694 {
695     assert(tcd != NULL);
696 
697     /* Disable Major interrupt */
698     if (0U != (mask & (uint32_t)kEDMA_MajorInterruptEnable))
699     {
700         tcd->CSR &= ~(uint16_t)DMA_CSR_INTMAJOR_MASK;
701     }
702 
703     /* Disable Half major interrupt */
704     if (0U != (mask & (uint32_t)kEDMA_HalfInterruptEnable))
705     {
706         tcd->CSR &= ~(uint16_t)DMA_CSR_INTHALF_MASK;
707     }
708 }
709 
710 /*!
711  * brief Gets the remaining major loop count from the eDMA current channel TCD.
712  *
713  * This function checks the TCD (Task Control Descriptor) status for a specified
714  * eDMA channel and returns the number of major loop count that has not finished.
715  *
716  * param base eDMA peripheral base address.
717  * param channel eDMA channel number.
718  * return Major loop count which has not been transferred yet for the current TCD.
719  * note 1. This function can only be used to get unfinished major loop count of transfer without
720  *          the next TCD, or it might be inaccuracy.
721  *       2. The unfinished/remaining transfer bytes cannot be obtained directly from registers while
722  *          the channel is running.
723  *          Because to calculate the remaining bytes, the initial NBYTES configured in DMA_TCDn_NBYTES_MLNO
724  *          register is needed while the eDMA IP does not support getting it while a channel is active.
725  *          In another word, the NBYTES value reading is always the actual (decrementing) NBYTES value the dma_engine
726  *          is working with while a channel is running.
727  *          Consequently, to get the remaining transfer bytes, a software-saved initial value of NBYTES (for example
728  *          copied before enabling the channel) is needed. The formula to calculate it is shown below:
729  *          RemainingBytes = RemainingMajorLoopCount * NBYTES(initially configured)
730  */
EDMA_GetRemainingMajorLoopCount(DMA_Type * base,uint32_t channel)731 uint32_t EDMA_GetRemainingMajorLoopCount(DMA_Type *base, uint32_t channel)
732 {
733     assert(channel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
734 
735     uint32_t remainingCount = 0;
736 
737     if (0U != (DMA_CSR_DONE_MASK & base->TCD[channel].CSR))
738     {
739         remainingCount = 0;
740     }
741     else
742     {
743         /* Calculate the unfinished bytes */
744         if (0U != (base->TCD[channel].CITER_ELINKNO & DMA_CITER_ELINKNO_ELINK_MASK))
745         {
746             remainingCount = (((uint32_t)base->TCD[channel].CITER_ELINKYES & DMA_CITER_ELINKYES_CITER_MASK) >>
747                               DMA_CITER_ELINKYES_CITER_SHIFT);
748         }
749         else
750         {
751             remainingCount = (((uint32_t)base->TCD[channel].CITER_ELINKNO & DMA_CITER_ELINKNO_CITER_MASK) >>
752                               DMA_CITER_ELINKNO_CITER_SHIFT);
753         }
754     }
755 
756     return remainingCount;
757 }
758 
759 /*!
760  * brief Gets the eDMA channel status flags.
761  *
762  * param base eDMA peripheral base address.
763  * param channel eDMA channel number.
764  * return The mask of channel status flags. Users need to use the
765  *         _edma_channel_status_flags type to decode the return variables.
766  */
EDMA_GetChannelStatusFlags(DMA_Type * base,uint32_t channel)767 uint32_t EDMA_GetChannelStatusFlags(DMA_Type *base, uint32_t channel)
768 {
769     assert(channel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
770 
771     uint32_t retval = 0;
772 
773     /* Get DONE bit flag */
774     retval |= (((uint32_t)base->TCD[channel].CSR & DMA_CSR_DONE_MASK) >> DMA_CSR_DONE_SHIFT);
775     /* Get ERROR bit flag */
776     retval |= ((((uint32_t)base->ERR >> channel) & 0x1U) << 1U);
777     /* Get INT bit flag */
778     retval |= ((((uint32_t)base->INT >> channel) & 0x1U) << 2U);
779 
780     return retval;
781 }
782 
783 /*!
784  * brief Clears the eDMA channel status flags.
785  *
786  * param base eDMA peripheral base address.
787  * param channel eDMA channel number.
788  * param mask The mask of channel status to be cleared. Users need to use
789  *             the defined _edma_channel_status_flags type.
790  */
EDMA_ClearChannelStatusFlags(DMA_Type * base,uint32_t channel,uint32_t mask)791 void EDMA_ClearChannelStatusFlags(DMA_Type *base, uint32_t channel, uint32_t mask)
792 {
793     assert(channel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
794 
795     /* Clear DONE bit flag */
796     if (0U != (mask & (uint32_t)kEDMA_DoneFlag))
797     {
798         base->CDNE = (uint8_t)channel;
799     }
800     /* Clear ERROR bit flag */
801     if (0U != (mask & (uint32_t)kEDMA_ErrorFlag))
802     {
803         base->CERR = (uint8_t)channel;
804     }
805     /* Clear INT bit flag */
806     if (0U != (mask & (uint32_t)kEDMA_InterruptFlag))
807     {
808         base->CINT = (uint8_t)channel;
809     }
810 }
811 
EDMA_GetInstanceOffset(uint32_t instance)812 static uint32_t EDMA_GetInstanceOffset(uint32_t instance)
813 {
814     static uint8_t startInstanceNum;
815 
816 #if defined(DMA0)
817     startInstanceNum = (uint8_t)EDMA_GetInstance(DMA0);
818 #elif defined(DMA1)
819     startInstanceNum = (uint8_t)EDMA_GetInstance(DMA1);
820 #elif defined(DMA2)
821     startInstanceNum = (uint8_t)EDMA_GetInstance(DMA2);
822 #elif defined(DMA3)
823     startInstanceNum = (uint8_t)EDMA_GetInstance(DMA3);
824 #endif
825 
826     assert(startInstanceNum <= instance);
827 
828     return instance - startInstanceNum;
829 }
830 
831 /*!
832  * brief Creates the eDMA handle.
833  *
834  * This function is called if using the transactional API for eDMA. This function
835  * initializes the internal state of the eDMA handle.
836  *
837  * param handle eDMA handle pointer. The eDMA handle stores callback function and
838  *               parameters.
839  * param base eDMA peripheral base address.
840  * param channel eDMA channel number.
841  */
EDMA_CreateHandle(edma_handle_t * handle,DMA_Type * base,uint32_t channel)842 void EDMA_CreateHandle(edma_handle_t *handle, DMA_Type *base, uint32_t channel)
843 {
844     assert(handle != NULL);
845     assert(channel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
846 
847     uint32_t edmaInstance;
848     uint32_t channelIndex;
849     edma_tcd_t *tcdRegs;
850 
851     /* Zero the handle */
852     (void)memset(handle, 0, sizeof(*handle));
853 
854     handle->base    = base;
855     handle->channel = (uint8_t)channel;
856 
857     /* Get the DMA instance number */
858     edmaInstance = EDMA_GetInstance(base);
859     channelIndex = (EDMA_GetInstanceOffset(edmaInstance) * (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL) + channel;
860     s_EDMAHandle[channelIndex] = handle;
861 
862     /* Enable NVIC interrupt */
863     (void)EnableIRQ(s_edmaIRQNumber[edmaInstance][channel]);
864 
865     /*
866        Reset TCD registers to zero. Unlike the EDMA_TcdReset(DREQ will be set),
867        CSR will be 0. Because in order to suit EDMA busy check mechanism in
868        EDMA_SubmitTransfer, CSR must be set 0.
869     */
870     tcdRegs            = (edma_tcd_t *)(uint32_t)&handle->base->TCD[handle->channel];
871     tcdRegs->SADDR     = 0;
872     tcdRegs->SOFF      = 0;
873     tcdRegs->ATTR      = 0;
874     tcdRegs->NBYTES    = 0;
875     tcdRegs->SLAST     = 0;
876     tcdRegs->DADDR     = 0;
877     tcdRegs->DOFF      = 0;
878     tcdRegs->CITER     = 0;
879     tcdRegs->DLAST_SGA = 0;
880     tcdRegs->CSR       = 0;
881     tcdRegs->BITER     = 0;
882 }
883 
884 /*!
885  * brief Installs the TCDs memory pool into the eDMA handle.
886  *
887  * This function is called after the EDMA_CreateHandle to use scatter/gather feature. This function shall only be used
888  * while users need to use scatter gather mode. Scatter gather mode enables EDMA to load a new transfer control block
889  * (tcd) in hardware, and automatically reconfigure that DMA channel for a new transfer.
890  * Users need to prepare tcd memory and also configure tcds using interface EDMA_SubmitTransfer.
891  *
892  * param handle eDMA handle pointer.
893  * param tcdPool A memory pool to store TCDs. It must be 32 bytes aligned.
894  * param tcdSize The number of TCD slots.
895  */
EDMA_InstallTCDMemory(edma_handle_t * handle,edma_tcd_t * tcdPool,uint32_t tcdSize)896 void EDMA_InstallTCDMemory(edma_handle_t *handle, edma_tcd_t *tcdPool, uint32_t tcdSize)
897 {
898     assert(handle != NULL);
899     assert(((uint32_t)tcdPool & 0x1FU) == 0U);
900 
901     /* Initialize tcd queue attribute. */
902     /* header should initial as 1, since that it is used to point to the next TCD to be loaded into TCD memory,
903      * In EDMA driver IRQ handler, header will be used to calculate how many tcd has done, for example,
904      * If application submit 4 transfer request, A->B->C->D,
905      * when A finshed, the header is 0, C is the next TCD to be load, since B is already loaded,
906      * according to EDMA driver IRQ handler, tcdDone = C - A - header = 2 - header = 2, but actually only 1 TCD done,
907      * so the issue will be the wrong TCD done count will pass to application in first TCD interrupt.
908      * During first submit, the header should be assigned to 1, since 0 is current one and 1 is next TCD to be loaded,
909      * but software cannot know which submission is the first one, so assign 1 to header here.
910      */
911     handle->header  = 1;
912     handle->tcdUsed = 0;
913     handle->tcdSize = (int8_t)tcdSize;
914     handle->flags   = 0;
915     handle->tcdPool = tcdPool;
916 }
917 
918 /*!
919  * brief Installs a callback function for the eDMA transfer.
920  *
921  * This callback is called in the eDMA IRQ handler. Use the callback to do something after
922  * the current major loop transfer completes. This function will be called every time one tcd finished transfer.
923  *
924  * param handle eDMA handle pointer.
925  * param callback eDMA callback function pointer.
926  * param userData A parameter for the callback function.
927  */
EDMA_SetCallback(edma_handle_t * handle,edma_callback callback,void * userData)928 void EDMA_SetCallback(edma_handle_t *handle, edma_callback callback, void *userData)
929 {
930     assert(handle != NULL);
931 
932     handle->callback = callback;
933     handle->userData = userData;
934 }
935 
EDMA_TransferWidthMapping(uint32_t width)936 static edma_transfer_size_t EDMA_TransferWidthMapping(uint32_t width)
937 {
938     edma_transfer_size_t transferSize = kEDMA_TransferSize1Bytes;
939 
940     /* map width to register value */
941     switch (width)
942     {
943         /* width 8bit */
944         case 1U:
945             transferSize = kEDMA_TransferSize1Bytes;
946             break;
947         /* width 16bit */
948         case 2U:
949             transferSize = kEDMA_TransferSize2Bytes;
950             break;
951         /* width 32bit */
952         case 4U:
953             transferSize = kEDMA_TransferSize4Bytes;
954             break;
955 #if (defined(FSL_FEATURE_EDMA_SUPPORT_8_BYTES_TRANSFER) && FSL_FEATURE_EDMA_SUPPORT_8_BYTES_TRANSFER)
956         /* width 64bit */
957         case 8U:
958             transferSize = kEDMA_TransferSize8Bytes;
959             break;
960 #endif
961 #if (defined(FSL_FEATURE_EDMA_SUPPORT_16_BYTES_TRANSFER) && FSL_FEATURE_EDMA_SUPPORT_16_BYTES_TRANSFER)
962         /* width 128bit */
963         case 16U:
964             transferSize = kEDMA_TransferSize16Bytes;
965             break;
966 #endif
967         /* width 256bit */
968         case 32U:
969             transferSize = kEDMA_TransferSize32Bytes;
970             break;
971         default:
972             /* All the cases have been listed above, the default clause should not be reached. */
973             assert(false);
974             break;
975     }
976 
977     return transferSize;
978 }
979 
980 /*!
981  * brief Prepares the eDMA transfer structure configurations.
982  *
983  * This function prepares the transfer configuration structure according to the user input.
984  *
985  * param config The user configuration structure of type edma_transfer_t.
986  * param srcAddr eDMA transfer source address.
987  * param srcWidth eDMA transfer source address width(bytes).
988  * param srcOffset source address offset.
989  * param destAddr eDMA transfer destination address.
990  * param destWidth eDMA transfer destination address width(bytes).
991  * param destOffset destination address offset.
992  * param bytesEachRequest eDMA transfer bytes per channel request.
993  * param transferBytes eDMA transfer bytes to be transferred.
994  * note The data address and the data width must be consistent. For example, if the SRC
995  *       is 4 bytes, the source address must be 4 bytes aligned, or it results in
996  *       source address error (SAE).
997  */
EDMA_PrepareTransferConfig(edma_transfer_config_t * config,void * srcAddr,uint32_t srcWidth,int16_t srcOffset,void * destAddr,uint32_t destWidth,int16_t destOffset,uint32_t bytesEachRequest,uint32_t transferBytes)998 void EDMA_PrepareTransferConfig(edma_transfer_config_t *config,
999                                 void *srcAddr,
1000                                 uint32_t srcWidth,
1001                                 int16_t srcOffset,
1002                                 void *destAddr,
1003                                 uint32_t destWidth,
1004                                 int16_t destOffset,
1005                                 uint32_t bytesEachRequest,
1006                                 uint32_t transferBytes)
1007 {
1008     assert(config != NULL);
1009     assert(srcAddr != NULL);
1010     assert(destAddr != NULL);
1011     assert((srcWidth != 0U) && (srcWidth <= 32U) && ((srcWidth & (srcWidth - 1U)) == 0U));
1012     assert((destWidth != 0U) && (destWidth <= 32U) && ((destWidth & (destWidth - 1U)) == 0U));
1013     assert((transferBytes % bytesEachRequest) == 0U);
1014     assert((((uint32_t)(uint32_t *)srcAddr) % srcWidth) == 0U);
1015     assert((((uint32_t)(uint32_t *)destAddr) % destWidth) == 0U);
1016 
1017     /* Initializes the configure structure to zero. */
1018     (void)memset(config, 0, sizeof(*config));
1019 
1020 #if defined FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET && FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET
1021     config->srcAddr  = MEMORY_ConvertMemoryMapAddress((uint32_t)(uint32_t *)srcAddr, kMEMORY_Local2DMA);
1022     config->destAddr = MEMORY_ConvertMemoryMapAddress((uint32_t)(uint32_t *)destAddr, kMEMORY_Local2DMA);
1023 #else
1024     config->destAddr = (uint32_t)(uint32_t *)destAddr;
1025     config->srcAddr  = (uint32_t)(uint32_t *)srcAddr;
1026 #endif /* FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET */
1027     config->minorLoopBytes   = bytesEachRequest;
1028     config->majorLoopCounts  = transferBytes / bytesEachRequest;
1029     config->srcTransferSize  = EDMA_TransferWidthMapping(srcWidth);
1030     config->destTransferSize = EDMA_TransferWidthMapping(destWidth);
1031     config->destOffset       = destOffset;
1032     config->srcOffset        = srcOffset;
1033 }
1034 
1035 /*!
1036  * brief Prepares the eDMA transfer structure.
1037  *
1038  * This function prepares the transfer configuration structure according to the user input.
1039  *
1040  * param config The user configuration structure of type edma_transfer_t.
1041  * param srcAddr eDMA transfer source address.
1042  * param srcWidth eDMA transfer source address width(bytes).
1043  * param destAddr eDMA transfer destination address.
1044  * param destWidth eDMA transfer destination address width(bytes).
1045  * param bytesEachRequest eDMA transfer bytes per channel request.
1046  * param transferBytes eDMA transfer bytes to be transferred.
1047  * param transferType eDMA transfer type.
1048  * note The data address and the data width must be consistent. For example, if the SRC
1049  *       is 4 bytes, the source address must be 4 bytes aligned, or it results in
1050  *       source address error (SAE).
1051  */
EDMA_PrepareTransfer(edma_transfer_config_t * config,void * srcAddr,uint32_t srcWidth,void * destAddr,uint32_t destWidth,uint32_t bytesEachRequest,uint32_t transferBytes,edma_transfer_type_t transferType)1052 void EDMA_PrepareTransfer(edma_transfer_config_t *config,
1053                           void *srcAddr,
1054                           uint32_t srcWidth,
1055                           void *destAddr,
1056                           uint32_t destWidth,
1057                           uint32_t bytesEachRequest,
1058                           uint32_t transferBytes,
1059                           edma_transfer_type_t transferType)
1060 {
1061     assert(config != NULL);
1062 
1063     int16_t srcOffset = 0, destOffset = 0;
1064 
1065     switch (transferType)
1066     {
1067         case kEDMA_MemoryToMemory:
1068             destOffset = (int16_t)destWidth;
1069             srcOffset  = (int16_t)srcWidth;
1070             break;
1071         case kEDMA_MemoryToPeripheral:
1072             destOffset = 0;
1073             srcOffset  = (int16_t)srcWidth;
1074             break;
1075         case kEDMA_PeripheralToMemory:
1076             destOffset = (int16_t)destWidth;
1077             srcOffset  = 0;
1078             break;
1079         case kEDMA_PeripheralToPeripheral:
1080             destOffset = 0;
1081             srcOffset  = 0;
1082             break;
1083         default:
1084             /* All the cases have been listed above, the default clause should not be reached. */
1085             assert(false);
1086             break;
1087     }
1088 
1089     EDMA_PrepareTransferConfig(config, srcAddr, srcWidth, srcOffset, destAddr, destWidth, destOffset, bytesEachRequest,
1090                                transferBytes);
1091 }
1092 
1093 /*!
1094  * brief Submits the eDMA transfer request.
1095  *
1096  * This function submits the eDMA transfer request according to the transfer configuration structure.
1097  * In scatter gather mode, call this function will add a configured tcd to the circular list of tcd pool.
1098  * The tcd pools is setup by call function EDMA_InstallTCDMemory before.
1099  *
1100  * param handle eDMA handle pointer.
1101  * param config Pointer to eDMA transfer configuration structure.
1102  * retval kStatus_EDMA_Success It means submit transfer request succeed.
1103  * retval kStatus_EDMA_QueueFull It means TCD queue is full. Submit transfer request is not allowed.
1104  * retval kStatus_EDMA_Busy It means the given channel is busy, need to submit request later.
1105  */
EDMA_SubmitTransfer(edma_handle_t * handle,const edma_transfer_config_t * config)1106 status_t EDMA_SubmitTransfer(edma_handle_t *handle, const edma_transfer_config_t *config)
1107 {
1108     assert(handle != NULL);
1109     assert(config != NULL);
1110 
1111     edma_tcd_t *tcdRegs = (edma_tcd_t *)(uint32_t)&handle->base->TCD[handle->channel];
1112 
1113     if (handle->tcdPool == NULL)
1114     {
1115         /*
1116          *    Check if EDMA channel is busy:
1117          *    1. if channel active bit is set, it implies that minor loop is executing, then channel is busy
1118          *    2. if channel active bit is not set and BITER not equal to CITER, it implies that major loop is executing,
1119          * then channel is busy
1120          *
1121          *    There is one case can not be covered in below condition:
1122          *    When transfer request is submitted, but no request from peripheral, that is to say channel sevice doesn't
1123          *    begin, if application would like to submit another transfer , then the TCD will be overwritten, since the
1124          *    ACTIVE is 0 and BITER = CITER, for such case, it is a scatter gather(link TCD) case actually, so
1125          *    application should enabled TCD pool for dynamic scatter gather mode by calling EDMA_InstallTCDMemory.
1126          */
1127         if (((handle->base->TCD[handle->channel].CSR & DMA_CSR_ACTIVE_MASK) != 0U) ||
1128             (((handle->base->TCD[handle->channel].CITER_ELINKNO & DMA_CITER_ELINKNO_CITER_MASK) !=
1129               (handle->base->TCD[handle->channel].BITER_ELINKNO & DMA_BITER_ELINKNO_BITER_MASK))))
1130         {
1131             return kStatus_EDMA_Busy;
1132         }
1133         else
1134         {
1135             EDMA_SetTransferConfig(handle->base, handle->channel, config, NULL);
1136             /* Enable auto disable request feature */
1137             handle->base->TCD[handle->channel].CSR |= DMA_CSR_DREQ_MASK;
1138             /* Enable major interrupt */
1139             handle->base->TCD[handle->channel].CSR |= DMA_CSR_INTMAJOR_MASK;
1140 
1141             return kStatus_Success;
1142         }
1143     }
1144     else /* Use the TCD queue. */
1145     {
1146         uint32_t primask;
1147         uint16_t csr;
1148         int8_t currentTcd;
1149         int8_t previousTcd;
1150         int8_t nextTcd;
1151         int8_t tmpTcdUsed;
1152         int8_t tmpTcdSize;
1153 
1154         /* Check if tcd pool is full. */
1155         primask    = DisableGlobalIRQ();
1156         tmpTcdUsed = handle->tcdUsed;
1157         tmpTcdSize = handle->tcdSize;
1158         if (tmpTcdUsed >= tmpTcdSize)
1159         {
1160             EnableGlobalIRQ(primask);
1161 
1162             return kStatus_EDMA_QueueFull;
1163         }
1164         currentTcd = handle->tail;
1165         handle->tcdUsed++;
1166         /* Calculate index of next TCD */
1167         nextTcd = currentTcd + 1;
1168         if (nextTcd == handle->tcdSize)
1169         {
1170             nextTcd = 0;
1171         }
1172         /* Advance queue tail index */
1173         handle->tail = nextTcd;
1174         EnableGlobalIRQ(primask);
1175         /* Calculate index of previous TCD */
1176         previousTcd = currentTcd != 0 ? currentTcd - 1 : (handle->tcdSize - 1);
1177         /* Configure current TCD block. */
1178         EDMA_TcdReset(&handle->tcdPool[currentTcd]);
1179         EDMA_TcdSetTransferConfig(&handle->tcdPool[currentTcd], config, NULL);
1180         /* Enable major interrupt */
1181         handle->tcdPool[currentTcd].CSR |= DMA_CSR_INTMAJOR_MASK;
1182         /* Link current TCD with next TCD for identification of current TCD */
1183 #if defined FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET && FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET
1184         handle->tcdPool[currentTcd].DLAST_SGA =
1185             MEMORY_ConvertMemoryMapAddress((uint32_t)&handle->tcdPool[nextTcd], kMEMORY_Local2DMA);
1186 #else
1187         handle->tcdPool[currentTcd].DLAST_SGA = (uint32_t)&handle->tcdPool[nextTcd];
1188 #endif /* FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET */
1189         /* Chain from previous descriptor unless tcd pool size is 1(this descriptor is its own predecessor). */
1190         if (currentTcd != previousTcd)
1191         {
1192             /* Enable scatter/gather feature in the previous TCD block. */
1193             csr = handle->tcdPool[previousTcd].CSR | ((uint16_t)DMA_CSR_ESG_MASK);
1194             csr &= ~((uint16_t)DMA_CSR_DREQ_MASK);
1195             handle->tcdPool[previousTcd].CSR = csr;
1196             /*
1197                 Check if the TCD block in the registers is the previous one (points to current TCD block). It
1198                 is used to check if the previous TCD linked has been loaded in TCD register. If so, it need to
1199                 link the TCD register in case link the current TCD with the dead chain when TCD loading occurs
1200                 before link the previous TCD block.
1201             */
1202 #if defined FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET && FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET
1203             if (tcdRegs->DLAST_SGA ==
1204                 MEMORY_ConvertMemoryMapAddress((uint32_t)&handle->tcdPool[currentTcd], kMEMORY_Local2DMA))
1205 #else
1206             if (tcdRegs->DLAST_SGA == (uint32_t)&handle->tcdPool[currentTcd])
1207 #endif /* FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET */
1208             {
1209                 /* Clear the DREQ bits for the dynamic scatter gather */
1210                 tcdRegs->CSR |= DMA_CSR_DREQ_MASK;
1211                 /* Enable scatter/gather also in the TCD registers. */
1212                 csr = tcdRegs->CSR | DMA_CSR_ESG_MASK;
1213                 /* Must write the CSR register one-time, because the transfer maybe finished anytime. */
1214                 tcdRegs->CSR = csr;
1215                 /*
1216                     It is very important to check the ESG bit!
1217                     Because this hardware design: if DONE bit is set, the ESG bit can not be set. So it can
1218                     be used to check if the dynamic TCD link operation is successful. If ESG bit is not set
1219                     and the DLAST_SGA is not the next TCD address(it means the dynamic TCD link succeed and
1220                     the current TCD block has been loaded into TCD registers), it means transfer finished
1221                     and TCD link operation fail, so must install TCD content into TCD registers and enable
1222                     transfer again. And if ESG is set, it means transfer has not finished, so TCD dynamic
1223                     link succeed.
1224                 */
1225                 if (0U != (tcdRegs->CSR & DMA_CSR_ESG_MASK))
1226                 {
1227                     tcdRegs->CSR &= ~(uint16_t)DMA_CSR_DREQ_MASK;
1228                     return kStatus_Success;
1229                 }
1230                 /*
1231                     Check whether the current TCD block is already loaded in the TCD registers. It is another
1232                     condition when ESG bit is not set: it means the dynamic TCD link succeed and the current
1233                     TCD block has been loaded into TCD registers.
1234                 */
1235 #if defined FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET && FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET
1236                 if (tcdRegs->DLAST_SGA ==
1237                     MEMORY_ConvertMemoryMapAddress((uint32_t)&handle->tcdPool[nextTcd], kMEMORY_Local2DMA))
1238 #else
1239                 if (tcdRegs->DLAST_SGA == (uint32_t)&handle->tcdPool[nextTcd])
1240 #endif /* FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET */
1241                 {
1242                     return kStatus_Success;
1243                 }
1244                 /*
1245                     If go to this, means the previous transfer finished, and the DONE bit is set.
1246                     So shall configure TCD registers.
1247                 */
1248             }
1249             else if (tcdRegs->DLAST_SGA != 0UL)
1250             {
1251                 /* The current TCD block has been linked successfully. */
1252                 return kStatus_Success;
1253             }
1254             else
1255             {
1256                 /*
1257                     DLAST_SGA is 0 and it means the first submit transfer, so shall configure
1258                     TCD registers.
1259                 */
1260             }
1261         }
1262         /* There is no live chain, TCD block need to be installed in TCD registers. */
1263         EDMA_InstallTCD(handle->base, handle->channel, &handle->tcdPool[currentTcd]);
1264         /* Enable channel request again. */
1265         if (0U != (handle->flags & EDMA_TRANSFER_ENABLED_MASK))
1266         {
1267             handle->base->SERQ = DMA_SERQ_SERQ(handle->channel);
1268         }
1269 
1270         return kStatus_Success;
1271     }
1272 }
1273 
1274 /*!
1275  * brief eDMA starts transfer.
1276  *
1277  * This function enables the channel request. Users can call this function after submitting the transfer request
1278  * or before submitting the transfer request.
1279  *
1280  * param handle eDMA handle pointer.
1281  */
EDMA_StartTransfer(edma_handle_t * handle)1282 void EDMA_StartTransfer(edma_handle_t *handle)
1283 {
1284     assert(handle != NULL);
1285     uint32_t tmpCSR = 0;
1286 
1287     if (handle->tcdPool == NULL)
1288     {
1289         handle->base->SERQ = DMA_SERQ_SERQ(handle->channel);
1290     }
1291     else /* Use the TCD queue. */
1292     {
1293         uint32_t primask;
1294         edma_tcd_t *tcdRegs = (edma_tcd_t *)(uint32_t)&handle->base->TCD[handle->channel];
1295 
1296         handle->flags |= EDMA_TRANSFER_ENABLED_MASK;
1297 
1298         /* Check if there was at least one descriptor submitted since reset (TCD in registers is valid) */
1299         if (tcdRegs->DLAST_SGA != 0U)
1300         {
1301             primask = DisableGlobalIRQ();
1302             /* Check if channel request is actually disable. */
1303             if ((handle->base->ERQ & ((uint32_t)1U << handle->channel)) == 0U)
1304             {
1305                 /* Check if transfer is paused. */
1306                 tmpCSR = tcdRegs->CSR;
1307                 if ((0U == (tmpCSR & DMA_CSR_DONE_MASK)) || (0U != (tmpCSR & DMA_CSR_ESG_MASK)))
1308                 {
1309                     /*
1310                         Re-enable channel request must be as soon as possible, so must put it into
1311                         critical section to avoid task switching or interrupt service routine.
1312                     */
1313                     handle->base->SERQ = DMA_SERQ_SERQ(handle->channel);
1314                 }
1315             }
1316             EnableGlobalIRQ(primask);
1317         }
1318     }
1319 }
1320 
1321 /*!
1322  * brief eDMA stops transfer.
1323  *
1324  * This function disables the channel request to pause the transfer. Users can call EDMA_StartTransfer()
1325  * again to resume the transfer.
1326  *
1327  * param handle eDMA handle pointer.
1328  */
EDMA_StopTransfer(edma_handle_t * handle)1329 void EDMA_StopTransfer(edma_handle_t *handle)
1330 {
1331     assert(handle != NULL);
1332 
1333     handle->flags &= (~(uint8_t)EDMA_TRANSFER_ENABLED_MASK);
1334     handle->base->CERQ = DMA_CERQ_CERQ(handle->channel);
1335 }
1336 
1337 /*!
1338  * brief eDMA aborts transfer.
1339  *
1340  * This function disables the channel request and clear transfer status bits.
1341  * Users can submit another transfer after calling this API.
1342  *
1343  * param handle DMA handle pointer.
1344  */
EDMA_AbortTransfer(edma_handle_t * handle)1345 void EDMA_AbortTransfer(edma_handle_t *handle)
1346 {
1347     handle->base->CERQ = DMA_CERQ_CERQ(handle->channel);
1348     /*
1349         Clear CSR to release channel. Because if the given channel started transfer,
1350         CSR will be not zero. Because if it is the last transfer, DREQ will be set.
1351         If not, ESG will be set.
1352     */
1353     handle->base->TCD[handle->channel].CSR = 0;
1354     /* Cancel all next TCD transfer. */
1355     handle->base->TCD[handle->channel].DLAST_SGA = 0;
1356     /* clear the CITER and BITER to make sure the TCD register in a correct state for next calling of
1357      * EDMA_SubmitTransfer */
1358     handle->base->TCD[handle->channel].CITER_ELINKNO = 0;
1359     handle->base->TCD[handle->channel].BITER_ELINKNO = 0;
1360 
1361     /* Handle the tcd */
1362     if (handle->tcdPool != NULL)
1363     {
1364         handle->header  = 1;
1365         handle->tail    = 0;
1366         handle->tcdUsed = 0;
1367     }
1368 }
1369 
1370 /*!
1371  * brief eDMA IRQ handler for the current major loop transfer completion.
1372  *
1373  * This function clears the channel major interrupt flag and calls
1374  * the callback function if it is not NULL.
1375  *
1376  * Note:
1377  * For the case using TCD queue, when the major iteration count is exhausted, additional operations are performed.
1378  * These include the final address adjustments and reloading of the BITER field into the CITER.
1379  * Assertion of an optional interrupt request also occurs at this time, as does a possible fetch of a new TCD from
1380  * memory using the scatter/gather address pointer included in the descriptor (if scatter/gather is enabled).
1381  *
1382  * For instance, when the time interrupt of TCD[0] happens, the TCD[1] has already been loaded into the eDMA engine.
1383  * As sga and sga_index are calculated based on the DLAST_SGA bitfield lies in the TCD_CSR register, the sga_index
1384  * in this case should be 2 (DLAST_SGA of TCD[1] stores the address of TCD[2]). Thus, the "tcdUsed" updated should be
1385  * (tcdUsed - 2U) which indicates the number of TCDs can be loaded in the memory pool (because TCD[0] and TCD[1] have
1386  * been loaded into the eDMA engine at this point already.).
1387  *
1388  * For the last two continuous ISRs in a scatter/gather process, they  both load the last TCD (The last ISR does not
1389  * load a new TCD) from the memory pool to the eDMA engine when major loop completes.
1390  * Therefore, ensure that the header and tcdUsed updated are identical for them.
1391  * tcdUsed are both 0 in this case as no TCD to be loaded.
1392  *
1393  * See the "eDMA basic data flow" in the eDMA Functional description section of the Reference Manual for
1394  * further details.
1395  *
1396  * param handle eDMA handle pointer.
1397  */
EDMA_HandleIRQ(edma_handle_t * handle)1398 void EDMA_HandleIRQ(edma_handle_t *handle)
1399 {
1400     assert(handle != NULL);
1401 
1402     bool transfer_done;
1403 
1404     /* Clear EDMA interrupt flag */
1405     handle->base->CINT = handle->channel;
1406     /* Check if transfer is already finished. */
1407     transfer_done = ((handle->base->TCD[handle->channel].CSR & DMA_CSR_DONE_MASK) != 0U);
1408 
1409     if (handle->tcdPool == NULL)
1410     {
1411         if (handle->callback != NULL)
1412         {
1413             (handle->callback)(handle, handle->userData, transfer_done, 0);
1414         }
1415     }
1416     else /* Use the TCD queue. Please refer to the API descriptions in the eDMA header file for detailed information. */
1417     {
1418         uint32_t sga = (uint32_t)handle->base->TCD[handle->channel].DLAST_SGA;
1419         uint32_t sga_index;
1420         int32_t tcds_done;
1421         uint8_t new_header;
1422         bool esg = ((handle->base->TCD[handle->channel].CSR & DMA_CSR_ESG_MASK) != 0U);
1423 
1424         /* Get the offset of the next transfer TCD blocks to be loaded into the eDMA engine. */
1425 #if defined FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET && FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET
1426         sga -= MEMORY_ConvertMemoryMapAddress((uint32_t)handle->tcdPool, kMEMORY_Local2DMA);
1427 #else
1428         sga -= (uint32_t)handle->tcdPool;
1429 #endif /* FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET */
1430         /* Get the index of the next transfer TCD blocks to be loaded into the eDMA engine. */
1431         sga_index = sga / sizeof(edma_tcd_t);
1432         /* Adjust header positions. */
1433         if (transfer_done)
1434         {
1435             /* New header shall point to the next TCD to be loaded (current one is already finished) */
1436             new_header = (uint8_t)sga_index;
1437         }
1438         else
1439         {
1440             /* New header shall point to this descriptor currently loaded (not finished yet) */
1441             new_header = sga_index != 0U ? (uint8_t)sga_index - 1U : (uint8_t)handle->tcdSize - 1U;
1442         }
1443         /* Calculate the number of finished TCDs */
1444         if (new_header == (uint8_t)handle->header)
1445         {
1446             int8_t tmpTcdUsed = handle->tcdUsed;
1447             int8_t tmpTcdSize = handle->tcdSize;
1448 
1449             /* check esg here for the case that application submit only one request, once the request complete:
1450              * new_header(1) = handle->header(1)
1451              * tcdUsed(1) != tcdSize(>1)
1452              * As the application submit only once, so scatter gather must not enabled, then tcds_done should be 1
1453              */
1454             if ((tmpTcdUsed == tmpTcdSize) || (!esg))
1455             {
1456                 tcds_done = handle->tcdUsed;
1457             }
1458             else
1459             {
1460                 /* No TCD in the memory are going to be loaded or internal error occurs. */
1461                 tcds_done = 0;
1462             }
1463         }
1464         else
1465         {
1466             tcds_done = (int32_t)new_header - (int32_t)handle->header;
1467             if (tcds_done < 0)
1468             {
1469                 tcds_done += handle->tcdSize;
1470             }
1471             /*
1472              * While code run to here, it means a TCD transfer Done and a new TCD has loaded to the hardware
1473              * so clear DONE here to allow submit scatter gather transfer request in the callback to avoid TCD
1474              * overwritten.
1475              */
1476             if (transfer_done)
1477             {
1478                 handle->base->CDNE = handle->channel;
1479             }
1480         }
1481         /* Advance header which points to the TCD to be loaded into the eDMA engine from memory. */
1482         handle->header = (int8_t)new_header;
1483         /* Release TCD blocks. tcdUsed is the TCD number which can be used/loaded in the memory pool. */
1484         handle->tcdUsed -= (int8_t)tcds_done;
1485         /* Invoke callback function. */
1486         if (NULL != handle->callback)
1487         {
1488             (handle->callback)(handle, handle->userData, transfer_done, tcds_done);
1489         }
1490 
1491         /*
1492          * 1.clear the DONE bit here is meaningful for below cases:
1493          * A new TCD has been loaded to EDMA already:
1494          * need to clear the DONE bit in the IRQ handler to avoid TCD in EDMA been overwritten
1495          * if peripheral request isn't coming before next transfer request.
1496          * 2. Don't clear DONE bit for below case,
1497          * for the case that transfer request submitted in the privious edma callback, this is a case that doesn't
1498          * need scatter gather, so keep DONE bit during the next transfer request submission will re-install the TCD and
1499          * the DONE bit will be cleared together with TCD re-installation.
1500          */
1501         if (transfer_done)
1502         {
1503             if ((handle->base->TCD[handle->channel].CSR & DMA_CSR_ESG_MASK) != 0U)
1504             {
1505                 handle->base->CDNE = handle->channel;
1506             }
1507         }
1508     }
1509 }
1510 
1511 #if defined(FSL_FEATURE_EDMA_MODULE_CHANNEL_IRQ_ENTRY_SHARED_OFFSET) && \
1512     (FSL_FEATURE_EDMA_MODULE_CHANNEL_IRQ_ENTRY_SHARED_OFFSET == 4)
1513 /* 8 channels (Shared): kl28 */
1514 #if defined(FSL_FEATURE_EDMA_MODULE_CHANNEL) && (FSL_FEATURE_EDMA_MODULE_CHANNEL == 8U)
1515 
1516 #if defined(DMA0)
1517 void DMA0_04_DriverIRQHandler(void);
DMA0_04_DriverIRQHandler(void)1518 void DMA0_04_DriverIRQHandler(void)
1519 {
1520     if ((EDMA_GetChannelStatusFlags(DMA0, 0U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1521     {
1522         EDMA_HandleIRQ(s_EDMAHandle[0]);
1523     }
1524     if ((EDMA_GetChannelStatusFlags(DMA0, 4U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1525     {
1526         EDMA_HandleIRQ(s_EDMAHandle[4]);
1527     }
1528     SDK_ISR_EXIT_BARRIER;
1529 }
1530 
1531 void DMA0_15_DriverIRQHandler(void);
DMA0_15_DriverIRQHandler(void)1532 void DMA0_15_DriverIRQHandler(void)
1533 {
1534     if ((EDMA_GetChannelStatusFlags(DMA0, 1U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1535     {
1536         EDMA_HandleIRQ(s_EDMAHandle[1]);
1537     }
1538     if ((EDMA_GetChannelStatusFlags(DMA0, 5U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1539     {
1540         EDMA_HandleIRQ(s_EDMAHandle[5]);
1541     }
1542     SDK_ISR_EXIT_BARRIER;
1543 }
1544 
1545 void DMA0_26_DriverIRQHandler(void);
DMA0_26_DriverIRQHandler(void)1546 void DMA0_26_DriverIRQHandler(void)
1547 {
1548     if ((EDMA_GetChannelStatusFlags(DMA0, 2U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1549     {
1550         EDMA_HandleIRQ(s_EDMAHandle[2]);
1551     }
1552     if ((EDMA_GetChannelStatusFlags(DMA0, 6U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1553     {
1554         EDMA_HandleIRQ(s_EDMAHandle[6]);
1555     }
1556     SDK_ISR_EXIT_BARRIER;
1557 }
1558 
1559 void DMA0_37_DriverIRQHandler(void);
DMA0_37_DriverIRQHandler(void)1560 void DMA0_37_DriverIRQHandler(void)
1561 {
1562     if ((EDMA_GetChannelStatusFlags(DMA0, 3U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1563     {
1564         EDMA_HandleIRQ(s_EDMAHandle[3]);
1565     }
1566     if ((EDMA_GetChannelStatusFlags(DMA0, 7U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1567     {
1568         EDMA_HandleIRQ(s_EDMAHandle[7]);
1569     }
1570     SDK_ISR_EXIT_BARRIER;
1571 }
1572 #endif
1573 
1574 #if defined(DMA1)
1575 
1576 #if defined(DMA0)
1577 void DMA1_04_DriverIRQHandler(void);
DMA1_04_DriverIRQHandler(void)1578 void DMA1_04_DriverIRQHandler(void)
1579 {
1580     if ((EDMA_GetChannelStatusFlags(DMA1, 0U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1581     {
1582         EDMA_HandleIRQ(s_EDMAHandle[8]);
1583     }
1584     if ((EDMA_GetChannelStatusFlags(DMA1, 4U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1585     {
1586         EDMA_HandleIRQ(s_EDMAHandle[12]);
1587     }
1588     SDK_ISR_EXIT_BARRIER;
1589 }
1590 
1591 void DMA1_15_DriverIRQHandler(void);
DMA1_15_DriverIRQHandler(void)1592 void DMA1_15_DriverIRQHandler(void)
1593 {
1594     if ((EDMA_GetChannelStatusFlags(DMA1, 1U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1595     {
1596         EDMA_HandleIRQ(s_EDMAHandle[9]);
1597     }
1598     if ((EDMA_GetChannelStatusFlags(DMA1, 5U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1599     {
1600         EDMA_HandleIRQ(s_EDMAHandle[13]);
1601     }
1602     SDK_ISR_EXIT_BARRIER;
1603 }
1604 
1605 void DMA1_26_DriverIRQHandler(void);
DMA1_26_DriverIRQHandler(void)1606 void DMA1_26_DriverIRQHandler(void)
1607 {
1608     if ((EDMA_GetChannelStatusFlags(DMA1, 2U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1609     {
1610         EDMA_HandleIRQ(s_EDMAHandle[10]);
1611     }
1612     if ((EDMA_GetChannelStatusFlags(DMA1, 6U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1613     {
1614         EDMA_HandleIRQ(s_EDMAHandle[14]);
1615     }
1616     SDK_ISR_EXIT_BARRIER;
1617 }
1618 
1619 void DMA1_37_DriverIRQHandler(void);
DMA1_37_DriverIRQHandler(void)1620 void DMA1_37_DriverIRQHandler(void)
1621 {
1622     if ((EDMA_GetChannelStatusFlags(DMA1, 3U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1623     {
1624         EDMA_HandleIRQ(s_EDMAHandle[11]);
1625     }
1626     if ((EDMA_GetChannelStatusFlags(DMA1, 7U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1627     {
1628         EDMA_HandleIRQ(s_EDMAHandle[15]);
1629     }
1630     SDK_ISR_EXIT_BARRIER;
1631 }
1632 
1633 #else
1634 void DMA1_04_DriverIRQHandler(void);
DMA1_04_DriverIRQHandler(void)1635 void DMA1_04_DriverIRQHandler(void)
1636 {
1637     if ((EDMA_GetChannelStatusFlags(DMA1, 0U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1638     {
1639         EDMA_HandleIRQ(s_EDMAHandle[0]);
1640     }
1641     if ((EDMA_GetChannelStatusFlags(DMA1, 4U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1642     {
1643         EDMA_HandleIRQ(s_EDMAHandle[4]);
1644     }
1645     SDK_ISR_EXIT_BARRIER;
1646 }
1647 
1648 void DMA1_15_DriverIRQHandler(void);
DMA1_15_DriverIRQHandler(void)1649 void DMA1_15_DriverIRQHandler(void)
1650 {
1651     if ((EDMA_GetChannelStatusFlags(DMA1, 1U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1652     {
1653         EDMA_HandleIRQ(s_EDMAHandle[1]);
1654     }
1655     if ((EDMA_GetChannelStatusFlags(DMA1, 5U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1656     {
1657         EDMA_HandleIRQ(s_EDMAHandle[5]);
1658     }
1659     SDK_ISR_EXIT_BARRIER;
1660 }
1661 
1662 void DMA1_26_DriverIRQHandler(void);
DMA1_26_DriverIRQHandler(void)1663 void DMA1_26_DriverIRQHandler(void)
1664 {
1665     if ((EDMA_GetChannelStatusFlags(DMA1, 2U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1666     {
1667         EDMA_HandleIRQ(s_EDMAHandle[2]);
1668     }
1669     if ((EDMA_GetChannelStatusFlags(DMA1, 6U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1670     {
1671         EDMA_HandleIRQ(s_EDMAHandle[6]);
1672     }
1673     SDK_ISR_EXIT_BARRIER;
1674 }
1675 
1676 void DMA1_37_DriverIRQHandler(void);
DMA1_37_DriverIRQHandler(void)1677 void DMA1_37_DriverIRQHandler(void)
1678 {
1679     if ((EDMA_GetChannelStatusFlags(DMA1, 3U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1680     {
1681         EDMA_HandleIRQ(s_EDMAHandle[3]);
1682     }
1683     if ((EDMA_GetChannelStatusFlags(DMA1, 7U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1684     {
1685         EDMA_HandleIRQ(s_EDMAHandle[7]);
1686     }
1687     SDK_ISR_EXIT_BARRIER;
1688 }
1689 #endif
1690 #endif
1691 #endif /* 8 channels (Shared) */
1692 #endif /* FSL_FEATURE_EDMA_MODULE_CHANNEL_IRQ_ENTRY_SHARED_OFFSET */
1693 
1694 #if defined(FSL_FEATURE_EDMA_MODULE_CHANNEL_IRQ_ENTRY_SHARED_OFFSET) && \
1695     (FSL_FEATURE_EDMA_MODULE_CHANNEL_IRQ_ENTRY_SHARED_OFFSET == 8)
1696 /* 16 channels (Shared): K32H844P */
1697 #if defined(FSL_FEATURE_EDMA_MODULE_CHANNEL) && (FSL_FEATURE_EDMA_MODULE_CHANNEL == 16U)
1698 
1699 void DMA0_08_DriverIRQHandler(void);
DMA0_08_DriverIRQHandler(void)1700 void DMA0_08_DriverIRQHandler(void)
1701 {
1702     if ((EDMA_GetChannelStatusFlags(DMA0, 0U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1703     {
1704         EDMA_HandleIRQ(s_EDMAHandle[0]);
1705     }
1706     if ((EDMA_GetChannelStatusFlags(DMA0, 8U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1707     {
1708         EDMA_HandleIRQ(s_EDMAHandle[8]);
1709     }
1710     SDK_ISR_EXIT_BARRIER;
1711 }
1712 
1713 void DMA0_19_DriverIRQHandler(void);
DMA0_19_DriverIRQHandler(void)1714 void DMA0_19_DriverIRQHandler(void)
1715 {
1716     if ((EDMA_GetChannelStatusFlags(DMA0, 1U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1717     {
1718         EDMA_HandleIRQ(s_EDMAHandle[1]);
1719     }
1720     if ((EDMA_GetChannelStatusFlags(DMA0, 9U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1721     {
1722         EDMA_HandleIRQ(s_EDMAHandle[9]);
1723     }
1724     SDK_ISR_EXIT_BARRIER;
1725 }
1726 
1727 void DMA0_210_DriverIRQHandler(void);
DMA0_210_DriverIRQHandler(void)1728 void DMA0_210_DriverIRQHandler(void)
1729 {
1730     if ((EDMA_GetChannelStatusFlags(DMA0, 2U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1731     {
1732         EDMA_HandleIRQ(s_EDMAHandle[2]);
1733     }
1734     if ((EDMA_GetChannelStatusFlags(DMA0, 10U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1735     {
1736         EDMA_HandleIRQ(s_EDMAHandle[10]);
1737     }
1738     SDK_ISR_EXIT_BARRIER;
1739 }
1740 
1741 void DMA0_311_DriverIRQHandler(void);
DMA0_311_DriverIRQHandler(void)1742 void DMA0_311_DriverIRQHandler(void)
1743 {
1744     if ((EDMA_GetChannelStatusFlags(DMA0, 3U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1745     {
1746         EDMA_HandleIRQ(s_EDMAHandle[3]);
1747     }
1748     if ((EDMA_GetChannelStatusFlags(DMA0, 11U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1749     {
1750         EDMA_HandleIRQ(s_EDMAHandle[11]);
1751     }
1752     SDK_ISR_EXIT_BARRIER;
1753 }
1754 
1755 void DMA0_412_DriverIRQHandler(void);
DMA0_412_DriverIRQHandler(void)1756 void DMA0_412_DriverIRQHandler(void)
1757 {
1758     if ((EDMA_GetChannelStatusFlags(DMA0, 4U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1759     {
1760         EDMA_HandleIRQ(s_EDMAHandle[4]);
1761     }
1762     if ((EDMA_GetChannelStatusFlags(DMA0, 12U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1763     {
1764         EDMA_HandleIRQ(s_EDMAHandle[12]);
1765     }
1766     SDK_ISR_EXIT_BARRIER;
1767 }
1768 
1769 void DMA0_513_DriverIRQHandler(void);
DMA0_513_DriverIRQHandler(void)1770 void DMA0_513_DriverIRQHandler(void)
1771 {
1772     if ((EDMA_GetChannelStatusFlags(DMA0, 5U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1773     {
1774         EDMA_HandleIRQ(s_EDMAHandle[5]);
1775     }
1776     if ((EDMA_GetChannelStatusFlags(DMA0, 13U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1777     {
1778         EDMA_HandleIRQ(s_EDMAHandle[13]);
1779     }
1780     SDK_ISR_EXIT_BARRIER;
1781 }
1782 
1783 void DMA0_614_DriverIRQHandler(void);
DMA0_614_DriverIRQHandler(void)1784 void DMA0_614_DriverIRQHandler(void)
1785 {
1786     if ((EDMA_GetChannelStatusFlags(DMA0, 6U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1787     {
1788         EDMA_HandleIRQ(s_EDMAHandle[6]);
1789     }
1790     if ((EDMA_GetChannelStatusFlags(DMA0, 14U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1791     {
1792         EDMA_HandleIRQ(s_EDMAHandle[14]);
1793     }
1794     SDK_ISR_EXIT_BARRIER;
1795 }
1796 
1797 void DMA0_715_DriverIRQHandler(void);
DMA0_715_DriverIRQHandler(void)1798 void DMA0_715_DriverIRQHandler(void)
1799 {
1800     if ((EDMA_GetChannelStatusFlags(DMA0, 7U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1801     {
1802         EDMA_HandleIRQ(s_EDMAHandle[7]);
1803     }
1804     if ((EDMA_GetChannelStatusFlags(DMA0, 15U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1805     {
1806         EDMA_HandleIRQ(s_EDMAHandle[15]);
1807     }
1808     SDK_ISR_EXIT_BARRIER;
1809 }
1810 
1811 #if defined(DMA1)
1812 void DMA1_08_DriverIRQHandler(void);
DMA1_08_DriverIRQHandler(void)1813 void DMA1_08_DriverIRQHandler(void)
1814 {
1815     if ((EDMA_GetChannelStatusFlags(DMA1, 0U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1816     {
1817         EDMA_HandleIRQ(s_EDMAHandle[16]);
1818     }
1819     if ((EDMA_GetChannelStatusFlags(DMA1, 8U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1820     {
1821         EDMA_HandleIRQ(s_EDMAHandle[24]);
1822     }
1823     SDK_ISR_EXIT_BARRIER;
1824 }
1825 
1826 void DMA1_19_DriverIRQHandler(void);
DMA1_19_DriverIRQHandler(void)1827 void DMA1_19_DriverIRQHandler(void)
1828 {
1829     if ((EDMA_GetChannelStatusFlags(DMA1, 1U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1830     {
1831         EDMA_HandleIRQ(s_EDMAHandle[17]);
1832     }
1833     if ((EDMA_GetChannelStatusFlags(DMA1, 9U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1834     {
1835         EDMA_HandleIRQ(s_EDMAHandle[25]);
1836     }
1837     SDK_ISR_EXIT_BARRIER;
1838 }
1839 
1840 void DMA1_210_DriverIRQHandler(void);
DMA1_210_DriverIRQHandler(void)1841 void DMA1_210_DriverIRQHandler(void)
1842 {
1843     if ((EDMA_GetChannelStatusFlags(DMA1, 2U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1844     {
1845         EDMA_HandleIRQ(s_EDMAHandle[18]);
1846     }
1847     if ((EDMA_GetChannelStatusFlags(DMA1, 10U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1848     {
1849         EDMA_HandleIRQ(s_EDMAHandle[26]);
1850     }
1851     SDK_ISR_EXIT_BARRIER;
1852 }
1853 
1854 void DMA1_311_DriverIRQHandler(void);
DMA1_311_DriverIRQHandler(void)1855 void DMA1_311_DriverIRQHandler(void)
1856 {
1857     if ((EDMA_GetChannelStatusFlags(DMA1, 3U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1858     {
1859         EDMA_HandleIRQ(s_EDMAHandle[19]);
1860     }
1861     if ((EDMA_GetChannelStatusFlags(DMA1, 11U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1862     {
1863         EDMA_HandleIRQ(s_EDMAHandle[27]);
1864     }
1865     SDK_ISR_EXIT_BARRIER;
1866 }
1867 
1868 void DMA1_412_DriverIRQHandler(void);
DMA1_412_DriverIRQHandler(void)1869 void DMA1_412_DriverIRQHandler(void)
1870 {
1871     if ((EDMA_GetChannelStatusFlags(DMA1, 4U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1872     {
1873         EDMA_HandleIRQ(s_EDMAHandle[20]);
1874     }
1875     if ((EDMA_GetChannelStatusFlags(DMA1, 12U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1876     {
1877         EDMA_HandleIRQ(s_EDMAHandle[28]);
1878     }
1879     SDK_ISR_EXIT_BARRIER;
1880 }
1881 
1882 void DMA1_513_DriverIRQHandler(void);
DMA1_513_DriverIRQHandler(void)1883 void DMA1_513_DriverIRQHandler(void)
1884 {
1885     if ((EDMA_GetChannelStatusFlags(DMA1, 5U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1886     {
1887         EDMA_HandleIRQ(s_EDMAHandle[21]);
1888     }
1889     if ((EDMA_GetChannelStatusFlags(DMA1, 13U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1890     {
1891         EDMA_HandleIRQ(s_EDMAHandle[29]);
1892     }
1893     SDK_ISR_EXIT_BARRIER;
1894 }
1895 
1896 void DMA1_614_DriverIRQHandler(void);
DMA1_614_DriverIRQHandler(void)1897 void DMA1_614_DriverIRQHandler(void)
1898 {
1899     if ((EDMA_GetChannelStatusFlags(DMA1, 6U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1900     {
1901         EDMA_HandleIRQ(s_EDMAHandle[22]);
1902     }
1903     if ((EDMA_GetChannelStatusFlags(DMA1, 14U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1904     {
1905         EDMA_HandleIRQ(s_EDMAHandle[30]);
1906     }
1907     SDK_ISR_EXIT_BARRIER;
1908 }
1909 
1910 void DMA1_715_DriverIRQHandler(void);
DMA1_715_DriverIRQHandler(void)1911 void DMA1_715_DriverIRQHandler(void)
1912 {
1913     if ((EDMA_GetChannelStatusFlags(DMA1, 7U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1914     {
1915         EDMA_HandleIRQ(s_EDMAHandle[23]);
1916     }
1917     if ((EDMA_GetChannelStatusFlags(DMA1, 15U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1918     {
1919         EDMA_HandleIRQ(s_EDMAHandle[31]);
1920     }
1921     SDK_ISR_EXIT_BARRIER;
1922 }
1923 #endif
1924 #endif /* 16 channels (Shared) */
1925 #endif /* FSL_FEATURE_EDMA_MODULE_CHANNEL_IRQ_ENTRY_SHARED_OFFSET */
1926 
1927 #if defined(FSL_FEATURE_EDMA_MODULE_CHANNEL_IRQ_ENTRY_SHARED_OFFSET) && \
1928     (FSL_FEATURE_EDMA_MODULE_CHANNEL_IRQ_ENTRY_SHARED_OFFSET == 16)
1929 /* 32 channels (Shared): k80 */
1930 #if defined(FSL_FEATURE_EDMA_MODULE_CHANNEL) && FSL_FEATURE_EDMA_MODULE_CHANNEL == 32U
1931 #if defined(DMA0)
1932 void DMA0_DMA16_DriverIRQHandler(void);
DMA0_DMA16_DriverIRQHandler(void)1933 void DMA0_DMA16_DriverIRQHandler(void)
1934 {
1935     if ((EDMA_GetChannelStatusFlags(DMA0, 0U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1936     {
1937         EDMA_HandleIRQ(s_EDMAHandle[0]);
1938     }
1939     if ((EDMA_GetChannelStatusFlags(DMA0, 16U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1940     {
1941         EDMA_HandleIRQ(s_EDMAHandle[16]);
1942     }
1943     SDK_ISR_EXIT_BARRIER;
1944 }
1945 
1946 void DMA1_DMA17_DriverIRQHandler(void);
DMA1_DMA17_DriverIRQHandler(void)1947 void DMA1_DMA17_DriverIRQHandler(void)
1948 {
1949     if ((EDMA_GetChannelStatusFlags(DMA0, 1U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1950     {
1951         EDMA_HandleIRQ(s_EDMAHandle[1]);
1952     }
1953     if ((EDMA_GetChannelStatusFlags(DMA0, 17U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1954     {
1955         EDMA_HandleIRQ(s_EDMAHandle[17]);
1956     }
1957     SDK_ISR_EXIT_BARRIER;
1958 }
1959 
1960 void DMA2_DMA18_DriverIRQHandler(void);
DMA2_DMA18_DriverIRQHandler(void)1961 void DMA2_DMA18_DriverIRQHandler(void)
1962 {
1963     if ((EDMA_GetChannelStatusFlags(DMA0, 2U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1964     {
1965         EDMA_HandleIRQ(s_EDMAHandle[2]);
1966     }
1967     if ((EDMA_GetChannelStatusFlags(DMA0, 18U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1968     {
1969         EDMA_HandleIRQ(s_EDMAHandle[18]);
1970     }
1971     SDK_ISR_EXIT_BARRIER;
1972 }
1973 
1974 void DMA3_DMA19_DriverIRQHandler(void);
DMA3_DMA19_DriverIRQHandler(void)1975 void DMA3_DMA19_DriverIRQHandler(void)
1976 {
1977     if ((EDMA_GetChannelStatusFlags(DMA0, 3U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1978     {
1979         EDMA_HandleIRQ(s_EDMAHandle[3]);
1980     }
1981     if ((EDMA_GetChannelStatusFlags(DMA0, 19U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1982     {
1983         EDMA_HandleIRQ(s_EDMAHandle[19]);
1984     }
1985     SDK_ISR_EXIT_BARRIER;
1986 }
1987 
1988 void DMA4_DMA20_DriverIRQHandler(void);
DMA4_DMA20_DriverIRQHandler(void)1989 void DMA4_DMA20_DriverIRQHandler(void)
1990 {
1991     if ((EDMA_GetChannelStatusFlags(DMA0, 4U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1992     {
1993         EDMA_HandleIRQ(s_EDMAHandle[4]);
1994     }
1995     if ((EDMA_GetChannelStatusFlags(DMA0, 20U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1996     {
1997         EDMA_HandleIRQ(s_EDMAHandle[20]);
1998     }
1999     SDK_ISR_EXIT_BARRIER;
2000 }
2001 
2002 void DMA5_DMA21_DriverIRQHandler(void);
DMA5_DMA21_DriverIRQHandler(void)2003 void DMA5_DMA21_DriverIRQHandler(void)
2004 {
2005     if ((EDMA_GetChannelStatusFlags(DMA0, 5U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2006     {
2007         EDMA_HandleIRQ(s_EDMAHandle[5]);
2008     }
2009     if ((EDMA_GetChannelStatusFlags(DMA0, 21U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2010     {
2011         EDMA_HandleIRQ(s_EDMAHandle[21]);
2012     }
2013     SDK_ISR_EXIT_BARRIER;
2014 }
2015 
2016 void DMA6_DMA22_DriverIRQHandler(void);
DMA6_DMA22_DriverIRQHandler(void)2017 void DMA6_DMA22_DriverIRQHandler(void)
2018 {
2019     if ((EDMA_GetChannelStatusFlags(DMA0, 6U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2020     {
2021         EDMA_HandleIRQ(s_EDMAHandle[6]);
2022     }
2023     if ((EDMA_GetChannelStatusFlags(DMA0, 22U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2024     {
2025         EDMA_HandleIRQ(s_EDMAHandle[22]);
2026     }
2027     SDK_ISR_EXIT_BARRIER;
2028 }
2029 
2030 void DMA7_DMA23_DriverIRQHandler(void);
DMA7_DMA23_DriverIRQHandler(void)2031 void DMA7_DMA23_DriverIRQHandler(void)
2032 {
2033     if ((EDMA_GetChannelStatusFlags(DMA0, 7U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2034     {
2035         EDMA_HandleIRQ(s_EDMAHandle[7]);
2036     }
2037     if ((EDMA_GetChannelStatusFlags(DMA0, 23U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2038     {
2039         EDMA_HandleIRQ(s_EDMAHandle[23]);
2040     }
2041     SDK_ISR_EXIT_BARRIER;
2042 }
2043 
2044 void DMA8_DMA24_DriverIRQHandler(void);
DMA8_DMA24_DriverIRQHandler(void)2045 void DMA8_DMA24_DriverIRQHandler(void)
2046 {
2047     if ((EDMA_GetChannelStatusFlags(DMA0, 8U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2048     {
2049         EDMA_HandleIRQ(s_EDMAHandle[8]);
2050     }
2051     if ((EDMA_GetChannelStatusFlags(DMA0, 24U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2052     {
2053         EDMA_HandleIRQ(s_EDMAHandle[24]);
2054     }
2055     SDK_ISR_EXIT_BARRIER;
2056 }
2057 
2058 void DMA9_DMA25_DriverIRQHandler(void);
DMA9_DMA25_DriverIRQHandler(void)2059 void DMA9_DMA25_DriverIRQHandler(void)
2060 {
2061     if ((EDMA_GetChannelStatusFlags(DMA0, 9U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2062     {
2063         EDMA_HandleIRQ(s_EDMAHandle[9]);
2064     }
2065     if ((EDMA_GetChannelStatusFlags(DMA0, 25U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2066     {
2067         EDMA_HandleIRQ(s_EDMAHandle[25]);
2068     }
2069     SDK_ISR_EXIT_BARRIER;
2070 }
2071 
2072 void DMA10_DMA26_DriverIRQHandler(void);
DMA10_DMA26_DriverIRQHandler(void)2073 void DMA10_DMA26_DriverIRQHandler(void)
2074 {
2075     if ((EDMA_GetChannelStatusFlags(DMA0, 10U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2076     {
2077         EDMA_HandleIRQ(s_EDMAHandle[10]);
2078     }
2079     if ((EDMA_GetChannelStatusFlags(DMA0, 26U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2080     {
2081         EDMA_HandleIRQ(s_EDMAHandle[26]);
2082     }
2083     SDK_ISR_EXIT_BARRIER;
2084 }
2085 
2086 void DMA11_DMA27_DriverIRQHandler(void);
DMA11_DMA27_DriverIRQHandler(void)2087 void DMA11_DMA27_DriverIRQHandler(void)
2088 {
2089     if ((EDMA_GetChannelStatusFlags(DMA0, 11U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2090     {
2091         EDMA_HandleIRQ(s_EDMAHandle[11]);
2092     }
2093     if ((EDMA_GetChannelStatusFlags(DMA0, 27U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2094     {
2095         EDMA_HandleIRQ(s_EDMAHandle[27]);
2096     }
2097     SDK_ISR_EXIT_BARRIER;
2098 }
2099 
2100 void DMA12_DMA28_DriverIRQHandler(void);
DMA12_DMA28_DriverIRQHandler(void)2101 void DMA12_DMA28_DriverIRQHandler(void)
2102 {
2103     if ((EDMA_GetChannelStatusFlags(DMA0, 12U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2104     {
2105         EDMA_HandleIRQ(s_EDMAHandle[12]);
2106     }
2107     if ((EDMA_GetChannelStatusFlags(DMA0, 28U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2108     {
2109         EDMA_HandleIRQ(s_EDMAHandle[28]);
2110     }
2111     SDK_ISR_EXIT_BARRIER;
2112 }
2113 
2114 void DMA13_DMA29_DriverIRQHandler(void);
DMA13_DMA29_DriverIRQHandler(void)2115 void DMA13_DMA29_DriverIRQHandler(void)
2116 {
2117     if ((EDMA_GetChannelStatusFlags(DMA0, 13U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2118     {
2119         EDMA_HandleIRQ(s_EDMAHandle[13]);
2120     }
2121     if ((EDMA_GetChannelStatusFlags(DMA0, 29U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2122     {
2123         EDMA_HandleIRQ(s_EDMAHandle[29]);
2124     }
2125     SDK_ISR_EXIT_BARRIER;
2126 }
2127 
2128 void DMA14_DMA30_DriverIRQHandler(void);
DMA14_DMA30_DriverIRQHandler(void)2129 void DMA14_DMA30_DriverIRQHandler(void)
2130 {
2131     if ((EDMA_GetChannelStatusFlags(DMA0, 14U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2132     {
2133         EDMA_HandleIRQ(s_EDMAHandle[14]);
2134     }
2135     if ((EDMA_GetChannelStatusFlags(DMA0, 30U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2136     {
2137         EDMA_HandleIRQ(s_EDMAHandle[30]);
2138     }
2139     SDK_ISR_EXIT_BARRIER;
2140 }
2141 
2142 void DMA15_DMA31_DriverIRQHandler(void);
DMA15_DMA31_DriverIRQHandler(void)2143 void DMA15_DMA31_DriverIRQHandler(void)
2144 {
2145     if ((EDMA_GetChannelStatusFlags(DMA0, 15U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2146     {
2147         EDMA_HandleIRQ(s_EDMAHandle[15]);
2148     }
2149     if ((EDMA_GetChannelStatusFlags(DMA0, 31U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2150     {
2151         EDMA_HandleIRQ(s_EDMAHandle[31]);
2152     }
2153     SDK_ISR_EXIT_BARRIER;
2154 }
2155 
2156 #else
2157 void DMA0_DMA16_DriverIRQHandler(void);
DMA0_DMA16_DriverIRQHandler(void)2158 void DMA0_DMA16_DriverIRQHandler(void)
2159 {
2160     if ((EDMA_GetChannelStatusFlags(DMA1, 0U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2161     {
2162         EDMA_HandleIRQ(s_EDMAHandle[0]);
2163     }
2164     if ((EDMA_GetChannelStatusFlags(DMA1, 16U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2165     {
2166         EDMA_HandleIRQ(s_EDMAHandle[16]);
2167     }
2168     SDK_ISR_EXIT_BARRIER;
2169 }
2170 
2171 void DMA1_DMA17_DriverIRQHandler(void);
DMA1_DMA17_DriverIRQHandler(void)2172 void DMA1_DMA17_DriverIRQHandler(void)
2173 {
2174     if ((EDMA_GetChannelStatusFlags(DMA1, 1U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2175     {
2176         EDMA_HandleIRQ(s_EDMAHandle[1]);
2177     }
2178     if ((EDMA_GetChannelStatusFlags(DMA1, 17U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2179     {
2180         EDMA_HandleIRQ(s_EDMAHandle[17]);
2181     }
2182     SDK_ISR_EXIT_BARRIER;
2183 }
2184 
2185 void DMA2_DMA18_DriverIRQHandler(void);
DMA2_DMA18_DriverIRQHandler(void)2186 void DMA2_DMA18_DriverIRQHandler(void)
2187 {
2188     if ((EDMA_GetChannelStatusFlags(DMA1, 2U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2189     {
2190         EDMA_HandleIRQ(s_EDMAHandle[2]);
2191     }
2192     if ((EDMA_GetChannelStatusFlags(DMA1, 18U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2193     {
2194         EDMA_HandleIRQ(s_EDMAHandle[18]);
2195     }
2196     SDK_ISR_EXIT_BARRIER;
2197 }
2198 
2199 void DMA3_DMA19_DriverIRQHandler(void);
DMA3_DMA19_DriverIRQHandler(void)2200 void DMA3_DMA19_DriverIRQHandler(void)
2201 {
2202     if ((EDMA_GetChannelStatusFlags(DMA1, 3U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2203     {
2204         EDMA_HandleIRQ(s_EDMAHandle[3]);
2205     }
2206     if ((EDMA_GetChannelStatusFlags(DMA1, 19U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2207     {
2208         EDMA_HandleIRQ(s_EDMAHandle[19]);
2209     }
2210     SDK_ISR_EXIT_BARRIER;
2211 }
2212 
2213 void DMA4_DMA20_DriverIRQHandler(void);
DMA4_DMA20_DriverIRQHandler(void)2214 void DMA4_DMA20_DriverIRQHandler(void)
2215 {
2216     if ((EDMA_GetChannelStatusFlags(DMA1, 4U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2217     {
2218         EDMA_HandleIRQ(s_EDMAHandle[4]);
2219     }
2220     if ((EDMA_GetChannelStatusFlags(DMA1, 20U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2221     {
2222         EDMA_HandleIRQ(s_EDMAHandle[20]);
2223     }
2224     SDK_ISR_EXIT_BARRIER;
2225 }
2226 
2227 void DMA5_DMA21_DriverIRQHandler(void);
DMA5_DMA21_DriverIRQHandler(void)2228 void DMA5_DMA21_DriverIRQHandler(void)
2229 {
2230     if ((EDMA_GetChannelStatusFlags(DMA1, 5U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2231     {
2232         EDMA_HandleIRQ(s_EDMAHandle[5]);
2233     }
2234     if ((EDMA_GetChannelStatusFlags(DMA1, 21U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2235     {
2236         EDMA_HandleIRQ(s_EDMAHandle[21]);
2237     }
2238     SDK_ISR_EXIT_BARRIER;
2239 }
2240 
2241 void DMA6_DMA22_DriverIRQHandler(void);
DMA6_DMA22_DriverIRQHandler(void)2242 void DMA6_DMA22_DriverIRQHandler(void)
2243 {
2244     if ((EDMA_GetChannelStatusFlags(DMA1, 6U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2245     {
2246         EDMA_HandleIRQ(s_EDMAHandle[6]);
2247     }
2248     if ((EDMA_GetChannelStatusFlags(DMA1, 22U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2249     {
2250         EDMA_HandleIRQ(s_EDMAHandle[22]);
2251     }
2252     SDK_ISR_EXIT_BARRIER;
2253 }
2254 
2255 void DMA7_DMA23_DriverIRQHandler(void);
DMA7_DMA23_DriverIRQHandler(void)2256 void DMA7_DMA23_DriverIRQHandler(void)
2257 {
2258     if ((EDMA_GetChannelStatusFlags(DMA1, 7U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2259     {
2260         EDMA_HandleIRQ(s_EDMAHandle[7]);
2261     }
2262     if ((EDMA_GetChannelStatusFlags(DMA1, 23U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2263     {
2264         EDMA_HandleIRQ(s_EDMAHandle[23]);
2265     }
2266     SDK_ISR_EXIT_BARRIER;
2267 }
2268 
2269 void DMA8_DMA24_DriverIRQHandler(void);
DMA8_DMA24_DriverIRQHandler(void)2270 void DMA8_DMA24_DriverIRQHandler(void)
2271 {
2272     if ((EDMA_GetChannelStatusFlags(DMA1, 8U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2273     {
2274         EDMA_HandleIRQ(s_EDMAHandle[8]);
2275     }
2276     if ((EDMA_GetChannelStatusFlags(DMA1, 24U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2277     {
2278         EDMA_HandleIRQ(s_EDMAHandle[24]);
2279     }
2280     SDK_ISR_EXIT_BARRIER;
2281 }
2282 
2283 void DMA9_DMA25_DriverIRQHandler(void);
DMA9_DMA25_DriverIRQHandler(void)2284 void DMA9_DMA25_DriverIRQHandler(void)
2285 {
2286     if ((EDMA_GetChannelStatusFlags(DMA1, 9U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2287     {
2288         EDMA_HandleIRQ(s_EDMAHandle[9]);
2289     }
2290     if ((EDMA_GetChannelStatusFlags(DMA1, 25U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2291     {
2292         EDMA_HandleIRQ(s_EDMAHandle[25]);
2293     }
2294     SDK_ISR_EXIT_BARRIER;
2295 }
2296 
2297 void DMA10_DMA26_DriverIRQHandler(void);
DMA10_DMA26_DriverIRQHandler(void)2298 void DMA10_DMA26_DriverIRQHandler(void)
2299 {
2300     if ((EDMA_GetChannelStatusFlags(DMA1, 10U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2301     {
2302         EDMA_HandleIRQ(s_EDMAHandle[10]);
2303     }
2304     if ((EDMA_GetChannelStatusFlags(DMA1, 26U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2305     {
2306         EDMA_HandleIRQ(s_EDMAHandle[26]);
2307     }
2308     SDK_ISR_EXIT_BARRIER;
2309 }
2310 
2311 void DMA11_DMA27_DriverIRQHandler(void);
DMA11_DMA27_DriverIRQHandler(void)2312 void DMA11_DMA27_DriverIRQHandler(void)
2313 {
2314     if ((EDMA_GetChannelStatusFlags(DMA1, 11U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2315     {
2316         EDMA_HandleIRQ(s_EDMAHandle[11]);
2317     }
2318     if ((EDMA_GetChannelStatusFlags(DMA1, 27U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2319     {
2320         EDMA_HandleIRQ(s_EDMAHandle[27]);
2321     }
2322     SDK_ISR_EXIT_BARRIER;
2323 }
2324 
2325 void DMA12_DMA28_DriverIRQHandler(void);
DMA12_DMA28_DriverIRQHandler(void)2326 void DMA12_DMA28_DriverIRQHandler(void)
2327 {
2328     if ((EDMA_GetChannelStatusFlags(DMA1, 12U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2329     {
2330         EDMA_HandleIRQ(s_EDMAHandle[12]);
2331     }
2332     if ((EDMA_GetChannelStatusFlags(DMA1, 28U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2333     {
2334         EDMA_HandleIRQ(s_EDMAHandle[28]);
2335     }
2336     SDK_ISR_EXIT_BARRIER;
2337 }
2338 
2339 void DMA13_DMA29_DriverIRQHandler(void);
DMA13_DMA29_DriverIRQHandler(void)2340 void DMA13_DMA29_DriverIRQHandler(void)
2341 {
2342     if ((EDMA_GetChannelStatusFlags(DMA1, 13U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2343     {
2344         EDMA_HandleIRQ(s_EDMAHandle[13]);
2345     }
2346     if ((EDMA_GetChannelStatusFlags(DMA1, 29U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2347     {
2348         EDMA_HandleIRQ(s_EDMAHandle[29]);
2349     }
2350     SDK_ISR_EXIT_BARRIER;
2351 }
2352 
2353 void DMA14_DMA30_DriverIRQHandler(void);
DMA14_DMA30_DriverIRQHandler(void)2354 void DMA14_DMA30_DriverIRQHandler(void)
2355 {
2356     if ((EDMA_GetChannelStatusFlags(DMA1, 14U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2357     {
2358         EDMA_HandleIRQ(s_EDMAHandle[14]);
2359     }
2360     if ((EDMA_GetChannelStatusFlags(DMA1, 30U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2361     {
2362         EDMA_HandleIRQ(s_EDMAHandle[30]);
2363     }
2364     SDK_ISR_EXIT_BARRIER;
2365 }
2366 
2367 void DMA15_DMA31_DriverIRQHandler(void);
DMA15_DMA31_DriverIRQHandler(void)2368 void DMA15_DMA31_DriverIRQHandler(void)
2369 {
2370     if ((EDMA_GetChannelStatusFlags(DMA1, 15U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2371     {
2372         EDMA_HandleIRQ(s_EDMAHandle[15]);
2373     }
2374     if ((EDMA_GetChannelStatusFlags(DMA1, 31U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2375     {
2376         EDMA_HandleIRQ(s_EDMAHandle[31]);
2377     }
2378     SDK_ISR_EXIT_BARRIER;
2379 }
2380 
2381 #endif
2382 #endif /* 32 channels (Shared) */
2383 #endif /* FSL_FEATURE_EDMA_MODULE_CHANNEL_IRQ_ENTRY_SHARED_OFFSET */
2384 
2385 #if defined(FSL_FEATURE_EDMA_MODULE_CHANNEL_IRQ_ENTRY_SHARED_OFFSET) && \
2386     (FSL_FEATURE_EDMA_MODULE_CHANNEL_IRQ_ENTRY_SHARED_OFFSET == 4)
2387 /* 32 channels (Shared): MCIMX7U5_M4 */
2388 #if defined(FSL_FEATURE_EDMA_MODULE_CHANNEL) && (FSL_FEATURE_EDMA_MODULE_CHANNEL == 32U)
2389 
2390 void DMA0_0_4_DriverIRQHandler(void);
DMA0_0_4_DriverIRQHandler(void)2391 void DMA0_0_4_DriverIRQHandler(void)
2392 {
2393     if ((EDMA_GetChannelStatusFlags(DMA0, 0U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2394     {
2395         EDMA_HandleIRQ(s_EDMAHandle[0]);
2396     }
2397     if ((EDMA_GetChannelStatusFlags(DMA0, 4U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2398     {
2399         EDMA_HandleIRQ(s_EDMAHandle[4]);
2400     }
2401     SDK_ISR_EXIT_BARRIER;
2402 }
2403 
2404 void DMA0_1_5_DriverIRQHandler(void);
DMA0_1_5_DriverIRQHandler(void)2405 void DMA0_1_5_DriverIRQHandler(void)
2406 {
2407     if ((EDMA_GetChannelStatusFlags(DMA0, 1U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2408     {
2409         EDMA_HandleIRQ(s_EDMAHandle[1]);
2410     }
2411     if ((EDMA_GetChannelStatusFlags(DMA0, 5U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2412     {
2413         EDMA_HandleIRQ(s_EDMAHandle[5]);
2414     }
2415     SDK_ISR_EXIT_BARRIER;
2416 }
2417 
2418 void DMA0_2_6_DriverIRQHandler(void);
DMA0_2_6_DriverIRQHandler(void)2419 void DMA0_2_6_DriverIRQHandler(void)
2420 {
2421     if ((EDMA_GetChannelStatusFlags(DMA0, 2U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2422     {
2423         EDMA_HandleIRQ(s_EDMAHandle[2]);
2424     }
2425     if ((EDMA_GetChannelStatusFlags(DMA0, 6U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2426     {
2427         EDMA_HandleIRQ(s_EDMAHandle[6]);
2428     }
2429     SDK_ISR_EXIT_BARRIER;
2430 }
2431 
2432 void DMA0_3_7_DriverIRQHandler(void);
DMA0_3_7_DriverIRQHandler(void)2433 void DMA0_3_7_DriverIRQHandler(void)
2434 {
2435     if ((EDMA_GetChannelStatusFlags(DMA0, 3U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2436     {
2437         EDMA_HandleIRQ(s_EDMAHandle[3]);
2438     }
2439     if ((EDMA_GetChannelStatusFlags(DMA0, 7U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2440     {
2441         EDMA_HandleIRQ(s_EDMAHandle[7]);
2442     }
2443     SDK_ISR_EXIT_BARRIER;
2444 }
2445 
2446 void DMA0_8_12_DriverIRQHandler(void);
DMA0_8_12_DriverIRQHandler(void)2447 void DMA0_8_12_DriverIRQHandler(void)
2448 {
2449     if ((EDMA_GetChannelStatusFlags(DMA0, 8U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2450     {
2451         EDMA_HandleIRQ(s_EDMAHandle[8]);
2452     }
2453     if ((EDMA_GetChannelStatusFlags(DMA0, 12U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2454     {
2455         EDMA_HandleIRQ(s_EDMAHandle[12]);
2456     }
2457     SDK_ISR_EXIT_BARRIER;
2458 }
2459 
2460 void DMA0_9_13_DriverIRQHandler(void);
DMA0_9_13_DriverIRQHandler(void)2461 void DMA0_9_13_DriverIRQHandler(void)
2462 {
2463     if ((EDMA_GetChannelStatusFlags(DMA0, 9U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2464     {
2465         EDMA_HandleIRQ(s_EDMAHandle[9]);
2466     }
2467     if ((EDMA_GetChannelStatusFlags(DMA0, 13U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2468     {
2469         EDMA_HandleIRQ(s_EDMAHandle[13]);
2470     }
2471     SDK_ISR_EXIT_BARRIER;
2472 }
2473 
2474 void DMA0_10_14_DriverIRQHandler(void);
DMA0_10_14_DriverIRQHandler(void)2475 void DMA0_10_14_DriverIRQHandler(void)
2476 {
2477     if ((EDMA_GetChannelStatusFlags(DMA0, 10U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2478     {
2479         EDMA_HandleIRQ(s_EDMAHandle[10]);
2480     }
2481     if ((EDMA_GetChannelStatusFlags(DMA0, 14U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2482     {
2483         EDMA_HandleIRQ(s_EDMAHandle[14]);
2484     }
2485     SDK_ISR_EXIT_BARRIER;
2486 }
2487 
2488 void DMA0_11_15_DriverIRQHandler(void);
DMA0_11_15_DriverIRQHandler(void)2489 void DMA0_11_15_DriverIRQHandler(void)
2490 {
2491     if ((EDMA_GetChannelStatusFlags(DMA0, 11U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2492     {
2493         EDMA_HandleIRQ(s_EDMAHandle[11]);
2494     }
2495     if ((EDMA_GetChannelStatusFlags(DMA0, 15U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2496     {
2497         EDMA_HandleIRQ(s_EDMAHandle[15]);
2498     }
2499     SDK_ISR_EXIT_BARRIER;
2500 }
2501 
2502 void DMA0_16_20_DriverIRQHandler(void);
DMA0_16_20_DriverIRQHandler(void)2503 void DMA0_16_20_DriverIRQHandler(void)
2504 {
2505     if ((EDMA_GetChannelStatusFlags(DMA0, 16U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2506     {
2507         EDMA_HandleIRQ(s_EDMAHandle[16]);
2508     }
2509     if ((EDMA_GetChannelStatusFlags(DMA0, 20U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2510     {
2511         EDMA_HandleIRQ(s_EDMAHandle[20]);
2512     }
2513     SDK_ISR_EXIT_BARRIER;
2514 }
2515 
2516 void DMA0_17_21_DriverIRQHandler(void);
DMA0_17_21_DriverIRQHandler(void)2517 void DMA0_17_21_DriverIRQHandler(void)
2518 {
2519     if ((EDMA_GetChannelStatusFlags(DMA0, 17U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2520     {
2521         EDMA_HandleIRQ(s_EDMAHandle[17]);
2522     }
2523     if ((EDMA_GetChannelStatusFlags(DMA0, 21U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2524     {
2525         EDMA_HandleIRQ(s_EDMAHandle[21]);
2526     }
2527     SDK_ISR_EXIT_BARRIER;
2528 }
2529 
2530 void DMA0_18_22_DriverIRQHandler(void);
DMA0_18_22_DriverIRQHandler(void)2531 void DMA0_18_22_DriverIRQHandler(void)
2532 {
2533     if ((EDMA_GetChannelStatusFlags(DMA0, 18U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2534     {
2535         EDMA_HandleIRQ(s_EDMAHandle[18]);
2536     }
2537     if ((EDMA_GetChannelStatusFlags(DMA0, 22U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2538     {
2539         EDMA_HandleIRQ(s_EDMAHandle[22]);
2540     }
2541     SDK_ISR_EXIT_BARRIER;
2542 }
2543 
2544 void DMA0_19_23_DriverIRQHandler(void);
DMA0_19_23_DriverIRQHandler(void)2545 void DMA0_19_23_DriverIRQHandler(void)
2546 {
2547     if ((EDMA_GetChannelStatusFlags(DMA0, 19U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2548     {
2549         EDMA_HandleIRQ(s_EDMAHandle[19]);
2550     }
2551     if ((EDMA_GetChannelStatusFlags(DMA0, 23U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2552     {
2553         EDMA_HandleIRQ(s_EDMAHandle[23]);
2554     }
2555     SDK_ISR_EXIT_BARRIER;
2556 }
2557 
2558 void DMA0_24_28_DriverIRQHandler(void);
DMA0_24_28_DriverIRQHandler(void)2559 void DMA0_24_28_DriverIRQHandler(void)
2560 {
2561     if ((EDMA_GetChannelStatusFlags(DMA0, 24U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2562     {
2563         EDMA_HandleIRQ(s_EDMAHandle[24]);
2564     }
2565     if ((EDMA_GetChannelStatusFlags(DMA0, 28U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2566     {
2567         EDMA_HandleIRQ(s_EDMAHandle[28]);
2568     }
2569     SDK_ISR_EXIT_BARRIER;
2570 }
2571 
2572 void DMA0_25_29_DriverIRQHandler(void);
DMA0_25_29_DriverIRQHandler(void)2573 void DMA0_25_29_DriverIRQHandler(void)
2574 {
2575     if ((EDMA_GetChannelStatusFlags(DMA0, 25U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2576     {
2577         EDMA_HandleIRQ(s_EDMAHandle[25]);
2578     }
2579     if ((EDMA_GetChannelStatusFlags(DMA0, 29U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2580     {
2581         EDMA_HandleIRQ(s_EDMAHandle[29]);
2582     }
2583     SDK_ISR_EXIT_BARRIER;
2584 }
2585 
2586 void DMA0_26_30_DriverIRQHandler(void);
DMA0_26_30_DriverIRQHandler(void)2587 void DMA0_26_30_DriverIRQHandler(void)
2588 {
2589     if ((EDMA_GetChannelStatusFlags(DMA0, 26U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2590     {
2591         EDMA_HandleIRQ(s_EDMAHandle[26]);
2592     }
2593     if ((EDMA_GetChannelStatusFlags(DMA0, 30U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2594     {
2595         EDMA_HandleIRQ(s_EDMAHandle[30]);
2596     }
2597     SDK_ISR_EXIT_BARRIER;
2598 }
2599 
2600 void DMA0_27_31_DriverIRQHandler(void);
DMA0_27_31_DriverIRQHandler(void)2601 void DMA0_27_31_DriverIRQHandler(void)
2602 {
2603     if ((EDMA_GetChannelStatusFlags(DMA0, 27U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2604     {
2605         EDMA_HandleIRQ(s_EDMAHandle[27]);
2606     }
2607     if ((EDMA_GetChannelStatusFlags(DMA0, 31U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2608     {
2609         EDMA_HandleIRQ(s_EDMAHandle[31]);
2610     }
2611     SDK_ISR_EXIT_BARRIER;
2612 }
2613 #endif /* 32 channels (Shared): MCIMX7U5 */
2614 #endif /* FSL_FEATURE_EDMA_MODULE_CHANNEL_IRQ_ENTRY_SHARED_OFFSET */
2615 
2616 #if defined(FSL_FEATURE_EDMA_MODULE_CHANNEL_IRQ_ENTRY_SHARED_OFFSET) && \
2617     (FSL_FEATURE_EDMA_MODULE_CHANNEL_IRQ_ENTRY_SHARED_OFFSET == 0)
2618 /* 4 channels (No Shared): kv10  */
2619 #if defined(FSL_FEATURE_EDMA_MODULE_CHANNEL) && (FSL_FEATURE_EDMA_MODULE_CHANNEL > 0)
2620 
2621 void DMA0_DriverIRQHandler(void);
DMA0_DriverIRQHandler(void)2622 void DMA0_DriverIRQHandler(void)
2623 {
2624     EDMA_HandleIRQ(s_EDMAHandle[0]);
2625     SDK_ISR_EXIT_BARRIER;
2626 }
2627 
2628 void DMA1_DriverIRQHandler(void);
DMA1_DriverIRQHandler(void)2629 void DMA1_DriverIRQHandler(void)
2630 {
2631     EDMA_HandleIRQ(s_EDMAHandle[1]);
2632     SDK_ISR_EXIT_BARRIER;
2633 }
2634 
2635 void DMA2_DriverIRQHandler(void);
DMA2_DriverIRQHandler(void)2636 void DMA2_DriverIRQHandler(void)
2637 {
2638     EDMA_HandleIRQ(s_EDMAHandle[2]);
2639     SDK_ISR_EXIT_BARRIER;
2640 }
2641 
2642 void DMA3_DriverIRQHandler(void);
DMA3_DriverIRQHandler(void)2643 void DMA3_DriverIRQHandler(void)
2644 {
2645     EDMA_HandleIRQ(s_EDMAHandle[3]);
2646     SDK_ISR_EXIT_BARRIER;
2647 }
2648 
2649 /* 8 channels (No Shared) */
2650 #if defined(FSL_FEATURE_EDMA_MODULE_CHANNEL) && (FSL_FEATURE_EDMA_MODULE_CHANNEL > 4U)
2651 
2652 void DMA4_DriverIRQHandler(void);
DMA4_DriverIRQHandler(void)2653 void DMA4_DriverIRQHandler(void)
2654 {
2655     EDMA_HandleIRQ(s_EDMAHandle[4]);
2656     SDK_ISR_EXIT_BARRIER;
2657 }
2658 
2659 void DMA5_DriverIRQHandler(void);
DMA5_DriverIRQHandler(void)2660 void DMA5_DriverIRQHandler(void)
2661 {
2662     EDMA_HandleIRQ(s_EDMAHandle[5]);
2663     SDK_ISR_EXIT_BARRIER;
2664 }
2665 
2666 void DMA6_DriverIRQHandler(void);
DMA6_DriverIRQHandler(void)2667 void DMA6_DriverIRQHandler(void)
2668 {
2669     EDMA_HandleIRQ(s_EDMAHandle[6]);
2670     SDK_ISR_EXIT_BARRIER;
2671 }
2672 
2673 void DMA7_DriverIRQHandler(void);
DMA7_DriverIRQHandler(void)2674 void DMA7_DriverIRQHandler(void)
2675 {
2676     EDMA_HandleIRQ(s_EDMAHandle[7]);
2677     SDK_ISR_EXIT_BARRIER;
2678 }
2679 #endif /* FSL_FEATURE_EDMA_MODULE_CHANNEL == 8 */
2680 
2681 /* 16 channels (No Shared) */
2682 #if defined(FSL_FEATURE_EDMA_MODULE_CHANNEL) && (FSL_FEATURE_EDMA_MODULE_CHANNEL > 8U)
2683 
2684 void DMA8_DriverIRQHandler(void);
DMA8_DriverIRQHandler(void)2685 void DMA8_DriverIRQHandler(void)
2686 {
2687     EDMA_HandleIRQ(s_EDMAHandle[8]);
2688     SDK_ISR_EXIT_BARRIER;
2689 }
2690 
2691 void DMA9_DriverIRQHandler(void);
DMA9_DriverIRQHandler(void)2692 void DMA9_DriverIRQHandler(void)
2693 {
2694     EDMA_HandleIRQ(s_EDMAHandle[9]);
2695     SDK_ISR_EXIT_BARRIER;
2696 }
2697 
2698 void DMA10_DriverIRQHandler(void);
DMA10_DriverIRQHandler(void)2699 void DMA10_DriverIRQHandler(void)
2700 {
2701     EDMA_HandleIRQ(s_EDMAHandle[10]);
2702     SDK_ISR_EXIT_BARRIER;
2703 }
2704 
2705 void DMA11_DriverIRQHandler(void);
DMA11_DriverIRQHandler(void)2706 void DMA11_DriverIRQHandler(void)
2707 {
2708     EDMA_HandleIRQ(s_EDMAHandle[11]);
2709     SDK_ISR_EXIT_BARRIER;
2710 }
2711 
2712 void DMA12_DriverIRQHandler(void);
DMA12_DriverIRQHandler(void)2713 void DMA12_DriverIRQHandler(void)
2714 {
2715     EDMA_HandleIRQ(s_EDMAHandle[12]);
2716     SDK_ISR_EXIT_BARRIER;
2717 }
2718 
2719 void DMA13_DriverIRQHandler(void);
DMA13_DriverIRQHandler(void)2720 void DMA13_DriverIRQHandler(void)
2721 {
2722     EDMA_HandleIRQ(s_EDMAHandle[13]);
2723     SDK_ISR_EXIT_BARRIER;
2724 }
2725 
2726 void DMA14_DriverIRQHandler(void);
DMA14_DriverIRQHandler(void)2727 void DMA14_DriverIRQHandler(void)
2728 {
2729     EDMA_HandleIRQ(s_EDMAHandle[14]);
2730     SDK_ISR_EXIT_BARRIER;
2731 }
2732 
2733 void DMA15_DriverIRQHandler(void);
DMA15_DriverIRQHandler(void)2734 void DMA15_DriverIRQHandler(void)
2735 {
2736     EDMA_HandleIRQ(s_EDMAHandle[15]);
2737     SDK_ISR_EXIT_BARRIER;
2738 }
2739 #endif /* FSL_FEATURE_EDMA_MODULE_CHANNEL == 16 */
2740 
2741 /* 32 channels (No Shared) */
2742 #if defined(FSL_FEATURE_EDMA_MODULE_CHANNEL) && (FSL_FEATURE_EDMA_MODULE_CHANNEL > 16U)
2743 
2744 void DMA16_DriverIRQHandler(void);
DMA16_DriverIRQHandler(void)2745 void DMA16_DriverIRQHandler(void)
2746 {
2747     EDMA_HandleIRQ(s_EDMAHandle[16]);
2748     SDK_ISR_EXIT_BARRIER;
2749 }
2750 
2751 void DMA17_DriverIRQHandler(void);
DMA17_DriverIRQHandler(void)2752 void DMA17_DriverIRQHandler(void)
2753 {
2754     EDMA_HandleIRQ(s_EDMAHandle[17]);
2755     SDK_ISR_EXIT_BARRIER;
2756 }
2757 
2758 void DMA18_DriverIRQHandler(void);
DMA18_DriverIRQHandler(void)2759 void DMA18_DriverIRQHandler(void)
2760 {
2761     EDMA_HandleIRQ(s_EDMAHandle[18]);
2762     SDK_ISR_EXIT_BARRIER;
2763 }
2764 
2765 void DMA19_DriverIRQHandler(void);
DMA19_DriverIRQHandler(void)2766 void DMA19_DriverIRQHandler(void)
2767 {
2768     EDMA_HandleIRQ(s_EDMAHandle[19]);
2769     SDK_ISR_EXIT_BARRIER;
2770 }
2771 
2772 void DMA20_DriverIRQHandler(void);
DMA20_DriverIRQHandler(void)2773 void DMA20_DriverIRQHandler(void)
2774 {
2775     EDMA_HandleIRQ(s_EDMAHandle[20]);
2776     SDK_ISR_EXIT_BARRIER;
2777 }
2778 
2779 void DMA21_DriverIRQHandler(void);
DMA21_DriverIRQHandler(void)2780 void DMA21_DriverIRQHandler(void)
2781 {
2782     EDMA_HandleIRQ(s_EDMAHandle[21]);
2783     SDK_ISR_EXIT_BARRIER;
2784 }
2785 
2786 void DMA22_DriverIRQHandler(void);
DMA22_DriverIRQHandler(void)2787 void DMA22_DriverIRQHandler(void)
2788 {
2789     EDMA_HandleIRQ(s_EDMAHandle[22]);
2790     SDK_ISR_EXIT_BARRIER;
2791 }
2792 
2793 void DMA23_DriverIRQHandler(void);
DMA23_DriverIRQHandler(void)2794 void DMA23_DriverIRQHandler(void)
2795 {
2796     EDMA_HandleIRQ(s_EDMAHandle[23]);
2797     SDK_ISR_EXIT_BARRIER;
2798 }
2799 
2800 void DMA24_DriverIRQHandler(void);
DMA24_DriverIRQHandler(void)2801 void DMA24_DriverIRQHandler(void)
2802 {
2803     EDMA_HandleIRQ(s_EDMAHandle[24]);
2804     SDK_ISR_EXIT_BARRIER;
2805 }
2806 
2807 void DMA25_DriverIRQHandler(void);
DMA25_DriverIRQHandler(void)2808 void DMA25_DriverIRQHandler(void)
2809 {
2810     EDMA_HandleIRQ(s_EDMAHandle[25]);
2811     SDK_ISR_EXIT_BARRIER;
2812 }
2813 
2814 void DMA26_DriverIRQHandler(void);
DMA26_DriverIRQHandler(void)2815 void DMA26_DriverIRQHandler(void)
2816 {
2817     EDMA_HandleIRQ(s_EDMAHandle[26]);
2818     SDK_ISR_EXIT_BARRIER;
2819 }
2820 
2821 void DMA27_DriverIRQHandler(void);
DMA27_DriverIRQHandler(void)2822 void DMA27_DriverIRQHandler(void)
2823 {
2824     EDMA_HandleIRQ(s_EDMAHandle[27]);
2825     SDK_ISR_EXIT_BARRIER;
2826 }
2827 
2828 void DMA28_DriverIRQHandler(void);
DMA28_DriverIRQHandler(void)2829 void DMA28_DriverIRQHandler(void)
2830 {
2831     EDMA_HandleIRQ(s_EDMAHandle[28]);
2832     SDK_ISR_EXIT_BARRIER;
2833 }
2834 
2835 void DMA29_DriverIRQHandler(void);
DMA29_DriverIRQHandler(void)2836 void DMA29_DriverIRQHandler(void)
2837 {
2838     EDMA_HandleIRQ(s_EDMAHandle[29]);
2839     SDK_ISR_EXIT_BARRIER;
2840 }
2841 
2842 void DMA30_DriverIRQHandler(void);
DMA30_DriverIRQHandler(void)2843 void DMA30_DriverIRQHandler(void)
2844 {
2845     EDMA_HandleIRQ(s_EDMAHandle[30]);
2846     SDK_ISR_EXIT_BARRIER;
2847 }
2848 
2849 void DMA31_DriverIRQHandler(void);
DMA31_DriverIRQHandler(void)2850 void DMA31_DriverIRQHandler(void)
2851 {
2852     EDMA_HandleIRQ(s_EDMAHandle[31]);
2853     SDK_ISR_EXIT_BARRIER;
2854 }
2855 #endif /* FSL_FEATURE_EDMA_MODULE_CHANNEL == 32 */
2856 
2857 #endif /* 4/8/16/32 channels (No Shared)  */
2858 #endif /* FSL_FEATURE_EDMA_MODULE_CHANNEL_IRQ_ENTRY_SHARED_OFFSET */
2859