1
2 /**
3 * @file xmc_dma.c
4 * @date 2019-05-07
5 *
6 * @cond
7 *********************************************************************************************************************
8 * XMClib v2.1.24 - XMC Peripheral Driver Library
9 *
10 * Copyright (c) 2015-2019, Infineon Technologies AG
11 * All rights reserved.
12 *
13 * Redistribution and use in source and binary forms, with or without modification,are permitted provided that the
14 * following conditions are met:
15 *
16 * Redistributions of source code must retain the above copyright notice, this list of conditions and the following
17 * disclaimer.
18 *
19 * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
20 * disclaimer in the documentation and/or other materials provided with the distribution.
21 *
22 * Neither the name of the copyright holders nor the names of its contributors may be used to endorse or promote
23 * products derived from this software without specific prior written permission.
24 *
25 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
26 * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
27 * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
28 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
29 * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
30 * WHETHER IN CONTRACT, STRICT LIABILITY,OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 *
33 * To improve the quality of the software, users are encouraged to share modifications, enhancements or bug fixes with
34 * Infineon Technologies AG dave@infineon.com).
35 *********************************************************************************************************************
36 *
37 * Change History
38 * --------------
39 *
40 * 2015-02-20:
41 * - Initial <br>
42 *
43 * 2015-05-20:
44 * - Add the declarations for the following APIs: <br>
45 * XMC_DMA_DisableRequestLine, XMC_DMA_ClearRequestLine, <br>
46 * XMC_DMA_CH_ClearSourcePeripheralRequest, <br>
47 * XMC_DMA_CH_ClearDestinationPeripheralRequest <br>
48 * - Remove PRIOARRAY <br>
49 * - Documentation updates <br>
50 *
51 * 2015-06-20:
52 * - Removed GetDriverVersion API <br>
53 * - Updated XMC_DMA_CH_Init() to support scatter/gather functionality (only
54 * on advanced DMA channels) <br>
55 * - Updated XMC_DMA_CH_Disable() <br>
56 *
57 * 2016-03-09:
58 * - Optimize write only registers
59 *
60 * 2016-04-08:
61 * - Update XMC_DMA_CH_EnableEvent and XMC_DMA_CH_DisableEvent.
62 * Write optimization of MASKCHEV
63 * - Fix XMC_DMA_IRQHandler, clear channel event status before processing the event handler.
64 * It corrects event losses if the DMA triggered in the event handler finished before returning from handler.
65 *
66 * 2018-02-08:
67 * - Fix XMC_DMA_CH_RequestLastMultiblockTransfer() clearing CTLL_LLP_SRC_EN and CTLL_LLP_DST_EN
68 *
69 * 2019-05-07:
70 * - Fixed compilation warnings
71 *
72 * @endcond
73 */
74
75 /*******************************************************************************
76 * HEADER FILES
77 *******************************************************************************/
78
79 #include "xmc_dma.h"
80
81 #if defined (GPDMA0)
82
83 #include "xmc_scu.h"
84
85 /*******************************************************************************
86 * MACROS
87 *******************************************************************************/
88
89 #define DLR_SRSEL_RS_MSK (0xfUL)
90 #define DLR_SRSEL_RS_BITSIZE (4UL)
91 #define DMA_EVENT_MAX (5UL)
92 #define GPDMA_CH_CFGH_DEST_PER_Pos GPDMA0_CH_CFGH_DEST_PER_Pos
93 #define GPDMA_CH_CFGH_SRC_PER_Pos GPDMA0_CH_CFGH_SRC_PER_Pos
94 #define GPDMA0_CH_CFGH_PER_Msk (0x7U)
95 #define GPDMA1_CH_CFGH_PER_Msk (0x3U)
96 #define GPDMA_CH_CFGH_PER_BITSIZE (4U)
97 #define GPDMA_CH_CTLL_INT_EN_Msk GPDMA0_CH_CTLL_INT_EN_Msk
98
99 /*******************************************************************************
100 * LOCAL DATA
101 *******************************************************************************/
102
103 #if defined (GPDMA0)
104 XMC_DMA_CH_EVENT_HANDLER_t dma0_event_handlers[XMC_DMA0_NUM_CHANNELS];
105 #endif
106
107 #if defined (GPDMA1)
108 XMC_DMA_CH_EVENT_HANDLER_t dma1_event_handlers[XMC_DMA1_NUM_CHANNELS];
109 #endif
110
111 /*******************************************************************************
112 * API IMPLEMENTATION
113 *******************************************************************************/
114
115 /* Initialize GPDMA */
XMC_DMA_Init(XMC_DMA_t * const dma)116 void XMC_DMA_Init(XMC_DMA_t *const dma)
117 {
118 XMC_DMA_Enable(dma);
119 }
120
121 /* Enable GPDMA module */
XMC_DMA_Enable(XMC_DMA_t * const dma)122 void XMC_DMA_Enable(XMC_DMA_t *const dma)
123 {
124 #if defined(GPDMA1)
125 if (dma == XMC_DMA0)
126 {
127 #endif
128 #if defined(CLOCK_GATING_SUPPORTED)
129 XMC_SCU_CLOCK_UngatePeripheralClock(XMC_SCU_PERIPHERAL_CLOCK_GPDMA0);
130 #endif
131 XMC_SCU_RESET_DeassertPeripheralReset(XMC_SCU_PERIPHERAL_RESET_GPDMA0);
132 #if defined(GPDMA1)
133 }
134 else
135 {
136 #if defined(CLOCK_GATING_SUPPORTED)
137 XMC_SCU_CLOCK_UngatePeripheralClock(XMC_SCU_PERIPHERAL_CLOCK_GPDMA1);
138 #endif
139 XMC_SCU_RESET_DeassertPeripheralReset(XMC_SCU_PERIPHERAL_RESET_GPDMA1);
140 }
141 #endif
142
143 dma->DMACFGREG = 0x1U;
144 }
145
146 /* Disable GPDMA module */
XMC_DMA_Disable(XMC_DMA_t * const dma)147 void XMC_DMA_Disable(XMC_DMA_t *const dma)
148 {
149 dma->DMACFGREG = 0x0U;
150
151 #if defined(GPDMA1)
152 if (dma == XMC_DMA0)
153 {
154 #endif
155 XMC_SCU_RESET_AssertPeripheralReset(XMC_SCU_PERIPHERAL_RESET_GPDMA0);
156 #if defined(CLOCK_GATING_SUPPORTED)
157 XMC_SCU_CLOCK_GatePeripheralClock(XMC_SCU_PERIPHERAL_CLOCK_GPDMA0);
158 #endif
159 #if defined(GPDMA1)
160 }
161 else
162 {
163 XMC_SCU_RESET_AssertPeripheralReset(XMC_SCU_PERIPHERAL_RESET_GPDMA1);
164 #if defined(CLOCK_GATING_SUPPORTED)
165 XMC_SCU_CLOCK_GatePeripheralClock(XMC_SCU_PERIPHERAL_CLOCK_GPDMA1);
166 #endif
167 }
168 #endif
169 }
170
171 /* Check is the GPDMA peripheral is enabled */
XMC_DMA_IsEnabled(const XMC_DMA_t * const dma)172 bool XMC_DMA_IsEnabled(const XMC_DMA_t *const dma)
173 {
174 bool status;
175
176 #if defined(GPDMA1)
177 if (dma == XMC_DMA0)
178 {
179 #endif
180 status = !XMC_SCU_RESET_IsPeripheralResetAsserted(XMC_SCU_PERIPHERAL_RESET_GPDMA0);
181 #if defined(CLOCK_GATING_SUPPORTED)
182 status = status && !XMC_SCU_CLOCK_IsPeripheralClockGated(XMC_SCU_PERIPHERAL_CLOCK_GPDMA0);
183 #endif
184 #if defined(GPDMA1)
185 }
186 else
187 {
188 status = !XMC_SCU_RESET_IsPeripheralResetAsserted(XMC_SCU_PERIPHERAL_RESET_GPDMA1);
189 #if defined(CLOCK_GATING_SUPPORTED)
190 status = status && !XMC_SCU_CLOCK_IsPeripheralClockGated(XMC_SCU_PERIPHERAL_CLOCK_GPDMA1);
191 #endif
192 }
193 #endif
194
195 /* DMA reset is not asserted and peripheral clock is not gated */
196 if (status == true)
197 {
198 status = status && (dma->DMACFGREG != 0U);
199 }
200
201 return status;
202 }
203
204 /* Enable request line */
XMC_DMA_EnableRequestLine(XMC_DMA_t * const dma,uint8_t line,uint8_t peripheral)205 void XMC_DMA_EnableRequestLine(XMC_DMA_t *const dma, uint8_t line, uint8_t peripheral)
206 {
207 #if defined(GPDMA1)
208 if (dma == XMC_DMA0)
209 {
210 #else
211 XMC_UNUSED_ARG(dma);
212 #endif
213 DLR->SRSEL0 = ((DLR->SRSEL0 & (uint32_t)~(DLR_SRSEL_RS_MSK << (line * DLR_SRSEL_RS_BITSIZE))) |
214 ((uint32_t)peripheral << (line * DLR_SRSEL_RS_BITSIZE)));
215 DLR->LNEN |= (0x1UL << (line & GPDMA0_CH_CFGH_PER_Msk));
216 #if defined(GPDMA1)
217 }
218 else
219 {
220 DLR->SRSEL1 = ((DLR->SRSEL1 & (uint32_t)~(DLR_SRSEL_RS_MSK << (line * DLR_SRSEL_RS_BITSIZE))) |
221 ((uint32_t)peripheral << (line * DLR_SRSEL_RS_BITSIZE)));
222 DLR->LNEN |= (0x100UL << line);
223 }
224 #endif
225 }
226
XMC_DMA_DisableRequestLine(XMC_DMA_t * const dma,uint8_t line)227 void XMC_DMA_DisableRequestLine(XMC_DMA_t *const dma, uint8_t line)
228 {
229 #if defined(GPDMA1)
230 if (dma == XMC_DMA0)
231 {
232 #else
233 XMC_UNUSED_ARG(dma);
234 #endif
235 DLR->LNEN &= ~(0x1UL << line);
236 #if defined(GPDMA1)
237 }
238 else
239 {
240 DLR->LNEN &= ~(0x100UL << line);
241 }
242 #endif
243 }
244
XMC_DMA_ClearRequestLine(XMC_DMA_t * const dma,uint8_t line)245 void XMC_DMA_ClearRequestLine(XMC_DMA_t *const dma, uint8_t line)
246 {
247 #if defined(GPDMA1)
248 if (dma == XMC_DMA0)
249 {
250 #else
251 XMC_UNUSED_ARG(dma);
252 #endif
253 DLR->LNEN &= ~(0x1UL << line);
254 DLR->LNEN |= 0x1UL << line;
255 #if defined(GPDMA1)
256 }
257 else
258 {
259 DLR->LNEN &= ~(0x100UL << line);
260 DLR->LNEN |= 0x100UL << line;
261 }
262 #endif
263 }
264
265 /* Get DMA DLR overrun status */
XMC_DMA_GetOverrunStatus(XMC_DMA_t * const dma,uint8_t line)266 bool XMC_DMA_GetOverrunStatus(XMC_DMA_t *const dma, uint8_t line)
267 {
268 bool status;
269
270 #if defined(GPDMA1)
271 if (dma == XMC_DMA0)
272 {
273 #else
274 XMC_UNUSED_ARG(dma);
275 #endif
276 status = (bool)(DLR->OVRSTAT & (0x1UL << line));
277 #if defined(GPDMA1)
278 }
279 else
280 {
281 status = (bool)(DLR->OVRSTAT & (0x100UL << line));
282 }
283 #endif
284
285 return status;
286 }
287
288 /* Clear DMA DLR overrun status */
XMC_DMA_ClearOverrunStatus(XMC_DMA_t * const dma,const uint8_t line)289 void XMC_DMA_ClearOverrunStatus(XMC_DMA_t *const dma, const uint8_t line)
290 {
291 #if defined(GPDMA1)
292 if (dma == XMC_DMA0)
293 {
294 #else
295 XMC_UNUSED_ARG(dma);
296 #endif
297 DLR->OVRCLR = (uint32_t)(0x1UL << line);
298 #if defined(GPDMA1)
299 }
300 else
301 {
302 DLR->OVRCLR = (uint32_t)(0x100UL << line);
303 }
304 #endif
305 }
306
307 /* Disable DMA channel */
XMC_DMA_CH_Disable(XMC_DMA_t * const dma,const uint8_t channel)308 void XMC_DMA_CH_Disable(XMC_DMA_t *const dma, const uint8_t channel)
309 {
310 dma->CHENREG = (uint32_t)(0x100UL << channel);
311 while((dma->CHENREG & (uint32_t)(0x1UL << channel)) != 0U)
312 {
313 /* wait until channel is disabled */
314 }
315 }
316
317 /* Check if a DMA channel is enabled */
XMC_DMA_CH_IsEnabled(XMC_DMA_t * const dma,const uint8_t channel)318 bool XMC_DMA_CH_IsEnabled(XMC_DMA_t *const dma, const uint8_t channel)
319 {
320 return (bool)(dma->CHENREG & ((uint32_t)1U << channel));
321 }
322
323 /* Initialize DMA channel */
XMC_DMA_CH_Init(XMC_DMA_t * const dma,const uint8_t channel,const XMC_DMA_CH_CONFIG_t * const config)324 XMC_DMA_CH_STATUS_t XMC_DMA_CH_Init(XMC_DMA_t *const dma, const uint8_t channel, const XMC_DMA_CH_CONFIG_t *const config)
325 {
326 XMC_DMA_CH_STATUS_t status;
327 uint8_t line;
328 uint8_t peripheral;
329
330 if (XMC_DMA_IsEnabled(dma) == true)
331 {
332 if (XMC_DMA_CH_IsEnabled(dma, channel) == false)
333 {
334 dma->CH[channel].SAR = config->src_addr;
335 dma->CH[channel].DAR = config->dst_addr;
336 dma->CH[channel].LLP = (uint32_t)config->linked_list_pointer;
337 dma->CH[channel].CTLH = (uint32_t)config->block_size;
338 dma->CH[channel].CTLL = config->control;
339
340 dma->CH[channel].CFGL = (uint32_t)((uint32_t)config->priority |
341 (uint32_t)GPDMA0_CH_CFGL_HS_SEL_SRC_Msk |
342 (uint32_t)GPDMA0_CH_CFGL_HS_SEL_DST_Msk);
343
344 if ((dma == XMC_DMA0) && (channel < (uint8_t)2))
345 {
346 /* Configure scatter and gather */
347 dma->CH[channel].SGR = config->src_gather_control;
348 dma->CH[channel].DSR = config->dst_scatter_control;
349 }
350
351 if (config->dst_handshaking == XMC_DMA_CH_DST_HANDSHAKING_HARDWARE)
352 {
353 /* Hardware handshaking interface configuration */
354 if ((config->transfer_flow == (uint32_t)XMC_DMA_CH_TRANSFER_FLOW_M2P_DMA) ||
355 (config->transfer_flow == (uint32_t)XMC_DMA_CH_TRANSFER_FLOW_P2P_DMA))
356 {
357 #if defined(GPDMA1)
358 if (dma == XMC_DMA0)
359 {
360 #endif
361 line = config->dst_peripheral_request & GPDMA0_CH_CFGH_PER_Msk;
362 #if defined(GPDMA1)
363 }
364 else
365 {
366 line = config->dst_peripheral_request & GPDMA1_CH_CFGH_PER_Msk;
367 }
368 #endif
369 peripheral = config->dst_peripheral_request >> GPDMA_CH_CFGH_PER_BITSIZE;
370
371 dma->CH[channel].CFGH |= (uint32_t)((uint32_t)line << GPDMA0_CH_CFGH_DEST_PER_Pos);
372 XMC_DMA_EnableRequestLine(dma, line, peripheral);
373 dma->CH[channel].CFGL &= (uint32_t)~GPDMA0_CH_CFGL_HS_SEL_DST_Msk;
374 }
375 }
376
377
378 if (config->src_handshaking == XMC_DMA_CH_SRC_HANDSHAKING_HARDWARE)
379 {
380 if ((config->transfer_flow == (uint32_t)XMC_DMA_CH_TRANSFER_FLOW_P2M_DMA) ||
381 (config->transfer_flow == (uint32_t)XMC_DMA_CH_TRANSFER_FLOW_P2P_DMA))
382 {
383 #if defined(GPDMA1)
384 if (dma == XMC_DMA0)
385 {
386 #endif
387 line = config->src_peripheral_request & GPDMA0_CH_CFGH_PER_Msk;
388 #if defined(GPDMA1)
389 }
390 else
391 {
392 line = config->src_peripheral_request & GPDMA1_CH_CFGH_PER_Msk;
393 }
394 #endif
395 peripheral = config->src_peripheral_request >> GPDMA_CH_CFGH_PER_BITSIZE;
396
397 dma->CH[channel].CFGH |= (uint32_t)((uint32_t)line << GPDMA0_CH_CFGH_SRC_PER_Pos);
398 XMC_DMA_EnableRequestLine(dma, line, peripheral);
399 dma->CH[channel].CFGL &= (uint32_t)~GPDMA0_CH_CFGL_HS_SEL_SRC_Msk;
400 }
401 }
402
403 XMC_DMA_CH_ClearEventStatus(dma, channel, (uint32_t)((uint32_t)XMC_DMA_CH_EVENT_TRANSFER_COMPLETE |
404 (uint32_t)XMC_DMA_CH_EVENT_BLOCK_TRANSFER_COMPLETE |
405 (uint32_t)XMC_DMA_CH_EVENT_SRC_TRANSACTION_COMPLETE |
406 (uint32_t)XMC_DMA_CH_EVENT_DST_TRANSACTION_COMPLETE |
407 (uint32_t)XMC_DMA_CH_EVENT_ERROR));
408
409 switch (config->transfer_type)
410 {
411 case XMC_DMA_CH_TRANSFER_TYPE_SINGLE_BLOCK:
412 break;
413
414 case XMC_DMA_CH_TRANSFER_TYPE_MULTI_BLOCK_SRCADR_CONTIGUOUS_DSTADR_RELOAD:
415 dma->CH[channel].CFGL |= (uint32_t)GPDMA0_CH_CFGL_RELOAD_DST_Msk;
416 break;
417
418 case XMC_DMA_CH_TRANSFER_TYPE_MULTI_BLOCK_SRCADR_RELOAD_DSTADR_CONTIGUOUS:
419 dma->CH[channel].CFGL |= (uint32_t)GPDMA0_CH_CFGL_RELOAD_SRC_Msk;
420 break;
421
422 case XMC_DMA_CH_TRANSFER_TYPE_MULTI_BLOCK_SRCADR_RELOAD_DSTADR_RELOAD:
423 dma->CH[channel].CFGL |= (uint32_t)((uint32_t)GPDMA0_CH_CFGL_RELOAD_DST_Msk |
424 (uint32_t)GPDMA0_CH_CFGL_RELOAD_SRC_Msk);
425 break;
426
427 case XMC_DMA_CH_TRANSFER_TYPE_MULTI_BLOCK_SRCADR_CONTIGUOUS_DSTADR_LINKED:
428 dma->CH[channel].CTLL |= (uint32_t)GPDMA0_CH_CTLL_LLP_DST_EN_Msk;
429 break;
430
431 case XMC_DMA_CH_TRANSFER_TYPE_MULTI_BLOCK_SRCADR_RELOAD_DSTADR_LINKED:
432 dma->CH[channel].CFGL |= (uint32_t)GPDMA0_CH_CFGL_RELOAD_SRC_Msk;
433 dma->CH[channel].CTLL |= (uint32_t)GPDMA0_CH_CTLL_LLP_DST_EN_Msk;
434 break;
435
436 case XMC_DMA_CH_TRANSFER_TYPE_MULTI_BLOCK_SRCADR_LINKED_DSTADR_CONTIGUOUS:
437 dma->CH[channel].CTLL |= (uint32_t)GPDMA0_CH_CTLL_LLP_SRC_EN_Msk;
438 break;
439
440 case XMC_DMA_CH_TRANSFER_TYPE_MULTI_BLOCK_SRCADR_LINKED_DSTADR_RELOAD:
441 dma->CH[channel].CFGL |= (uint32_t)GPDMA0_CH_CFGL_RELOAD_DST_Msk;
442 dma->CH[channel].CTLL |= (uint32_t)GPDMA0_CH_CTLL_LLP_SRC_EN_Msk;
443 break;
444
445 case XMC_DMA_CH_TRANSFER_TYPE_MULTI_BLOCK_SRCADR_LINKED_DSTADR_LINKED:
446 dma->CH[channel].CTLL |= (uint32_t)((uint32_t)GPDMA0_CH_CTLL_LLP_SRC_EN_Msk |
447 (uint32_t)GPDMA0_CH_CTLL_LLP_DST_EN_Msk);
448 break;
449
450 default:
451 break;
452 }
453
454 status = XMC_DMA_CH_STATUS_OK;
455
456 }
457 else
458 {
459 status = XMC_DMA_CH_STATUS_BUSY;
460 }
461 }
462 else
463 {
464 status = XMC_DMA_CH_STATUS_ERROR;
465 }
466
467 return status;
468 }
469
470 /* Suspend DMA channel transfer */
XMC_DMA_CH_Suspend(XMC_DMA_t * const dma,const uint8_t channel)471 void XMC_DMA_CH_Suspend(XMC_DMA_t *const dma, const uint8_t channel)
472 {
473 dma->CH[channel].CFGL |= (uint32_t)GPDMA0_CH_CFGL_CH_SUSP_Msk;
474 }
475
476 /* Resume DMA channel transfer */
XMC_DMA_CH_Resume(XMC_DMA_t * const dma,const uint8_t channel)477 void XMC_DMA_CH_Resume(XMC_DMA_t *const dma, const uint8_t channel)
478 {
479 dma->CH[channel].CFGL &= (uint32_t)~GPDMA0_CH_CFGL_CH_SUSP_Msk;
480 }
481
482 /* Check if a DMA channel is suspended */
XMC_DMA_CH_IsSuspended(XMC_DMA_t * const dma,const uint8_t channel)483 bool XMC_DMA_CH_IsSuspended(XMC_DMA_t *const dma, const uint8_t channel)
484 {
485 return (bool)(dma->CH[channel].CFGL & (uint32_t)GPDMA0_CH_CFGL_CH_SUSP_Msk);
486 }
487
488 /* Enable GPDMA event */
XMC_DMA_CH_EnableEvent(XMC_DMA_t * const dma,const uint8_t channel,const uint32_t event)489 void XMC_DMA_CH_EnableEvent(XMC_DMA_t *const dma, const uint8_t channel, const uint32_t event)
490 {
491 uint32_t event_idx;
492
493 for(event_idx = 0UL; event_idx < DMA_EVENT_MAX; ++event_idx)
494 {
495 if (event & ((uint32_t)0x1UL << event_idx))
496 {
497 dma->MASKCHEV[event_idx * 2UL] = ((uint32_t)0x101UL << channel);
498 }
499 }
500 }
501
502 /* Disable GPDMA event */
XMC_DMA_CH_DisableEvent(XMC_DMA_t * const dma,const uint8_t channel,const uint32_t event)503 void XMC_DMA_CH_DisableEvent(XMC_DMA_t *const dma, const uint8_t channel, const uint32_t event)
504 {
505 uint32_t event_idx;
506
507 for(event_idx = 0UL; event_idx < DMA_EVENT_MAX; ++event_idx)
508 {
509 if (event & ((uint32_t)0x1UL << event_idx))
510 {
511 dma->MASKCHEV[event_idx * 2UL] = ((uint32_t)0x100UL << channel);
512 }
513 }
514 }
515
516 /* Clear GPDMA event */
XMC_DMA_CH_ClearEventStatus(XMC_DMA_t * const dma,const uint8_t channel,const uint32_t event)517 void XMC_DMA_CH_ClearEventStatus(XMC_DMA_t *const dma, const uint8_t channel, const uint32_t event)
518 {
519 uint32_t event_idx;
520
521 for(event_idx = 0UL; event_idx < DMA_EVENT_MAX; ++event_idx)
522 {
523 if (event & (uint32_t)((uint32_t)0x1UL << event_idx))
524 {
525 dma->CLEARCHEV[event_idx * 2UL] = ((uint32_t)0x1UL << channel);
526 }
527 }
528
529 }
530
531 /* Get GPDMA event status */
XMC_DMA_CH_GetEventStatus(XMC_DMA_t * const dma,const uint8_t channel)532 uint32_t XMC_DMA_CH_GetEventStatus(XMC_DMA_t *const dma, const uint8_t channel)
533 {
534 uint32_t event_idx;
535 uint32_t status = 0UL;
536
537 for(event_idx = 0UL; event_idx < DMA_EVENT_MAX; ++event_idx)
538 {
539 status |= (uint32_t)((dma->STATUSCHEV[event_idx * 2UL] & (uint32_t)((uint32_t)0x1UL << (uint32_t)channel)) ?
540 ((uint32_t)0x1UL << event_idx) : (uint32_t)0UL);
541 }
542
543 return status;
544 }
545
546 /* Enable source gather */
XMC_DMA_CH_EnableSourceGather(XMC_DMA_t * const dma,const uint8_t channel,uint32_t interval,uint16_t count)547 void XMC_DMA_CH_EnableSourceGather(XMC_DMA_t *const dma, const uint8_t channel, uint32_t interval, uint16_t count)
548 {
549 dma->CH[channel].CTLL |= (uint32_t)GPDMA0_CH_CTLL_SRC_GATHER_EN_Msk;
550 dma->CH[channel].SGR = ((uint32_t)interval << GPDMA0_CH_SGR_SGI_Pos) | ((uint32_t)count << GPDMA0_CH_SGR_SGC_Pos);
551 }
552
553 /* Disable source gather */
XMC_DMA_CH_DisableSourceGather(XMC_DMA_t * const dma,const uint8_t channel)554 void XMC_DMA_CH_DisableSourceGather(XMC_DMA_t *const dma, const uint8_t channel)
555 {
556 dma->CH[channel].CTLL &= (uint32_t)~GPDMA0_CH_CTLL_SRC_GATHER_EN_Msk;
557 }
558
559 /* Enable destination scatter */
XMC_DMA_CH_EnableDestinationScatter(XMC_DMA_t * const dma,const uint8_t channel,uint32_t interval,uint16_t count)560 void XMC_DMA_CH_EnableDestinationScatter(XMC_DMA_t *const dma, const uint8_t channel, uint32_t interval, uint16_t count)
561 {
562 dma->CH[channel].CTLL |= (uint32_t)GPDMA0_CH_CTLL_DST_SCATTER_EN_Msk;
563 dma->CH[channel].DSR = ((uint32_t)interval << GPDMA0_CH_DSR_DSI_Pos) | ((uint32_t)count << GPDMA0_CH_DSR_DSC_Pos);
564 }
565
566 /* Disable destination scatter */
XMC_DMA_CH_DisableDestinationScatter(XMC_DMA_t * const dma,const uint8_t channel)567 void XMC_DMA_CH_DisableDestinationScatter(XMC_DMA_t *const dma, const uint8_t channel)
568 {
569 dma->CH[channel].CTLL &= (uint32_t)~GPDMA0_CH_CTLL_DST_SCATTER_EN_Msk;
570 }
571
572 /* Trigger source request */
XMC_DMA_CH_TriggerSourceRequest(XMC_DMA_t * const dma,const uint8_t channel,const XMC_DMA_CH_TRANSACTION_TYPE_t type,bool last)573 void XMC_DMA_CH_TriggerSourceRequest(XMC_DMA_t *const dma, const uint8_t channel, const XMC_DMA_CH_TRANSACTION_TYPE_t type, bool last)
574 {
575 if ((uint32_t)type == (uint32_t)XMC_DMA_CH_TRANSACTION_TYPE_SINGLE)
576 {
577 dma->SGLREQSRCREG = ((uint32_t)0x101UL << channel);
578 }
579
580 if (last == true)
581 {
582 dma->LSTSRCREG = (uint32_t)0x101UL << channel;
583 }
584
585 dma->REQSRCREG = (uint32_t)0x101UL << channel;
586 }
587
588 /* Trigger destination request */
XMC_DMA_CH_TriggerDestinationRequest(XMC_DMA_t * const dma,const uint8_t channel,const XMC_DMA_CH_TRANSACTION_TYPE_t type,bool last)589 void XMC_DMA_CH_TriggerDestinationRequest(XMC_DMA_t *const dma, const uint8_t channel, const XMC_DMA_CH_TRANSACTION_TYPE_t type, bool last)
590 {
591 if(type == XMC_DMA_CH_TRANSACTION_TYPE_SINGLE)
592 {
593 dma->SGLREQDSTREG = (uint32_t)0x101UL << channel;
594 }
595
596 if (last == true)
597 {
598 dma->LSTDSTREG = (uint32_t)0x101UL << channel;
599 }
600
601 dma->REQDSTREG = (uint32_t)0x101UL << channel;
602 }
603
604 /* Enable source address reload */
XMC_DMA_CH_EnableSourceAddressReload(XMC_DMA_t * const dma,const uint8_t channel)605 void XMC_DMA_CH_EnableSourceAddressReload(XMC_DMA_t *const dma, const uint8_t channel)
606 {
607 dma->CH[channel].CFGL |= (uint32_t)GPDMA0_CH_CFGL_RELOAD_SRC_Msk;
608 }
609
610 /* Disable source address reload */
XMC_DMA_CH_DisableSourceAddressReload(XMC_DMA_t * const dma,const uint8_t channel)611 void XMC_DMA_CH_DisableSourceAddressReload(XMC_DMA_t *const dma, const uint8_t channel)
612 {
613 dma->CH[channel].CFGL &= (uint32_t)~GPDMA0_CH_CFGL_RELOAD_SRC_Msk;
614 }
615
616 /* Enable destination address reload */
XMC_DMA_CH_EnableDestinationAddressReload(XMC_DMA_t * const dma,const uint8_t channel)617 void XMC_DMA_CH_EnableDestinationAddressReload(XMC_DMA_t *const dma, const uint8_t channel)
618 {
619 dma->CH[channel].CFGL |= (uint32_t)GPDMA0_CH_CFGL_RELOAD_DST_Msk;
620 }
621
622 /* Disable destination address reload */
XMC_DMA_CH_DisableDestinationAddressReload(XMC_DMA_t * const dma,const uint8_t channel)623 void XMC_DMA_CH_DisableDestinationAddressReload(XMC_DMA_t *const dma, const uint8_t channel)
624 {
625 dma->CH[channel].CFGL &= (uint32_t)~GPDMA0_CH_CFGL_RELOAD_DST_Msk;
626 }
627
628 /* Request last multi-block transfer */
XMC_DMA_CH_RequestLastMultiblockTransfer(XMC_DMA_t * const dma,const uint8_t channel)629 void XMC_DMA_CH_RequestLastMultiblockTransfer(XMC_DMA_t *const dma, const uint8_t channel)
630 {
631 dma->CH[channel].CFGL &= (uint32_t)~(GPDMA0_CH_CFGL_RELOAD_SRC_Msk | GPDMA0_CH_CFGL_RELOAD_DST_Msk);
632 dma->CH[channel].CTLL &= (uint32_t)~(GPDMA0_CH_CTLL_LLP_SRC_EN_Msk | GPDMA0_CH_CTLL_LLP_DST_EN_Msk);
633 }
634
635 /* Set event handler */
XMC_DMA_CH_SetEventHandler(XMC_DMA_t * const dma,const uint8_t channel,XMC_DMA_CH_EVENT_HANDLER_t event_handler)636 void XMC_DMA_CH_SetEventHandler(XMC_DMA_t *const dma, const uint8_t channel, XMC_DMA_CH_EVENT_HANDLER_t event_handler)
637 {
638 #if defined(GPDMA1)
639 if (dma == XMC_DMA0)
640 {
641 #else
642 XMC_UNUSED_ARG(dma);
643 #endif
644 dma0_event_handlers[channel] = event_handler;
645 #if defined(GPDMA1)
646 }
647 else
648 {
649 dma1_event_handlers[channel] = event_handler;
650 }
651 #endif
652 }
653
XMC_DMA_CH_ClearSourcePeripheralRequest(XMC_DMA_t * const dma,uint8_t channel)654 void XMC_DMA_CH_ClearSourcePeripheralRequest(XMC_DMA_t *const dma, uint8_t channel)
655 {
656 uint32_t line;
657 line = (dma->CH[channel].CFGH & GPDMA0_CH_CFGH_SRC_PER_Msk) >> GPDMA0_CH_CFGH_SRC_PER_Pos;
658
659 XMC_DMA_ClearRequestLine(dma, (uint8_t)line);
660 }
661
XMC_DMA_CH_ClearDestinationPeripheralRequest(XMC_DMA_t * const dma,uint8_t channel)662 void XMC_DMA_CH_ClearDestinationPeripheralRequest(XMC_DMA_t *const dma, uint8_t channel)
663 {
664 uint32_t line;
665 line = (dma->CH[channel].CFGH & GPDMA0_CH_CFGH_DEST_PER_Msk) >> GPDMA0_CH_CFGH_DEST_PER_Pos;
666
667 XMC_DMA_ClearRequestLine(dma, (uint8_t)line);
668 }
669
670 /* Default DMA IRQ handler */
XMC_DMA_IRQHandler(XMC_DMA_t * const dma)671 void XMC_DMA_IRQHandler(XMC_DMA_t *const dma)
672 {
673 uint32_t event;
674 int32_t channel;
675 uint32_t mask;
676 XMC_DMA_CH_EVENT_HANDLER_t *dma_event_handlers;
677 XMC_DMA_CH_EVENT_HANDLER_t event_handler;
678
679 #if defined(GPDMA1)
680 if (dma == XMC_DMA0)
681 {
682 #endif
683 dma_event_handlers = dma0_event_handlers;
684 #if defined(GPDMA1)
685 }
686 else
687 {
688 dma_event_handlers = dma1_event_handlers;
689 }
690 #endif
691
692 event = XMC_DMA_GetEventStatus(dma);
693 channel = 0;
694
695 if ((event & (uint32_t)XMC_DMA_CH_EVENT_ERROR) != (uint32_t)0UL)
696 {
697 event = XMC_DMA_GetChannelsErrorStatus(dma);
698 while (event != 0)
699 {
700 mask = (uint32_t)1U << channel;
701 if ((event & mask) != 0)
702 {
703 XMC_DMA_CH_ClearEventStatus(dma, (uint8_t)channel, (uint32_t)XMC_DMA_CH_EVENT_ERROR);
704
705 /* Call user callback to handle event */
706 event_handler = dma_event_handlers[channel];
707 if (event_handler != NULL)
708 {
709 event_handler(XMC_DMA_CH_EVENT_ERROR);
710 }
711
712 break;
713 }
714 ++channel;
715 }
716 }
717 else if ((event & (uint32_t)XMC_DMA_CH_EVENT_TRANSFER_COMPLETE) != (uint32_t)0UL)
718 {
719 event = XMC_DMA_GetChannelsTransferCompleteStatus(dma);
720 while (event != 0)
721 {
722 mask = (uint32_t)1U << channel;
723 if (event & mask)
724 {
725 XMC_DMA_CH_ClearEventStatus(dma, (uint8_t)channel, (uint32_t)((uint32_t)XMC_DMA_CH_EVENT_TRANSFER_COMPLETE |
726 (uint32_t)XMC_DMA_CH_EVENT_BLOCK_TRANSFER_COMPLETE |
727 (uint32_t)XMC_DMA_CH_EVENT_SRC_TRANSACTION_COMPLETE |
728 (uint32_t)XMC_DMA_CH_EVENT_DST_TRANSACTION_COMPLETE));
729
730 /* Call user callback to handle event */
731 event_handler = dma_event_handlers[channel];
732 if (event_handler != NULL)
733 {
734 event_handler(XMC_DMA_CH_EVENT_TRANSFER_COMPLETE);
735 }
736
737 break;
738 }
739 ++channel;
740 }
741 }
742 else if ((event & (uint32_t)XMC_DMA_CH_EVENT_BLOCK_TRANSFER_COMPLETE) != (uint32_t)0UL)
743 {
744 event = XMC_DMA_GetChannelsBlockCompleteStatus(dma);
745 while (event != 0)
746 {
747 mask = (uint32_t)1U << channel;
748 if (event & mask)
749 {
750 XMC_DMA_CH_ClearEventStatus(dma, (uint8_t)channel, (uint32_t)((uint32_t)XMC_DMA_CH_EVENT_BLOCK_TRANSFER_COMPLETE |
751 (uint32_t)XMC_DMA_CH_EVENT_SRC_TRANSACTION_COMPLETE |
752 (uint32_t)XMC_DMA_CH_EVENT_DST_TRANSACTION_COMPLETE));
753
754 /* Call user callback to handle event */
755 event_handler = dma_event_handlers[channel];
756 if (event_handler != NULL)
757 {
758 event_handler(XMC_DMA_CH_EVENT_BLOCK_TRANSFER_COMPLETE);
759 }
760
761 break;
762 }
763 ++channel;
764 }
765 }
766 else if ((event & (uint32_t)XMC_DMA_CH_EVENT_SRC_TRANSACTION_COMPLETE) != (uint32_t)0UL)
767 {
768 event = XMC_DMA_GetChannelsSourceTransactionCompleteStatus(dma);
769 while (event != 0)
770 {
771 mask = (uint32_t)1U << channel;
772 if (event & mask)
773 {
774 XMC_DMA_CH_ClearEventStatus(dma, (uint8_t)channel, (uint32_t)XMC_DMA_CH_EVENT_SRC_TRANSACTION_COMPLETE);
775
776 /* Call user callback to handle event */
777 event_handler = dma_event_handlers[channel];
778 if (event_handler != NULL)
779 {
780 event_handler(XMC_DMA_CH_EVENT_SRC_TRANSACTION_COMPLETE);
781 }
782
783 break;
784 }
785 ++channel;
786 }
787 }
788 else if ((event & (uint32_t)XMC_DMA_CH_EVENT_DST_TRANSACTION_COMPLETE) != (uint32_t)0UL)
789 {
790 event = XMC_DMA_GetChannelsDestinationTransactionCompleteStatus(dma);
791 while (event != 0)
792 {
793 mask = (uint32_t)1U << channel;
794 if (event & mask)
795 {
796 XMC_DMA_CH_ClearEventStatus(dma, (uint8_t)channel, (uint32_t)XMC_DMA_CH_EVENT_DST_TRANSACTION_COMPLETE);
797
798 /* Call user callback to handle event */
799 event_handler = dma_event_handlers[channel];
800 if (event_handler != NULL)
801 {
802 event_handler(XMC_DMA_CH_EVENT_DST_TRANSACTION_COMPLETE);
803 }
804
805 break;
806 }
807 ++channel;
808 }
809 }
810 else
811 {
812 /* no active interrupt was found? */
813 }
814
815 }
816
817 #endif /* GPDMA0 */
818