1 /***************************************************************************//**
2 * @file
3 * @brief Direct memory access (LDMA) module peripheral API
4 *******************************************************************************
5 * # License
6 * <b>Copyright 2018 Silicon Laboratories Inc. www.silabs.com</b>
7 *******************************************************************************
8 *
9 * SPDX-License-Identifier: Zlib
10 *
11 * The licensor of this software is Silicon Laboratories Inc.
12 *
13 * This software is provided 'as-is', without any express or implied
14 * warranty. In no event will the authors be held liable for any damages
15 * arising from the use of this software.
16 *
17 * Permission is granted to anyone to use this software for any purpose,
18 * including commercial applications, and to alter it and redistribute it
19 * freely, subject to the following restrictions:
20 *
21 * 1. The origin of this software must not be misrepresented; you must not
22 * claim that you wrote the original software. If you use this software
23 * in a product, an acknowledgment in the product documentation would be
24 * appreciated but is not required.
25 * 2. Altered source versions must be plainly marked as such, and must not be
26 * misrepresented as being the original software.
27 * 3. This notice may not be removed or altered from any source distribution.
28 *
29 ******************************************************************************/
30
31 #include "em_ldma.h"
32
33 #if defined(LDMA_PRESENT) && (LDMA_COUNT == 1)
34
35 #include <stddef.h>
36 #include "sl_assert.h"
37 #include "em_bus.h"
38 #include "em_cmu.h"
39 #include "em_core.h"
40
41 /***************************************************************************//**
42 * @addtogroup ldma
43 * @{
44 ******************************************************************************/
45
46 #if defined(LDMA_IRQ_HANDLER_TEMPLATE)
47 /***************************************************************************//**
48 * @brief
49 * A template for an LDMA IRQ handler.
50 ******************************************************************************/
LDMA_IRQHandler(void)51 void LDMA_IRQHandler(void)
52 {
53 uint32_t ch;
54 /* Get all pending and enabled interrupts. */
55 uint32_t pending = LDMA_IntGetEnabled();
56
57 /* Loop on an LDMA error to enable debugging. */
58 while (pending & LDMA_IF_ERROR) {
59 }
60
61 /* Iterate over all LDMA channels. */
62 for (ch = 0; ch < DMA_CHAN_COUNT; ch++) {
63 uint32_t mask = 0x1 << ch;
64 if (pending & mask) {
65 /* Clear the interrupt flag. */
66 LDMA->IFC = mask;
67
68 /* Perform more actions here, execute callbacks, and so on. */
69 }
70 }
71 }
72 #endif
73
74 /***************************************************************************//**
75 * @brief
76 * De-initialize the LDMA controller.
77 *
78 * LDMA interrupts are disabled and the LDMA clock is stopped.
79 ******************************************************************************/
LDMA_DeInit(void)80 void LDMA_DeInit(void)
81 {
82 NVIC_DisableIRQ(LDMA_IRQn);
83 LDMA->IEN = 0;
84 #if defined(_LDMA_CHDIS_MASK)
85 LDMA->CHDIS = _LDMA_CHEN_MASK;
86 #else
87 LDMA->CHEN = 0;
88 #endif
89 #if defined(LDMA_EN_EN)
90 LDMA->EN = 0;
91 #if defined(LDMA_EN_DISABLING)
92 while (LDMA->EN & _LDMA_EN_DISABLING_MASK) {
93 }
94 #endif
95 #endif
96
97 CMU_ClockEnable(cmuClock_LDMA, false);
98 #if defined(_SILICON_LABS_32B_SERIES_2_CONFIG) && (_SILICON_LABS_32B_SERIES_2_CONFIG > 1)
99 CMU_ClockEnable(cmuClock_LDMAXBAR, false);
100 #endif
101 }
102
103 /***************************************************************************//**
104 * @brief
105 * Enable or disable an LDMA channel request.
106 *
107 * @details
108 * Use this function to enable or disable an LDMA channel request. This will
109 * prevent the LDMA from proceeding after its current transaction if disabled.
110 *
111 * @param[in] ch
112 * LDMA channel to enable or disable requests.
113 *
114 * @param[in] enable
115 * If 'true', the request will be enabled. If 'false', the request will be disabled.
116 ******************************************************************************/
LDMA_EnableChannelRequest(int ch,bool enable)117 void LDMA_EnableChannelRequest(int ch, bool enable)
118 {
119 EFM_ASSERT(ch < (int)DMA_CHAN_COUNT);
120
121 BUS_RegBitWrite(&LDMA->REQDIS, ch, !enable);
122 }
123
124 /***************************************************************************//**
125 * @brief
126 * Initialize the LDMA controller.
127 *
128 * @details
129 * This function will disable all the LDMA channels and enable the LDMA bus
130 * clock in the CMU. This function will also enable the LDMA IRQ in the NVIC
131 * and set the LDMA IRQ priority to a user-configurable priority. The LDMA
132 * interrupt priority is configured using the @ref LDMA_Init_t structure.
133 *
134 * @note
135 * Since this function enables the LDMA IRQ, always add a custom
136 * LDMA_IRQHandler to the application to handle any interrupts
137 * from LDMA.
138 *
139 * @param[in] init
140 * A pointer to the initialization structure used to configure the LDMA.
141 ******************************************************************************/
LDMA_Init(const LDMA_Init_t * init)142 void LDMA_Init(const LDMA_Init_t *init)
143 {
144 uint32_t ldmaCtrlVal;
145 EFM_ASSERT(init != NULL);
146 EFM_ASSERT(!(((uint32_t)init->ldmaInitCtrlNumFixed << _LDMA_CTRL_NUMFIXED_SHIFT)
147 & ~_LDMA_CTRL_NUMFIXED_MASK));
148
149 #if defined(_LDMA_CTRL_SYNCPRSCLREN_SHIFT) && defined (_LDMA_CTRL_SYNCPRSSETEN_SHIFT)
150 EFM_ASSERT(!(((uint32_t)init->ldmaInitCtrlSyncPrsClrEn << _LDMA_CTRL_SYNCPRSCLREN_SHIFT)
151 & ~_LDMA_CTRL_SYNCPRSCLREN_MASK));
152 EFM_ASSERT(!(((uint32_t)init->ldmaInitCtrlSyncPrsSetEn << _LDMA_CTRL_SYNCPRSSETEN_SHIFT)
153 & ~_LDMA_CTRL_SYNCPRSSETEN_MASK));
154 #endif
155
156 #if defined(_LDMA_SYNCHWEN_SYNCCLREN_SHIFT) && defined (_LDMA_SYNCHWEN_SYNCSETEN_SHIFT)
157 EFM_ASSERT(!(((uint32_t)init->ldmaInitCtrlSyncPrsClrEn << _LDMA_SYNCHWEN_SYNCCLREN_SHIFT)
158 & ~_LDMA_SYNCHWEN_SYNCCLREN_MASK));
159 EFM_ASSERT(!(((uint32_t)init->ldmaInitCtrlSyncPrsSetEn << _LDMA_SYNCHWEN_SYNCSETEN_SHIFT)
160 & ~_LDMA_SYNCHWEN_SYNCSETEN_MASK));
161 #endif
162
163 EFM_ASSERT(init->ldmaInitIrqPriority < (1 << __NVIC_PRIO_BITS));
164
165 CMU_ClockEnable(cmuClock_LDMA, true);
166 #if defined(_SILICON_LABS_32B_SERIES_2_CONFIG) && (_SILICON_LABS_32B_SERIES_2_CONFIG > 1)
167 CMU_ClockEnable(cmuClock_LDMAXBAR, true);
168 #endif
169
170 #if defined(LDMA_EN_EN)
171 LDMA->EN = LDMA_EN_EN;
172 #endif
173
174 ldmaCtrlVal = (uint32_t)init->ldmaInitCtrlNumFixed << _LDMA_CTRL_NUMFIXED_SHIFT;
175
176 #if defined(_LDMA_CTRL_SYNCPRSCLREN_SHIFT) && defined (_LDMA_CTRL_SYNCPRSSETEN_SHIFT)
177 ldmaCtrlVal |= (init->ldmaInitCtrlSyncPrsClrEn << _LDMA_CTRL_SYNCPRSCLREN_SHIFT)
178 | (init->ldmaInitCtrlSyncPrsSetEn << _LDMA_CTRL_SYNCPRSSETEN_SHIFT);
179 #endif
180
181 LDMA->CTRL = ldmaCtrlVal;
182
183 #if defined(_LDMA_SYNCHWEN_SYNCCLREN_SHIFT) && defined (_LDMA_SYNCHWEN_SYNCSETEN_SHIFT)
184 LDMA->SYNCHWEN = ((uint32_t)init->ldmaInitCtrlSyncPrsClrEn << _LDMA_SYNCHWEN_SYNCCLREN_SHIFT)
185 | ((uint32_t)init->ldmaInitCtrlSyncPrsSetEn << _LDMA_SYNCHWEN_SYNCSETEN_SHIFT);
186 #endif
187
188 #if defined(_LDMA_CHDIS_MASK)
189 LDMA->CHDIS = _LDMA_CHEN_MASK;
190 #else
191 LDMA->CHEN = 0;
192 #endif
193 LDMA->DBGHALT = 0;
194 LDMA->REQDIS = 0;
195
196 /* Enable the LDMA error interrupt. */
197 LDMA->IEN = LDMA_IEN_ERROR;
198 #if defined (LDMA_HAS_SET_CLEAR)
199 LDMA->IF_CLR = 0xFFFFFFFFU;
200 #else
201 LDMA->IFC = 0xFFFFFFFFU;
202 #endif
203 NVIC_ClearPendingIRQ(LDMA_IRQn);
204
205 /* Range is 0-7, where 0 is the highest priority. */
206 NVIC_SetPriority(LDMA_IRQn, init->ldmaInitIrqPriority);
207
208 NVIC_EnableIRQ(LDMA_IRQn);
209 }
210
211 /***************************************************************************//**
212 * @brief
213 * Start a DMA transfer.
214 *
215 * @param[in] ch
216 * A DMA channel.
217 *
218 * @param[in] transfer
219 * The initialization structure used to configure the transfer.
220 *
221 * @param[in] descriptor
222 * The transfer descriptor, which can be an array of descriptors linked together.
223 * Each descriptor's fields stored in RAM will be loaded into the certain
224 * hardware registers at the proper time to perform the DMA transfer.
225 ******************************************************************************/
LDMA_StartTransfer(int ch,const LDMA_TransferCfg_t * transfer,const LDMA_Descriptor_t * descriptor)226 void LDMA_StartTransfer(int ch,
227 const LDMA_TransferCfg_t *transfer,
228 const LDMA_Descriptor_t *descriptor)
229 {
230 #if !(defined (_LDMA_SYNCHWEN_SYNCCLREN_SHIFT) && defined (_LDMA_SYNCHWEN_SYNCSETEN_SHIFT))
231 uint32_t tmp;
232 #endif
233 CORE_DECLARE_IRQ_STATE;
234 uint32_t chMask = 1UL << (uint8_t)ch;
235
236 EFM_ASSERT(ch < (int)DMA_CHAN_COUNT);
237 EFM_ASSERT(transfer != NULL);
238
239 #if defined (_LDMAXBAR_CH_REQSEL_MASK)
240 EFM_ASSERT(!(transfer->ldmaReqSel & ~_LDMAXBAR_CH_REQSEL_MASK));
241 #elif defined (_LDMA_CH_REQSEL_MASK)
242 EFM_ASSERT(!(transfer->ldmaReqSel & ~_LDMA_CH_REQSEL_MASK));
243 #endif
244
245 #if defined (_LDMA_SYNCHWEN_SYNCCLREN_SHIFT) && defined (_LDMA_SYNCHWEN_SYNCSETEN_SHIFT)
246 EFM_ASSERT(!(((uint32_t)transfer->ldmaCtrlSyncPrsClrOff << _LDMA_SYNCHWEN_SYNCCLREN_SHIFT)
247 & ~_LDMA_SYNCHWEN_SYNCCLREN_MASK));
248 EFM_ASSERT(!(((uint32_t)transfer->ldmaCtrlSyncPrsClrOn << _LDMA_SYNCHWEN_SYNCCLREN_SHIFT)
249 & ~_LDMA_SYNCHWEN_SYNCCLREN_MASK));
250 EFM_ASSERT(!(((uint32_t)transfer->ldmaCtrlSyncPrsSetOff << _LDMA_SYNCHWEN_SYNCSETEN_SHIFT)
251 & ~_LDMA_SYNCHWEN_SYNCSETEN_MASK));
252 EFM_ASSERT(!(((uint32_t)transfer->ldmaCtrlSyncPrsSetOn << _LDMA_SYNCHWEN_SYNCSETEN_SHIFT)
253 & ~_LDMA_SYNCHWEN_SYNCSETEN_MASK));
254 #elif defined (_LDMA_CTRL_SYNCPRSCLREN_SHIFT) && defined (_LDMA_CTRL_SYNCPRSSETEN_SHIFT)
255 EFM_ASSERT(!(((uint32_t)transfer->ldmaCtrlSyncPrsClrOff << _LDMA_CTRL_SYNCPRSCLREN_SHIFT)
256 & ~_LDMA_CTRL_SYNCPRSCLREN_MASK));
257 EFM_ASSERT(!(((uint32_t)transfer->ldmaCtrlSyncPrsClrOn << _LDMA_CTRL_SYNCPRSCLREN_SHIFT)
258 & ~_LDMA_CTRL_SYNCPRSCLREN_MASK));
259 EFM_ASSERT(!(((uint32_t)transfer->ldmaCtrlSyncPrsSetOff << _LDMA_CTRL_SYNCPRSSETEN_SHIFT)
260 & ~_LDMA_CTRL_SYNCPRSSETEN_MASK));
261 EFM_ASSERT(!(((uint32_t)transfer->ldmaCtrlSyncPrsSetOn << _LDMA_CTRL_SYNCPRSSETEN_SHIFT)
262 & ~_LDMA_CTRL_SYNCPRSSETEN_MASK));
263 #endif
264
265 EFM_ASSERT(!(((uint32_t)transfer->ldmaCfgArbSlots << _LDMA_CH_CFG_ARBSLOTS_SHIFT)
266 & ~_LDMA_CH_CFG_ARBSLOTS_MASK));
267 EFM_ASSERT(!(((uint32_t)transfer->ldmaCfgSrcIncSign << _LDMA_CH_CFG_SRCINCSIGN_SHIFT)
268 & ~_LDMA_CH_CFG_SRCINCSIGN_MASK));
269 EFM_ASSERT(!(((uint32_t)transfer->ldmaCfgDstIncSign << _LDMA_CH_CFG_DSTINCSIGN_SHIFT)
270 & ~_LDMA_CH_CFG_DSTINCSIGN_MASK));
271 EFM_ASSERT(!(((uint32_t)transfer->ldmaLoopCnt << _LDMA_CH_LOOP_LOOPCNT_SHIFT)
272 & ~_LDMA_CH_LOOP_LOOPCNT_MASK));
273
274 /* Clear the pending channel interrupt. */
275 #if defined (LDMA_HAS_SET_CLEAR)
276 LDMA->IF_CLR = chMask;
277 #else
278 LDMA->IFC = chMask;
279 #endif
280
281 #if defined(LDMAXBAR)
282 LDMAXBAR->CH[ch].REQSEL = transfer->ldmaReqSel;
283 #else
284 LDMA->CH[ch].REQSEL = transfer->ldmaReqSel;
285 #endif
286 LDMA->CH[ch].LOOP = transfer->ldmaLoopCnt << _LDMA_CH_LOOP_LOOPCNT_SHIFT;
287 LDMA->CH[ch].CFG = (transfer->ldmaCfgArbSlots << _LDMA_CH_CFG_ARBSLOTS_SHIFT)
288 | (transfer->ldmaCfgSrcIncSign << _LDMA_CH_CFG_SRCINCSIGN_SHIFT)
289 | (transfer->ldmaCfgDstIncSign << _LDMA_CH_CFG_DSTINCSIGN_SHIFT)
290 #if defined(_LDMA_CH_CFG_SRCBUSPORT_MASK)
291 | (transfer->ldmaCfgStructBusPort << _LDMA_CH_CFG_STRUCTBUSPORT_SHIFT)
292 | (transfer->ldmaCfgSrcBusPort << _LDMA_CH_CFG_SRCBUSPORT_SHIFT)
293 | (transfer->ldmaCfgDstBusPort << _LDMA_CH_CFG_DSTBUSPORT_SHIFT)
294 #endif
295 ;
296
297 /* Set the descriptor address. */
298 LDMA->CH[ch].LINK = (uint32_t)descriptor & _LDMA_CH_LINK_LINKADDR_MASK;
299
300 /* A critical region. */
301 CORE_ENTER_ATOMIC();
302
303 /* Enable the channel interrupt. */
304 LDMA->IEN |= chMask;
305
306 if (transfer->ldmaReqDis) {
307 LDMA->REQDIS |= chMask;
308 }
309
310 if (transfer->ldmaDbgHalt) {
311 LDMA->DBGHALT |= chMask;
312 }
313
314 #if defined (_LDMA_SYNCHWEN_SYNCCLREN_SHIFT) && defined (_LDMA_SYNCHWEN_SYNCSETEN_SHIFT)
315
316 LDMA->SYNCHWEN_CLR =
317 (((uint32_t)transfer->ldmaCtrlSyncPrsClrOff << _LDMA_SYNCHWEN_SYNCCLREN_SHIFT)
318 | ((uint32_t)transfer->ldmaCtrlSyncPrsSetOff << _LDMA_SYNCHWEN_SYNCSETEN_SHIFT))
319 & _LDMA_SYNCHWEN_MASK;
320
321 LDMA->SYNCHWEN_SET =
322 (((uint32_t)transfer->ldmaCtrlSyncPrsClrOn << _LDMA_SYNCHWEN_SYNCCLREN_SHIFT)
323 | ((uint32_t)transfer->ldmaCtrlSyncPrsSetOn << _LDMA_SYNCHWEN_SYNCSETEN_SHIFT))
324 & _LDMA_SYNCHWEN_MASK;
325
326 #elif defined (_LDMA_CTRL_SYNCPRSCLREN_SHIFT) && defined (_LDMA_CTRL_SYNCPRSSETEN_SHIFT)
327
328 tmp = LDMA->CTRL;
329
330 if (transfer->ldmaCtrlSyncPrsClrOff) {
331 tmp &= ~_LDMA_CTRL_SYNCPRSCLREN_MASK
332 | (~transfer->ldmaCtrlSyncPrsClrOff << _LDMA_CTRL_SYNCPRSCLREN_SHIFT);
333 }
334
335 if (transfer->ldmaCtrlSyncPrsClrOn) {
336 tmp |= transfer->ldmaCtrlSyncPrsClrOn << _LDMA_CTRL_SYNCPRSCLREN_SHIFT;
337 }
338
339 if (transfer->ldmaCtrlSyncPrsSetOff) {
340 tmp &= ~_LDMA_CTRL_SYNCPRSSETEN_MASK
341 | (~transfer->ldmaCtrlSyncPrsSetOff << _LDMA_CTRL_SYNCPRSSETEN_SHIFT);
342 }
343
344 if (transfer->ldmaCtrlSyncPrsSetOn) {
345 tmp |= transfer->ldmaCtrlSyncPrsSetOn << _LDMA_CTRL_SYNCPRSSETEN_SHIFT;
346 }
347
348 LDMA->CTRL = tmp;
349
350 #else
351
352 #error "SYNC Set and SYNC Clear not defined"
353
354 #endif
355
356 BUS_RegMaskedClear(&LDMA->CHDONE, chMask); /* Clear the done flag. */
357 LDMA->LINKLOAD = chMask; /* Start a transfer by loading the descriptor. */
358
359 /* A critical region end. */
360 CORE_EXIT_ATOMIC();
361 }
362
363 #if defined(_LDMA_CH_CTRL_EXTEND_MASK)
364 /***************************************************************************//**
365 * @brief
366 * Start an extended DMA transfer.
367 *
368 * @param[in] ch
369 * A DMA channel.
370 *
371 * @param[in] transfer
372 * The initialization structure used to configure the transfer.
373 *
374 * @param[in] descriptor_ext
375 * The extended transfer descriptor, which can be an array of descriptors
376 * linked together. Each descriptor's fields stored in RAM will be loaded
377 * into the certain hardware registers at the proper time to perform the DMA
378 * transfer.
379 ******************************************************************************/
LDMA_StartTransferExtend(int ch,const LDMA_TransferCfg_t * transfer,const LDMA_DescriptorExtend_t * descriptor_ext)380 void LDMA_StartTransferExtend(int ch,
381 const LDMA_TransferCfg_t *transfer,
382 const LDMA_DescriptorExtend_t *descriptor_ext)
383 {
384 // Ensure destination interleaving supported for given channel.
385 EFM_ASSERT(((1 << ch) & LDMA_ILCHNL));
386
387 LDMA_StartTransfer(ch,
388 transfer,
389 (const LDMA_Descriptor_t *)descriptor_ext);
390 }
391 #endif
392
393 /***************************************************************************//**
394 * @brief
395 * Stop a DMA transfer.
396 *
397 * @note
398 * The DMA will complete the current AHB burst transfer before stopping.
399 *
400 * @param[in] ch
401 * A DMA channel to stop.
402 ******************************************************************************/
LDMA_StopTransfer(int ch)403 void LDMA_StopTransfer(int ch)
404 {
405 uint32_t chMask = 1UL << (uint8_t)ch;
406
407 EFM_ASSERT(ch < (int)DMA_CHAN_COUNT);
408
409 #if defined(_LDMA_CHDIS_MASK)
410 CORE_ATOMIC_SECTION(
411 LDMA->IEN &= ~chMask;
412 LDMA->CHDIS = chMask;
413 )
414 #else
415 CORE_ATOMIC_SECTION(
416 LDMA->IEN &= ~chMask;
417 BUS_RegMaskedClear(&LDMA->CHEN, chMask);
418 )
419 #endif
420 }
421
422 /***************************************************************************//**
423 * @brief
424 * Check if a DMA transfer has completed.
425 *
426 * @param[in] ch
427 * A DMA channel to check.
428 *
429 * @return
430 * True if transfer has completed, false if not.
431 ******************************************************************************/
LDMA_TransferDone(int ch)432 bool LDMA_TransferDone(int ch)
433 {
434 bool retVal = false;
435 uint32_t chMask = 1UL << (uint8_t)ch;
436
437 EFM_ASSERT(ch < (int)DMA_CHAN_COUNT);
438
439 #if defined(_LDMA_CHSTATUS_MASK)
440 CORE_ATOMIC_SECTION(
441 if (((LDMA->CHSTATUS & chMask) == 0) && ((LDMA->CHDONE & chMask) == chMask)) {
442 retVal = true;
443 }
444 )
445 #else
446 CORE_ATOMIC_SECTION(
447 if (((LDMA->CHEN & chMask) == 0) && ((LDMA->CHDONE & chMask) == chMask)) {
448 retVal = true;
449 }
450 )
451 #endif
452
453 return retVal;
454 }
455
456 /***************************************************************************//**
457 * @brief
458 * Get the number of items remaining in a transfer.
459 *
460 * @note
461 * This function does not take into account that a DMA transfer with
462 * a chain of linked transfers might be ongoing. It will only check the
463 * count for the current transfer.
464 *
465 * @param[in] ch
466 * The channel number of the transfer to check.
467 *
468 * @return
469 * A number of items remaining in the transfer.
470 ******************************************************************************/
LDMA_TransferRemainingCount(int ch)471 uint32_t LDMA_TransferRemainingCount(int ch)
472 {
473 uint32_t remaining, done, iflag;
474 uint32_t chMask = 1UL << (uint8_t)ch;
475
476 EFM_ASSERT(ch < (int)DMA_CHAN_COUNT);
477
478 CORE_ATOMIC_SECTION(
479 iflag = LDMA->IF;
480 done = LDMA->CHDONE;
481 remaining = LDMA->CH[ch].CTRL;
482 )
483
484 iflag &= chMask;
485 done &= chMask;
486 remaining = (remaining & _LDMA_CH_CTRL_XFERCNT_MASK)
487 >> _LDMA_CH_CTRL_XFERCNT_SHIFT;
488
489 if (done || ((remaining == 0) && iflag)) {
490 return 0;
491 }
492
493 /* +1 because XFERCNT is 0-based. */
494 return remaining + 1;
495 }
496
497 /** @} (end addtogroup ldma) */
498 #endif /* defined( LDMA_PRESENT ) && ( LDMA_COUNT == 1 ) */
499