1 /*
2 * Copyright (c) 2023 - 2024, Nordic Semiconductor ASA
3 * All rights reserved.
4 *
5 * SPDX-License-Identifier: BSD-3-Clause
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions are met:
9 *
10 * 1. Redistributions of source code must retain the above copyright notice, this
11 * list of conditions and the following disclaimer.
12 *
13 * 2. Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in the
15 * documentation and/or other materials provided with the distribution.
16 *
17 * 3. Neither the name of the copyright holder nor the names of its
18 * contributors may be used to endorse or promote products derived from this
19 * software without specific prior written permission.
20 *
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
22 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
23 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
24 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
25 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
26 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
27 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
28 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
29 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
31 * POSSIBILITY OF SUCH DAMAGE.
32 */
33
34 #ifndef NRF_MVDMA_H__
35 #define NRF_MVDMA_H__
36
37 #include <nrfx.h>
38 #include <helpers/nrf_vdma.h>
39
40 #ifdef __cplusplus
41 extern "C" {
42 #endif
43
44 /**
45 * @defgroup nrf_mvdma_hal MVDMA HAL
46 * @{
47 * @ingroup nrf_mvdma
48 * @brief Hardware access layer for managing the Memory-to-Memory Vector DMA (MVDMA) peripheral.
49 */
50
51 #if defined(MVDMA_TASKS_PAUSE_TASKS_PAUSE_Pos) || defined(__NRFX_DOXYGEN__)
52 /** @brief Symbol indicating whether MVDMA uses new or old version. */
53 #define NRF_MVDMA_HAS_NEW_VER 1
54 #else
55 #define NRF_MVDMA_HAS_NEW_VER 0
56 #endif
57
58 #if defined(MVDMA_CONFIG_AXIMODE_AXIMODE_Msk) || defined(__NRFX_DOXYGEN__)
59 /** @brief Symbol indicating whether AXIMODE configuration is present. */
60 #define NRF_MVDMA_HAS_AXIMODE 1
61 #else
62 #define NRF_MVDMA_HAS_AXIMODE 0
63 #endif
64
65 #if (MVDMA_JOBLISTCOUNT > 1) || defined(__NRFX_DOXYGEN__)
66 /** @brief Macro for checking if multi-mode operation is available. */
67 #define NRF_MVDMA_HAS_MULTIMODE 1
68 #else
69 #define NRF_MVDMA_HAS_MULTIMODE 0
70 #endif
71
72 /** @brief MVDMA tasks. */
73 typedef enum
74 {
75 #if NRF_MVDMA_HAS_NEW_VER
76 NRF_MVDMA_TASK_PAUSE = offsetof(NRF_MVDMA_Type, TASKS_PAUSE), ///< Pause DMA transaction at next idle stage on memory bus.
77 #else
78 NRF_MVDMA_TASK_STOP = offsetof(NRF_MVDMA_Type, TASKS_STOP), ///< Stop DMA transaction immediately, or after an ongoing AXI burst.
79 #endif
80 NRF_MVDMA_TASK_RESET = offsetof(NRF_MVDMA_Type, TASKS_RESET), ///< Return all registers to default state and FSMs to IDLE.
81 NRF_MVDMA_TASK_START0 = offsetof(NRF_MVDMA_Type, TASKS_START[0]), ///< Start or continue processing of descriptor list 0.
82 NRF_MVDMA_TASK_START1 = offsetof(NRF_MVDMA_Type, TASKS_START[1]), ///< Start or continue processing of descriptor list 1.
83 NRF_MVDMA_TASK_START2 = offsetof(NRF_MVDMA_Type, TASKS_START[2]), ///< Start or continue processing of descriptor list 2.
84 NRF_MVDMA_TASK_START3 = offsetof(NRF_MVDMA_Type, TASKS_START[3]), ///< Start or continue processing of descriptor list 3.
85 NRF_MVDMA_TASK_START4 = offsetof(NRF_MVDMA_Type, TASKS_START[4]), ///< Start or continue processing of descriptor list 4.
86 NRF_MVDMA_TASK_START5 = offsetof(NRF_MVDMA_Type, TASKS_START[5]), ///< Start or continue processing of descriptor list 5.
87 NRF_MVDMA_TASK_START6 = offsetof(NRF_MVDMA_Type, TASKS_START[6]), ///< Start or continue processing of descriptor list 6.
88 NRF_MVDMA_TASK_START7 = offsetof(NRF_MVDMA_Type, TASKS_START[7]), ///< Start or continue processing of descriptor list 7.
89 } nrf_mvdma_task_t;
90
91 /** @brief MVDMA events. */
92 typedef enum
93 {
94 NRF_MVDMA_EVENT_END = offsetof(NRF_MVDMA_Type, EVENTS_END), ///< Sink data descriptor list has been completed.
95 NRF_MVDMA_EVENT_STARTED = offsetof(NRF_MVDMA_Type, EVENTS_STARTED), ///< Data descriptor list processing has been started.
96 #if NRF_MVDMA_HAS_NEW_VER
97 NRF_MVDMA_EVENT_PAUSED = offsetof(NRF_MVDMA_Type, EVENTS_PAUSED), ///< Data transfer has been paused.
98 #else
99 NRF_MVDMA_EVENT_STOPPED = offsetof(NRF_MVDMA_Type, EVENTS_STOPPED), ///< Data descriptor list processing has been stopped.
100 #endif
101 NRF_MVDMA_EVENT_RESET = offsetof(NRF_MVDMA_Type, EVENTS_RESET), ///< MVDMA has been reset.
102 #if NRF_MVDMA_HAS_NEW_VER
103 NRF_MVDMA_EVENT_SOURCEBUSERROR = offsetof(NRF_MVDMA_Type, EVENTS_SOURCE.BUSERROR), ///< Bus error has been received on the source channel.
104 NRF_MVDMA_EVENT_SOURCESELECTJOBDONE = offsetof(NRF_MVDMA_Type, EVENTS_SOURCE.SELECTJOBDONE), ///< Job on the source channel with event enable attribute bit active has been processed.
105 #else
106 NRF_MVDMA_EVENT_SOURCEBUSERROR = offsetof(NRF_MVDMA_Type, EVENTS_SOURCEBUSERROR), ///< Bus error has been received on the source channel.
107 #endif
108 #if NRF_MVDMA_HAS_NEW_VER
109 NRF_MVDMA_EVENT_SINKBUSERROR = offsetof(NRF_MVDMA_Type, EVENTS_SINK.BUSERROR), ///< Bus error has been received on the sink channel.
110 NRF_MVDMA_EVENT_SINKSELECTJOBDONE = offsetof(NRF_MVDMA_Type, EVENTS_SINK.SELECTJOBDONE), ///< Job on the sink channel with event enable attribute bit active has been processed.
111 #else
112 NRF_MVDMA_EVENT_SINKBUSERROR = offsetof(NRF_MVDMA_Type, EVENTS_SINKBUSERROR), ///< Bus error has been received on the sink channel.
113 #endif
114 } nrf_mvdma_event_t;
115
116 /** @brief MVDMA interrupts. */
117 typedef enum
118 {
119 NRF_MVDMA_INT_END_MASK = MVDMA_INTENSET_END_Msk, ///< Interrupt on END event.
120 NRF_MVDMA_INT_STARTED_MASK = MVDMA_INTENSET_STARTED_Msk, ///< Interrupt on STARTED event.
121 #if NRF_MVDMA_HAS_NEW_VER
122 NRF_MVDMA_INT_PAUSED_MASK = MVDMA_INTENSET_PAUSED_Msk, ///< Interrupt on PAUSED event.
123 #else
124 NRF_MVDMA_INT_STOPPED_MASK = MVDMA_INTENSET_STOPPED_Msk, ///< Interrupt on STOPPED event.
125 #endif
126 NRF_MVDMA_INT_RESET_MASK = MVDMA_INTENSET_RESET_Msk, ///< Interrupt on RESET event.
127 NRF_MVDMA_INT_SOURCEBUSERROR_MASK = MVDMA_INTENSET_SOURCEBUSERROR_Msk, ///< Interrupt on SOURCEBUSERROR event.
128 NRF_MVDMA_INT_SINKBUSERROR_MASK = MVDMA_INTENSET_SINKBUSERROR_Msk, ///< Interrupt on SINKBUSERROR event.
129 #if NRF_MVDMA_HAS_NEW_VER
130 NRF_MVDMA_INT_SOURCESELECTJOBDONE_MASK = MVDMA_INTENSET_SOURCESELECTJOBDONE_Msk, ///< Interrupt on SOURCESELECTJOBDONE event.
131 NRF_MVDMA_INT_SINKSELECTJOBDONE_MASK = MVDMA_INTENSET_SINKSELECTJOBDONE_Msk, ///< Interrupt on SINKSELECTJOBDONE event.
132 #endif
133 } nrf_mvdma_int_mask_t;
134
135 /** @brief MVDMA modes of operation. */
136 typedef enum
137 {
138 NRF_MVDMA_MODE_SINGLE = MVDMA_CONFIG_MODE_MODE_SingleMode, ///< Descriptor list pointers are stored in SOURCELISTPTR and SINKLISTPTR registers.
139 NRF_MVDMA_MODE_MULTI = MVDMA_CONFIG_MODE_MODE_MultiMode, ///< Descriptor list pointers are stored in a list in the memory.
140 } nrf_mvdma_mode_t;
141
142 #if NRF_MVDMA_HAS_AXIMODE
143 /** @brief MVDMA AXI modes. */
144 typedef enum
145 {
146 NRF_MVDMA_AXIMODE_AXI = MVDMA_CONFIG_AXIMODE_AXIMODE_AXI, ///< AXI burst transactions may be longer than one beat.
147 NRF_MVDMA_AXIMODE_AXILITE = MVDMA_CONFIG_AXIMODE_AXIMODE_AXILITE, ///< All AXI transactions are one-beat accesses.
148 } nrf_mvdma_aximode_t;
149 #endif
150
151 /** @brief MVDMA FIFO status. */
152 typedef enum
153 {
154 NRF_MVDMA_FIFO_STATUS_EMPTY = MVDMA_STATUS_FIFO_FIFOSTATUS_Empty, ///< No data in intermediate FIFO.
155 #if NRF_MVDMA_HAS_NEW_VER
156 NRF_MVDMA_FIFO_STATUS_NOT_EMPTY = MVDMA_STATUS_FIFO_FIFOSTATUS_NotEmpty, ///< Intermediate FIFO contains data.
157 #else
158 NRF_MVDMA_FIFO_STATUS_ALMOST_FULL = MVDMA_STATUS_FIFO_FIFOSTATUS_AlmostFull, ///< Intermediate FIFO is almost full.
159 #endif
160 NRF_MVDMA_FIFO_STATUS_FULL = MVDMA_STATUS_FIFO_FIFOSTATUS_Full, ///< Intermediate FIFO is full.
161 } nrf_mvdma_fifo_status_t;
162
163 /** @brief MVDMA data source errors. */
164 typedef enum
165 {
166 #if NRF_MVDMA_HAS_NEW_VER
167 NRF_MVDMA_SOURCE_ERROR_NONE = MVDMA_SOURCE_BUSERROR_BUSERROR_NoError, ///< No error.
168 NRF_MVDMA_SOURCE_ERROR_READ = MVDMA_SOURCE_BUSERROR_BUSERROR_ReadError, ///< Error related to memory when reading joblist or memory/register when reading data.
169 NRF_MVDMA_SOURCE_ERROR_READ_DECODE = MVDMA_SOURCE_BUSERROR_BUSERROR_ReadDecodeError, ///< Error related to the joblist address or address when reading memory/register.
170 #else
171 NRF_MVDMA_SOURCE_ERROR_NONE = MVDMA_STATUS_SOURCEBUSERROR_BUSERROR_NoError, ///< No error.
172 NRF_MVDMA_SOURCE_ERROR_SLAVE = MVDMA_STATUS_SOURCEBUSERROR_BUSERROR_SlaveError, ///< Error generated by AXI slave.
173 NRF_MVDMA_SOURCE_ERROR_DECODE = MVDMA_STATUS_SOURCEBUSERROR_BUSERROR_DecodeError, ///< Error generated by interconnect.
174 #endif
175 } nrf_mvdma_source_error_t;
176
177 /** @brief MVDMA data sink errors. */
178 typedef enum
179 {
180 #if NRF_MVDMA_HAS_NEW_VER
181 NRF_MVDMA_SINK_ERROR_NONE = MVDMA_SINK_BUSERROR_BUSERROR_NoError, ///< No error.
182 NRF_MVDMA_SINK_ERROR_READ = MVDMA_SINK_BUSERROR_BUSERROR_ReadError, ///< Error related to memory when reading joblist.
183 NRF_MVDMA_SINK_ERROR_WRITE = MVDMA_SINK_BUSERROR_BUSERROR_WriteError, ///< Error related to memory/register when writing data.
184 NRF_MVDMA_SINK_ERROR_DECODE_READ = MVDMA_SINK_BUSERROR_BUSERROR_ReadDecodeError, ///< Error related to the joblist address when reading joblist.
185 NRF_MVDMA_SINK_ERROR_DECODE_WRITE = MVDMA_SINK_BUSERROR_BUSERROR_WriteDecodeError, ///< Error related to the memory/register address when writing data.
186 #else
187 NRF_MVDMA_SINK_ERROR_NONE = MVDMA_STATUS_SINKBUSERROR_BUSERROR_NoError, ///< No error.
188 NRF_MVDMA_SINK_ERROR_SLAVE_READ = MVDMA_STATUS_SINKBUSERROR_BUSERROR_ReadSlaveError, ///< Read error generated by AXI slave.
189 NRF_MVDMA_SINK_ERROR_SLAVE_WRITE = MVDMA_STATUS_SINKBUSERROR_BUSERROR_WriteSlaveError, ///< Write error generated by AXI slave.
190 NRF_MVDMA_SINK_ERROR_DECODE_READ = MVDMA_STATUS_SINKBUSERROR_BUSERROR_ReadDecodeError, ///< Read error generated by interconnect.
191 NRF_MVDMA_SINK_ERROR_DECODE_WRITE = MVDMA_STATUS_SINKBUSERROR_BUSERROR_WriteDecodeError, ///< Write error generated by interconnect.
192 #endif
193 } nrf_mvdma_sink_error_t;
194
195 /**
196 * @brief Function for activating the specified MVDMA task.
197 *
198 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
199 * @param[in] task Task to be activated.
200 */
201 NRF_STATIC_INLINE void nrf_mvdma_task_trigger(NRF_MVDMA_Type * p_reg,
202 nrf_mvdma_task_t task);
203
204 /**
205 * @brief Function for getting the address of the specified MVDMA task register.
206 *
207 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
208 * @param[in] task Specified task.
209 *
210 * @return Address of the specified task register.
211 */
212 NRF_STATIC_INLINE uint32_t nrf_mvdma_task_address_get(NRF_MVDMA_Type const * p_reg,
213 nrf_mvdma_task_t task);
214
215 /**
216 * @brief Function for getting START task by its index.
217 *
218 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
219 * @param[in] index Index of the START task.
220 *
221 * @return START task.
222 */
223 NRF_STATIC_INLINE nrf_mvdma_task_t nrf_mvdma_start_task_get(NRF_MVDMA_Type const * p_reg,
224 uint8_t index);
225
226 /**
227 * @brief Function for clearing the specified MVDMA event.
228 *
229 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
230 * @param[in] event Event to clear.
231 */
232 NRF_STATIC_INLINE void nrf_mvdma_event_clear(NRF_MVDMA_Type * p_reg,
233 nrf_mvdma_event_t event);
234
235 /**
236 * @brief Function for retrieving the state of the MVDMA event.
237 *
238 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
239 * @param[in] event Event to be checked.
240 *
241 * @retval true The event has been generated.
242 * @retval false The event has not been generated.
243 */
244 NRF_STATIC_INLINE bool nrf_mvdma_event_check(NRF_MVDMA_Type const * p_reg,
245 nrf_mvdma_event_t event);
246
247 /**
248 * @brief Function for getting the address of the specified MVDMA event register.
249 *
250 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
251 * @param[in] event Specified event.
252 *
253 * @return Address of the specified event register.
254 */
255 NRF_STATIC_INLINE uint32_t nrf_mvdma_event_address_get(NRF_MVDMA_Type const * p_reg,
256 nrf_mvdma_event_t event);
257
258 /**
259 * @brief Function for enabling specified interrupts.
260 *
261 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
262 * @param[in] mask Mask of interrupts to be enabled.
263 * Use @ref nrf_mvdma_int_mask_t values for bit masking.
264 */
265 NRF_STATIC_INLINE void nrf_mvdma_int_enable(NRF_MVDMA_Type * p_reg, uint32_t mask);
266
267 /**
268 * @brief Function for disabling specified interrupts.
269 *
270 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
271 * @param[in] mask Mask of interrupts to be disabled.
272 * Use @ref nrf_mvdma_int_mask_t values for bit masking.
273 */
274 NRF_STATIC_INLINE void nrf_mvdma_int_disable(NRF_MVDMA_Type * p_reg, uint32_t mask);
275
276 /**
277 * @brief Function for checking if the specified interrupts are enabled.
278 *
279 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
280 * @param[in] mask Mask of interrupts to be checked.
281 * Use @ref nrf_mvdma_int_mask_t values for bit masking.
282 *
283 * @return Mask of enabled interrupts.
284 */
285 NRF_STATIC_INLINE uint32_t nrf_mvdma_int_enable_check(NRF_MVDMA_Type const * p_reg, uint32_t mask);
286
287 /**
288 * @brief Function for retrieving the state of pending interrupts.
289 *
290 * @note States of pending interrupt are saved as a bitmask.
291 * One set at particular position means that interrupt for event is pending.
292 *
293 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
294 *
295 * @return Bitmask with information about pending interrupts.
296 * Use @ref nrf_mvdma_int_mask_t values for bit masking.
297 */
298 NRF_STATIC_INLINE uint32_t nrf_mvdma_int_pending_get(NRF_MVDMA_Type const * p_reg);
299
300 #if defined(DPPI_PRESENT) || defined(__NRFX_DOXYGEN__)
301 /**
302 * @brief Function for setting the subscribe configuration for a given
303 * MVDMA task.
304 *
305 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
306 * @param[in] task Task for which to set the configuration.
307 * @param[in] channel Channel through which to subscribe events.
308 */
309 NRF_STATIC_INLINE void nrf_mvdma_subscribe_set(NRF_MVDMA_Type * p_reg,
310 nrf_mvdma_task_t task,
311 uint8_t channel);
312
313 /**
314 * @brief Function for clearing the subscribe configuration for a given
315 * MVDMA task.
316 *
317 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
318 * @param[in] task Task for which to clear the configuration.
319 */
320 NRF_STATIC_INLINE void nrf_mvdma_subscribe_clear(NRF_MVDMA_Type * p_reg,
321 nrf_mvdma_task_t task);
322
323 /**
324 * @brief Function for setting the publish configuration for a given
325 * MVDMA event.
326 *
327 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
328 * @param[in] event Event for which to set the configuration.
329 * @param[in] channel Channel through which to publish the event.
330 */
331 NRF_STATIC_INLINE void nrf_mvdma_publish_set(NRF_MVDMA_Type * p_reg,
332 nrf_mvdma_event_t event,
333 uint8_t channel);
334
335 /**
336 * @brief Function for clearing the publish configuration for a given
337 * MVDMA event.
338 *
339 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
340 * @param[in] event Event for which to clear the configuration.
341 */
342 NRF_STATIC_INLINE void nrf_mvdma_publish_clear(NRF_MVDMA_Type * p_reg,
343 nrf_mvdma_event_t event);
344 #endif // defined(DPPI_PRESENT) || defined(__NRFX_DOXYGEN__)
345
346 /**
347 * @brief Function for setting the MVDMA mode.
348 *
349 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
350 * @param[in] mode Desired operating mode for MVDMA.
351 */
352 NRF_STATIC_INLINE void nrf_mvdma_mode_set(NRF_MVDMA_Type * p_reg, nrf_mvdma_mode_t mode);
353
354 #if NRF_MVDMA_HAS_AXIMODE
355 /**
356 * @brief Function for setting the AXI mode.
357 *
358 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
359 * @param[in] aximode Desired AXI mode for MVDMA.
360 */
361 NRF_STATIC_INLINE void nrf_mvdma_aximode_set(NRF_MVDMA_Type * p_reg, nrf_mvdma_aximode_t aximode);
362 #endif
363
364 /**
365 * @brief Function for setting the pointer to the source descriptor list
366 * or pointer to the list of descriptor list pointers,
367 * depending on configured @ref nrf_mvdma_mode_t mode.
368 *
369 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
370 * @param[in] p_job Pointer to a job list.
371 */
372 NRF_STATIC_INLINE void nrf_mvdma_source_list_ptr_set(NRF_MVDMA_Type * p_reg,
373 nrf_vdma_job_t const * p_job);
374
375 /**
376 * @brief Function for getting the pointer to the source descriptor list
377 * or pointer to the list of descriptor list pointers,
378 * depending on configured @ref nrf_mvdma_mode_t mode.
379 *
380 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
381 *
382 * @return Pointer to a job list.
383 */
384 NRF_STATIC_INLINE nrf_vdma_job_t * nrf_mvdma_source_list_ptr_get(NRF_MVDMA_Type const * p_reg);
385
386 /**
387 * @brief Function for getting the pointer to the sink descriptor list
388 * or pointer to the list of descriptor list pointers,
389 * depending on configured @ref nrf_mvdma_mode_t mode.
390 *
391 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
392 * @param[in] p_job Pointer to a job list.
393 */
394 NRF_STATIC_INLINE void nrf_mvdma_sink_list_ptr_set(NRF_MVDMA_Type * p_reg,
395 nrf_vdma_job_t const * p_job);
396
397 /**
398 * @brief Function for getting the pointer to the sink descriptor list
399 * or pointer to the list of descriptor list pointers,
400 * depending on configured @ref nrf_mvdma_mode_t mode.
401 *
402 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
403 *
404 * @return Pointer to a job list.
405 */
406 NRF_STATIC_INLINE nrf_vdma_job_t * nrf_mvdma_sink_list_ptr_get(NRF_MVDMA_Type const * p_reg);
407
408 /**
409 * @brief Function for getting the result of CRC checksum calculation.
410 *
411 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
412 *
413 * @return Result of CRC checksum calculation.
414 */
415 NRF_STATIC_INLINE uint32_t nrf_mvdma_crc_result_get(NRF_MVDMA_Type const * p_reg);
416
417 /**
418 * @brief Function for getting the status of intermediate FIFO.
419 *
420 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
421 *
422 * @return Intermediate FIFO status.
423 */
424 NRF_STATIC_INLINE nrf_mvdma_fifo_status_t nrf_mvdma_fifo_status_get(NRF_MVDMA_Type const * p_reg);
425
426 /**
427 * @brief Function for checking the MVDMA activity.
428 *
429 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
430 *
431 * @retval true MVDMA is processing data.
432 * @retval false MVDMA is idle.
433 */
434 NRF_STATIC_INLINE bool nrf_mvdma_activity_check(NRF_MVDMA_Type const * p_reg);
435
436 /**
437 * @brief Function for getting the bus error of MVDMA data source.
438 *
439 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
440 *
441 * @return Bus error of data source.
442 */
443 NRF_STATIC_INLINE nrf_mvdma_source_error_t nrf_mvdma_source_error_get(NRF_MVDMA_Type const * p_reg);
444
445 /**
446 * @brief Function for getting the bus error of MVDMA data source.
447 *
448 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
449 *
450 * @return Bus error of data sink.
451 */
452 NRF_STATIC_INLINE nrf_mvdma_sink_error_t nrf_mvdma_sink_error_get(NRF_MVDMA_Type const * p_reg);
453
454 /**
455 * @brief Function for getting the latest address being accessed on the source AXI channel.
456 *
457 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
458 *
459 * @return Latest address being accessed on the source AXI channel.
460 */
461 NRF_STATIC_INLINE uint32_t nrf_mvdma_last_source_address_get(NRF_MVDMA_Type const * p_reg);
462
463 /**
464 * @brief Function for getting the latest address being accessed on the sink AXI channel.
465 *
466 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
467 *
468 * @return Latest address being accessed on the sink AXI channel.
469 */
470 NRF_STATIC_INLINE uint32_t nrf_mvdma_last_sink_address_get(NRF_MVDMA_Type const * p_reg);
471
472 /**
473 * @brief Function for getting the number of completed jobs in the current source descriptor list.
474 *
475 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
476 *
477 * @return Number of completed jobs in the current source descriptor list.
478 */
479 NRF_STATIC_INLINE uint32_t nrf_mvdma_source_job_count_get(NRF_MVDMA_Type const * p_reg);
480
481 /**
482 * @brief Function for getting the number of completed jobs in the current sink descriptor list.
483 *
484 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
485 *
486 * @return Number of completed jobs in the current sink descriptor list.
487 */
488 NRF_STATIC_INLINE uint32_t nrf_mvdma_sink_job_count_get(NRF_MVDMA_Type const * p_reg);
489
490 #ifndef NRF_DECLARE_ONLY
491
nrf_mvdma_task_trigger(NRF_MVDMA_Type * p_reg,nrf_mvdma_task_t task)492 NRF_STATIC_INLINE void nrf_mvdma_task_trigger(NRF_MVDMA_Type * p_reg,
493 nrf_mvdma_task_t task)
494 {
495 *((volatile uint32_t *)((uint8_t *)p_reg + (uint32_t)task)) = 0x1uL;
496 }
497
nrf_mvdma_task_address_get(NRF_MVDMA_Type const * p_reg,nrf_mvdma_task_t task)498 NRF_STATIC_INLINE uint32_t nrf_mvdma_task_address_get(NRF_MVDMA_Type const * p_reg,
499 nrf_mvdma_task_t task)
500 {
501 return ((uint32_t)p_reg + (uint32_t)task);
502 }
503
nrf_mvdma_start_task_get(NRF_MVDMA_Type const * p_reg,uint8_t index)504 NRF_STATIC_INLINE nrf_mvdma_task_t nrf_mvdma_start_task_get(NRF_MVDMA_Type const * p_reg,
505 uint8_t index)
506 {
507 (void)p_reg;
508 return (nrf_mvdma_task_t)(NRFX_OFFSETOF(NRF_MVDMA_Type, TASKS_START[index]));
509 }
510
nrf_mvdma_event_clear(NRF_MVDMA_Type * p_reg,nrf_mvdma_event_t event)511 NRF_STATIC_INLINE void nrf_mvdma_event_clear(NRF_MVDMA_Type * p_reg,
512 nrf_mvdma_event_t event)
513 {
514 *((volatile uint32_t *)((uint8_t *)p_reg + (uint32_t)event)) = 0x0uL;
515 }
516
nrf_mvdma_event_check(NRF_MVDMA_Type const * p_reg,nrf_mvdma_event_t event)517 NRF_STATIC_INLINE bool nrf_mvdma_event_check(NRF_MVDMA_Type const * p_reg,
518 nrf_mvdma_event_t event)
519 {
520 return nrf_event_check(p_reg, event);
521 }
522
nrf_mvdma_event_address_get(NRF_MVDMA_Type const * p_reg,nrf_mvdma_event_t event)523 NRF_STATIC_INLINE uint32_t nrf_mvdma_event_address_get(NRF_MVDMA_Type const * p_reg,
524 nrf_mvdma_event_t event)
525 {
526 return ((uint32_t)p_reg + (uint32_t)event);
527 }
528
nrf_mvdma_int_enable(NRF_MVDMA_Type * p_reg,uint32_t mask)529 NRF_STATIC_INLINE void nrf_mvdma_int_enable(NRF_MVDMA_Type * p_reg, uint32_t mask)
530 {
531 p_reg->INTENSET = mask;
532 }
533
nrf_mvdma_int_disable(NRF_MVDMA_Type * p_reg,uint32_t mask)534 NRF_STATIC_INLINE void nrf_mvdma_int_disable(NRF_MVDMA_Type * p_reg, uint32_t mask)
535 {
536 p_reg->INTENCLR = mask;
537 }
538
nrf_mvdma_int_enable_check(NRF_MVDMA_Type const * p_reg,uint32_t mask)539 NRF_STATIC_INLINE uint32_t nrf_mvdma_int_enable_check(NRF_MVDMA_Type const * p_reg, uint32_t mask)
540 {
541 return (p_reg->INTENSET & mask);
542 }
543
nrf_mvdma_int_pending_get(NRF_MVDMA_Type const * p_reg)544 NRF_STATIC_INLINE uint32_t nrf_mvdma_int_pending_get(NRF_MVDMA_Type const * p_reg)
545 {
546 return p_reg->INTPEND;
547 }
548
549 #if defined(DPPI_PRESENT)
nrf_mvdma_subscribe_set(NRF_MVDMA_Type * p_reg,nrf_mvdma_task_t task,uint8_t channel)550 NRF_STATIC_INLINE void nrf_mvdma_subscribe_set(NRF_MVDMA_Type * p_reg,
551 nrf_mvdma_task_t task,
552 uint8_t channel)
553 {
554 *((volatile uint32_t *) ((uint8_t *) p_reg + (uint32_t) task + 0x80uL)) =
555 ((uint32_t)channel | NRF_SUBSCRIBE_PUBLISH_ENABLE);
556 }
557
nrf_mvdma_subscribe_clear(NRF_MVDMA_Type * p_reg,nrf_mvdma_task_t task)558 NRF_STATIC_INLINE void nrf_mvdma_subscribe_clear(NRF_MVDMA_Type * p_reg,
559 nrf_mvdma_task_t task)
560 {
561 *((volatile uint32_t *) ((uint8_t *) p_reg + (uint32_t) task + 0x80uL)) = 0;
562 }
563
nrf_mvdma_publish_set(NRF_MVDMA_Type * p_reg,nrf_mvdma_event_t event,uint8_t channel)564 NRF_STATIC_INLINE void nrf_mvdma_publish_set(NRF_MVDMA_Type * p_reg,
565 nrf_mvdma_event_t event,
566 uint8_t channel)
567 {
568 *((volatile uint32_t *) ((uint8_t *) p_reg + (uint32_t) event + 0x80uL)) =
569 ((uint32_t)channel | NRF_SUBSCRIBE_PUBLISH_ENABLE);
570 }
571
nrf_mvdma_publish_clear(NRF_MVDMA_Type * p_reg,nrf_mvdma_event_t event)572 NRF_STATIC_INLINE void nrf_mvdma_publish_clear(NRF_MVDMA_Type * p_reg,
573 nrf_mvdma_event_t event)
574 {
575 *((volatile uint32_t *) ((uint8_t *) p_reg + (uint32_t) event + 0x80uL)) = 0;
576 }
577 #endif // defined(DPPI_PRESENT)
578
nrf_mvdma_mode_set(NRF_MVDMA_Type * p_reg,nrf_mvdma_mode_t mode)579 NRF_STATIC_INLINE void nrf_mvdma_mode_set(NRF_MVDMA_Type * p_reg, nrf_mvdma_mode_t mode)
580 {
581 p_reg->CONFIG.MODE = ((uint32_t)mode << MVDMA_CONFIG_MODE_MODE_Pos);
582 }
583
584 #if NRF_MVDMA_HAS_AXIMODE
nrf_mvdma_aximode_set(NRF_MVDMA_Type * p_reg,nrf_mvdma_aximode_t aximode)585 NRF_STATIC_INLINE void nrf_mvdma_aximode_set(NRF_MVDMA_Type * p_reg, nrf_mvdma_aximode_t aximode)
586 {
587 p_reg->CONFIG.AXIMODE = ((uint32_t)aximode << MVDMA_CONFIG_AXIMODE_AXIMODE_Pos);
588 }
589 #endif
590
nrf_mvdma_source_list_ptr_set(NRF_MVDMA_Type * p_reg,nrf_vdma_job_t const * p_job)591 NRF_STATIC_INLINE void nrf_mvdma_source_list_ptr_set(NRF_MVDMA_Type * p_reg,
592 nrf_vdma_job_t const * p_job)
593 {
594 #if NRF_MVDMA_HAS_NEW_VER
595 p_reg->SOURCE.LISTPTR = (uint32_t)p_job;
596 #else
597 p_reg->CONFIG.SOURCELISTPTR = (uint32_t)p_job;
598 #endif
599 }
600
nrf_mvdma_source_list_ptr_get(NRF_MVDMA_Type const * p_reg)601 NRF_STATIC_INLINE nrf_vdma_job_t * nrf_mvdma_source_list_ptr_get(NRF_MVDMA_Type const * p_reg)
602 {
603 #if NRF_MVDMA_HAS_NEW_VER
604 return (nrf_vdma_job_t *)(p_reg->SOURCE.LISTPTR);
605 #else
606 return (nrf_vdma_job_t *)(p_reg->CONFIG.SOURCELISTPTR);
607 #endif
608 }
609
nrf_mvdma_sink_list_ptr_set(NRF_MVDMA_Type * p_reg,nrf_vdma_job_t const * p_job)610 NRF_STATIC_INLINE void nrf_mvdma_sink_list_ptr_set(NRF_MVDMA_Type * p_reg,
611 nrf_vdma_job_t const * p_job)
612 {
613 #if NRF_MVDMA_HAS_NEW_VER
614 p_reg->SINK.LISTPTR = (uint32_t)p_job;
615 #else
616 p_reg->CONFIG.SINKLISTPTR = (uint32_t)p_job;
617 #endif
618 }
619
nrf_mvdma_sink_list_ptr_get(NRF_MVDMA_Type const * p_reg)620 NRF_STATIC_INLINE nrf_vdma_job_t * nrf_mvdma_sink_list_ptr_get(NRF_MVDMA_Type const * p_reg)
621 {
622 #if NRF_MVDMA_HAS_NEW_VER
623 return (nrf_vdma_job_t *)(p_reg->SINK.LISTPTR);
624 #else
625 return (nrf_vdma_job_t *)(p_reg->CONFIG.SINKLISTPTR);
626 #endif
627 }
628
nrf_mvdma_crc_result_get(NRF_MVDMA_Type const * p_reg)629 NRF_STATIC_INLINE uint32_t nrf_mvdma_crc_result_get(NRF_MVDMA_Type const * p_reg)
630 {
631 return p_reg->STATUS.CRCRESULT;
632 }
633
nrf_mvdma_fifo_status_get(NRF_MVDMA_Type const * p_reg)634 NRF_STATIC_INLINE nrf_mvdma_fifo_status_t nrf_mvdma_fifo_status_get(NRF_MVDMA_Type const * p_reg)
635 {
636 return (nrf_mvdma_fifo_status_t)(p_reg->STATUS.FIFO);
637 }
638
nrf_mvdma_activity_check(NRF_MVDMA_Type const * p_reg)639 NRF_STATIC_INLINE bool nrf_mvdma_activity_check(NRF_MVDMA_Type const * p_reg)
640 {
641 return (p_reg->STATUS.ACTIVE ==
642 (MVDMA_STATUS_ACTIVE_ACTIVE_Active << MVDMA_STATUS_ACTIVE_ACTIVE_Pos));
643 }
644
nrf_mvdma_source_error_get(NRF_MVDMA_Type const * p_reg)645 NRF_STATIC_INLINE nrf_mvdma_source_error_t nrf_mvdma_source_error_get(NRF_MVDMA_Type const * p_reg)
646 {
647 #if NRF_MVDMA_HAS_NEW_VER
648 return (nrf_mvdma_source_error_t)(p_reg->SOURCE.BUSERROR);
649 #else
650 return (nrf_mvdma_source_error_t)(p_reg->STATUS.SOURCEBUSERROR);
651 #endif
652 }
653
nrf_mvdma_sink_error_get(NRF_MVDMA_Type const * p_reg)654 NRF_STATIC_INLINE nrf_mvdma_sink_error_t nrf_mvdma_sink_error_get(NRF_MVDMA_Type const * p_reg)
655 {
656 #if NRF_MVDMA_HAS_NEW_VER
657 return (nrf_mvdma_sink_error_t)(p_reg->SINK.BUSERROR);
658 #else
659 return (nrf_mvdma_sink_error_t)(p_reg->STATUS.SINKBUSERROR);
660 #endif
661 }
662
nrf_mvdma_last_source_address_get(NRF_MVDMA_Type const * p_reg)663 NRF_STATIC_INLINE uint32_t nrf_mvdma_last_source_address_get(NRF_MVDMA_Type const * p_reg)
664 {
665 #if NRF_MVDMA_HAS_NEW_VER
666 return p_reg->SOURCE.ADDRESS;
667 #else
668 return p_reg->STATUS.SOURCEADDRESS;
669 #endif
670 }
671
nrf_mvdma_last_sink_address_get(NRF_MVDMA_Type const * p_reg)672 NRF_STATIC_INLINE uint32_t nrf_mvdma_last_sink_address_get(NRF_MVDMA_Type const * p_reg)
673 {
674 #if NRF_MVDMA_HAS_NEW_VER
675 return p_reg->SINK.ADDRESS;
676 #else
677 return p_reg->STATUS.SINKADDRESS;
678 #endif
679 }
680
nrf_mvdma_source_job_count_get(NRF_MVDMA_Type const * p_reg)681 NRF_STATIC_INLINE uint32_t nrf_mvdma_source_job_count_get(NRF_MVDMA_Type const * p_reg)
682 {
683 #if NRF_MVDMA_HAS_NEW_VER
684 return p_reg->SOURCE.JOBCOUNT;
685 #else
686 return p_reg->STATUS.SOURCEJOBCOUNT;
687 #endif
688 }
689
nrf_mvdma_sink_job_count_get(NRF_MVDMA_Type const * p_reg)690 NRF_STATIC_INLINE uint32_t nrf_mvdma_sink_job_count_get(NRF_MVDMA_Type const * p_reg)
691 {
692 #if NRF_MVDMA_HAS_NEW_VER
693 return p_reg->SINK.JOBCOUNT;
694 #else
695 return p_reg->STATUS.SINKJOBCOUNT;
696 #endif
697 }
698
699 #endif // NRF_DECLARE_ONLY
700
701 /** @} */
702
703 #ifdef __cplusplus
704 }
705 #endif
706
707 #endif // NRF_MVDMA_H__
708