1 /*
2 * Copyright (c) 2023, Nordic Semiconductor ASA
3 * All rights reserved.
4 *
5 * SPDX-License-Identifier: BSD-3-Clause
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions are met:
9 *
10 * 1. Redistributions of source code must retain the above copyright notice, this
11 * list of conditions and the following disclaimer.
12 *
13 * 2. Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in the
15 * documentation and/or other materials provided with the distribution.
16 *
17 * 3. Neither the name of the copyright holder nor the names of its
18 * contributors may be used to endorse or promote products derived from this
19 * software without specific prior written permission.
20 *
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
22 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
23 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
24 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
25 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
26 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
27 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
28 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
29 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
31 * POSSIBILITY OF SUCH DAMAGE.
32 */
33
34 #ifndef NRFY_MVDMA_H__
35 #define NRFY_MVDMA_H__
36
37 #include <nrfx.h>
38 #include <hal/nrf_mvdma.h>
39 #include <helpers/nrf_vdma.h>
40
41 #ifdef __cplusplus
42 extern "C" {
43 #endif
44
45 typedef struct nrfy_mvdma_list_request_t nrfy_mvdma_list_request_t;
46
47 NRFY_STATIC_INLINE bool __nrfy_internal_mvdma_event_handle(NRF_MVDMA_Type * p_reg,
48 uint32_t mask,
49 nrf_mvdma_event_t event,
50 uint32_t * p_event_mask);
51
52 NRFY_STATIC_INLINE
53 uint32_t __nrfy_internal_mvdma_events_process(NRF_MVDMA_Type * p_reg,
54 uint32_t mask,
55 nrfy_mvdma_list_request_t const * p_list_request);
56
57 NRFY_STATIC_INLINE void __nrfy_internal_mvdma_event_enabled_clear(NRF_MVDMA_Type * p_reg,
58 uint32_t mask,
59 nrf_mvdma_event_t event);
60
61 NRFY_STATIC_INLINE void __nrfy_internal_mvdma_source_buffers_flush(nrf_vdma_job_t * p_source_job);
62
63 NRFY_STATIC_INLINE uint32_t __nrfy_internal_mvdma_sink_job_count_get(NRF_MVDMA_Type const * p_reg);
64
65 /**
66 * @defgroup nrfy_mvdma MVDMA HALY
67 * @{
68 * @ingroup nrf_mvdma
69 * @brief Hardware access layer with cache and barrier support for managing the MVDMA peripheral.
70 */
71
72 #if NRF_MVDMA_HAS_NEW_VER || defined(__NRFX_DOXYGEN__)
73 /** @refhal{NRF_MVDMA_HAS_NEW_VER} */
74 #define NRFY_MVDMA_HAS_NEW_VER 1
75 #else
76 #define NRFY_MVDMA_HAS_NEW_VER 0
77 #endif
78
79 #if NRF_MVDMA_HAS_AXIMODE || defined(__NRFX_DOXYGEN__)
80 /** @refhal{NRF_MVDMA_HAS_AXIMODE} */
81 #define NRFY_MVDMA_HAS_AXIMODE 1
82 #else
83 #define NRFY_MVDMA_HAS_AXIMODE 0
84 #endif
85
86 /** @brief Structure describing list execution request for the MVDMA.*/
87 struct nrfy_mvdma_list_request_t
88 {
89 nrf_vdma_job_t * p_source_job_list; ///< Pointer to the source job list.
90 nrf_vdma_job_t * p_sink_job_list; ///< Pointer to the sink job list.
91 };
92
93 #if NRF_MVDMA_HAS_MULTIMODE
94 /** @brief Structure describing lists of job list execution requests for the MVDMA. */
95 typedef struct
96 {
97 nrf_vdma_job_t ** pp_source_job_lists; ///< Pointer to the list of the source job lists.
98 nrf_vdma_job_t ** pp_sink_job_lists; ///< Pointer to the list of the sink job lists.
99 uint8_t length; ///< Length of the list of the sink/source job lists.
100 } nrfy_mvdma_multi_list_request_t;
101 #endif
102
103 /** @brief Auxiliary structure describing the MVDMA job list with unspecified direction. */
104 typedef struct
105 {
106 nrf_vdma_job_t * p_jobs; ///< Pointer to the job list.
107 size_t job_count; ///< Number of jobs executed, including terminating job.
108 uint32_t last_addr; ///< Last sink or source address accessed by the peripheral when the list was processed.
109 } nrfy_mvdma_list_desc_t;
110
111 /**
112 * @brief Function for initializing the specified MVDMA interrupts.
113 *
114 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
115 * @param[in] mask Mask of interrupts to be initialized.
116 * @param[in] irq_priority Interrupt priority.
117 * @param[in] enable True if the interrupts are to be enabled, false otherwise.
118 */
nrfy_mvdma_int_init(NRF_MVDMA_Type * p_reg,uint32_t mask,uint8_t irq_priority,bool enable)119 NRFY_STATIC_INLINE void nrfy_mvdma_int_init(NRF_MVDMA_Type * p_reg,
120 uint32_t mask,
121 uint8_t irq_priority,
122 bool enable)
123 {
124 __nrfy_internal_mvdma_event_enabled_clear(p_reg, mask, NRF_MVDMA_EVENT_END);
125 __nrfy_internal_mvdma_event_enabled_clear(p_reg, mask, NRF_MVDMA_EVENT_RESET);
126 __nrfy_internal_mvdma_event_enabled_clear(p_reg, mask, NRF_MVDMA_EVENT_STARTED);
127 #if NRFY_MVDMA_HAS_NEW_VER
128 __nrfy_internal_mvdma_event_enabled_clear(p_reg, mask, NRF_MVDMA_EVENT_PAUSED);
129 #else
130 __nrfy_internal_mvdma_event_enabled_clear(p_reg, mask, NRF_MVDMA_EVENT_STOPPED);
131 #endif
132 __nrfy_internal_mvdma_event_enabled_clear(p_reg, mask, NRF_MVDMA_EVENT_SINKBUSERROR);
133 __nrfy_internal_mvdma_event_enabled_clear(p_reg, mask, NRF_MVDMA_EVENT_SOURCEBUSERROR);
134 #if NRFY_MVDMA_HAS_NEW_VER
135 __nrfy_internal_mvdma_event_enabled_clear(p_reg, mask, NRF_MVDMA_EVENT_SINKSELECTJOBDONE);
136 __nrfy_internal_mvdma_event_enabled_clear(p_reg, mask, NRF_MVDMA_EVENT_SOURCESELECTJOBDONE);
137 #endif
138 nrf_barrier_w();
139
140 NRFX_IRQ_PRIORITY_SET(nrfx_get_irq_number(p_reg), irq_priority);
141 NRFX_IRQ_ENABLE(nrfx_get_irq_number(p_reg));
142 if (enable)
143 {
144 nrf_mvdma_int_enable(p_reg, mask);
145 }
146 nrf_barrier_w();
147 }
148
149 /**
150 * @brief Function for uninitializing the specified MVDMA interrupts.
151 *
152 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
153 */
nrfy_mvdma_int_uninit(NRF_MVDMA_Type * p_reg)154 NRFY_STATIC_INLINE void nrfy_mvdma_int_uninit(NRF_MVDMA_Type * p_reg)
155 {
156 NRFX_IRQ_DISABLE(nrfx_get_irq_number(p_reg));
157 nrf_barrier_w();
158 }
159
160 /**
161 * @brief Function for processing the specified MVDMA events.
162 *
163 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
164 * @param[in] mask Mask of events to be processed,
165 * created by @ref NRFY_EVENT_TO_INT_BITMASK().
166 * @param[in] p_list_request Pointer to the structure of list execution request associated with
167 * the last operation. Can be NULL.
168 *
169 * @return Mask of events that were generated and processed.
170 * To be checked against the result of @ref NRFY_EVENT_TO_INT_BITMASK().
171 */
nrfy_mvdma_events_process(NRF_MVDMA_Type * p_reg,uint32_t mask,nrfy_mvdma_list_request_t * p_list_request)172 NRFY_STATIC_INLINE uint32_t nrfy_mvdma_events_process(NRF_MVDMA_Type * p_reg,
173 uint32_t mask,
174 nrfy_mvdma_list_request_t * p_list_request)
175 {
176 uint32_t evt_mask = __nrfy_internal_mvdma_events_process(p_reg, mask, p_list_request);
177 nrf_barrier_w();
178 return evt_mask;
179 }
180
181 /**
182 * @brief Function for starting the MVDMA jobs in single-mode.
183 *
184 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
185 * @param[in] p_list_request Pointer to the structure of list execution request if the transaction
186 * is to be blocking. NULL for non-blocking transactions.
187 */
nrfy_mvdma_start(NRF_MVDMA_Type * p_reg,nrfy_mvdma_list_request_t const * p_list_request)188 NRFY_STATIC_INLINE void nrfy_mvdma_start(NRF_MVDMA_Type * p_reg,
189 nrfy_mvdma_list_request_t const * p_list_request)
190 {
191 nrf_mvdma_task_trigger(p_reg, NRF_MVDMA_TASK_START0);
192 if (p_list_request)
193 {
194 nrf_barrier_w();
195 uint32_t evt_mask = NRFY_EVENT_TO_INT_BITMASK(NRF_MVDMA_EVENT_END);
196 while (!__nrfy_internal_mvdma_events_process(p_reg, evt_mask, p_list_request))
197 {}
198 }
199 nrf_barrier_w();
200 }
201
202 #if NRF_MVDMA_HAS_MULTIMODE || defined(__NRFX_DOXYGEN__)
203 /**
204 * @brief Function for starting the MVDMA jobs in multi-mode.
205 *
206 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
207 * @param[in] idx Index of the job list that will be executed.
208 * @param[in] p_list_request Pointer to the structure of lists execution request if the transaction
209 * is to be blocking. NULL for non-blocking transactions.
210 */
211 NRFY_STATIC_INLINE
nrfy_mvdma_multi_start(NRF_MVDMA_Type * p_reg,uint8_t idx,nrfy_mvdma_multi_list_request_t const * p_list_request)212 void nrfy_mvdma_multi_start(NRF_MVDMA_Type * p_reg,
213 uint8_t idx,
214 nrfy_mvdma_multi_list_request_t const * p_list_request)
215 {
216 nrf_mvdma_task_trigger(p_reg, nrf_mvdma_start_task_get(p_reg, idx));
217 if (p_list_request)
218 {
219 nrfy_mvdma_list_request_t list_req =
220 {
221 .p_source_job_list = p_list_request->pp_source_job_lists[idx],
222 .p_sink_job_list = p_list_request->pp_sink_job_lists[idx],
223 };
224
225 nrf_barrier_w();
226 uint32_t evt_mask = NRFY_EVENT_TO_INT_BITMASK(NRF_MVDMA_EVENT_END);
227 while (!__nrfy_internal_mvdma_events_process(p_reg, evt_mask, &list_req))
228 {}
229 }
230 nrf_barrier_w();
231 }
232
233 /**
234 * @brief Function for setting the MVDMA job lists in multi-mode.
235 *
236 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
237 * @param[in] p_list_request Pointer to the structure of list execution request.
238 */
239 NRFY_STATIC_INLINE
nrfy_mvdma_multi_job_list_set(NRF_MVDMA_Type * p_reg,nrfy_mvdma_multi_list_request_t const * p_list_request)240 void nrfy_mvdma_multi_job_list_set(NRF_MVDMA_Type * p_reg,
241 nrfy_mvdma_multi_list_request_t const * p_list_request)
242 {
243 for (size_t i = 0; i < p_list_request->length; i++)
244 {
245 __nrfy_internal_mvdma_source_buffers_flush(p_list_request->pp_source_job_lists[i]);
246 }
247
248 nrf_mvdma_source_list_ptr_set(p_reg, (nrf_vdma_job_t *)p_list_request->pp_source_job_lists);
249 nrf_mvdma_sink_list_ptr_set(p_reg, (nrf_vdma_job_t *)p_list_request->pp_sink_job_lists);
250 }
251
252 /**
253 * @brief Function for getting the MVDMA job lists in multi-mode.
254 *
255 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
256 * @param[out] p_list_request Pointer to the structure to be filled with list execution request.
257 */
258 NRFY_STATIC_INLINE
nrfy_mvdma_multi_job_list_get(NRF_MVDMA_Type const * p_reg,nrfy_mvdma_multi_list_request_t * p_list_request)259 void nrfy_mvdma_multi_job_list_get(NRF_MVDMA_Type const * p_reg,
260 nrfy_mvdma_multi_list_request_t * p_list_request)
261 {
262 p_list_request->pp_source_job_lists = (nrf_vdma_job_t **)nrf_mvdma_source_list_ptr_get(p_reg);
263 p_list_request->pp_sink_job_lists = (nrf_vdma_job_t **)nrf_mvdma_sink_list_ptr_get(p_reg);
264 }
265
266 #endif // NRF_MVDMA_HAS_MULTIMODE || defined(__NRFX_DOXYGEN__)
267
268 /**
269 * @brief Function for setting the MVDMA jobs.
270 *
271 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
272 * @param[in] p_list_request Pointer to the structure of list execution request.
273 */
nrfy_mvdma_job_list_set(NRF_MVDMA_Type * p_reg,nrfy_mvdma_list_request_t const * p_list_request)274 NRFY_STATIC_INLINE void nrfy_mvdma_job_list_set(NRF_MVDMA_Type * p_reg,
275 nrfy_mvdma_list_request_t const * p_list_request)
276 {
277 __nrfy_internal_mvdma_source_buffers_flush(p_list_request->p_source_job_list);
278
279 nrf_mvdma_source_list_ptr_set(p_reg, p_list_request->p_source_job_list);
280 nrf_mvdma_sink_list_ptr_set(p_reg, p_list_request->p_sink_job_list);
281 nrf_barrier_w();
282 }
283
284 /**
285 * @brief Function for resetting the MVDMA peripheral.
286 *
287 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
288 * @param[in] wait True if reset is to be done in blocking mode, false otherwise.
289 */
nrfy_mvdma_reset(NRF_MVDMA_Type * p_reg,bool wait)290 NRFY_STATIC_INLINE void nrfy_mvdma_reset(NRF_MVDMA_Type * p_reg,
291 bool wait)
292 {
293 nrf_mvdma_task_trigger(p_reg, NRF_MVDMA_TASK_RESET);
294 if (wait)
295 {
296 nrf_barrier_w();
297 uint32_t evt_mask = NRFY_EVENT_TO_INT_BITMASK(NRF_MVDMA_EVENT_RESET);
298 while (!__nrfy_internal_mvdma_events_process(p_reg, evt_mask, NULL))
299 {}
300 }
301 nrf_barrier_w();
302 }
303
304 /**
305 * @brief Function for aborting the MVDMA transaction.
306 *
307 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
308 * @param[in] p_list_request Pointer to the structure of list execution request.
309 */
nrfy_mvdma_abort(NRF_MVDMA_Type * p_reg,nrfy_mvdma_list_request_t const * p_list_request)310 NRFY_STATIC_INLINE void nrfy_mvdma_abort(NRF_MVDMA_Type * p_reg,
311 nrfy_mvdma_list_request_t const * p_list_request)
312 {
313 #if NRF_MVDMA_HAS_NEW_VER
314 nrf_mvdma_task_trigger(p_reg, NRF_MVDMA_TASK_PAUSE);
315 #else
316 nrf_mvdma_task_trigger(p_reg, NRF_MVDMA_TASK_STOP);
317 #endif
318
319 if (p_list_request)
320 {
321 nrf_barrier_w();
322 #if NRF_MVDMA_HAS_NEW_VER
323 uint32_t evt_mask = NRFY_EVENT_TO_INT_BITMASK(NRF_MVDMA_EVENT_PAUSED);
324 #else
325 uint32_t evt_mask = NRFY_EVENT_TO_INT_BITMASK(NRF_MVDMA_EVENT_STOPPED);
326 #endif
327 while (!__nrfy_internal_mvdma_events_process(p_reg, evt_mask, p_list_request))
328 {}
329 }
330 nrf_barrier_w();
331 }
332
333 /**
334 * @brief Function for getting the MVDMA source job details
335 *
336 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
337 * @param[out] p_job_list_desc Pointer to the structure to be filled with job list description.
338 */
339 NRFY_STATIC_INLINE
nrfy_mvdma_source_job_description_get(NRF_MVDMA_Type const * p_reg,nrfy_mvdma_list_desc_t * p_job_list_desc)340 void nrfy_mvdma_source_job_description_get(NRF_MVDMA_Type const * p_reg,
341 nrfy_mvdma_list_desc_t * p_job_list_desc)
342 {
343 nrf_barrier_rw();
344 p_job_list_desc->p_jobs = nrf_mvdma_source_list_ptr_get(p_reg);
345 p_job_list_desc->job_count = nrf_mvdma_source_job_count_get(p_reg);
346 p_job_list_desc->last_addr = nrf_mvdma_last_source_address_get(p_reg);
347 nrf_barrier_r();
348 }
349
350 /**
351 * @brief Function for getting the MVDMA sink job details
352 *
353 * @param[in] p_reg Pointer to the structure of registers of the peripheral.
354 * @param[out] p_job_list_desc Pointer to the structure to be filled with job list description.
355 */
356 NRFY_STATIC_INLINE
nrfy_mvdma_sink_job_description_get(NRF_MVDMA_Type const * p_reg,nrfy_mvdma_list_desc_t * p_job_list_desc)357 void nrfy_mvdma_sink_job_description_get(NRF_MVDMA_Type const * p_reg,
358 nrfy_mvdma_list_desc_t * p_job_list_desc)
359 {
360 nrf_barrier_rw();
361 p_job_list_desc->p_jobs = nrf_mvdma_sink_list_ptr_get(p_reg);
362 p_job_list_desc->job_count = nrf_mvdma_sink_job_count_get(p_reg);
363 p_job_list_desc->last_addr = nrf_mvdma_last_sink_address_get(p_reg);
364 nrf_barrier_r();
365 }
366
367 /** @refhal{nrf_mvdma_task_trigger} */
nrfy_mvdma_task_trigger(NRF_MVDMA_Type * p_reg,nrf_mvdma_task_t task)368 NRFY_STATIC_INLINE void nrfy_mvdma_task_trigger(NRF_MVDMA_Type * p_reg,
369 nrf_mvdma_task_t task)
370 {
371 nrf_mvdma_task_trigger(p_reg, task);
372 nrf_barrier_w();
373 }
374
375 /** @refhal{nrf_mvdma_task_address_get} */
nrfy_mvdma_task_address_get(NRF_MVDMA_Type const * p_reg,nrf_mvdma_task_t task)376 NRFY_STATIC_INLINE uint32_t nrfy_mvdma_task_address_get(NRF_MVDMA_Type const * p_reg,
377 nrf_mvdma_task_t task)
378 {
379 return nrf_mvdma_task_address_get(p_reg, task);
380 }
381
382 /** @refhal{nrf_mvdma_start_task_get} */
nrfy_mvdma_start_task_get(NRF_MVDMA_Type const * p_reg,uint8_t index)383 NRFY_STATIC_INLINE nrf_mvdma_task_t nrfy_mvdma_start_task_get(NRF_MVDMA_Type const * p_reg,
384 uint8_t index)
385 {
386 return nrf_mvdma_start_task_get(p_reg, index);
387 }
388
389 /** @refhal{nrf_mvdma_event_clear} */
nrfy_mvdma_event_clear(NRF_MVDMA_Type * p_reg,nrf_mvdma_event_t event)390 NRFY_STATIC_INLINE void nrfy_mvdma_event_clear(NRF_MVDMA_Type * p_reg,
391 nrf_mvdma_event_t event)
392 {
393 nrf_mvdma_event_clear(p_reg, event);
394 nrf_barrier_w();
395 }
396
397 /** @refhal{nrf_mvdma_event_check} */
nrfy_mvdma_event_check(NRF_MVDMA_Type const * p_reg,nrf_mvdma_event_t event)398 NRFY_STATIC_INLINE bool nrfy_mvdma_event_check(NRF_MVDMA_Type const * p_reg,
399 nrf_mvdma_event_t event)
400 {
401 nrf_barrier_r();
402 bool check = nrf_mvdma_event_check(p_reg, event);
403 nrf_barrier_r();
404 return check;
405 }
406
407 /** @refhal{nrf_mvdma_event_address_get} */
nrfy_mvdma_event_address_get(NRF_MVDMA_Type const * p_reg,nrf_mvdma_event_t event)408 NRFY_STATIC_INLINE uint32_t nrfy_mvdma_event_address_get(NRF_MVDMA_Type const * p_reg,
409 nrf_mvdma_event_t event)
410 {
411 return nrf_mvdma_event_address_get(p_reg, event);
412 }
413
414 /** @refhal{nrf_mvdma_int_enable} */
nrfy_mvdma_int_enable(NRF_MVDMA_Type * p_reg,uint32_t mask)415 NRFY_STATIC_INLINE void nrfy_mvdma_int_enable(NRF_MVDMA_Type * p_reg,
416 uint32_t mask)
417 {
418 nrf_mvdma_int_enable(p_reg, mask);
419 nrf_barrier_w();
420 }
421
422 /** @refhal{nrf_mvdma_int_disable} */
nrfy_mvdma_int_disable(NRF_MVDMA_Type * p_reg,uint32_t mask)423 NRFY_STATIC_INLINE void nrfy_mvdma_int_disable(NRF_MVDMA_Type * p_reg,
424 uint32_t mask)
425 {
426 nrf_mvdma_int_disable(p_reg, mask);
427 nrf_barrier_w();
428 }
429
430 /** @refhal{nrf_mvdma_int_enable_check} */
nrfy_mvdma_int_enable_check(NRF_MVDMA_Type const * p_reg,uint32_t mask)431 NRFY_STATIC_INLINE uint32_t nrfy_mvdma_int_enable_check(NRF_MVDMA_Type const * p_reg,
432 uint32_t mask)
433 {
434 nrf_barrier_rw();
435 uint32_t check = nrf_mvdma_int_enable_check(p_reg, mask);
436 nrf_barrier_r();
437 return check;
438 }
439
440 /** @refhal{nrf_mvdma_int_pending_get} */
nrfy_mvdma_int_pending_get(NRF_MVDMA_Type const * p_reg)441 NRFY_STATIC_INLINE uint32_t nrfy_mvdma_int_pending_get(NRF_MVDMA_Type const * p_reg)
442 {
443 nrf_barrier_r();
444 uint32_t pending = nrf_mvdma_int_pending_get(p_reg);
445 nrf_barrier_r();
446 return pending;
447 }
448
449 #if defined(DPPI_PRESENT) || defined(__NRFX_DOXYGEN__)
450 /** @refhal{nrf_mvdma_subscribe_set} */
nrfy_mvdma_subscribe_set(NRF_MVDMA_Type * p_reg,nrf_mvdma_task_t task,uint8_t channel)451 NRFY_STATIC_INLINE void nrfy_mvdma_subscribe_set(NRF_MVDMA_Type * p_reg,
452 nrf_mvdma_task_t task,
453 uint8_t channel)
454 {
455 nrf_mvdma_subscribe_set(p_reg, task, channel);
456 nrf_barrier_w();
457 }
458
459 /** @refhal{nrf_mvdma_subscribe_clear} */
nrfy_mvdma_subscribe_clear(NRF_MVDMA_Type * p_reg,nrf_mvdma_task_t task)460 NRFY_STATIC_INLINE void nrfy_mvdma_subscribe_clear(NRF_MVDMA_Type * p_reg,
461 nrf_mvdma_task_t task)
462 {
463 nrf_mvdma_subscribe_clear(p_reg, task);
464 nrf_barrier_w();
465 }
466
467 /** @refhal{nrf_mvdma_publish_set} */
nrfy_mvdma_publish_set(NRF_MVDMA_Type * p_reg,nrf_mvdma_event_t event,uint8_t channel)468 NRFY_STATIC_INLINE void nrfy_mvdma_publish_set(NRF_MVDMA_Type * p_reg,
469 nrf_mvdma_event_t event,
470 uint8_t channel)
471 {
472 nrf_mvdma_publish_set(p_reg, event, channel);
473 nrf_barrier_w();
474 }
475
476 /** @refhal{nrf_mvdma_publish_clear} */
nrfy_mvdma_publish_clear(NRF_MVDMA_Type * p_reg,nrf_mvdma_event_t event)477 NRFY_STATIC_INLINE void nrfy_mvdma_publish_clear(NRF_MVDMA_Type * p_reg,
478 nrf_mvdma_event_t event)
479 {
480 nrf_mvdma_publish_clear(p_reg, event);
481 nrf_barrier_w();
482 }
483 #endif
484
485 /** @refhal{nrf_mvdma_mode_set} */
nrfy_mvdma_mode_set(NRF_MVDMA_Type * p_reg,nrf_mvdma_mode_t mode)486 NRFY_STATIC_INLINE void nrfy_mvdma_mode_set(NRF_MVDMA_Type * p_reg,
487 nrf_mvdma_mode_t mode)
488 {
489 nrf_mvdma_mode_set(p_reg, mode);
490 nrf_barrier_w();
491 }
492
493 #if NRFY_MVDMA_HAS_AXIMODE
494 /** @refhal{nrf_mvdma_aximode_set} */
nrfy_mvdma_aximode_set(NRF_MVDMA_Type * p_reg,nrf_mvdma_aximode_t aximode)495 NRFY_STATIC_INLINE void nrfy_mvdma_aximode_set(NRF_MVDMA_Type * p_reg,
496 nrf_mvdma_aximode_t aximode)
497 {
498 nrf_mvdma_aximode_set(p_reg, aximode);
499 nrf_barrier_w();
500 }
501 #endif
502
503 /** @refhal{nrf_mvdma_source_list_ptr_set} */
nrfy_mvdma_source_list_ptr_set(NRF_MVDMA_Type * p_reg,nrf_vdma_job_t const * p_job)504 NRFY_STATIC_INLINE void nrfy_mvdma_source_list_ptr_set(NRF_MVDMA_Type * p_reg,
505 nrf_vdma_job_t const * p_job)
506 {
507 nrf_mvdma_source_list_ptr_set(p_reg, p_job);
508 nrf_barrier_w();
509 }
510
511 /** @refhal{nrf_mvdma_source_list_ptr_get} */
nrfy_mvdma_source_list_ptr_get(NRF_MVDMA_Type const * p_reg)512 NRFY_STATIC_INLINE nrf_vdma_job_t * nrfy_mvdma_source_list_ptr_get(NRF_MVDMA_Type const * p_reg)
513 {
514 nrf_barrier_rw();
515 nrf_vdma_job_t * p_job = nrf_mvdma_source_list_ptr_get(p_reg);
516 nrf_barrier_r();
517 return p_job;
518 }
519
520 /** @refhal{nrf_mvdma_sink_list_ptr_set} */
nrfy_mvdma_sink_list_ptr_set(NRF_MVDMA_Type * p_reg,nrf_vdma_job_t const * p_job)521 NRFY_STATIC_INLINE void nrfy_mvdma_sink_list_ptr_set(NRF_MVDMA_Type * p_reg,
522 nrf_vdma_job_t const * p_job)
523 {
524 nrf_mvdma_sink_list_ptr_set(p_reg, p_job);
525 nrf_barrier_w();
526 }
527
528 /** @refhal{nrf_mvdma_sink_list_ptr_get} */
nrfy_mvdma_sink_list_ptr_get(NRF_MVDMA_Type const * p_reg)529 NRFY_STATIC_INLINE nrf_vdma_job_t * nrfy_mvdma_sink_list_ptr_get(NRF_MVDMA_Type const * p_reg)
530 {
531 nrf_barrier_rw();
532 nrf_vdma_job_t * p_job = nrf_mvdma_sink_list_ptr_get(p_reg);
533 nrf_barrier_r();
534 return p_job;
535 }
536
537 /** @refhal{nrf_mvdma_crc_result_get} */
nrfy_mvdma_crc_result_get(NRF_MVDMA_Type const * p_reg)538 NRFY_STATIC_INLINE uint32_t nrfy_mvdma_crc_result_get(NRF_MVDMA_Type const * p_reg)
539 {
540 nrf_barrier_r();
541 uint32_t crc_result = nrf_mvdma_crc_result_get(p_reg);
542 nrf_barrier_r();
543 return crc_result;
544 }
545
546 /** @refhal{nrf_mvdma_fifo_status_get} */
nrfy_mvdma_fifo_status_get(NRF_MVDMA_Type const * p_reg)547 NRFY_STATIC_INLINE nrf_mvdma_fifo_status_t nrfy_mvdma_fifo_status_get(NRF_MVDMA_Type const * p_reg)
548 {
549 nrf_barrier_r();
550 nrf_mvdma_fifo_status_t fifo_status = nrf_mvdma_fifo_status_get(p_reg);
551 nrf_barrier_r();
552 return fifo_status;
553 }
554
555 /** @refhal{nrf_mvdma_activity_check} */
nrfy_mvdma_activity_check(NRF_MVDMA_Type * p_reg)556 NRFY_STATIC_INLINE bool nrfy_mvdma_activity_check(NRF_MVDMA_Type * p_reg)
557 {
558 nrf_barrier_r();
559 bool check = nrf_mvdma_activity_check(p_reg);
560 nrf_barrier_r();
561 return check;
562 }
563
564 /** @refhal{nrf_mvdma_source_error_get} */
565 NRFY_STATIC_INLINE
nrfy_mvdma_source_error_get(NRF_MVDMA_Type const * p_reg)566 nrf_mvdma_source_error_t nrfy_mvdma_source_error_get(NRF_MVDMA_Type const * p_reg)
567 {
568 nrf_barrier_r();
569 nrf_mvdma_source_error_t error = nrf_mvdma_source_error_get(p_reg);
570 nrf_barrier_r();
571 return error;
572 }
573
574 /** @refhal{nrf_mvdma_sink_error_get} */
nrfy_mvdma_sink_error_get(NRF_MVDMA_Type const * p_reg)575 NRFY_STATIC_INLINE nrf_mvdma_sink_error_t nrfy_mvdma_sink_error_get(NRF_MVDMA_Type const * p_reg)
576 {
577 nrf_barrier_r();
578 nrf_mvdma_sink_error_t error = nrf_mvdma_sink_error_get(p_reg);
579 nrf_barrier_r();
580 return error;
581 }
582
583 /** @refhal{nrf_mvdma_last_source_address_get} */
nrfy_mvdma_last_source_address_get(NRF_MVDMA_Type const * p_reg)584 NRFY_STATIC_INLINE uint32_t nrfy_mvdma_last_source_address_get(NRF_MVDMA_Type const * p_reg)
585 {
586 nrf_barrier_r();
587 uint32_t address = nrf_mvdma_last_source_address_get(p_reg);
588 nrf_barrier_r();
589 return address;
590 }
591
592 /** @refhal{nrf_mvdma_last_sink_address_get} */
nrfy_mvdma_last_sink_address_get(NRF_MVDMA_Type const * p_reg)593 NRFY_STATIC_INLINE uint32_t nrfy_mvdma_last_sink_address_get(NRF_MVDMA_Type const * p_reg)
594 {
595 nrf_barrier_r();
596 uint32_t address = nrf_mvdma_last_sink_address_get(p_reg);
597 nrf_barrier_r();
598 return address;
599 }
600
601 /** @refhal{nrf_mvdma_source_job_count_get} */
nrfy_mvdma_source_job_count_get(NRF_MVDMA_Type const * p_reg)602 NRFY_STATIC_INLINE uint32_t nrfy_mvdma_source_job_count_get(NRF_MVDMA_Type const * p_reg)
603 {
604 nrf_barrier_r();
605 uint32_t job_count = nrf_mvdma_source_job_count_get(p_reg);
606 nrf_barrier_r();
607 return job_count;
608 }
609
610 /** @refhal{nrf_mvdma_sink_job_count_get} */
nrfy_mvdma_sink_job_count_get(NRF_MVDMA_Type const * p_reg)611 NRFY_STATIC_INLINE uint32_t nrfy_mvdma_sink_job_count_get(NRF_MVDMA_Type const * p_reg)
612 {
613 return __nrfy_internal_mvdma_sink_job_count_get(p_reg);
614 }
615
616 /** @} */
617
__nrfy_internal_mvdma_event_handle(NRF_MVDMA_Type * p_reg,uint32_t mask,nrf_mvdma_event_t event,uint32_t * p_event_mask)618 NRFY_STATIC_INLINE bool __nrfy_internal_mvdma_event_handle(NRF_MVDMA_Type * p_reg,
619 uint32_t mask,
620 nrf_mvdma_event_t event,
621 uint32_t * p_event_mask)
622 {
623 if ((mask & NRFY_EVENT_TO_INT_BITMASK(event)) && nrf_mvdma_event_check(p_reg, event))
624 {
625 nrf_mvdma_event_clear(p_reg, event);
626 if (p_event_mask)
627 {
628 *p_event_mask |= NRFY_EVENT_TO_INT_BITMASK(event);
629 }
630 return true;
631 }
632 return false;
633 }
634
635 NRFY_STATIC_INLINE
__nrfy_internal_mvdma_events_process(NRF_MVDMA_Type * p_reg,uint32_t mask,nrfy_mvdma_list_request_t const * p_list_request)636 uint32_t __nrfy_internal_mvdma_events_process(NRF_MVDMA_Type * p_reg,
637 uint32_t mask,
638 nrfy_mvdma_list_request_t const * p_list_request)
639 {
640 uint32_t evt_mask = 0;
641
642 nrf_barrier_r();
643 (void)__nrfy_internal_mvdma_event_handle(p_reg, mask, NRF_MVDMA_EVENT_RESET, &evt_mask);
644 (void)__nrfy_internal_mvdma_event_handle(p_reg, mask, NRF_MVDMA_EVENT_STARTED, &evt_mask);
645 (void)__nrfy_internal_mvdma_event_handle(p_reg, mask, NRF_MVDMA_EVENT_SINKBUSERROR, &evt_mask);
646 (void)__nrfy_internal_mvdma_event_handle(p_reg,
647 mask,
648 NRF_MVDMA_EVENT_SOURCEBUSERROR,
649 &evt_mask);
650 #if NRF_MVDMA_HAS_NEW_VER
651 (void)__nrfy_internal_mvdma_event_handle(p_reg,
652 mask,
653 NRF_MVDMA_EVENT_SINKSELECTJOBDONE,
654 &evt_mask);
655 (void)__nrfy_internal_mvdma_event_handle(p_reg,
656 mask,
657 NRF_MVDMA_EVENT_SOURCESELECTJOBDONE,
658 &evt_mask);
659 #endif
660
661 bool invalidated = false;
662
663 #if NRF_MVDMA_HAS_NEW_VER
664 if (__nrfy_internal_mvdma_event_handle(p_reg, mask, NRF_MVDMA_EVENT_PAUSED, &evt_mask))
665 #else
666 if (__nrfy_internal_mvdma_event_handle(p_reg, mask, NRF_MVDMA_EVENT_STOPPED, &evt_mask))
667 #endif
668 {
669 size_t job_count = __nrfy_internal_mvdma_sink_job_count_get(p_reg);
670 for (size_t i = 0; i < job_count; i++)
671 {
672 NRFY_CACHE_INV(p_list_request->p_sink_job_list[i].p_buffer,
673 p_list_request->p_sink_job_list[i].size);
674 }
675 invalidated = true;
676 }
677
678 if (__nrfy_internal_mvdma_event_handle(p_reg, mask, NRF_MVDMA_EVENT_END, &evt_mask) &&
679 !invalidated)
680 {
681 for (nrf_vdma_job_t * p_job = p_list_request->p_sink_job_list;
682 p_job->p_buffer != NULL;
683 p_job++)
684 {
685 NRFY_CACHE_INV(p_job->p_buffer, p_job->size);
686 }
687 }
688
689 return evt_mask;
690 }
691
__nrfy_internal_mvdma_event_enabled_clear(NRF_MVDMA_Type * p_reg,uint32_t mask,nrf_mvdma_event_t event)692 NRFY_STATIC_INLINE void __nrfy_internal_mvdma_event_enabled_clear(NRF_MVDMA_Type * p_reg,
693 uint32_t mask,
694 nrf_mvdma_event_t event)
695 {
696 if (mask & NRFY_EVENT_TO_INT_BITMASK(event))
697 {
698 nrf_mvdma_event_clear(p_reg, event);
699 }
700 }
701
__nrfy_internal_mvdma_source_buffers_flush(nrf_vdma_job_t * p_source_job)702 NRFY_STATIC_INLINE void __nrfy_internal_mvdma_source_buffers_flush(nrf_vdma_job_t * p_source_job)
703 {
704 // Recognize if nrf_vdma_reduced_job_t is being used.
705 if (p_source_job->attributes & NRF_VDMA_ATTRIBUTE_FIXED_ATTR)
706 {
707 size_t size = p_source_job->size;
708 nrf_vdma_job_reduced_t * p_job_reduced = (nrf_vdma_job_reduced_t *)
709 (p_source_job + 1)->p_buffer;
710
711 NRFY_CACHE_WB(p_source_job->p_buffer, size);
712
713 for (nrf_vdma_job_reduced_t * p_buffer = p_job_reduced; p_buffer != NULL; p_buffer++)
714 {
715 NRFY_CACHE_WB(p_buffer, size);
716 }
717 }
718 else
719 {
720 for (nrf_vdma_job_t * p_job = p_source_job; p_job->p_buffer != NULL; p_job++)
721 {
722 NRFY_CACHE_WB(p_job->p_buffer, p_job->size);
723 }
724 }
725 }
726
__nrfy_internal_mvdma_sink_job_count_get(NRF_MVDMA_Type const * p_reg)727 NRFY_STATIC_INLINE uint32_t __nrfy_internal_mvdma_sink_job_count_get(NRF_MVDMA_Type const * p_reg)
728 {
729 nrf_barrier_r();
730 uint32_t job_count = nrf_mvdma_sink_job_count_get(p_reg);
731 nrf_barrier_r();
732 return job_count;
733 }
734
735 #ifdef __cplusplus
736 }
737 #endif
738
739 #endif // NRFY_MVDMA_H__
740