1 /* 2 * Copyright (c) 2024 Nordic Semiconductor ASA 3 * SPDX-License-Identifier: Apache-2.0 4 */ 5 6 /** 7 * @file 8 * nRF SoC specific public APIs for Device Memory Management (dmm) subsystem 9 */ 10 11 #ifndef SOC_NORDIC_COMMON_DMM_H_ 12 #define SOC_NORDIC_COMMON_DMM_H_ 13 14 #include <stdint.h> 15 #include <zephyr/devicetree.h> 16 #include <zephyr/linker/devicetree_regions.h> 17 #include <zephyr/mem_mgmt/mem_attr.h> 18 #include <zephyr/sys/util.h> 19 20 #ifdef __cplusplus 21 extern "C" { 22 #endif 23 24 /** @cond INTERNAL_HIDDEN */ 25 26 #ifdef CONFIG_DCACHE 27 28 /* Determine if memory region is cacheable. */ 29 #define DMM_IS_REG_CACHEABLE(node_id) \ 30 COND_CODE_1(DT_NODE_HAS_PROP(node_id, zephyr_memory_attr), \ 31 ((DT_PROP(node_id, zephyr_memory_attr) & DT_MEM_CACHEABLE)), \ 32 (0)) 33 34 /* Determine required alignment of the data buffers in specified memory region. 35 * Cache line alignment is required if region is cacheable and data cache is enabled. 36 */ 37 #define DMM_REG_ALIGN_SIZE(node_id) \ 38 (DMM_IS_REG_CACHEABLE(node_id) ? CONFIG_DCACHE_LINE_SIZE : sizeof(uint8_t)) 39 40 #else 41 42 #define DMM_IS_REG_CACHEABLE(node_id) 0 43 #define DMM_REG_ALIGN_SIZE(node_id) (sizeof(uint8_t)) 44 45 #endif /* CONFIG_DCACHE */ 46 47 /* Determine required alignment of the data buffers in memory region 48 * associated with specified device node. 49 */ 50 #define DMM_ALIGN_SIZE(node_id) DMM_REG_ALIGN_SIZE(DT_PHANDLE(node_id, memory_regions)) 51 52 /** 53 * @brief Get reference to memory region associated with the specified device node 54 * 55 * @param node_id Device node. 56 * 57 * @return Reference to memory region. NULL if not defined for given device node. 58 */ 59 #define DMM_DEV_TO_REG(node_id) \ 60 COND_CODE_1(DT_NODE_HAS_PROP(node_id, memory_regions), \ 61 ((void *)DT_REG_ADDR(DT_PHANDLE(node_id, memory_regions))), (NULL)) 62 63 /** 64 * @brief Preallocate buffer in memory region associated with the specified device node 65 * 66 * @param node_id Device node. 67 */ 68 #define DMM_MEMORY_SECTION(node_id) \ 69 COND_CODE_1(DT_NODE_HAS_PROP(node_id, memory_regions), \ 70 (__attribute__((__section__(LINKER_DT_NODE_REGION_NAME( \ 71 DT_PHANDLE(node_id, memory_regions))))) \ 72 __aligned(DMM_ALIGN_SIZE(node_id))), \ 73 ()) 74 75 #ifdef CONFIG_HAS_NORDIC_DMM 76 77 /** 78 * @brief Prepare a DMA output buffer for the specified device 79 * 80 * Allocate an output buffer in memory region that given device can perform DMA transfers from. 81 * Copy @p user_buffer contents into it. 82 * Writeback data cache lines associated with output buffer, if needed. 83 * 84 * @note Depending on provided user buffer parameters and SoC architecture, 85 * dynamic allocation and cache operations might be skipped. 86 * 87 * @note @p buffer_out can be released using @ref dmm_buffer_in_release() 88 * to support transmitting and receiving data to the same buffer. 89 * 90 * @warning It is prohibited to read or write @p user_buffer or @p buffer_out contents 91 * from the time this function is called until @ref dmm_buffer_out_release() 92 * or @ref dmm_buffer_in_release is called on the same buffer 93 * or until this function returns with an error. 94 * 95 * @param region Memory region associated with device to prepare the buffer for. 96 * @param user_buffer CPU address (virtual if applicable) of the buffer containing data 97 * to be processed by the given device. 98 * @param user_length Length of the buffer containing data to be processed by the given device. 99 * @param buffer_out Pointer to a bus address of a buffer containing the prepared DMA buffer. 100 * 101 * @retval 0 If succeeded. 102 * @retval -ENOMEM If output buffer could not be allocated. 103 * @retval -errno Negative errno for other failures. 104 */ 105 int dmm_buffer_out_prepare(void *region, void const *user_buffer, size_t user_length, 106 void **buffer_out); 107 108 /** 109 * @brief Release the previously prepared DMA output buffer 110 * 111 * @param region Memory region associated with device to release the buffer for. 112 * @param buffer_out Bus address of the DMA output buffer previously prepared 113 * with @ref dmm_buffer_out_prepare(). 114 * 115 * @retval 0 If succeeded. 116 * @retval -errno Negative errno code on failure. 117 */ 118 int dmm_buffer_out_release(void *region, void *buffer_out); 119 120 /** 121 * @brief Prepare a DMA input buffer for the specified device 122 * 123 * Allocate an input buffer in memory region that given device can perform DMA transfers to. 124 * 125 * @note Depending on provided user buffer parameters and SoC architecture, 126 * dynamic allocation might be skipped. 127 * 128 * @warning It is prohibited to read or write @p user_buffer or @p buffer_in contents 129 * from the time this function is called until @ref dmm_buffer_in_release() 130 * is called on the same buffer or until this function returns with an error. 131 * 132 * @param region Memory region associated with device to prepare the buffer for. 133 * @param user_buffer CPU address (virtual if applicable) of the buffer to be filled with data 134 * from the given device. 135 * @param user_length Length of the buffer to be filled with data from the given device. 136 * @param buffer_in Pointer to a bus address of a buffer containing the prepared DMA buffer. 137 * 138 * @retval 0 If succeeded. 139 * @retval -ENOMEM If input buffer could not be allocated. 140 * @retval -errno Negative errno for other failures. 141 */ 142 int dmm_buffer_in_prepare(void *region, void *user_buffer, size_t user_length, void **buffer_in); 143 144 /** 145 * @brief Release the previously prepared DMA input buffer 146 * 147 * Invalidate data cache lines associated with input buffer, if needed. 148 * Copy @p buffer_in contents into @p user_buffer, if needed. 149 * 150 * @param region Memory region associated with device to release the buffer for. 151 * @param user_buffer CPU address (virtual if applicable) of the buffer to be filled with data 152 * from the given device. 153 * @param user_length Length of the buffer to be filled with data from the given device. 154 * @param buffer_in Bus address of the DMA input buffer previously prepared 155 * with @ref dmm_buffer_in_prepare(). 156 * 157 * @note @p user_buffer and @p buffer_in arguments pair provided in this function call must match 158 * the arguments pair provided in prior call to @ref dmm_buffer_out_prepare() 159 * or @ref dmm_buffer_in_prepare(). 160 * 161 * @retval 0 If succeeded. 162 * @retval -errno Negative errno code on failure. 163 */ 164 int dmm_buffer_in_release(void *region, void *user_buffer, size_t user_length, void *buffer_in); 165 166 /** 167 * @brief Initialize DMM. 168 * 169 * @retval 0 If succeeded. 170 * @retval -errno Negative errno code on failure. 171 */ 172 int dmm_init(void); 173 174 /** @endcond */ 175 176 #else 177 178 static ALWAYS_INLINE int dmm_buffer_out_prepare(void *region, void const *user_buffer, 179 size_t user_length, void **buffer_out) 180 { 181 ARG_UNUSED(region); 182 ARG_UNUSED(user_length); 183 *buffer_out = (void *)user_buffer; 184 return 0; 185 } 186 187 static ALWAYS_INLINE int dmm_buffer_out_release(void *region, void *buffer_out) 188 { 189 ARG_UNUSED(region); 190 ARG_UNUSED(buffer_out); 191 return 0; 192 } 193 194 static ALWAYS_INLINE int dmm_buffer_in_prepare(void *region, void *user_buffer, size_t user_length, 195 void **buffer_in) 196 { 197 ARG_UNUSED(region); 198 ARG_UNUSED(user_length); 199 *buffer_in = user_buffer; 200 return 0; 201 } 202 203 static ALWAYS_INLINE int dmm_buffer_in_release(void *region, void *user_buffer, size_t user_length, 204 void *buffer_in) 205 { 206 ARG_UNUSED(region); 207 ARG_UNUSED(user_buffer); 208 ARG_UNUSED(user_length); 209 ARG_UNUSED(buffer_in); 210 return 0; 211 } 212 213 static ALWAYS_INLINE int dmm_init(void) 214 { 215 return 0; 216 } 217 218 #endif 219 220 #ifdef __cplusplus 221 } 222 #endif 223 224 #endif /* SOC_NORDIC_COMMON_DMM_H_ */ 225