1 /*
2 * Copyright (c) 2021 Espressif Systems (Shanghai) Co., Ltd.
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
7 #include <zephyr/kernel.h>
8 #include <string.h>
9 #include <zephyr/sys/math_extras.h>
10 #if CONFIG_SOC_ESP32
11 #include <esp32/spiram.h>
12 #elif CONFIG_SOC_ESP32S2
13 #include <esp32s2/spiram.h>
14 #elif CONFIG_SOC_ESP32S3
15 #include <esp32s3/spiram.h>
16 #endif
17 #include <esp_attr.h>
18
19 #if (CONFIG_ESP_SPIRAM || (CONFIG_HEAP_MEM_POOL_SIZE > 0) || (CONFIG_ESP_HEAP_MEM_POOL_REGION_1_SIZE > 0))
20
21 #if (CONFIG_ESP_HEAP_MEM_POOL_REGION_1_SIZE > 0)
22 char __aligned(sizeof(void *)) __NOINIT_ATTR dram0_seg_1_heap[CONFIG_ESP_HEAP_MEM_POOL_REGION_1_SIZE];
23 STRUCT_SECTION_ITERABLE(k_heap, _internal_heap_1) = {
24 .heap = {
25 .init_mem = dram0_seg_1_heap,
26 .init_bytes = CONFIG_ESP_HEAP_MEM_POOL_REGION_1_SIZE,
27 }
28 };
29 #endif
30
31 #if defined(CONFIG_ESP_SPIRAM)
32 EXT_RAM_ATTR int _spiram_data_start;
33 STRUCT_SECTION_ITERABLE(k_heap, _spiram_heap) = {
34 .heap = {
35 .init_mem = &_spiram_data_start,
36 #if (CONFIG_ESP_SPIRAM_SIZE <= 0x400000)
37 .init_bytes = CONFIG_ESP_SPIRAM_SIZE,
38 #else
39 .init_bytes = 0x400000,
40 #endif
41 },
42 };
43 #endif
44
45 #if (CONFIG_ESP_SPIRAM || (CONFIG_ESP_HEAP_MEM_POOL_REGION_1_SIZE > 0))
z_esp_aligned_alloc(struct k_heap * heap,size_t align,size_t size)46 static void *z_esp_aligned_alloc(struct k_heap *heap, size_t align, size_t size)
47 {
48 void *mem;
49 struct k_heap **heap_ref;
50 size_t __align;
51
52 /*
53 * Adjust the size to make room for our heap reference.
54 * Merge a rewind bit with align value (see sys_heap_aligned_alloc()).
55 * This allows for storing the heap pointer right below the aligned
56 * boundary without wasting any memory.
57 */
58 if (size_add_overflow(size, sizeof(heap_ref), &size)) {
59 return NULL;
60 }
61 __align = align | sizeof(heap_ref);
62
63 mem = k_heap_aligned_alloc(heap, __align, size, K_NO_WAIT);
64 if (mem == NULL) {
65 return NULL;
66 }
67
68 heap_ref = mem;
69 *heap_ref = heap;
70 mem = ++heap_ref;
71 __ASSERT(align == 0 || ((uintptr_t)mem & (align - 1)) == 0,
72 "misaligned memory at %p (align = %zu)", mem, align);
73
74 return mem;
75 }
76
z_esp_aligned_calloc(struct k_heap * heap,size_t nmemb,size_t size)77 static void *z_esp_aligned_calloc(struct k_heap *heap, size_t nmemb, size_t size)
78 {
79 void *ret;
80 size_t bounds;
81 if (size_mul_overflow(nmemb, size, &bounds)) {
82 return NULL;
83 }
84 ret = z_esp_aligned_alloc(heap, sizeof(void *), bounds);
85 if (ret != NULL) {
86 (void)memset(ret, 0, bounds);
87 }
88 return ret;
89 }
90 #endif
91
z_esp_alloc_internal(size_t align,size_t size)92 static void *z_esp_alloc_internal(size_t align, size_t size)
93 {
94 void *ptr = NULL;
95 #if (CONFIG_HEAP_MEM_POOL_SIZE > 0)
96 ptr = k_aligned_alloc(sizeof(void *), size);
97 #endif
98 #if (CONFIG_ESP_HEAP_MEM_POOL_REGION_1_SIZE > 0)
99 if (ptr == NULL) {
100 ptr = z_esp_aligned_alloc(&_internal_heap_1, align, size);
101 }
102 #endif
103 return ptr;
104 }
105
z_esp_calloc_internal(size_t nmemb,size_t size)106 static void *z_esp_calloc_internal(size_t nmemb, size_t size)
107 {
108 void *ptr = NULL;
109 #if (CONFIG_HEAP_MEM_POOL_SIZE > 0)
110 size_t bounds;
111 if (!size_mul_overflow(nmemb, size, &bounds)) {
112 ptr = k_aligned_alloc(sizeof(void *), bounds);
113 if (ptr != NULL) {
114 (void)memset(ptr, 0, bounds);
115 }
116 }
117 #endif
118 #if (CONFIG_ESP_HEAP_MEM_POOL_REGION_1_SIZE > 0)
119 if (ptr == NULL) {
120 ptr = z_esp_aligned_calloc(&_internal_heap_1, nmemb, size);
121 }
122 #endif
123 return ptr;
124 }
125
126 #if (CONFIG_HEAP_MEM_POOL_SIZE > 0)
__wrap_k_malloc(size_t size)127 void *__wrap_k_malloc(size_t size)
128 {
129 void *ptr = NULL;
130 #if defined(CONFIG_ESP_SPIRAM)
131 if (size < CONFIG_ESP_HEAP_MIN_EXTRAM_THRESHOLD) {
132 #endif
133 ptr = z_esp_alloc_internal(sizeof(void *), size);
134 #if defined(CONFIG_ESP_HEAP_SEARCH_ALL_REGIONS)
135 if (ptr == NULL) {
136 ptr = z_esp_aligned_alloc(&_spiram_heap, sizeof(void *), size);
137 }
138 #endif
139 #if defined(CONFIG_ESP_SPIRAM)
140 } else {
141 ptr = z_esp_aligned_alloc(&_spiram_heap, sizeof(void *), size);
142 #if defined(CONFIG_ESP_HEAP_SEARCH_ALL_REGIONS)
143 if (ptr == NULL) {
144 ptr = z_esp_alloc_internal(sizeof(void *), size);
145 }
146 #endif
147 }
148 #endif
149 return ptr;
150 }
151
__wrap_k_calloc(size_t nmemb,size_t size)152 void *__wrap_k_calloc(size_t nmemb, size_t size)
153 {
154 void *ptr = NULL;
155 #if defined(CONFIG_ESP_SPIRAM)
156 if (size < CONFIG_ESP_HEAP_MIN_EXTRAM_THRESHOLD) {
157 #endif
158 ptr = z_esp_calloc_internal(nmemb, size);
159 #if defined(CONFIG_ESP_HEAP_SEARCH_ALL_REGIONS)
160 if (ptr == NULL) {
161 ptr = z_esp_aligned_calloc(&_spiram_heap, nmemb, size);
162 }
163 #endif
164 #if defined(CONFIG_ESP_SPIRAM)
165 } else {
166 ptr = z_esp_aligned_calloc(&_spiram_heap, nmemb, size);
167 #if defined(CONFIG_ESP_HEAP_SEARCH_ALL_REGIONS)
168 if (ptr == NULL) {
169 ptr = z_esp_calloc_internal(nmemb, size);
170 }
171 #endif
172 }
173 #endif
174 return ptr;
175 }
176 #endif
177
178 #endif
179