1 // Copyright 2015-2016 Espressif Systems (Shanghai) PTE LTD
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 #include <stdint.h>
15 #include <stdlib.h>
16 #include <stdbool.h>
17 #include <assert.h>
18 #include <string.h>
19 #include <stddef.h>
20 #include <stdio.h>
21 #include <sys/param.h>
22 #include <multi_heap.h>
23 #include "multi_heap_internal.h"
24
25 /* Note: Keep platform-specific parts in this header, this source
26 file should depend on libc only */
27 #include "multi_heap_platform.h"
28
29 /* Defines compile-time configuration macros */
30 #include "multi_heap_config.h"
31
32 #ifdef MULTI_HEAP_POISONING
33
34 /* Alias MULTI_HEAP_POISONING_SLOW to SLOW for better readabilty */
35 #ifdef SLOW
36 #error "external header has defined SLOW"
37 #endif
38 #ifdef MULTI_HEAP_POISONING_SLOW
39 #define SLOW 1
40 #endif
41
42 #define MALLOC_FILL_PATTERN 0xce
43 #define FREE_FILL_PATTERN 0xfe
44
45 #define HEAD_CANARY_PATTERN 0xABBA1234
46 #define TAIL_CANARY_PATTERN 0xBAAD5678
47
48
49 #define ALIGN_UP(num, align) (((num) + ((align) - 1)) & ~((align) - 1))
50
51 typedef struct {
52 uint32_t head_canary;
53 MULTI_HEAP_BLOCK_OWNER
54 size_t alloc_size;
55 } poison_head_t;
56
57 typedef struct {
58 uint32_t tail_canary;
59 } poison_tail_t;
60
61 #define POISON_OVERHEAD (sizeof(poison_head_t) + sizeof(poison_tail_t))
62
63 /* Given a "poisoned" region with pre-data header 'head', and actual data size 'alloc_size', fill in the head and tail
64 region checks.
65
66 Returns the pointer to the actual usable data buffer (ie after 'head')
67 */
poison_allocated_region(poison_head_t * head,size_t alloc_size)68 static uint8_t *poison_allocated_region(poison_head_t *head, size_t alloc_size)
69 {
70 uint8_t *data = (uint8_t *)(&head[1]); /* start of data ie 'real' allocated buffer */
71 poison_tail_t *tail = (poison_tail_t *)(data + alloc_size);
72 head->alloc_size = alloc_size;
73 head->head_canary = HEAD_CANARY_PATTERN;
74 MULTI_HEAP_SET_BLOCK_OWNER(head);
75
76 uint32_t tail_canary = TAIL_CANARY_PATTERN;
77 if ((intptr_t)tail % sizeof(void *) == 0) {
78 tail->tail_canary = tail_canary;
79 } else {
80 /* unaligned tail_canary */
81 memcpy(&tail->tail_canary, &tail_canary, sizeof(uint32_t));
82 }
83
84 return data;
85 }
86
87 /* Given a pointer to some allocated data, check the head & tail poison structures (before & after it) that were
88 previously injected by poison_allocated_region().
89
90 Returns a pointer to the poison header structure, or NULL if the poison structures are corrupt.
91 */
verify_allocated_region(void * data,bool print_errors)92 static poison_head_t *verify_allocated_region(void *data, bool print_errors)
93 {
94 poison_head_t *head = (poison_head_t *)((intptr_t)data - sizeof(poison_head_t));
95 poison_tail_t *tail = (poison_tail_t *)((intptr_t)data + head->alloc_size);
96
97 /* check if the beginning of the data was overwritten */
98 if (head->head_canary != HEAD_CANARY_PATTERN) {
99 if (print_errors) {
100 MULTI_HEAP_STDERR_PRINTF("CORRUPT HEAP: Bad head at %p. Expected 0x%08x got 0x%08x\n", &head->head_canary,
101 HEAD_CANARY_PATTERN, head->head_canary);
102 }
103 return NULL;
104 }
105
106 /* check if the end of the data was overrun */
107 uint32_t canary;
108 if ((intptr_t)tail % sizeof(void *) == 0) {
109 canary = tail->tail_canary;
110 } else {
111 /* tail is unaligned */
112 memcpy(&canary, &tail->tail_canary, sizeof(canary));
113 }
114 if (canary != TAIL_CANARY_PATTERN) {
115 if (print_errors) {
116 MULTI_HEAP_STDERR_PRINTF("CORRUPT HEAP: Bad tail at %p. Expected 0x%08x got 0x%08x\n", &tail->tail_canary,
117 TAIL_CANARY_PATTERN, canary);
118 }
119 return NULL;
120 }
121
122 return head;
123 }
124
125 #ifdef SLOW
126 /* Go through a region that should have the specified fill byte 'pattern',
127 verify it.
128
129 if expect_free is true, expect FREE_FILL_PATTERN otherwise MALLOC_FILL_PATTERN.
130
131 if swap_pattern is true, swap patterns in the buffer (ie replace MALLOC_FILL_PATTERN with FREE_FILL_PATTERN, and vice versa.)
132
133 Returns true if verification checks out.
134 */
verify_fill_pattern(void * data,size_t size,bool print_errors,bool expect_free,bool swap_pattern)135 static bool verify_fill_pattern(void *data, size_t size, bool print_errors, bool expect_free, bool swap_pattern)
136 {
137 const uint32_t FREE_FILL_WORD = (FREE_FILL_PATTERN << 24) | (FREE_FILL_PATTERN << 16) | (FREE_FILL_PATTERN << 8) | FREE_FILL_PATTERN;
138 const uint32_t MALLOC_FILL_WORD = (MALLOC_FILL_PATTERN << 24) | (MALLOC_FILL_PATTERN << 16) | (MALLOC_FILL_PATTERN << 8) | MALLOC_FILL_PATTERN;
139
140 const uint32_t EXPECT_WORD = expect_free ? FREE_FILL_WORD : MALLOC_FILL_WORD;
141 const uint32_t REPLACE_WORD = expect_free ? MALLOC_FILL_WORD : FREE_FILL_WORD;
142 bool valid = true;
143
144 /* Use 4-byte operations as much as possible */
145 if ((intptr_t)data % 4 == 0) {
146 uint32_t *p = data;
147 while (size >= 4) {
148 if (*p != EXPECT_WORD) {
149 if (print_errors) {
150 MULTI_HEAP_STDERR_PRINTF("CORRUPT HEAP: Invalid data at %p. Expected 0x%08x got 0x%08x\n", p, EXPECT_WORD, *p);
151 }
152 valid = false;
153 #ifndef NDEBUG
154 /* If an assertion is going to fail as soon as we're done verifying the pattern, leave the rest of the
155 buffer contents as-is for better post-mortem analysis
156 */
157 swap_pattern = false;
158 #endif
159 }
160 if (swap_pattern) {
161 *p = REPLACE_WORD;
162 }
163 p++;
164 size -= 4;
165 }
166 data = p;
167 }
168
169 uint8_t *p = data;
170 for (size_t i = 0; i < size; i++) {
171 if (p[i] != (uint8_t)EXPECT_WORD) {
172 if (print_errors) {
173 MULTI_HEAP_STDERR_PRINTF("CORRUPT HEAP: Invalid data at %p. Expected 0x%02x got 0x%02x\n", p, (uint8_t)EXPECT_WORD, *p);
174 }
175 valid = false;
176 #ifndef NDEBUG
177 swap_pattern = false; // same as above
178 #endif
179 }
180 if (swap_pattern) {
181 p[i] = (uint8_t)REPLACE_WORD;
182 }
183 }
184 return valid;
185 }
186 #endif
187
multi_heap_aligned_alloc(multi_heap_handle_t heap,size_t size,size_t alignment)188 void *multi_heap_aligned_alloc(multi_heap_handle_t heap, size_t size, size_t alignment)
189 {
190 if (!size) {
191 return NULL;
192 }
193
194 if (size > SIZE_MAX - POISON_OVERHEAD) {
195 return NULL;
196 }
197
198 multi_heap_internal_lock(heap);
199 poison_head_t *head = multi_heap_aligned_alloc_impl_offs(heap, size + POISON_OVERHEAD,
200 alignment, sizeof(poison_head_t));
201 uint8_t *data = NULL;
202 if (head != NULL) {
203 data = poison_allocated_region(head, size);
204 #ifdef SLOW
205 /* check everything we got back is FREE_FILL_PATTERN & swap for MALLOC_FILL_PATTERN */
206 bool ret = verify_fill_pattern(data, size, true, true, true);
207 assert( ret );
208 #endif
209 } else {
210 multi_heap_internal_unlock(heap);
211 return NULL;
212 }
213
214 multi_heap_internal_unlock(heap);
215
216 return data;
217 }
218
multi_heap_malloc(multi_heap_handle_t heap,size_t size)219 void *multi_heap_malloc(multi_heap_handle_t heap, size_t size)
220 {
221 if (!size) {
222 return NULL;
223 }
224
225 if(size > SIZE_MAX - POISON_OVERHEAD) {
226 return NULL;
227 }
228
229 multi_heap_internal_lock(heap);
230 poison_head_t *head = multi_heap_malloc_impl(heap, size + POISON_OVERHEAD);
231 uint8_t *data = NULL;
232 if (head != NULL) {
233 data = poison_allocated_region(head, size);
234 #ifdef SLOW
235 /* check everything we got back is FREE_FILL_PATTERN & swap for MALLOC_FILL_PATTERN */
236 bool ret = verify_fill_pattern(data, size, true, true, true);
237 assert( ret );
238 #endif
239 }
240
241 multi_heap_internal_unlock(heap);
242 return data;
243 }
244
multi_heap_free(multi_heap_handle_t heap,void * p)245 void multi_heap_free(multi_heap_handle_t heap, void *p)
246 {
247 if (p == NULL) {
248 return;
249 }
250 multi_heap_internal_lock(heap);
251
252 poison_head_t *head = verify_allocated_region(p, true);
253 assert(head != NULL);
254
255 #ifdef SLOW
256 /* replace everything with FREE_FILL_PATTERN, including the poison head/tail */
257 memset(head, FREE_FILL_PATTERN,
258 head->alloc_size + POISON_OVERHEAD);
259 #endif
260 multi_heap_free_impl(heap, head);
261
262 multi_heap_internal_unlock(heap);
263 }
264
multi_heap_aligned_free(multi_heap_handle_t heap,void * p)265 void multi_heap_aligned_free(multi_heap_handle_t heap, void *p)
266 {
267 multi_heap_free(heap, p);
268 }
269
multi_heap_realloc(multi_heap_handle_t heap,void * p,size_t size)270 void *multi_heap_realloc(multi_heap_handle_t heap, void *p, size_t size)
271 {
272 poison_head_t *head = NULL;
273 poison_head_t *new_head;
274 void *result = NULL;
275
276 if(size > SIZE_MAX - POISON_OVERHEAD) {
277 return NULL;
278 }
279 if (p == NULL) {
280 return multi_heap_malloc(heap, size);
281 }
282 if (size == 0) {
283 multi_heap_free(heap, p);
284 return NULL;
285 }
286
287 /* p != NULL, size != 0 */
288 head = verify_allocated_region(p, true);
289 assert(head != NULL);
290
291 multi_heap_internal_lock(heap);
292
293 #ifndef SLOW
294 new_head = multi_heap_realloc_impl(heap, head, size + POISON_OVERHEAD);
295 if (new_head != NULL) {
296 /* For "fast" poisoning, we only overwrite the head/tail of the new block so it's safe
297 to poison, so no problem doing this even if realloc resized in place.
298 */
299 result = poison_allocated_region(new_head, size);
300 }
301 #else // SLOW
302 /* When slow poisoning is enabled, it becomes very fiddly to try and correctly fill memory when resizing in place
303 (where the buffer may be moved (including to an overlapping address with the old buffer), grown, or shrunk in
304 place.)
305
306 For now we just malloc a new buffer, copy, and free. :|
307
308 Note: If this ever changes, multi_heap defrag realloc test should be enabled.
309 */
310 size_t orig_alloc_size = head->alloc_size;
311
312 new_head = multi_heap_malloc_impl(heap, size + POISON_OVERHEAD);
313 if (new_head != NULL) {
314 result = poison_allocated_region(new_head, size);
315 memcpy(result, p, MIN(size, orig_alloc_size));
316 multi_heap_free(heap, p);
317 }
318 #endif
319
320 multi_heap_internal_unlock(heap);
321
322 return result;
323 }
324
multi_heap_get_block_address(multi_heap_block_handle_t block)325 void *multi_heap_get_block_address(multi_heap_block_handle_t block)
326 {
327 char *head = multi_heap_get_block_address_impl(block);
328 return head + sizeof(poison_head_t);
329 }
330
multi_heap_get_block_owner(multi_heap_block_handle_t block)331 void *multi_heap_get_block_owner(multi_heap_block_handle_t block)
332 {
333 return MULTI_HEAP_GET_BLOCK_OWNER((poison_head_t*)multi_heap_get_block_address_impl(block));
334 }
335
multi_heap_register(void * start,size_t size)336 multi_heap_handle_t multi_heap_register(void *start, size_t size)
337 {
338 #ifdef SLOW
339 if (start != NULL) {
340 memset(start, FREE_FILL_PATTERN, size);
341 }
342 #endif
343 return multi_heap_register_impl(start, size);
344 }
345
subtract_poison_overhead(size_t * arg)346 static inline void subtract_poison_overhead(size_t *arg) {
347 if (*arg > POISON_OVERHEAD) {
348 *arg -= POISON_OVERHEAD;
349 } else {
350 *arg = 0;
351 }
352 }
353
multi_heap_get_allocated_size(multi_heap_handle_t heap,void * p)354 size_t multi_heap_get_allocated_size(multi_heap_handle_t heap, void *p)
355 {
356 poison_head_t *head = verify_allocated_region(p, true);
357 assert(head != NULL);
358 size_t result = multi_heap_get_allocated_size_impl(heap, head);
359 return result;
360 }
361
multi_heap_get_info(multi_heap_handle_t heap,multi_heap_info_t * info)362 void multi_heap_get_info(multi_heap_handle_t heap, multi_heap_info_t *info)
363 {
364 multi_heap_get_info_impl(heap, info);
365 /* don't count the heap poison head & tail overhead in the allocated bytes size */
366 info->total_allocated_bytes -= info->allocated_blocks * POISON_OVERHEAD;
367 /* trim largest_free_block to account for poison overhead */
368 subtract_poison_overhead(&info->largest_free_block);
369 /* similarly, trim total_free_bytes so there's no suggestion that
370 a block this big may be available. */
371 subtract_poison_overhead(&info->total_free_bytes);
372 subtract_poison_overhead(&info->minimum_free_bytes);
373 }
374
multi_heap_free_size(multi_heap_handle_t heap)375 size_t multi_heap_free_size(multi_heap_handle_t heap)
376 {
377 size_t r = multi_heap_free_size_impl(heap);
378 subtract_poison_overhead(&r);
379 return r;
380 }
381
multi_heap_minimum_free_size(multi_heap_handle_t heap)382 size_t multi_heap_minimum_free_size(multi_heap_handle_t heap)
383 {
384 size_t r = multi_heap_minimum_free_size_impl(heap);
385 subtract_poison_overhead(&r);
386 return r;
387 }
388
389 /* Internal hooks used by multi_heap to manage poisoning, while keeping some modularity */
390
multi_heap_internal_check_block_poisoning(void * start,size_t size,bool is_free,bool print_errors)391 bool multi_heap_internal_check_block_poisoning(void *start, size_t size, bool is_free, bool print_errors)
392 {
393 if (is_free) {
394 #ifdef SLOW
395 return verify_fill_pattern(start, size, print_errors, true, false);
396 #else
397 return true; /* can only verify empty blocks in SLOW mode */
398 #endif
399 } else {
400 void *data = (void *)((intptr_t)start + sizeof(poison_head_t));
401 poison_head_t *head = verify_allocated_region(data, print_errors);
402 if (head != NULL && head->alloc_size > size - POISON_OVERHEAD) {
403 /* block can be bigger than alloc_size, for reasons of alignment & fragmentation,
404 but block can never be smaller than head->alloc_size... */
405 if (print_errors) {
406 MULTI_HEAP_STDERR_PRINTF("CORRUPT HEAP: Size at %p expected <=0x%08x got 0x%08x\n", &head->alloc_size,
407 size - POISON_OVERHEAD, head->alloc_size);
408 }
409 return false;
410 }
411 return head != NULL;
412 }
413 }
414
multi_heap_internal_poison_fill_region(void * start,size_t size,bool is_free)415 void multi_heap_internal_poison_fill_region(void *start, size_t size, bool is_free)
416 {
417 memset(start, is_free ? FREE_FILL_PATTERN : MALLOC_FILL_PATTERN, size);
418 }
419
420 #else // !MULTI_HEAP_POISONING
421
422 #ifdef MULTI_HEAP_POISONING_SLOW
423 #error "MULTI_HEAP_POISONING_SLOW requires MULTI_HEAP_POISONING"
424 #endif
425
426 #endif // MULTI_HEAP_POISONING
427