1 /*
2  * SPDX-FileCopyrightText: 2015-2022 Espressif Systems (Shanghai) CO LTD
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  */
6 #include <stdint.h>
7 #include <stdlib.h>
8 #include <stdbool.h>
9 #include <assert.h>
10 #include <string.h>
11 #include <stddef.h>
12 #include <stdio.h>
13 #include <sys/param.h>
14 #include <multi_heap.h>
15 #include "multi_heap_internal.h"
16 
17 /* Note: Keep platform-specific parts in this header, this source
18    file should depend on libc only */
19 #include "multi_heap_platform.h"
20 
21 /* Defines compile-time configuration macros */
22 #include "multi_heap_config.h"
23 
24 #if CONFIG_HEAP_TLSF_USE_ROM_IMPL
25 /* Header containing the declaration of tlsf_poison_fill_pfunc_set()
26  * and tlsf_poison_check_pfunc_set() used to register callbacks to
27  * fill and check memory region with given patterns in the heap
28  * components.
29  */
30 #include "esp_rom_tlsf.h"
31 #endif
32 
33 #ifdef MULTI_HEAP_POISONING
34 
35 /* Alias MULTI_HEAP_POISONING_SLOW to SLOW for better readabilty */
36 #ifdef SLOW
37 #error "external header has defined SLOW"
38 #endif
39 #ifdef MULTI_HEAP_POISONING_SLOW
40 #define SLOW 1
41 #endif
42 
43 #define MALLOC_FILL_PATTERN 0xce
44 #define FREE_FILL_PATTERN 0xfe
45 
46 #define HEAD_CANARY_PATTERN 0xABBA1234
47 #define TAIL_CANARY_PATTERN 0xBAAD5678
48 
49 
50 #define ALIGN_UP(num, align) (((num) + ((align) - 1)) & ~((align) - 1))
51 
52 typedef struct {
53     uint32_t head_canary;
54     MULTI_HEAP_BLOCK_OWNER
55     size_t alloc_size;
56 } poison_head_t;
57 
58 typedef struct {
59     uint32_t tail_canary;
60 } poison_tail_t;
61 
62 #define POISON_OVERHEAD (sizeof(poison_head_t) + sizeof(poison_tail_t))
63 
64 /* Given a "poisoned" region with pre-data header 'head', and actual data size 'alloc_size', fill in the head and tail
65    region checks.
66 
67    Returns the pointer to the actual usable data buffer (ie after 'head')
68 */
poison_allocated_region(poison_head_t * head,size_t alloc_size)69 __attribute__((noinline))  static uint8_t *poison_allocated_region(poison_head_t *head, size_t alloc_size)
70 {
71     uint8_t *data = (uint8_t *)(&head[1]); /* start of data ie 'real' allocated buffer */
72     poison_tail_t *tail = (poison_tail_t *)(data + alloc_size);
73     head->alloc_size = alloc_size;
74     head->head_canary = HEAD_CANARY_PATTERN;
75     MULTI_HEAP_SET_BLOCK_OWNER(head);
76 
77     uint32_t tail_canary = TAIL_CANARY_PATTERN;
78     if ((intptr_t)tail % sizeof(void *) == 0) {
79         tail->tail_canary = tail_canary;
80     } else {
81         /* unaligned tail_canary */
82         memcpy(&tail->tail_canary, &tail_canary, sizeof(uint32_t));
83     }
84 
85     return data;
86 }
87 
88 /* Given a pointer to some allocated data, check the head & tail poison structures (before & after it) that were
89    previously injected by poison_allocated_region().
90 
91    Returns a pointer to the poison header structure, or NULL if the poison structures are corrupt.
92 */
verify_allocated_region(void * data,bool print_errors)93 __attribute__((noinline)) static poison_head_t *verify_allocated_region(void *data, bool print_errors)
94 {
95     poison_head_t *head = (poison_head_t *)((intptr_t)data - sizeof(poison_head_t));
96     poison_tail_t *tail = (poison_tail_t *)((intptr_t)data + head->alloc_size);
97 
98     /* check if the beginning of the data was overwritten */
99     if (head->head_canary != HEAD_CANARY_PATTERN) {
100         if (print_errors) {
101             MULTI_HEAP_STDERR_PRINTF("CORRUPT HEAP: Bad head at %p. Expected 0x%08x got 0x%08x\n", &head->head_canary,
102                    HEAD_CANARY_PATTERN, head->head_canary);
103         }
104         return NULL;
105     }
106 
107     /* check if the end of the data was overrun */
108     uint32_t canary;
109     if ((intptr_t)tail % sizeof(void *) == 0) {
110         canary = tail->tail_canary;
111     } else {
112         /* tail is unaligned */
113         memcpy(&canary, &tail->tail_canary, sizeof(canary));
114     }
115     if (canary != TAIL_CANARY_PATTERN) {
116         if (print_errors) {
117             MULTI_HEAP_STDERR_PRINTF("CORRUPT HEAP: Bad tail at %p. Expected 0x%08x got 0x%08x\n", &tail->tail_canary,
118                    TAIL_CANARY_PATTERN, canary);
119         }
120         return NULL;
121     }
122 
123     return head;
124 }
125 
126 #ifdef SLOW
127 /* Go through a region that should have the specified fill byte 'pattern',
128    verify it.
129 
130    if expect_free is true, expect FREE_FILL_PATTERN otherwise MALLOC_FILL_PATTERN.
131 
132    if swap_pattern is true, swap patterns in the buffer (ie replace MALLOC_FILL_PATTERN with FREE_FILL_PATTERN, and vice versa.)
133 
134    Returns true if verification checks out.
135 
136    This function has the attribute noclone to prevent the compiler to create a clone on flash where expect_free is removed (as this
137    function is called only with expect_free == true throughout the component).
138 */
139 __attribute__((noinline)) NOCLONE_ATTR
verify_fill_pattern(void * data,size_t size,const bool print_errors,const bool expect_free,bool swap_pattern)140 static bool verify_fill_pattern(void *data, size_t size, const bool print_errors, const bool expect_free, bool swap_pattern)
141 {
142     const uint32_t FREE_FILL_WORD = (FREE_FILL_PATTERN << 24) | (FREE_FILL_PATTERN << 16) | (FREE_FILL_PATTERN << 8) | FREE_FILL_PATTERN;
143     const uint32_t MALLOC_FILL_WORD = (MALLOC_FILL_PATTERN << 24) | (MALLOC_FILL_PATTERN << 16) | (MALLOC_FILL_PATTERN << 8) | MALLOC_FILL_PATTERN;
144 
145     const uint32_t EXPECT_WORD = expect_free ? FREE_FILL_WORD : MALLOC_FILL_WORD;
146     const uint32_t REPLACE_WORD = expect_free ? MALLOC_FILL_WORD : FREE_FILL_WORD;
147     bool valid = true;
148 
149     /* Use 4-byte operations as much as possible */
150     if ((intptr_t)data % 4 == 0) {
151         uint32_t *p = data;
152         while (size >= 4) {
153             if (*p != EXPECT_WORD) {
154                 if (print_errors) {
155                     MULTI_HEAP_STDERR_PRINTF("CORRUPT HEAP: Invalid data at %p. Expected 0x%08x got 0x%08x\n", p, EXPECT_WORD, *p);
156                 }
157                 valid = false;
158 #ifndef NDEBUG
159                 /* If an assertion is going to fail as soon as we're done verifying the pattern, leave the rest of the
160                    buffer contents as-is for better post-mortem analysis
161                 */
162                 swap_pattern = false;
163 #endif
164             }
165             if (swap_pattern) {
166                 *p = REPLACE_WORD;
167             }
168             p++;
169             size -= 4;
170         }
171         data = p;
172     }
173 
174     uint8_t *p = data;
175     for (size_t i = 0; i < size; i++) {
176         if (p[i] != (uint8_t)EXPECT_WORD) {
177             if (print_errors) {
178                 MULTI_HEAP_STDERR_PRINTF("CORRUPT HEAP: Invalid data at %p. Expected 0x%02x got 0x%02x\n", p, (uint8_t)EXPECT_WORD, *p);
179             }
180             valid = false;
181 #ifndef NDEBUG
182             swap_pattern = false; // same as above
183 #endif
184         }
185         if (swap_pattern) {
186             p[i] = (uint8_t)REPLACE_WORD;
187         }
188     }
189     return valid;
190 }
191 
192 /*!
193  * @brief Definition of the weak function declared in TLSF repository.
194  * The call of this function assures that the header of an absorbed
195  * block is filled with the correct pattern in case of comprehensive
196  * heap poisoning.
197  *
198  * @param start: pointer to the start of the memory region to fill
199  * @param size: size of the memory region to fill
200  * @param is_free: Indicate if the pattern to use the fill the region should be
201  * an after free or after allocation pattern.
202  */
block_absorb_post_hook(void * start,size_t size,bool is_free)203 void block_absorb_post_hook(void *start, size_t size, bool is_free)
204 {
205     multi_heap_internal_poison_fill_region(start, size, is_free);
206 }
207 #endif
208 
multi_heap_aligned_alloc(multi_heap_handle_t heap,size_t size,size_t alignment)209 void *multi_heap_aligned_alloc(multi_heap_handle_t heap, size_t size, size_t alignment)
210 {
211     if (!size) {
212         return NULL;
213     }
214 
215     if (size > SIZE_MAX  - POISON_OVERHEAD) {
216         return NULL;
217     }
218 
219     multi_heap_internal_lock(heap);
220     poison_head_t *head = multi_heap_aligned_alloc_impl_offs(heap, size + POISON_OVERHEAD,
221                                                              alignment, sizeof(poison_head_t));
222     uint8_t *data = NULL;
223     if (head != NULL) {
224         data = poison_allocated_region(head, size);
225 #ifdef SLOW
226         /* check everything we got back is FREE_FILL_PATTERN & swap for MALLOC_FILL_PATTERN */
227         bool ret = verify_fill_pattern(data, size, true, true, true);
228         assert( ret );
229 #endif
230     } else {
231         multi_heap_internal_unlock(heap);
232         return NULL;
233     }
234 
235     multi_heap_internal_unlock(heap);
236 
237     return data;
238 }
239 
multi_heap_malloc(multi_heap_handle_t heap,size_t size)240 void *multi_heap_malloc(multi_heap_handle_t heap, size_t size)
241 {
242     if (!size) {
243         return NULL;
244     }
245 
246     if(size > SIZE_MAX - POISON_OVERHEAD) {
247         return NULL;
248     }
249 
250     multi_heap_internal_lock(heap);
251     poison_head_t *head = multi_heap_malloc_impl(heap, size + POISON_OVERHEAD);
252     uint8_t *data = NULL;
253     if (head != NULL) {
254         data = poison_allocated_region(head, size);
255 #ifdef SLOW
256         /* check everything we got back is FREE_FILL_PATTERN & swap for MALLOC_FILL_PATTERN */
257         bool ret = verify_fill_pattern(data, size, true, true, true);
258         assert( ret );
259 #endif
260     }
261 
262     multi_heap_internal_unlock(heap);
263     return data;
264 }
265 
266 /* This function has the noclone attribute to prevent the compiler to optimize out the
267  * check for p == NULL and create a clone function placed in flash. */
multi_heap_free(multi_heap_handle_t heap,void * p)268 NOCLONE_ATTR void multi_heap_free(multi_heap_handle_t heap, void *p)
269 {
270     if (p == NULL) {
271         return;
272     }
273     multi_heap_internal_lock(heap);
274 
275     poison_head_t *head = verify_allocated_region(p, true);
276     assert(head != NULL);
277 
278     #ifdef SLOW
279     /* replace everything with FREE_FILL_PATTERN, including the poison head/tail */
280     memset(head, FREE_FILL_PATTERN,
281            head->alloc_size + POISON_OVERHEAD);
282     #endif
283     multi_heap_free_impl(heap, head);
284 
285     multi_heap_internal_unlock(heap);
286 }
287 
multi_heap_aligned_free(multi_heap_handle_t heap,void * p)288 void multi_heap_aligned_free(multi_heap_handle_t heap, void *p)
289 {
290     multi_heap_free(heap, p);
291 }
292 
multi_heap_realloc(multi_heap_handle_t heap,void * p,size_t size)293 void *multi_heap_realloc(multi_heap_handle_t heap, void *p, size_t size)
294 {
295     poison_head_t *head = NULL;
296     poison_head_t *new_head;
297     void *result = NULL;
298 
299     if(size > SIZE_MAX - POISON_OVERHEAD) {
300         return NULL;
301     }
302     if (p == NULL) {
303         return multi_heap_malloc(heap, size);
304     }
305     if (size == 0) {
306         multi_heap_free(heap, p);
307         return NULL;
308     }
309 
310     /* p != NULL, size != 0 */
311     head = verify_allocated_region(p, true);
312     assert(head != NULL);
313 
314     multi_heap_internal_lock(heap);
315 
316 #ifndef SLOW
317     new_head = multi_heap_realloc_impl(heap, head, size + POISON_OVERHEAD);
318     if (new_head != NULL) {
319         /* For "fast" poisoning, we only overwrite the head/tail of the new block so it's safe
320            to poison, so no problem doing this even if realloc resized in place.
321         */
322         result = poison_allocated_region(new_head, size);
323     }
324 #else // SLOW
325     /* When slow poisoning is enabled, it becomes very fiddly to try and correctly fill memory when resizing in place
326        (where the buffer may be moved (including to an overlapping address with the old buffer), grown, or shrunk in
327        place.)
328 
329        For now we just malloc a new buffer, copy, and free. :|
330 
331        Note: If this ever changes, multi_heap defrag realloc test should be enabled.
332     */
333     size_t orig_alloc_size = head->alloc_size;
334 
335     new_head = multi_heap_malloc_impl(heap, size + POISON_OVERHEAD);
336     if (new_head != NULL) {
337         result = poison_allocated_region(new_head, size);
338         memcpy(result, p, MIN(size, orig_alloc_size));
339         multi_heap_free(heap, p);
340     }
341 #endif
342 
343     multi_heap_internal_unlock(heap);
344 
345     return result;
346 }
347 
multi_heap_get_block_address(multi_heap_block_handle_t block)348 void *multi_heap_get_block_address(multi_heap_block_handle_t block)
349 {
350     char *head = multi_heap_get_block_address_impl(block);
351     return head + sizeof(poison_head_t);
352 }
353 
multi_heap_get_block_owner(multi_heap_block_handle_t block)354 void *multi_heap_get_block_owner(multi_heap_block_handle_t block)
355 {
356     return MULTI_HEAP_GET_BLOCK_OWNER((poison_head_t*)multi_heap_get_block_address_impl(block));
357 }
358 
multi_heap_register(void * start,size_t size)359 multi_heap_handle_t multi_heap_register(void *start, size_t size)
360 {
361 #ifdef SLOW
362     if (start != NULL) {
363         memset(start, FREE_FILL_PATTERN, size);
364     }
365 #endif
366 #if CONFIG_HEAP_TLSF_USE_ROM_IMPL
367     tlsf_poison_fill_pfunc_set(multi_heap_internal_poison_fill_region);
368     tlsf_poison_check_pfunc_set(multi_heap_internal_check_block_poisoning);
369 #endif // CONFIG_HEAP_TLSF_USE_ROM_IMPL
370     return multi_heap_register_impl(start, size);
371 }
372 
subtract_poison_overhead(size_t * arg)373 static inline __attribute__((always_inline)) void subtract_poison_overhead(size_t *arg) {
374     if (*arg > POISON_OVERHEAD) {
375         *arg -= POISON_OVERHEAD;
376     } else {
377         *arg = 0;
378     }
379 }
380 
multi_heap_get_allocated_size(multi_heap_handle_t heap,void * p)381 size_t multi_heap_get_allocated_size(multi_heap_handle_t heap, void *p)
382 {
383     poison_head_t *head = verify_allocated_region(p, true);
384     assert(head != NULL);
385     size_t result = multi_heap_get_allocated_size_impl(heap, head);
386     subtract_poison_overhead(&result);
387     return result;
388 }
389 
multi_heap_get_info(multi_heap_handle_t heap,multi_heap_info_t * info)390 void multi_heap_get_info(multi_heap_handle_t heap, multi_heap_info_t *info)
391 {
392     multi_heap_get_info_impl(heap, info);
393     /* don't count the heap poison head & tail overhead in the allocated bytes size */
394     info->total_allocated_bytes -= info->allocated_blocks * POISON_OVERHEAD;
395     /* trim largest_free_block to account for poison overhead */
396     subtract_poison_overhead(&info->largest_free_block);
397     /* similarly, trim total_free_bytes so there's no suggestion that
398        a block this big may be available. */
399     subtract_poison_overhead(&info->total_free_bytes);
400     subtract_poison_overhead(&info->minimum_free_bytes);
401 }
402 
multi_heap_free_size(multi_heap_handle_t heap)403 size_t multi_heap_free_size(multi_heap_handle_t heap)
404 {
405     size_t r = multi_heap_free_size_impl(heap);
406     subtract_poison_overhead(&r);
407     return r;
408 }
409 
multi_heap_minimum_free_size(multi_heap_handle_t heap)410 size_t multi_heap_minimum_free_size(multi_heap_handle_t heap)
411 {
412     size_t r = multi_heap_minimum_free_size_impl(heap);
413     subtract_poison_overhead(&r);
414     return r;
415 }
416 
417 /* Internal hooks used by multi_heap to manage poisoning, while keeping some modularity */
418 
multi_heap_internal_check_block_poisoning(void * start,size_t size,bool is_free,bool print_errors)419 bool multi_heap_internal_check_block_poisoning(void *start, size_t size, bool is_free, bool print_errors)
420 {
421     if (is_free) {
422 #ifdef SLOW
423         return verify_fill_pattern(start, size, print_errors, true, false);
424 #else
425         return true; /* can only verify empty blocks in SLOW mode */
426 #endif
427     } else {
428         void *data = (void *)((intptr_t)start + sizeof(poison_head_t));
429         poison_head_t *head = verify_allocated_region(data, print_errors);
430         if (head != NULL && head->alloc_size > size - POISON_OVERHEAD) {
431             /* block can be bigger than alloc_size, for reasons of alignment & fragmentation,
432                but block can never be smaller than head->alloc_size... */
433             if (print_errors) {
434                 MULTI_HEAP_STDERR_PRINTF("CORRUPT HEAP: Size at %p expected <=0x%08x got 0x%08x\n", &head->alloc_size,
435                        size - POISON_OVERHEAD, head->alloc_size);
436             }
437             return false;
438         }
439         return head != NULL;
440     }
441 }
442 
multi_heap_internal_poison_fill_region(void * start,size_t size,bool is_free)443 void multi_heap_internal_poison_fill_region(void *start, size_t size, bool is_free)
444 {
445     memset(start, is_free ? FREE_FILL_PATTERN : MALLOC_FILL_PATTERN, size);
446 }
447 
448 #else // !MULTI_HEAP_POISONING
449 
450 #ifdef MULTI_HEAP_POISONING_SLOW
451 #error "MULTI_HEAP_POISONING_SLOW requires MULTI_HEAP_POISONING"
452 #endif
453 
454 #endif  // MULTI_HEAP_POISONING
455