1// Copyright 2015-2016 Espressif Systems (Shanghai) PTE LTD 2// 3// Licensed under the Apache License, Version 2.0 (the "License"); 4// you may not use this file except in compliance with the License. 5// You may obtain a copy of the License at 6 7// http://www.apache.org/licenses/LICENSE-2.0 8// 9// Unless required by applicable law or agreed to in writing, software 10// distributed under the License is distributed on an "AS IS" BASIS, 11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12// See the License for the specific language governing permissions and 13// limitations under the License. 14#include <string.h> 15#include <sdkconfig.h> 16#include "soc/soc_memory_layout.h" 17#include "esp_attr.h" 18 19/* Encode the CPU ID in the LSB of the ccount value */ 20inline static uint32_t get_ccount(void) 21{ 22 uint32_t ccount = cpu_hal_get_cycle_count() & ~3; 23#ifndef CONFIG_FREERTOS_UNICORE 24 ccount |= xPortGetCoreID(); 25#endif 26 return ccount; 27} 28 29/* Architecture-specific return value of __builtin_return_address which 30 * should be interpreted as an invalid address. 31 */ 32#ifdef __XTENSA__ 33#define HEAP_ARCH_INVALID_PC 0x40000000 34#else 35#define HEAP_ARCH_INVALID_PC 0x00000000 36#endif 37 38// Caller is 2 stack frames deeper than we care about 39#define STACK_OFFSET 2 40 41#define TEST_STACK(N) do { \ 42 if (STACK_DEPTH == N) { \ 43 return; \ 44 } \ 45 callers[N] = __builtin_return_address(N+STACK_OFFSET); \ 46 if (!esp_ptr_executable(callers[N]) \ 47 || callers[N] == (void*) HEAP_ARCH_INVALID_PC) { \ 48 callers[N] = 0; \ 49 return; \ 50 } \ 51 } while(0) 52 53/* Static function to read the call stack for a traced heap call. 54 55 Calls to __builtin_return_address are "unrolled" via TEST_STACK macro as gcc requires the 56 argument to be a compile-time constant. 57*/ 58static IRAM_ATTR __attribute__((noinline)) void get_call_stack(void **callers) 59{ 60 bzero(callers, sizeof(void *) * STACK_DEPTH); 61 TEST_STACK(0); 62 TEST_STACK(1); 63 TEST_STACK(2); 64 TEST_STACK(3); 65 TEST_STACK(4); 66 TEST_STACK(5); 67 TEST_STACK(6); 68 TEST_STACK(7); 69 TEST_STACK(8); 70 TEST_STACK(9); 71} 72 73_Static_assert(STACK_DEPTH >= 0 && STACK_DEPTH <= 10, "CONFIG_HEAP_TRACING_STACK_DEPTH must be in range 0-10"); 74 75 76typedef enum { 77 TRACE_MALLOC_CAPS, 78 TRACE_MALLOC_DEFAULT 79} trace_malloc_mode_t; 80 81 82void *__real_heap_caps_malloc(size_t size, uint32_t caps); 83void *__real_heap_caps_malloc_default( size_t size ); 84void *__real_heap_caps_realloc_default( void *ptr, size_t size ); 85 86/* trace any 'malloc' event */ 87static IRAM_ATTR __attribute__((noinline)) void *trace_malloc(size_t size, uint32_t caps, trace_malloc_mode_t mode) 88{ 89 uint32_t ccount = get_ccount(); 90 void *p; 91 92 if ( mode == TRACE_MALLOC_CAPS ) { 93 p = __real_heap_caps_malloc(size, caps); 94 } else { //TRACE_MALLOC_DEFAULT 95 p = __real_heap_caps_malloc_default(size); 96 } 97 98 heap_trace_record_t rec = { 99 .address = p, 100 .ccount = ccount, 101 .size = size, 102 }; 103 get_call_stack(rec.alloced_by); 104 record_allocation(&rec); 105 return p; 106} 107 108void __real_heap_caps_free(void *p); 109 110/* trace any 'free' event */ 111static IRAM_ATTR __attribute__((noinline)) void trace_free(void *p) 112{ 113 void *callers[STACK_DEPTH]; 114 get_call_stack(callers); 115 record_free(p, callers); 116 117 __real_heap_caps_free(p); 118} 119 120void * __real_heap_caps_realloc(void *p, size_t size, uint32_t caps); 121 122/* trace any 'realloc' event */ 123static IRAM_ATTR __attribute__((noinline)) void *trace_realloc(void *p, size_t size, uint32_t caps, trace_malloc_mode_t mode) 124{ 125 void *callers[STACK_DEPTH]; 126 uint32_t ccount = get_ccount(); 127 void *r; 128 129 /* trace realloc as free-then-alloc */ 130 get_call_stack(callers); 131 record_free(p, callers); 132 133 if (mode == TRACE_MALLOC_CAPS ) { 134 r = __real_heap_caps_realloc(p, size, caps); 135 } else { //TRACE_MALLOC_DEFAULT 136 r = __real_heap_caps_realloc_default(p, size); 137 } 138 /* realloc with zero size is a free */ 139 if (size != 0) { 140 heap_trace_record_t rec = { 141 .address = r, 142 .ccount = ccount, 143 .size = size, 144 }; 145 memcpy(rec.alloced_by, callers, sizeof(void *) * STACK_DEPTH); 146 record_allocation(&rec); 147 } 148 return r; 149} 150 151/* Note: this changes the behaviour of libc malloc/realloc/free a bit, 152 as they no longer go via the libc functions in ROM. But more or less 153 the same in the end. */ 154 155IRAM_ATTR void *__wrap_malloc(size_t size) 156{ 157 return trace_malloc(size, 0, TRACE_MALLOC_DEFAULT); 158} 159 160IRAM_ATTR void __wrap_free(void *p) 161{ 162 trace_free(p); 163} 164 165IRAM_ATTR void *__wrap_realloc(void *p, size_t size) 166{ 167 return trace_realloc(p, size, 0, TRACE_MALLOC_DEFAULT); 168} 169 170IRAM_ATTR void *__wrap_calloc(size_t nmemb, size_t size) 171{ 172 size = size * nmemb; 173 void *result = trace_malloc(size, 0, TRACE_MALLOC_DEFAULT); 174 if (result != NULL) { 175 memset(result, 0, size); 176 } 177 return result; 178} 179 180IRAM_ATTR void *__wrap_heap_caps_malloc(size_t size, uint32_t caps) 181{ 182 return trace_malloc(size, caps, TRACE_MALLOC_CAPS); 183} 184 185void __wrap_heap_caps_free(void *p) __attribute__((alias("__wrap_free"))); 186 187IRAM_ATTR void *__wrap_heap_caps_realloc(void *p, size_t size, uint32_t caps) 188{ 189 return trace_realloc(p, size, caps, TRACE_MALLOC_CAPS); 190} 191 192IRAM_ATTR void *__wrap_heap_caps_malloc_default( size_t size ) 193{ 194 return trace_malloc(size, 0, TRACE_MALLOC_DEFAULT); 195} 196 197IRAM_ATTR void *__wrap_heap_caps_realloc_default( void *ptr, size_t size ) 198{ 199 return trace_realloc(ptr, size, 0, TRACE_MALLOC_DEFAULT); 200} 201