1// Copyright 2015-2016 Espressif Systems (Shanghai) PTE LTD 2// 3// Licensed under the Apache License, Version 2.0 (the "License"); 4// you may not use this file except in compliance with the License. 5// You may obtain a copy of the License at 6 7// http://www.apache.org/licenses/LICENSE-2.0 8// 9// Unless required by applicable law or agreed to in writing, software 10// distributed under the License is distributed on an "AS IS" BASIS, 11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12// See the License for the specific language governing permissions and 13// limitations under the License. 14#include <string.h> 15#include <sdkconfig.h> 16#include "soc/soc_memory_layout.h" 17#include "esp_attr.h" 18#include "esp_cpu.h" 19#include "esp_macros.h" 20 21/* Encode the CPU ID in the LSB of the ccount value */ 22inline static uint32_t get_ccount(void) 23{ 24 uint32_t ccount = esp_cpu_get_cycle_count() & ~3; 25#ifndef CONFIG_FREERTOS_UNICORE 26 ccount |= xPortGetCoreID(); 27#endif 28 return ccount; 29} 30 31/* Architecture-specific return value of __builtin_return_address which 32 * should be interpreted as an invalid address. 33 */ 34#ifdef __XTENSA__ 35#define HEAP_ARCH_INVALID_PC 0x40000000 36#else 37#define HEAP_ARCH_INVALID_PC 0x00000000 38#endif 39 40// Caller is 2 stack frames deeper than we care about 41#define STACK_OFFSET 2 42 43#define TEST_STACK(N) do { \ 44 if (STACK_DEPTH == N) { \ 45 return; \ 46 } \ 47 callers[N] = __builtin_return_address(N+STACK_OFFSET); \ 48 if (!esp_ptr_executable(callers[N]) \ 49 || callers[N] == (void*) HEAP_ARCH_INVALID_PC) { \ 50 callers[N] = 0; \ 51 return; \ 52 } \ 53 } while(0) 54 55/* Static function to read the call stack for a traced heap call. 56 57 Calls to __builtin_return_address are "unrolled" via TEST_STACK macro as gcc requires the 58 argument to be a compile-time constant. 59*/ 60static HEAP_IRAM_ATTR __attribute__((noinline)) void get_call_stack(void **callers) 61{ 62 bzero(callers, sizeof(void *) * STACK_DEPTH); 63 TEST_STACK(0); 64 TEST_STACK(1); 65 TEST_STACK(2); 66 TEST_STACK(3); 67 TEST_STACK(4); 68 TEST_STACK(5); 69 TEST_STACK(6); 70 TEST_STACK(7); 71 TEST_STACK(8); 72 TEST_STACK(9); 73} 74 75ESP_STATIC_ASSERT(STACK_DEPTH >= 0 && STACK_DEPTH <= 10, "CONFIG_HEAP_TRACING_STACK_DEPTH must be in range 0-10"); 76 77 78typedef enum { 79 TRACE_MALLOC_CAPS, 80 TRACE_MALLOC_DEFAULT 81} trace_malloc_mode_t; 82 83 84void *__real_heap_caps_malloc(size_t size, uint32_t caps); 85void *__real_heap_caps_malloc_default( size_t size ); 86void *__real_heap_caps_realloc_default( void *ptr, size_t size ); 87 88/* trace any 'malloc' event */ 89static HEAP_IRAM_ATTR __attribute__((noinline)) void *trace_malloc(size_t size, uint32_t caps, trace_malloc_mode_t mode) 90{ 91 uint32_t ccount = get_ccount(); 92 void *p; 93 94 if ( mode == TRACE_MALLOC_CAPS ) { 95 p = __real_heap_caps_malloc(size, caps); 96 } else { //TRACE_MALLOC_DEFAULT 97 p = __real_heap_caps_malloc_default(size); 98 } 99 100 heap_trace_record_t rec = { 101 .address = p, 102 .ccount = ccount, 103 .size = size, 104 }; 105 get_call_stack(rec.alloced_by); 106 record_allocation(&rec); 107 return p; 108} 109 110void __real_heap_caps_free(void *p); 111 112/* trace any 'free' event */ 113static HEAP_IRAM_ATTR __attribute__((noinline)) void trace_free(void *p) 114{ 115 void *callers[STACK_DEPTH]; 116 get_call_stack(callers); 117 record_free(p, callers); 118 119 __real_heap_caps_free(p); 120} 121 122void * __real_heap_caps_realloc(void *p, size_t size, uint32_t caps); 123 124/* trace any 'realloc' event */ 125static HEAP_IRAM_ATTR __attribute__((noinline)) void *trace_realloc(void *p, size_t size, uint32_t caps, trace_malloc_mode_t mode) 126{ 127 void *callers[STACK_DEPTH]; 128 uint32_t ccount = get_ccount(); 129 void *r; 130 131 /* trace realloc as free-then-alloc */ 132 get_call_stack(callers); 133 record_free(p, callers); 134 135 if (mode == TRACE_MALLOC_CAPS ) { 136 r = __real_heap_caps_realloc(p, size, caps); 137 } else { //TRACE_MALLOC_DEFAULT 138 r = __real_heap_caps_realloc_default(p, size); 139 } 140 /* realloc with zero size is a free */ 141 if (size != 0) { 142 heap_trace_record_t rec = { 143 .address = r, 144 .ccount = ccount, 145 .size = size, 146 }; 147 memcpy(rec.alloced_by, callers, sizeof(void *) * STACK_DEPTH); 148 record_allocation(&rec); 149 } 150 return r; 151} 152 153/* Note: this changes the behaviour of libc malloc/realloc/free a bit, 154 as they no longer go via the libc functions in ROM. But more or less 155 the same in the end. */ 156 157HEAP_IRAM_ATTR void *__wrap_malloc(size_t size) 158{ 159 return trace_malloc(size, 0, TRACE_MALLOC_DEFAULT); 160} 161 162HEAP_IRAM_ATTR void __wrap_free(void *p) 163{ 164 trace_free(p); 165} 166 167HEAP_IRAM_ATTR void *__wrap_realloc(void *p, size_t size) 168{ 169 return trace_realloc(p, size, 0, TRACE_MALLOC_DEFAULT); 170} 171 172HEAP_IRAM_ATTR void *__wrap_calloc(size_t nmemb, size_t size) 173{ 174 size = size * nmemb; 175 void *result = trace_malloc(size, 0, TRACE_MALLOC_DEFAULT); 176 if (result != NULL) { 177 memset(result, 0, size); 178 } 179 return result; 180} 181 182HEAP_IRAM_ATTR void *__wrap_heap_caps_malloc(size_t size, uint32_t caps) 183{ 184 return trace_malloc(size, caps, TRACE_MALLOC_CAPS); 185} 186 187void __wrap_heap_caps_free(void *p) __attribute__((alias("__wrap_free"))); 188 189HEAP_IRAM_ATTR void *__wrap_heap_caps_realloc(void *p, size_t size, uint32_t caps) 190{ 191 return trace_realloc(p, size, caps, TRACE_MALLOC_CAPS); 192} 193 194HEAP_IRAM_ATTR void *__wrap_heap_caps_malloc_default( size_t size ) 195{ 196 return trace_malloc(size, 0, TRACE_MALLOC_DEFAULT); 197} 198 199HEAP_IRAM_ATTR void *__wrap_heap_caps_realloc_default( void *ptr, size_t size ) 200{ 201 return trace_realloc(ptr, size, 0, TRACE_MALLOC_DEFAULT); 202} 203