1 /**
2  * @file lv_mem.c
3  * General and portable implementation of malloc and free.
4  * The dynamic memory monitoring is also supported.
5  */
6 
7 /*********************
8  *      INCLUDES
9  *********************/
10 #include "lv_mem.h"
11 #include "lv_tlsf.h"
12 #include "lv_gc.h"
13 #include "lv_assert.h"
14 #include "lv_log.h"
15 
16 #if LV_MEM_CUSTOM != 0
17     #include LV_MEM_CUSTOM_INCLUDE
18 #endif
19 
20 #ifdef LV_MEM_POOL_INCLUDE
21     #include LV_MEM_POOL_INCLUDE
22 #endif
23 
24 /*********************
25  *      DEFINES
26  *********************/
27 /*memset the allocated memories to 0xaa and freed memories to 0xbb (just for testing purposes)*/
28 #ifndef LV_MEM_ADD_JUNK
29     #define LV_MEM_ADD_JUNK  0
30 #endif
31 
32 #ifdef LV_ARCH_64
33     #define MEM_UNIT         uint64_t
34     #define ALIGN_MASK       0x7
35 #else
36     #define MEM_UNIT         uint32_t
37     #define ALIGN_MASK       0x3
38 #endif
39 
40 #define ZERO_MEM_SENTINEL  0xa1b2c3d4
41 
42 /**********************
43  *      TYPEDEFS
44  **********************/
45 
46 /**********************
47  *  STATIC PROTOTYPES
48  **********************/
49 #if LV_MEM_CUSTOM == 0
50     static void lv_mem_walker(void * ptr, size_t size, int used, void * user);
51 #endif
52 
53 /**********************
54  *  STATIC VARIABLES
55  **********************/
56 #if LV_MEM_CUSTOM == 0
57     static lv_tlsf_t tlsf;
58     static uint32_t cur_used;
59     static uint32_t max_used;
60 #endif
61 
62 static uint32_t zero_mem = ZERO_MEM_SENTINEL; /*Give the address of this variable if 0 byte should be allocated*/
63 
64 /**********************
65  *      MACROS
66  **********************/
67 #if LV_LOG_TRACE_MEM
68     #define MEM_TRACE(...) LV_LOG_TRACE(__VA_ARGS__)
69 #else
70     #define MEM_TRACE(...)
71 #endif
72 
73 #define COPY32 *d32 = *s32; d32++; s32++;
74 #define COPY8 *d8 = *s8; d8++; s8++;
75 #define SET32(x) *d32 = x; d32++;
76 #define SET8(x) *d8 = x; d8++;
77 #define REPEAT8(expr) expr expr expr expr expr expr expr expr
78 
79 /**********************
80  *   GLOBAL FUNCTIONS
81  **********************/
82 
83 /**
84  * Initialize the dyn_mem module (work memory and other variables)
85  */
lv_mem_init(void)86 void lv_mem_init(void)
87 {
88 #if LV_MEM_CUSTOM == 0
89 
90 #if LV_MEM_ADR == 0
91 #ifdef LV_MEM_POOL_ALLOC
92     tlsf = lv_tlsf_create_with_pool((void *)LV_MEM_POOL_ALLOC(LV_MEM_SIZE), LV_MEM_SIZE);
93 #else
94     /*Allocate a large array to store the dynamically allocated data*/
95     static LV_ATTRIBUTE_LARGE_RAM_ARRAY MEM_UNIT work_mem_int[LV_MEM_SIZE / sizeof(MEM_UNIT)];
96     tlsf = lv_tlsf_create_with_pool((void *)work_mem_int, LV_MEM_SIZE);
97 #endif
98 #else
99     tlsf = lv_tlsf_create_with_pool((void *)LV_MEM_ADR, LV_MEM_SIZE);
100 #endif
101 #endif
102 
103 #if LV_MEM_ADD_JUNK
104     LV_LOG_WARN("LV_MEM_ADD_JUNK is enabled which makes LVGL much slower");
105 #endif
106 }
107 
108 /**
109  * Clean up the memory buffer which frees all the allocated memories.
110  * @note It work only if `LV_MEM_CUSTOM == 0`
111  */
lv_mem_deinit(void)112 void lv_mem_deinit(void)
113 {
114 #if LV_MEM_CUSTOM == 0
115     lv_tlsf_destroy(tlsf);
116     lv_mem_init();
117 #endif
118 }
119 
120 /**
121  * Allocate a memory dynamically
122  * @param size size of the memory to allocate in bytes
123  * @return pointer to the allocated memory
124  */
lv_mem_alloc(size_t size)125 void * lv_mem_alloc(size_t size)
126 {
127     MEM_TRACE("allocating %lu bytes", (unsigned long)size);
128     if(size == 0) {
129         MEM_TRACE("using zero_mem");
130         return &zero_mem;
131     }
132 
133 #if LV_MEM_CUSTOM == 0
134     void * alloc = lv_tlsf_malloc(tlsf, size);
135 #else
136     void * alloc = LV_MEM_CUSTOM_ALLOC(size);
137 #endif
138 
139     if(alloc == NULL) {
140         LV_LOG_INFO("couldn't allocate memory (%lu bytes)", (unsigned long)size);
141 #if LV_LOG_LEVEL <= LV_LOG_LEVEL_INFO
142         lv_mem_monitor_t mon;
143         lv_mem_monitor(&mon);
144         LV_LOG_INFO("used: %6d (%3d %%), frag: %3d %%, biggest free: %6d",
145                     (int)(mon.total_size - mon.free_size), mon.used_pct, mon.frag_pct,
146                     (int)mon.free_biggest_size);
147 #endif
148     }
149 #if LV_MEM_ADD_JUNK
150     else {
151         lv_memset(alloc, 0xaa, size);
152     }
153 #endif
154 
155     if(alloc) {
156 #if LV_MEM_CUSTOM == 0
157         cur_used += size;
158         max_used = LV_MAX(cur_used, max_used);
159 #endif
160         MEM_TRACE("allocated at %p", alloc);
161     }
162     return alloc;
163 }
164 
165 /**
166  * Free an allocated data
167  * @param data pointer to an allocated memory
168  */
lv_mem_free(void * data)169 void lv_mem_free(void * data)
170 {
171     MEM_TRACE("freeing %p", data);
172     if(data == &zero_mem) return;
173     if(data == NULL) return;
174 
175 #if LV_MEM_CUSTOM == 0
176 #  if LV_MEM_ADD_JUNK
177     lv_memset(data, 0xbb, lv_tlsf_block_size(data));
178 #  endif
179     size_t size = lv_tlsf_free(tlsf, data);
180     if(cur_used > size) cur_used -= size;
181     else cur_used = 0;
182 #else
183     LV_MEM_CUSTOM_FREE(data);
184 #endif
185 }
186 
187 /**
188  * Reallocate a memory with a new size. The old content will be kept.
189  * @param data pointer to an allocated memory.
190  * Its content will be copied to the new memory block and freed
191  * @param new_size the desired new size in byte
192  * @return pointer to the new memory
193  */
lv_mem_realloc(void * data_p,size_t new_size)194 void * lv_mem_realloc(void * data_p, size_t new_size)
195 {
196     MEM_TRACE("reallocating %p with %lu size", data_p, (unsigned long)new_size);
197     if(new_size == 0) {
198         MEM_TRACE("using zero_mem");
199         lv_mem_free(data_p);
200         return &zero_mem;
201     }
202 
203     if(data_p == &zero_mem) return lv_mem_alloc(new_size);
204 
205 #if LV_MEM_CUSTOM == 0
206     void * new_p = lv_tlsf_realloc(tlsf, data_p, new_size);
207 #else
208     void * new_p = LV_MEM_CUSTOM_REALLOC(data_p, new_size);
209 #endif
210     if(new_p == NULL) {
211         LV_LOG_ERROR("couldn't allocate memory");
212         return NULL;
213     }
214 
215     MEM_TRACE("allocated at %p", new_p);
216     return new_p;
217 }
218 
lv_mem_test(void)219 lv_res_t lv_mem_test(void)
220 {
221     if(zero_mem != ZERO_MEM_SENTINEL) {
222         LV_LOG_WARN("zero_mem is written");
223         return LV_RES_INV;
224     }
225 
226 #if LV_MEM_CUSTOM == 0
227     if(lv_tlsf_check(tlsf)) {
228         LV_LOG_WARN("failed");
229         return LV_RES_INV;
230     }
231 
232     if(lv_tlsf_check_pool(lv_tlsf_get_pool(tlsf))) {
233         LV_LOG_WARN("pool failed");
234         return LV_RES_INV;
235     }
236 #endif
237     MEM_TRACE("passed");
238     return LV_RES_OK;
239 }
240 
241 /**
242  * Give information about the work memory of dynamic allocation
243  * @param mon_p pointer to a lv_mem_monitor_t variable,
244  *              the result of the analysis will be stored here
245  */
lv_mem_monitor(lv_mem_monitor_t * mon_p)246 void lv_mem_monitor(lv_mem_monitor_t * mon_p)
247 {
248     /*Init the data*/
249     lv_memset(mon_p, 0, sizeof(lv_mem_monitor_t));
250 #if LV_MEM_CUSTOM == 0
251     MEM_TRACE("begin");
252 
253     lv_tlsf_walk_pool(lv_tlsf_get_pool(tlsf), lv_mem_walker, mon_p);
254 
255     mon_p->total_size = LV_MEM_SIZE;
256     mon_p->used_pct = 100 - (100U * mon_p->free_size) / mon_p->total_size;
257     if(mon_p->free_size > 0) {
258         mon_p->frag_pct = mon_p->free_biggest_size * 100U / mon_p->free_size;
259         mon_p->frag_pct = 100 - mon_p->frag_pct;
260     }
261     else {
262         mon_p->frag_pct = 0; /*no fragmentation if all the RAM is used*/
263     }
264 
265     mon_p->max_used = max_used;
266 
267     MEM_TRACE("finished");
268 #endif
269 }
270 
271 
272 /**
273  * Get a temporal buffer with the given size.
274  * @param size the required size
275  */
lv_mem_buf_get(uint32_t size)276 void * lv_mem_buf_get(uint32_t size)
277 {
278     if(size == 0) return NULL;
279 
280     MEM_TRACE("begin, getting %d bytes", size);
281 
282     /*Try to find a free buffer with suitable size*/
283     int8_t i_guess = -1;
284     for(uint8_t i = 0; i < LV_MEM_BUF_MAX_NUM; i++) {
285         if(LV_GC_ROOT(lv_mem_buf[i]).used == 0 && LV_GC_ROOT(lv_mem_buf[i]).size >= size) {
286             if(LV_GC_ROOT(lv_mem_buf[i]).size == size) {
287                 LV_GC_ROOT(lv_mem_buf[i]).used = 1;
288                 return LV_GC_ROOT(lv_mem_buf[i]).p;
289             }
290             else if(i_guess < 0) {
291                 i_guess = i;
292             }
293             /*If size of `i` is closer to `size` prefer it*/
294             else if(LV_GC_ROOT(lv_mem_buf[i]).size < LV_GC_ROOT(lv_mem_buf[i_guess]).size) {
295                 i_guess = i;
296             }
297         }
298     }
299 
300     if(i_guess >= 0) {
301         LV_GC_ROOT(lv_mem_buf[i_guess]).used = 1;
302         MEM_TRACE("returning already allocated buffer (buffer id: %d, address: %p)", i_guess,
303                   LV_GC_ROOT(lv_mem_buf[i_guess]).p);
304         return LV_GC_ROOT(lv_mem_buf[i_guess]).p;
305     }
306 
307     /*Reallocate a free buffer*/
308     for(uint8_t i = 0; i < LV_MEM_BUF_MAX_NUM; i++) {
309         if(LV_GC_ROOT(lv_mem_buf[i]).used == 0) {
310             /*if this fails you probably need to increase your LV_MEM_SIZE/heap size*/
311             void * buf = lv_mem_realloc(LV_GC_ROOT(lv_mem_buf[i]).p, size);
312             LV_ASSERT_MSG(buf != NULL, "Out of memory, can't allocate a new buffer (increase your LV_MEM_SIZE/heap size)");
313             if(buf == NULL) return NULL;
314 
315             LV_GC_ROOT(lv_mem_buf[i]).used = 1;
316             LV_GC_ROOT(lv_mem_buf[i]).size = size;
317             LV_GC_ROOT(lv_mem_buf[i]).p    = buf;
318             MEM_TRACE("allocated (buffer id: %d, address: %p)", i, LV_GC_ROOT(lv_mem_buf[i]).p);
319             return LV_GC_ROOT(lv_mem_buf[i]).p;
320         }
321     }
322 
323     LV_LOG_ERROR("no more buffers. (increase LV_MEM_BUF_MAX_NUM)");
324     LV_ASSERT_MSG(false, "No more buffers. Increase LV_MEM_BUF_MAX_NUM.");
325     return NULL;
326 }
327 
328 /**
329  * Release a memory buffer
330  * @param p buffer to release
331  */
lv_mem_buf_release(void * p)332 void lv_mem_buf_release(void * p)
333 {
334     MEM_TRACE("begin (address: %p)", p);
335 
336     for(uint8_t i = 0; i < LV_MEM_BUF_MAX_NUM; i++) {
337         if(LV_GC_ROOT(lv_mem_buf[i]).p == p) {
338             LV_GC_ROOT(lv_mem_buf[i]).used = 0;
339             return;
340         }
341     }
342 
343     LV_LOG_ERROR("p is not a known buffer");
344 }
345 
346 /**
347  * Free all memory buffers
348  */
lv_mem_buf_free_all(void)349 void lv_mem_buf_free_all(void)
350 {
351     for(uint8_t i = 0; i < LV_MEM_BUF_MAX_NUM; i++) {
352         if(LV_GC_ROOT(lv_mem_buf[i]).p) {
353             lv_mem_free(LV_GC_ROOT(lv_mem_buf[i]).p);
354             LV_GC_ROOT(lv_mem_buf[i]).p = NULL;
355             LV_GC_ROOT(lv_mem_buf[i]).used = 0;
356             LV_GC_ROOT(lv_mem_buf[i]).size = 0;
357         }
358     }
359 }
360 
361 #if LV_MEMCPY_MEMSET_STD == 0
362 /**
363  * Same as `memcpy` but optimized for 4 byte operation.
364  * @param dst pointer to the destination buffer
365  * @param src pointer to the source buffer
366  * @param len number of byte to copy
367  */
lv_memcpy(void * dst,const void * src,size_t len)368 void * LV_ATTRIBUTE_FAST_MEM lv_memcpy(void * dst, const void * src, size_t len)
369 {
370     uint8_t * d8 = dst;
371     const uint8_t * s8 = src;
372 
373     lv_uintptr_t d_align = (lv_uintptr_t)d8 & ALIGN_MASK;
374     lv_uintptr_t s_align = (lv_uintptr_t)s8 & ALIGN_MASK;
375 
376     /*Byte copy for unaligned memories*/
377     if(s_align != d_align) {
378         while(len > 32) {
379             REPEAT8(COPY8);
380             REPEAT8(COPY8);
381             REPEAT8(COPY8);
382             REPEAT8(COPY8);
383             len -= 32;
384         }
385         while(len) {
386             COPY8
387             len--;
388         }
389         return dst;
390     }
391 
392     /*Make the memories aligned*/
393     if(d_align) {
394         d_align = ALIGN_MASK + 1 - d_align;
395         while(d_align && len) {
396             COPY8;
397             d_align--;
398             len--;
399         }
400     }
401 
402     uint32_t * d32 = (uint32_t *)d8;
403     const uint32_t * s32 = (uint32_t *)s8;
404     while(len > 32) {
405         REPEAT8(COPY32)
406         len -= 32;
407     }
408 
409     while(len > 4) {
410         COPY32;
411         len -= 4;
412     }
413 
414     d8 = (uint8_t *)d32;
415     s8 = (const uint8_t *)s32;
416     while(len) {
417         COPY8
418         len--;
419     }
420 
421     return dst;
422 }
423 
424 /**
425  * Same as `memset` but optimized for 4 byte operation.
426  * @param dst pointer to the destination buffer
427  * @param v value to set [0..255]
428  * @param len number of byte to set
429  */
lv_memset(void * dst,uint8_t v,size_t len)430 void LV_ATTRIBUTE_FAST_MEM lv_memset(void * dst, uint8_t v, size_t len)
431 {
432 
433     uint8_t * d8 = (uint8_t *)dst;
434 
435     uintptr_t d_align = (lv_uintptr_t) d8 & ALIGN_MASK;
436 
437     /*Make the address aligned*/
438     if(d_align) {
439         d_align = ALIGN_MASK + 1 - d_align;
440         while(d_align && len) {
441             SET8(v);
442             len--;
443             d_align--;
444         }
445     }
446 
447     uint32_t v32 = (uint32_t)v + ((uint32_t)v << 8) + ((uint32_t)v << 16) + ((uint32_t)v << 24);
448 
449     uint32_t * d32 = (uint32_t *)d8;
450 
451     while(len > 32) {
452         REPEAT8(SET32(v32));
453         len -= 32;
454     }
455 
456     while(len > 4) {
457         SET32(v32);
458         len -= 4;
459     }
460 
461     d8 = (uint8_t *)d32;
462     while(len) {
463         SET8(v);
464         len--;
465     }
466 }
467 
468 /**
469  * Same as `memset(dst, 0x00, len)` but optimized for 4 byte operation.
470  * @param dst pointer to the destination buffer
471  * @param len number of byte to set
472  */
lv_memset_00(void * dst,size_t len)473 void LV_ATTRIBUTE_FAST_MEM lv_memset_00(void * dst, size_t len)
474 {
475     uint8_t * d8 = (uint8_t *)dst;
476     uintptr_t d_align = (lv_uintptr_t) d8 & ALIGN_MASK;
477 
478     /*Make the address aligned*/
479     if(d_align) {
480         d_align = ALIGN_MASK + 1 - d_align;
481         while(d_align && len) {
482             SET8(0);
483             len--;
484             d_align--;
485         }
486     }
487 
488     uint32_t * d32 = (uint32_t *)d8;
489     while(len > 32) {
490         REPEAT8(SET32(0));
491         len -= 32;
492     }
493 
494     while(len > 4) {
495         SET32(0);
496         len -= 4;
497     }
498 
499     d8 = (uint8_t *)d32;
500     while(len) {
501         SET8(0);
502         len--;
503     }
504 }
505 
506 /**
507  * Same as `memset(dst, 0xFF, len)` but optimized for 4 byte operation.
508  * @param dst pointer to the destination buffer
509  * @param len number of byte to set
510  */
lv_memset_ff(void * dst,size_t len)511 void LV_ATTRIBUTE_FAST_MEM lv_memset_ff(void * dst, size_t len)
512 {
513     uint8_t * d8 = (uint8_t *)dst;
514     uintptr_t d_align = (lv_uintptr_t) d8 & ALIGN_MASK;
515 
516     /*Make the address aligned*/
517     if(d_align) {
518         d_align = ALIGN_MASK + 1 - d_align;
519         while(d_align && len) {
520             SET8(0xFF);
521             len--;
522             d_align--;
523         }
524     }
525 
526     uint32_t * d32 = (uint32_t *)d8;
527     while(len > 32) {
528         REPEAT8(SET32(0xFFFFFFFF));
529         len -= 32;
530     }
531 
532     while(len > 4) {
533         SET32(0xFFFFFFFF);
534         len -= 4;
535     }
536 
537     d8 = (uint8_t *)d32;
538     while(len) {
539         SET8(0xFF);
540         len--;
541     }
542 }
543 
544 #endif /*LV_MEMCPY_MEMSET_STD*/
545 
546 /**********************
547  *   STATIC FUNCTIONS
548  **********************/
549 
550 #if LV_MEM_CUSTOM == 0
lv_mem_walker(void * ptr,size_t size,int used,void * user)551 static void lv_mem_walker(void * ptr, size_t size, int used, void * user)
552 {
553     LV_UNUSED(ptr);
554 
555     lv_mem_monitor_t * mon_p = user;
556     if(used) {
557         mon_p->used_cnt++;
558     }
559     else {
560         mon_p->free_cnt++;
561         mon_p->free_size += size;
562         if(size > mon_p->free_biggest_size)
563             mon_p->free_biggest_size = size;
564     }
565 }
566 #endif
567