1 /**
2  * @file lv_mem.c
3  * General and portable implementation of malloc and free.
4  * The dynamic memory monitoring is also supported.
5  */
6 
7 /*********************
8  *      INCLUDES
9  *********************/
10 #include "lv_mem.h"
11 #include "lv_tlsf.h"
12 #include "lv_gc.h"
13 #include "lv_assert.h"
14 #include "lv_log.h"
15 
16 #if LV_MEM_CUSTOM != 0
17     #include LV_MEM_CUSTOM_INCLUDE
18 #endif
19 
20 #ifdef LV_MEM_POOL_INCLUDE
21     #include LV_MEM_POOL_INCLUDE
22 #endif
23 
24 /*********************
25  *      DEFINES
26  *********************/
27 /*memset the allocated memories to 0xaa and freed memories to 0xbb (just for testing purposes)*/
28 #ifndef LV_MEM_ADD_JUNK
29     #define LV_MEM_ADD_JUNK  0
30 #endif
31 
32 #ifdef LV_ARCH_64
33     #define MEM_UNIT         uint64_t
34     #define ALIGN_MASK       0x7
35 #else
36     #define MEM_UNIT         uint32_t
37     #define ALIGN_MASK       0x3
38 #endif
39 
40 #define ZERO_MEM_SENTINEL  0xa1b2c3d4
41 
42 /**********************
43  *      TYPEDEFS
44  **********************/
45 
46 /**********************
47  *  STATIC PROTOTYPES
48  **********************/
49 #if LV_MEM_CUSTOM == 0
50     static void lv_mem_walker(void * ptr, size_t size, int used, void * user);
51 #endif
52 
53 /**********************
54  *  STATIC VARIABLES
55  **********************/
56 #if LV_MEM_CUSTOM == 0
57     static lv_tlsf_t tlsf;
58     static uint32_t cur_used;
59     static uint32_t max_used;
60 #endif
61 
62 static uint32_t zero_mem = ZERO_MEM_SENTINEL; /*Give the address of this variable if 0 byte should be allocated*/
63 
64 /**********************
65  *      MACROS
66  **********************/
67 #if LV_LOG_TRACE_MEM
68     #define MEM_TRACE(...) LV_LOG_TRACE(__VA_ARGS__)
69 #else
70     #define MEM_TRACE(...)
71 #endif
72 
73 #define COPY32 *d32 = *s32; d32++; s32++;
74 #define COPY8 *d8 = *s8; d8++; s8++;
75 #define SET32(x) *d32 = x; d32++;
76 #define SET8(x) *d8 = x; d8++;
77 #define REPEAT8(expr) expr expr expr expr expr expr expr expr
78 
79 /**********************
80  *   GLOBAL FUNCTIONS
81  **********************/
82 
83 /**
84  * Initialize the dyn_mem module (work memory and other variables)
85  */
lv_mem_init(void)86 void lv_mem_init(void)
87 {
88 #if LV_MEM_CUSTOM == 0
89 
90 #if LV_MEM_ADR == 0
91 #ifdef LV_MEM_POOL_ALLOC
92     tlsf = lv_tlsf_create_with_pool((void *)LV_MEM_POOL_ALLOC(LV_MEM_SIZE), LV_MEM_SIZE);
93 #else
94     /*Allocate a large array to store the dynamically allocated data*/
95     static LV_ATTRIBUTE_LARGE_RAM_ARRAY MEM_UNIT work_mem_int[LV_MEM_SIZE / sizeof(MEM_UNIT)];
96     tlsf = lv_tlsf_create_with_pool((void *)work_mem_int, LV_MEM_SIZE);
97 #endif
98 #else
99     tlsf = lv_tlsf_create_with_pool((void *)LV_MEM_ADR, LV_MEM_SIZE);
100 #endif
101 #endif
102 
103 #if LV_MEM_ADD_JUNK
104     LV_LOG_WARN("LV_MEM_ADD_JUNK is enabled which makes LVGL much slower");
105 #endif
106 }
107 
108 /**
109  * Clean up the memory buffer which frees all the allocated memories.
110  * @note It work only if `LV_MEM_CUSTOM == 0`
111  */
lv_mem_deinit(void)112 void lv_mem_deinit(void)
113 {
114 #if LV_MEM_CUSTOM == 0
115     lv_tlsf_destroy(tlsf);
116     lv_mem_init();
117 #endif
118 }
119 
120 /**
121  * Allocate a memory dynamically
122  * @param size size of the memory to allocate in bytes
123  * @return pointer to the allocated memory
124  */
lv_mem_alloc(size_t size)125 void * lv_mem_alloc(size_t size)
126 {
127     MEM_TRACE("allocating %lu bytes", (unsigned long)size);
128     if(size == 0) {
129         MEM_TRACE("using zero_mem");
130         return &zero_mem;
131     }
132 
133 #if LV_MEM_CUSTOM == 0
134     void * alloc = lv_tlsf_malloc(tlsf, size);
135 #else
136     void * alloc = LV_MEM_CUSTOM_ALLOC(size);
137 #endif
138 
139     if(alloc == NULL) {
140         LV_LOG_INFO("couldn't allocate memory (%lu bytes)", (unsigned long)size);
141 #if LV_LOG_LEVEL <= LV_LOG_LEVEL_INFO
142         lv_mem_monitor_t mon;
143         lv_mem_monitor(&mon);
144         LV_LOG_INFO("used: %6d (%3d %%), frag: %3d %%, biggest free: %6d",
145                     (int)(mon.total_size - mon.free_size), mon.used_pct, mon.frag_pct,
146                     (int)mon.free_biggest_size);
147 #endif
148     }
149 #if LV_MEM_ADD_JUNK
150     else {
151         lv_memset(alloc, 0xaa, size);
152     }
153 #endif
154 
155     if(alloc) {
156 #if LV_MEM_CUSTOM == 0
157         cur_used += size;
158         max_used = LV_MAX(cur_used, max_used);
159 #endif
160         MEM_TRACE("allocated at %p", alloc);
161     }
162     return alloc;
163 }
164 
165 /**
166  * Free an allocated data
167  * @param data pointer to an allocated memory
168  */
lv_mem_free(void * data)169 void lv_mem_free(void * data)
170 {
171     MEM_TRACE("freeing %p", data);
172     if(data == &zero_mem) return;
173     if(data == NULL) return;
174 
175 #if LV_MEM_CUSTOM == 0
176 #  if LV_MEM_ADD_JUNK
177     lv_memset(data, 0xbb, lv_tlsf_block_size(data));
178 #  endif
179     size_t size = lv_tlsf_free(tlsf, data);
180     if(cur_used > size) cur_used -= size;
181     else cur_used = 0;
182 #else
183     LV_MEM_CUSTOM_FREE(data);
184 #endif
185 }
186 
187 /**
188  * Reallocate a memory with a new size. The old content will be kept.
189  * @param data pointer to an allocated memory.
190  * Its content will be copied to the new memory block and freed
191  * @param new_size the desired new size in byte
192  * @return pointer to the new memory
193  */
lv_mem_realloc(void * data_p,size_t new_size)194 void * lv_mem_realloc(void * data_p, size_t new_size)
195 {
196     MEM_TRACE("reallocating %p with %lu size", data_p, (unsigned long)new_size);
197     if(new_size == 0) {
198         MEM_TRACE("using zero_mem");
199         lv_mem_free(data_p);
200         return &zero_mem;
201     }
202 
203     if(data_p == &zero_mem) return lv_mem_alloc(new_size);
204 
205 #if LV_MEM_CUSTOM == 0
206     void * new_p = lv_tlsf_realloc(tlsf, data_p, new_size);
207 #else
208     void * new_p = LV_MEM_CUSTOM_REALLOC(data_p, new_size);
209 #endif
210     if(new_p == NULL) {
211         LV_LOG_ERROR("couldn't allocate memory");
212         return NULL;
213     }
214 
215     MEM_TRACE("allocated at %p", new_p);
216     return new_p;
217 }
218 
lv_mem_test(void)219 lv_res_t lv_mem_test(void)
220 {
221     if(zero_mem != ZERO_MEM_SENTINEL) {
222         LV_LOG_WARN("zero_mem is written");
223         return LV_RES_INV;
224     }
225 
226 #if LV_MEM_CUSTOM == 0
227     if(lv_tlsf_check(tlsf)) {
228         LV_LOG_WARN("failed");
229         return LV_RES_INV;
230     }
231 
232     if(lv_tlsf_check_pool(lv_tlsf_get_pool(tlsf))) {
233         LV_LOG_WARN("pool failed");
234         return LV_RES_INV;
235     }
236 #endif
237     MEM_TRACE("passed");
238     return LV_RES_OK;
239 }
240 
241 /**
242  * Give information about the work memory of dynamic allocation
243  * @param mon_p pointer to a lv_mem_monitor_t variable,
244  *              the result of the analysis will be stored here
245  */
lv_mem_monitor(lv_mem_monitor_t * mon_p)246 void lv_mem_monitor(lv_mem_monitor_t * mon_p)
247 {
248     /*Init the data*/
249     lv_memset(mon_p, 0, sizeof(lv_mem_monitor_t));
250 #if LV_MEM_CUSTOM == 0
251     MEM_TRACE("begin");
252 
253     lv_tlsf_walk_pool(lv_tlsf_get_pool(tlsf), lv_mem_walker, mon_p);
254 
255     mon_p->total_size = LV_MEM_SIZE;
256     mon_p->used_pct = 100 - (100U * mon_p->free_size) / mon_p->total_size;
257     if(mon_p->free_size > 0) {
258         mon_p->frag_pct = mon_p->free_biggest_size * 100U / mon_p->free_size;
259         mon_p->frag_pct = 100 - mon_p->frag_pct;
260     }
261     else {
262         mon_p->frag_pct = 0; /*no fragmentation if all the RAM is used*/
263     }
264 
265     mon_p->max_used = max_used;
266 
267     MEM_TRACE("finished");
268 #endif
269 }
270 
271 /**
272  * Get a temporal buffer with the given size.
273  * @param size the required size
274  */
lv_mem_buf_get(uint32_t size)275 void * lv_mem_buf_get(uint32_t size)
276 {
277     if(size == 0) return NULL;
278 
279     MEM_TRACE("begin, getting %d bytes", size);
280 
281     /*Try to find a free buffer with suitable size*/
282     int8_t i_guess = -1;
283     for(uint8_t i = 0; i < LV_MEM_BUF_MAX_NUM; i++) {
284         if(LV_GC_ROOT(lv_mem_buf[i]).used == 0 && LV_GC_ROOT(lv_mem_buf[i]).size >= size) {
285             if(LV_GC_ROOT(lv_mem_buf[i]).size == size) {
286                 LV_GC_ROOT(lv_mem_buf[i]).used = 1;
287                 return LV_GC_ROOT(lv_mem_buf[i]).p;
288             }
289             else if(i_guess < 0) {
290                 i_guess = i;
291             }
292             /*If size of `i` is closer to `size` prefer it*/
293             else if(LV_GC_ROOT(lv_mem_buf[i]).size < LV_GC_ROOT(lv_mem_buf[i_guess]).size) {
294                 i_guess = i;
295             }
296         }
297     }
298 
299     if(i_guess >= 0) {
300         LV_GC_ROOT(lv_mem_buf[i_guess]).used = 1;
301         MEM_TRACE("returning already allocated buffer (buffer id: %d, address: %p)", i_guess,
302                   LV_GC_ROOT(lv_mem_buf[i_guess]).p);
303         return LV_GC_ROOT(lv_mem_buf[i_guess]).p;
304     }
305 
306     /*Reallocate a free buffer*/
307     for(uint8_t i = 0; i < LV_MEM_BUF_MAX_NUM; i++) {
308         if(LV_GC_ROOT(lv_mem_buf[i]).used == 0) {
309             /*if this fails you probably need to increase your LV_MEM_SIZE/heap size*/
310             void * buf = lv_mem_realloc(LV_GC_ROOT(lv_mem_buf[i]).p, size);
311             LV_ASSERT_MSG(buf != NULL, "Out of memory, can't allocate a new buffer (increase your LV_MEM_SIZE/heap size)");
312             if(buf == NULL) return NULL;
313 
314             LV_GC_ROOT(lv_mem_buf[i]).used = 1;
315             LV_GC_ROOT(lv_mem_buf[i]).size = size;
316             LV_GC_ROOT(lv_mem_buf[i]).p    = buf;
317             MEM_TRACE("allocated (buffer id: %d, address: %p)", i, LV_GC_ROOT(lv_mem_buf[i]).p);
318             return LV_GC_ROOT(lv_mem_buf[i]).p;
319         }
320     }
321 
322     LV_LOG_ERROR("no more buffers. (increase LV_MEM_BUF_MAX_NUM)");
323     LV_ASSERT_MSG(false, "No more buffers. Increase LV_MEM_BUF_MAX_NUM.");
324     return NULL;
325 }
326 
327 /**
328  * Release a memory buffer
329  * @param p buffer to release
330  */
lv_mem_buf_release(void * p)331 void lv_mem_buf_release(void * p)
332 {
333     MEM_TRACE("begin (address: %p)", p);
334 
335     for(uint8_t i = 0; i < LV_MEM_BUF_MAX_NUM; i++) {
336         if(LV_GC_ROOT(lv_mem_buf[i]).p == p) {
337             LV_GC_ROOT(lv_mem_buf[i]).used = 0;
338             return;
339         }
340     }
341 
342     LV_LOG_ERROR("p is not a known buffer");
343 }
344 
345 /**
346  * Free all memory buffers
347  */
lv_mem_buf_free_all(void)348 void lv_mem_buf_free_all(void)
349 {
350     for(uint8_t i = 0; i < LV_MEM_BUF_MAX_NUM; i++) {
351         if(LV_GC_ROOT(lv_mem_buf[i]).p) {
352             lv_mem_free(LV_GC_ROOT(lv_mem_buf[i]).p);
353             LV_GC_ROOT(lv_mem_buf[i]).p = NULL;
354             LV_GC_ROOT(lv_mem_buf[i]).used = 0;
355             LV_GC_ROOT(lv_mem_buf[i]).size = 0;
356         }
357     }
358 }
359 
360 #if LV_MEMCPY_MEMSET_STD == 0
361 /**
362  * Same as `memcpy` but optimized for 4 byte operation.
363  * @param dst pointer to the destination buffer
364  * @param src pointer to the source buffer
365  * @param len number of byte to copy
366  */
lv_memcpy(void * dst,const void * src,size_t len)367 void * LV_ATTRIBUTE_FAST_MEM lv_memcpy(void * dst, const void * src, size_t len)
368 {
369     uint8_t * d8 = dst;
370     const uint8_t * s8 = src;
371 
372     lv_uintptr_t d_align = (lv_uintptr_t)d8 & ALIGN_MASK;
373     lv_uintptr_t s_align = (lv_uintptr_t)s8 & ALIGN_MASK;
374 
375     /*Byte copy for unaligned memories*/
376     if(s_align != d_align) {
377         while(len > 32) {
378             REPEAT8(COPY8);
379             REPEAT8(COPY8);
380             REPEAT8(COPY8);
381             REPEAT8(COPY8);
382             len -= 32;
383         }
384         while(len) {
385             COPY8
386             len--;
387         }
388         return dst;
389     }
390 
391     /*Make the memories aligned*/
392     if(d_align) {
393         d_align = ALIGN_MASK + 1 - d_align;
394         while(d_align && len) {
395             COPY8;
396             d_align--;
397             len--;
398         }
399     }
400 
401     uint32_t * d32 = (uint32_t *)d8;
402     const uint32_t * s32 = (uint32_t *)s8;
403     while(len > 32) {
404         REPEAT8(COPY32)
405         len -= 32;
406     }
407 
408     while(len > 4) {
409         COPY32;
410         len -= 4;
411     }
412 
413     d8 = (uint8_t *)d32;
414     s8 = (const uint8_t *)s32;
415     while(len) {
416         COPY8
417         len--;
418     }
419 
420     return dst;
421 }
422 
423 /**
424  * Same as `memset` but optimized for 4 byte operation.
425  * @param dst pointer to the destination buffer
426  * @param v value to set [0..255]
427  * @param len number of byte to set
428  */
lv_memset(void * dst,uint8_t v,size_t len)429 void LV_ATTRIBUTE_FAST_MEM lv_memset(void * dst, uint8_t v, size_t len)
430 {
431 
432     uint8_t * d8 = (uint8_t *)dst;
433 
434     uintptr_t d_align = (lv_uintptr_t) d8 & ALIGN_MASK;
435 
436     /*Make the address aligned*/
437     if(d_align) {
438         d_align = ALIGN_MASK + 1 - d_align;
439         while(d_align && len) {
440             SET8(v);
441             len--;
442             d_align--;
443         }
444     }
445 
446     uint32_t v32 = (uint32_t)v + ((uint32_t)v << 8) + ((uint32_t)v << 16) + ((uint32_t)v << 24);
447 
448     uint32_t * d32 = (uint32_t *)d8;
449 
450     while(len > 32) {
451         REPEAT8(SET32(v32));
452         len -= 32;
453     }
454 
455     while(len > 4) {
456         SET32(v32);
457         len -= 4;
458     }
459 
460     d8 = (uint8_t *)d32;
461     while(len) {
462         SET8(v);
463         len--;
464     }
465 }
466 
467 /**
468  * Same as `memset(dst, 0x00, len)` but optimized for 4 byte operation.
469  * @param dst pointer to the destination buffer
470  * @param len number of byte to set
471  */
lv_memset_00(void * dst,size_t len)472 void LV_ATTRIBUTE_FAST_MEM lv_memset_00(void * dst, size_t len)
473 {
474     uint8_t * d8 = (uint8_t *)dst;
475     uintptr_t d_align = (lv_uintptr_t) d8 & ALIGN_MASK;
476 
477     /*Make the address aligned*/
478     if(d_align) {
479         d_align = ALIGN_MASK + 1 - d_align;
480         while(d_align && len) {
481             SET8(0);
482             len--;
483             d_align--;
484         }
485     }
486 
487     uint32_t * d32 = (uint32_t *)d8;
488     while(len > 32) {
489         REPEAT8(SET32(0));
490         len -= 32;
491     }
492 
493     while(len > 4) {
494         SET32(0);
495         len -= 4;
496     }
497 
498     d8 = (uint8_t *)d32;
499     while(len) {
500         SET8(0);
501         len--;
502     }
503 }
504 
505 /**
506  * Same as `memset(dst, 0xFF, len)` but optimized for 4 byte operation.
507  * @param dst pointer to the destination buffer
508  * @param len number of byte to set
509  */
lv_memset_ff(void * dst,size_t len)510 void LV_ATTRIBUTE_FAST_MEM lv_memset_ff(void * dst, size_t len)
511 {
512     uint8_t * d8 = (uint8_t *)dst;
513     uintptr_t d_align = (lv_uintptr_t) d8 & ALIGN_MASK;
514 
515     /*Make the address aligned*/
516     if(d_align) {
517         d_align = ALIGN_MASK + 1 - d_align;
518         while(d_align && len) {
519             SET8(0xFF);
520             len--;
521             d_align--;
522         }
523     }
524 
525     uint32_t * d32 = (uint32_t *)d8;
526     while(len > 32) {
527         REPEAT8(SET32(0xFFFFFFFF));
528         len -= 32;
529     }
530 
531     while(len > 4) {
532         SET32(0xFFFFFFFF);
533         len -= 4;
534     }
535 
536     d8 = (uint8_t *)d32;
537     while(len) {
538         SET8(0xFF);
539         len--;
540     }
541 }
542 
543 #endif /*LV_MEMCPY_MEMSET_STD*/
544 
545 /**********************
546  *   STATIC FUNCTIONS
547  **********************/
548 
549 #if LV_MEM_CUSTOM == 0
lv_mem_walker(void * ptr,size_t size,int used,void * user)550 static void lv_mem_walker(void * ptr, size_t size, int used, void * user)
551 {
552     LV_UNUSED(ptr);
553 
554     lv_mem_monitor_t * mon_p = user;
555     if(used) {
556         mon_p->used_cnt++;
557     }
558     else {
559         mon_p->free_cnt++;
560         mon_p->free_size += size;
561         if(size > mon_p->free_biggest_size)
562             mon_p->free_biggest_size = size;
563     }
564 }
565 #endif
566