1 /*
2  * Copyright (c) 2020 Intel Corporation
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  */
6 #include <zephyr/kernel.h>
7 #include <zephyr/ztest.h>
8 #include <zephyr/sys/sys_heap.h>
9 
10 /* need to peek into some heap internals */
11 #include "../../../../lib/heap/heap.h"
12 
13 #define HEAP_SZ 0x1000
14 
15 uint8_t __aligned(CHUNK_UNIT) heapmem[HEAP_SZ];
16 
17 /* Heap metadata sizes */
18 uint8_t *heap_start, *heap_end;
19 size_t heap_chunk_header_size;
20 
21 /*
22  * The align argument may contain a "rewind" bit.
23  * See comment in sys_heap_aligned_alloc().
24  */
alignment_ok(void * ptr,size_t align)25 static bool alignment_ok(void *ptr, size_t align)
26 {
27 	uintptr_t addr = (uintptr_t)ptr;
28 	size_t rew;
29 
30 	/* split rewind bit from alignment */
31 	rew = LSB_GET(align);
32 	rew = (rew == align) ? 0 : rew;
33 	align -= rew;
34 
35 	/* undo the pointer rewind */
36 	addr += rew;
37 
38 	/* validate pointer alignment */
39 	return (addr & (align - 1)) == 0;
40 }
41 
42 /* Note that this test is making whitebox assumptions about the
43  * behavior of the heap in order to exercise coverage of the
44  * underlying code: that chunk headers are 8 bytes, that heap chunks
45  * are returned low-address to high, and that freed blocks are merged
46  * immediately with adjacent free blocks.
47  */
check_heap_align(struct sys_heap * h,size_t prefix,size_t align,size_t size)48 static void check_heap_align(struct sys_heap *h,
49 			     size_t prefix, size_t align, size_t size)
50 {
51 	void *p, *q, *r, *s;
52 	size_t suffix;
53 
54 	p = sys_heap_alloc(h, prefix);
55 	zassert_true(prefix == 0 || p != NULL, "prefix allocation failed");
56 
57 	q = sys_heap_aligned_alloc(h, align, size);
58 	zassert_true(q != NULL, "first aligned allocation failed");
59 	zassert_true(alignment_ok(q, align), "block not aligned");
60 
61 	r = sys_heap_aligned_alloc(h, align, size);
62 	zassert_true(r != NULL, "second aligned allocation failed");
63 	zassert_true(alignment_ok(r, align), "block not aligned");
64 
65 	/* Make sure ALL the split memory goes back into the heap and
66 	 * we can allocate the full remaining suffix
67 	 */
68 	suffix = (heap_end - (uint8_t *)ROUND_UP((uintptr_t)r + size, CHUNK_UNIT))
69 		- heap_chunk_header_size;
70 	s = sys_heap_alloc(h, suffix);
71 	zassert_true(s != NULL, "suffix allocation failed (%zd/%zd/%zd)",
72 				prefix, align, size);
73 	zassert_true(sys_heap_validate(h), "heap invalid");
74 
75 	sys_heap_free(h, p);
76 	sys_heap_free(h, q);
77 	sys_heap_free(h, r);
78 	sys_heap_free(h, s);
79 
80 	/* Make sure it's still valid, and empty */
81 	zassert_true(sys_heap_validate(h), "heap invalid");
82 	p = sys_heap_alloc(h, heap_end - heap_start);
83 	zassert_true(p != NULL, "heap not empty");
84 	q = sys_heap_alloc(h, 1);
85 	zassert_true(q == NULL, "heap not full");
86 	sys_heap_free(h, p);
87 }
88 
ZTEST(lib_heap_align,test_aligned_alloc)89 ZTEST(lib_heap_align, test_aligned_alloc)
90 {
91 	struct sys_heap heap = {};
92 	void *p, *q;
93 
94 	sys_heap_init(&heap, heapmem, HEAP_SZ);
95 
96 	p = sys_heap_alloc(&heap, 1);
97 	zassert_true(p != NULL, "initial alloc failed");
98 	sys_heap_free(&heap, p);
99 
100 	/* Heap starts where that first chunk was, and ends one 8-byte
101 	 * chunk header before the end of its memory
102 	 */
103 	heap_start = p;
104 	heap_end = heapmem + heap.heap->end_chunk * CHUNK_UNIT;
105 	heap_chunk_header_size = chunk_header_bytes(heap.heap);
106 
107 	for (size_t align = 8; align < HEAP_SZ / 4; align *= 2) {
108 		for (size_t prefix = 0; prefix <= align; prefix += 8) {
109 			for (size_t size = 4; size <= align; size += 12) {
110 				check_heap_align(&heap, prefix, align, size);
111 				for (size_t rew = 4; rew < MIN(align, 32); rew *= 2) {
112 					check_heap_align(&heap, prefix,
113 							 align | rew, size);
114 				}
115 			}
116 		}
117 	}
118 
119 	/* corner case on small heaps */
120 	p = sys_heap_aligned_alloc(&heap, 8, 12);
121 	memset(p, 0, 12);
122 	zassert_true(sys_heap_validate(&heap), "heap invalid");
123 	sys_heap_free(&heap, p);
124 
125 	/* corner case with minimizing the overallocation before alignment */
126 	p = sys_heap_aligned_alloc(&heap, 16, 16);
127 	q = sys_heap_aligned_alloc(&heap, 16, 17);
128 	memset(p, 0, 16);
129 	memset(q, 0, 17);
130 	zassert_true(sys_heap_validate(&heap), "heap invalid");
131 	sys_heap_free(&heap, p);
132 	sys_heap_free(&heap, q);
133 }
134 
135 ZTEST_SUITE(lib_heap_align, NULL, NULL, NULL, NULL, NULL);
136