1 /*
2  * Copyright (c) 2024 Nordic Semiconductor ASA
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  */
6 
7 #include <errno.h>
8 #include <string.h>
9 #include <zephyr/cache.h>
10 #include <zephyr/kernel.h>
11 #include <zephyr/ztest.h>
12 
13 #include <dmm.h>
14 
15 #define DUT_CACHE   DT_ALIAS(dut_cache)
16 #define DUT_NOCACHE DT_ALIAS(dut_nocache)
17 
18 #define DMM_TEST_GET_REG_START(node_id)				\
19 	COND_CODE_1(DT_NODE_HAS_PROP(node_id, memory_regions),	\
20 		    (DT_REG_ADDR(DT_PHANDLE(node_id, memory_regions))), (0))
21 
22 #define DMM_TEST_GET_REG_SIZE(node_id)				\
23 	COND_CODE_1(DT_NODE_HAS_PROP(node_id, memory_regions),	\
24 		    (DT_REG_SIZE(DT_PHANDLE(node_id, memory_regions))), (0))
25 
26 #if CONFIG_DCACHE
27 BUILD_ASSERT(DMM_ALIGN_SIZE(DUT_CACHE) == CONFIG_DCACHE_LINE_SIZE);
28 BUILD_ASSERT(DMM_ALIGN_SIZE(DUT_NOCACHE) == 1);
29 #endif
30 
31 struct dmm_test_region {
32 	void *mem_reg;
33 	uintptr_t start;
34 	size_t size;
35 };
36 
37 enum {
38 	DMM_TEST_REGION_CACHE,
39 	DMM_TEST_REGION_NOCACHE,
40 	DMM_TEST_REGION_COUNT
41 };
42 
43 struct dmm_fixture {
44 	struct dmm_test_region regions[DMM_TEST_REGION_COUNT];
45 	uint32_t fill_value;
46 };
47 
48 static const struct dmm_test_region dmm_test_regions[DMM_TEST_REGION_COUNT] = {
49 	[DMM_TEST_REGION_CACHE] = {
50 		.mem_reg = DMM_DEV_TO_REG(DUT_CACHE),
51 		.start = DMM_TEST_GET_REG_START(DUT_CACHE),
52 		.size = DMM_TEST_GET_REG_SIZE(DUT_CACHE)
53 	},
54 	[DMM_TEST_REGION_NOCACHE] = {
55 		.mem_reg = DMM_DEV_TO_REG(DUT_NOCACHE),
56 		.start = DMM_TEST_GET_REG_START(DUT_NOCACHE),
57 		.size = DMM_TEST_GET_REG_SIZE(DUT_NOCACHE)
58 	},
59 };
60 
test_setup(void)61 static void *test_setup(void)
62 {
63 	static struct dmm_fixture fixture;
64 
65 	memcpy(fixture.regions, dmm_test_regions, sizeof(dmm_test_regions));
66 	fixture.fill_value = 0x1;
67 	return &fixture;
68 }
69 
test_cleanup(void * argc)70 static void test_cleanup(void *argc)
71 {
72 }
73 
dmm_buffer_in_region_check(struct dmm_test_region * dtr,void * buf,size_t size)74 static bool dmm_buffer_in_region_check(struct dmm_test_region *dtr, void *buf, size_t size)
75 {
76 	uintptr_t start = (uintptr_t)buf;
77 
78 	return ((start >= dtr->start) && ((start + size) <= (dtr->start + dtr->size)));
79 }
80 
dmm_check_output_buffer(struct dmm_test_region * dtr,uint32_t * fill_value,void * data,size_t size,bool was_prealloc,bool is_cached)81 static void dmm_check_output_buffer(struct dmm_test_region *dtr, uint32_t *fill_value,
82 				    void *data, size_t size, bool was_prealloc, bool is_cached)
83 {
84 	void *buf;
85 	int retval;
86 
87 	memset(data, (*fill_value)++, size);
88 	retval = dmm_buffer_out_prepare(dtr->mem_reg, data, size, &buf);
89 	zassert_ok(retval);
90 	if (IS_ENABLED(CONFIG_DCACHE) && is_cached) {
91 		zassert_true(IS_ALIGNED(buf, CONFIG_DCACHE_LINE_SIZE));
92 	}
93 
94 	if (IS_ENABLED(CONFIG_HAS_NORDIC_DMM)) {
95 		if (was_prealloc) {
96 			zassert_equal(data, buf);
97 		} else {
98 			zassert_not_equal(data, buf);
99 		}
100 		zassert_true(dmm_buffer_in_region_check(dtr, buf, size));
101 	} else {
102 		zassert_equal(data, buf);
103 	}
104 	sys_cache_data_invd_range(buf, size);
105 	zassert_mem_equal(buf, data, size);
106 
107 	retval = dmm_buffer_out_release(dtr->mem_reg, buf);
108 	zassert_ok(retval);
109 }
110 
dmm_check_input_buffer(struct dmm_test_region * dtr,uint32_t * fill_value,void * data,size_t size,bool was_prealloc,bool is_cached)111 static void dmm_check_input_buffer(struct dmm_test_region *dtr, uint32_t *fill_value,
112 				   void *data, size_t size, bool was_prealloc, bool is_cached)
113 {
114 	void *buf;
115 	int retval;
116 	uint8_t intermediate_buf[128];
117 
118 	zassert_true(size < sizeof(intermediate_buf));
119 
120 	retval = dmm_buffer_in_prepare(dtr->mem_reg, data, size, &buf);
121 	zassert_ok(retval);
122 	if (IS_ENABLED(CONFIG_DCACHE) && is_cached) {
123 		zassert_true(IS_ALIGNED(buf, CONFIG_DCACHE_LINE_SIZE));
124 	}
125 
126 	if (IS_ENABLED(CONFIG_HAS_NORDIC_DMM)) {
127 		if (was_prealloc) {
128 			zassert_equal(data, buf);
129 		} else {
130 			zassert_not_equal(data, buf);
131 		}
132 		zassert_true(dmm_buffer_in_region_check(dtr, buf, size));
133 	} else {
134 		zassert_equal(data, buf);
135 	}
136 
137 	/* Simulate external bus master writing to memory region */
138 	memset(buf, (*fill_value)++, size);
139 	sys_cache_data_flush_range(buf, size);
140 	/* Preserve actual memory region contents before polluting the cache */
141 	memcpy(intermediate_buf, buf, size);
142 	if (IS_ENABLED(CONFIG_DCACHE) && is_cached) {
143 		/* Purposefully pollute the cache to make sure library manages cache properly */
144 		memset(buf, (*fill_value)++, size);
145 	}
146 
147 	retval = dmm_buffer_in_release(dtr->mem_reg, data, size, buf);
148 	zassert_ok(retval);
149 
150 	zassert_mem_equal(data, intermediate_buf, size);
151 }
152 
ZTEST_USER_F(dmm,test_check_dev_cache_in_allocate)153 ZTEST_USER_F(dmm, test_check_dev_cache_in_allocate)
154 {
155 	uint8_t user_data[16];
156 
157 	dmm_check_input_buffer(&fixture->regions[DMM_TEST_REGION_CACHE], &fixture->fill_value,
158 			       user_data, sizeof(user_data), false, true);
159 }
160 
ZTEST_USER_F(dmm,test_check_dev_cache_in_preallocate)161 ZTEST_USER_F(dmm, test_check_dev_cache_in_preallocate)
162 {
163 	static uint8_t user_data[16] DMM_MEMORY_SECTION(DUT_CACHE);
164 
165 	dmm_check_input_buffer(&fixture->regions[DMM_TEST_REGION_CACHE], &fixture->fill_value,
166 			       user_data, sizeof(user_data), true, true);
167 }
168 
ZTEST_USER_F(dmm,test_check_dev_cache_out_allocate)169 ZTEST_USER_F(dmm, test_check_dev_cache_out_allocate)
170 {
171 	uint8_t user_data[16];
172 
173 	dmm_check_output_buffer(&fixture->regions[DMM_TEST_REGION_CACHE], &fixture->fill_value,
174 				user_data, sizeof(user_data), false, true);
175 }
176 
ZTEST_USER_F(dmm,test_check_dev_cache_out_preallocate)177 ZTEST_USER_F(dmm, test_check_dev_cache_out_preallocate)
178 {
179 	static uint8_t user_data[16] DMM_MEMORY_SECTION(DUT_CACHE);
180 
181 	dmm_check_output_buffer(&fixture->regions[DMM_TEST_REGION_CACHE], &fixture->fill_value,
182 				user_data, sizeof(user_data), true, true);
183 }
184 
ZTEST_USER_F(dmm,test_check_dev_nocache_in_allocate)185 ZTEST_USER_F(dmm, test_check_dev_nocache_in_allocate)
186 {
187 	uint8_t user_data[16];
188 
189 	dmm_check_input_buffer(&fixture->regions[DMM_TEST_REGION_NOCACHE], &fixture->fill_value,
190 			       user_data, sizeof(user_data), false, false);
191 }
192 
ZTEST_USER_F(dmm,test_check_dev_nocache_in_preallocate)193 ZTEST_USER_F(dmm, test_check_dev_nocache_in_preallocate)
194 {
195 	static uint8_t user_data[16] DMM_MEMORY_SECTION(DUT_NOCACHE);
196 
197 	dmm_check_input_buffer(&fixture->regions[DMM_TEST_REGION_NOCACHE], &fixture->fill_value,
198 			       user_data, sizeof(user_data), true, false);
199 }
200 
ZTEST_USER_F(dmm,test_check_dev_nocache_out_allocate)201 ZTEST_USER_F(dmm, test_check_dev_nocache_out_allocate)
202 {
203 	uint8_t user_data[16];
204 
205 	dmm_check_output_buffer(&fixture->regions[DMM_TEST_REGION_NOCACHE], &fixture->fill_value,
206 				user_data, sizeof(user_data), false, false);
207 }
208 
ZTEST_USER_F(dmm,test_check_dev_nocache_out_preallocate)209 ZTEST_USER_F(dmm, test_check_dev_nocache_out_preallocate)
210 {
211 	static uint8_t user_data[16] DMM_MEMORY_SECTION(DUT_NOCACHE);
212 
213 	dmm_check_output_buffer(&fixture->regions[DMM_TEST_REGION_NOCACHE], &fixture->fill_value,
214 				user_data, sizeof(user_data), true, false);
215 }
216 
217 ZTEST_SUITE(dmm, NULL, test_setup, NULL, test_cleanup, NULL);
218 
dmm_test_prepare(void)219 int dmm_test_prepare(void)
220 {
221 	const struct dmm_test_region *dtr;
222 
223 	for (size_t i = 0; i < ARRAY_SIZE(dmm_test_regions); i++) {
224 		dtr = &dmm_test_regions[i];
225 		memset((void *)dtr->start, 0x00, dtr->size);
226 	}
227 
228 	return 0;
229 }
230 
231 SYS_INIT(dmm_test_prepare, PRE_KERNEL_1, 0);
232