1 /*
2 * SPDX-FileCopyrightText: 2020-2022 Espressif Systems (Shanghai) CO LTD
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
7 #include <string.h>
8 #include "sdkconfig.h"
9 #include "freertos/FreeRTOSConfig.h"
10 #include "soc/periph_defs.h"
11 #include "esp_intr_alloc.h"
12 #include "hal/memprot_ll.h"
13 #include "esp32s3/rom/ets_sys.h"
14 #include "esp_cpu.h"
15 #include "esp_fault.h"
16 #include "esp_attr.h"
17 #include "hal/memprot_types.h"
18 #include "esp_private/esp_memprot_internal.h"
19 #include "esp_memprot.h"
20
21 /*
22 * LD section boundaries
23 */
24 extern int _iram_text_start;
25 extern int _iram_text_end;
26 extern int _rtc_text_end;
27
28 /*
29 * Local holder of the Memprot config required by the last esp_mprot_set_prot() call.
30 * The structure is zeroed on creation => 'mem_type_mask == MEMPROT_TYPE_NONE' guarantees no interference before proper
31 * update by the API function
32 */
33 static esp_memp_config_t s_memp_cfg = ESP_MEMPROT_ZERO_CONFIG();
34
35
36 //////////////////////////////////////////////////////////////////////////////
37 // internal helpers
38
esp_mprot_cpuid_valid(const int core)39 static esp_err_t esp_mprot_cpuid_valid(const int core)
40 {
41 for (size_t x = 0; x < s_memp_cfg.target_cpu_count; x++) {
42 if (core == s_memp_cfg.target_cpu[x]) {
43 return ESP_OK;
44 }
45 }
46
47 return ESP_ERR_MEMPROT_CPUID_INVALID;
48 }
49
esp_memprot_iram0_get_def_split_addr(void)50 static void *esp_memprot_iram0_get_def_split_addr(void)
51 {
52 return (void *)(uint32_t)&_iram_text_end;
53 }
54
esp_memprot_dram0_get_def_split_addr(void)55 static void *esp_memprot_dram0_get_def_split_addr(void)
56 {
57 return (void *)MAP_IRAM_TO_DRAM((uint32_t)&_iram_text_end);
58 }
59
esp_memprot_rtcfast_get_min_split_addr(void)60 static void *esp_memprot_rtcfast_get_min_split_addr(void)
61 {
62 return (void *)(uint32_t)&_rtc_text_end;
63 }
64
65 //register MEMPROT-relevant interrupts in the global matrix (to deploy panic handling)
66 //mapped to ETS_MEMACCESS_ERR_INUM (shared with cache-access interrupts)
esp_mprot_set_intr_matrix(const esp_mprot_mem_t mem_type,const int core)67 static esp_err_t esp_mprot_set_intr_matrix(const esp_mprot_mem_t mem_type, const int core)
68 {
69 ESP_INTR_DISABLE(ETS_MEMACCESS_ERR_INUM);
70
71 esp_err_t err;
72 uint32_t intr_src_num = ETS_MAX_INTR_SOURCE;
73
74 switch (mem_type) {
75 case MEMPROT_TYPE_IRAM0_SRAM:
76 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_iram0_get_intr_source_num(core, &intr_src_num)))
77 esp_rom_route_intr_matrix(core, intr_src_num, ETS_MEMACCESS_ERR_INUM);
78 break;
79 case MEMPROT_TYPE_DRAM0_SRAM:
80 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_dram0_get_intr_source_num(core, &intr_src_num)))
81 esp_rom_route_intr_matrix(core, intr_src_num, ETS_MEMACCESS_ERR_INUM);
82 break;
83 case MEMPROT_TYPE_IRAM0_RTCFAST:
84 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_rtcfast_get_intr_source_num(core, &intr_src_num)))
85 esp_rom_route_intr_matrix(core, intr_src_num, ETS_MEMACCESS_ERR_INUM);
86 break;
87 default:
88 return ESP_ERR_MEMPROT_MEMORY_TYPE_INVALID;
89 }
90
91 ESP_INTR_ENABLE(ETS_MEMACCESS_ERR_INUM);
92
93 return ESP_OK;
94 }
95
96 //////////////////////////////////////////////////////////////////////////////
97 // PMS configuration APIs (set/get/lock)
98 // IRAM/DRAM settings are shared by both CPUs, RTCFAST settings are separate for each core
99
esp_mprot_set_split_addr(const esp_mprot_mem_t mem_type,const esp_mprot_split_addr_t line_type,const void * line_addr,const int core)100 esp_err_t esp_mprot_set_split_addr(const esp_mprot_mem_t mem_type, const esp_mprot_split_addr_t line_type, const void *line_addr, const int core __attribute__((unused)))
101 {
102 switch (mem_type) {
103 case MEMPROT_TYPE_IRAM0_SRAM:
104 switch (line_type) {
105 case MEMPROT_SPLIT_ADDR_IRAM0_DRAM0:
106 return esp_mprot_ll_err_to_esp_err(memprot_ll_set_iram0_split_line_main_I_D(line_addr));
107 case MEMPROT_SPLIT_ADDR_IRAM0_LINE_0:
108 return esp_mprot_ll_err_to_esp_err(memprot_ll_set_iram0_split_line_I_0(line_addr));
109 case MEMPROT_SPLIT_ADDR_IRAM0_LINE_1:
110 return esp_mprot_ll_err_to_esp_err(memprot_ll_set_iram0_split_line_I_1(line_addr));
111 default:
112 return ESP_ERR_MEMPROT_SPLIT_ADDR_INVALID;
113 } break;
114 case MEMPROT_TYPE_DRAM0_SRAM:
115 switch (line_type) {
116 case MEMPROT_SPLIT_ADDR_DRAM0_DMA_LINE_0:
117 return esp_mprot_ll_err_to_esp_err(memprot_ll_set_dram0_split_line_D_0(line_addr));
118 case MEMPROT_SPLIT_ADDR_DRAM0_DMA_LINE_1:
119 return esp_mprot_ll_err_to_esp_err(memprot_ll_set_dram0_split_line_D_1(line_addr));
120 default:
121 return ESP_ERR_MEMPROT_SPLIT_ADDR_INVALID;
122 } break;
123 case MEMPROT_TYPE_IRAM0_RTCFAST:
124 if (line_type == MEMPROT_SPLIT_ADDR_MAIN) { /* so far only WORLD_0 is supported */
125 esp_err_t err;
126 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_cpuid_valid(core))
127 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_set_rtcfast_split_line(core, line_addr, MEMP_HAL_WORLD_0)))
128 return ESP_OK;
129 } else {
130 return ESP_ERR_MEMPROT_SPLIT_ADDR_INVALID;
131 }
132 break;
133 default:
134 return ESP_ERR_MEMPROT_MEMORY_TYPE_INVALID;
135 }
136 }
137
esp_mprot_get_split_addr(const esp_mprot_mem_t mem_type,const esp_mprot_split_addr_t line_type,void ** line_addr,const int core)138 esp_err_t esp_mprot_get_split_addr(const esp_mprot_mem_t mem_type, const esp_mprot_split_addr_t line_type, void **line_addr, const int core __attribute__((unused)))
139 {
140 if (line_addr == NULL) {
141 return ESP_ERR_INVALID_ARG;
142 }
143
144 switch (mem_type) {
145 case MEMPROT_TYPE_IRAM0_SRAM:
146 switch (line_type) {
147 case MEMPROT_SPLIT_ADDR_IRAM0_DRAM0:
148 *line_addr = memprot_ll_get_iram0_split_line_main_I_D();
149 break;
150 case MEMPROT_SPLIT_ADDR_IRAM0_LINE_0:
151 *line_addr = memprot_ll_get_iram0_split_line_I_0();
152 break;
153 case MEMPROT_SPLIT_ADDR_IRAM0_LINE_1:
154 *line_addr = memprot_ll_get_iram0_split_line_I_1();
155 break;
156 default:
157 return ESP_ERR_MEMPROT_SPLIT_ADDR_INVALID;
158 } break;
159 case MEMPROT_TYPE_DRAM0_SRAM:
160 switch (line_type) {
161 case MEMPROT_SPLIT_ADDR_DRAM0_DMA_LINE_0:
162 *line_addr = memprot_ll_get_dram0_split_line_D_0();
163 break;
164 case MEMPROT_SPLIT_ADDR_DRAM0_DMA_LINE_1:
165 *line_addr = memprot_ll_get_dram0_split_line_D_1();
166 break;
167 default:
168 return ESP_ERR_MEMPROT_SPLIT_ADDR_INVALID;
169 } break;
170 case MEMPROT_TYPE_IRAM0_RTCFAST:
171 if (line_type == MEMPROT_SPLIT_ADDR_MAIN) { /* so far only WORLD_0 is supported */
172 esp_err_t err;
173 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_cpuid_valid(core))
174 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_get_rtcfast_split_line(core, MEMP_HAL_WORLD_0, *line_addr)))
175 } else {
176 return ESP_ERR_MEMPROT_SPLIT_ADDR_INVALID;
177 }
178 break;
179 default:
180 return ESP_ERR_MEMPROT_MEMORY_TYPE_INVALID;
181 }
182
183 return ESP_OK;
184 }
185
esp_mprot_get_default_main_split_addr(const esp_mprot_mem_t mem_type,void ** def_split_addr)186 esp_err_t esp_mprot_get_default_main_split_addr(const esp_mprot_mem_t mem_type, void **def_split_addr)
187 {
188 if (def_split_addr == NULL) {
189 return ESP_ERR_INVALID_ARG;
190 }
191
192 switch (mem_type) {
193 case MEMPROT_TYPE_IRAM0_SRAM:
194 *def_split_addr = esp_memprot_iram0_get_def_split_addr();
195 break;
196 case MEMPROT_TYPE_DRAM0_SRAM:
197 *def_split_addr = esp_memprot_dram0_get_def_split_addr();
198 break;
199 case MEMPROT_TYPE_IRAM0_RTCFAST:
200 *def_split_addr = esp_memprot_rtcfast_get_min_split_addr();
201 break;
202 default:
203 return ESP_ERR_MEMPROT_MEMORY_TYPE_INVALID;
204 }
205
206 return ESP_OK;
207 }
208
esp_mprot_set_split_addr_lock(const esp_mprot_mem_t mem_type,const int core)209 esp_err_t esp_mprot_set_split_addr_lock(const esp_mprot_mem_t mem_type, const int core __attribute__((unused)))
210 {
211 switch (mem_type) {
212 case MEMPROT_TYPE_IRAM0_SRAM:
213 case MEMPROT_TYPE_DRAM0_SRAM:
214 memprot_ll_set_iram0_dram0_split_line_lock();
215 break;
216 case MEMPROT_TYPE_IRAM0_RTCFAST: { //caution: RTCFAST shares the lock with other PIF PMS constraints!
217 esp_err_t err;
218 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_cpuid_valid(core))
219 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_set_pif_constraint_lock(core)))
220 } break;
221 default:
222 return ESP_ERR_MEMPROT_MEMORY_TYPE_INVALID;
223 }
224
225 return ESP_OK;
226 }
227
esp_mprot_get_split_addr_lock(const esp_mprot_mem_t mem_type,bool * locked,const int core)228 esp_err_t esp_mprot_get_split_addr_lock(const esp_mprot_mem_t mem_type, bool *locked, const int core __attribute__((unused)))
229 {
230 if (locked == NULL) {
231 return ESP_ERR_INVALID_ARG;
232 }
233
234 switch (mem_type) {
235 case MEMPROT_TYPE_IRAM0_SRAM:
236 case MEMPROT_TYPE_DRAM0_SRAM:
237 *locked = memprot_ll_get_iram0_dram0_split_line_lock();
238 break;
239 case MEMPROT_TYPE_IRAM0_RTCFAST: {
240 esp_err_t err;
241 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_cpuid_valid(core))
242 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_get_pif_constraint_lock(core, locked)))
243 } break;
244 default:
245 return ESP_ERR_MEMPROT_MEMORY_TYPE_INVALID;
246 }
247
248 return ESP_OK;
249 }
250
esp_mprot_set_pms_lock(const esp_mprot_mem_t mem_type,const int core)251 esp_err_t esp_mprot_set_pms_lock(const esp_mprot_mem_t mem_type, const int core __attribute__((unused)))
252 {
253 switch (mem_type) {
254 case MEMPROT_TYPE_IRAM0_SRAM:
255 memprot_ll_iram0_set_pms_lock();
256 break;
257 case MEMPROT_TYPE_DRAM0_SRAM:
258 memprot_ll_dram0_set_pms_lock();
259 break;
260 case MEMPROT_TYPE_IRAM0_RTCFAST: { //caution: RTCFAST shares the lock with other PIF PMS constraints!
261 esp_err_t err;
262 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_cpuid_valid(core))
263 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_set_pif_constraint_lock(core)))
264 } break;
265 default:
266 return ESP_ERR_MEMPROT_MEMORY_TYPE_INVALID;
267 }
268
269 return ESP_OK;
270 }
271
esp_mprot_get_pms_lock(const esp_mprot_mem_t mem_type,bool * locked,const int core)272 esp_err_t esp_mprot_get_pms_lock(const esp_mprot_mem_t mem_type, bool *locked, const int core __attribute__((unused)))
273 {
274 if (locked == NULL) {
275 return ESP_ERR_INVALID_ARG;
276 }
277
278 switch (mem_type) {
279 case MEMPROT_TYPE_IRAM0_SRAM:
280 *locked = memprot_ll_iram0_get_pms_lock();
281 break;
282 case MEMPROT_TYPE_DRAM0_SRAM:
283 *locked = memprot_ll_dram0_get_pms_lock();
284 break;
285 case MEMPROT_TYPE_IRAM0_RTCFAST: {
286 esp_err_t err;
287 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_cpuid_valid(core))
288 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_get_pif_constraint_lock(core, locked)))
289 } break;
290 default:
291 return ESP_ERR_MEMPROT_MEMORY_TYPE_INVALID;
292 }
293
294 return ESP_OK;
295 }
296
esp_mprot_set_pms_area(const esp_mprot_pms_area_t area_type,const uint32_t flags,const int core)297 esp_err_t esp_mprot_set_pms_area(const esp_mprot_pms_area_t area_type, const uint32_t flags, const int core __attribute__((unused)))
298 {
299 esp_err_t err;
300 bool r = flags & MEMPROT_OP_READ;
301 bool w = flags & MEMPROT_OP_WRITE;
302 bool x = flags & MEMPROT_OP_EXEC;
303
304 switch (area_type) {
305 case MEMPROT_PMS_AREA_IRAM0_0:
306 memprot_ll_iram0_set_pms_area_0(r, w, x);
307 break;
308 case MEMPROT_PMS_AREA_IRAM0_1:
309 memprot_ll_iram0_set_pms_area_1(r, w, x);
310 break;
311 case MEMPROT_PMS_AREA_IRAM0_2:
312 memprot_ll_iram0_set_pms_area_2(r, w, x);
313 break;
314 case MEMPROT_PMS_AREA_IRAM0_3:
315 memprot_ll_iram0_set_pms_area_3(r, w, x);
316 break;
317 case MEMPROT_PMS_AREA_DRAM0_0:
318 memprot_ll_dram0_set_pms_area_0(r, w);
319 break;
320 case MEMPROT_PMS_AREA_DRAM0_1:
321 memprot_ll_dram0_set_pms_area_1(r, w);
322 break;
323 case MEMPROT_PMS_AREA_DRAM0_2:
324 memprot_ll_dram0_set_pms_area_2(r, w);
325 break;
326 case MEMPROT_PMS_AREA_DRAM0_3:
327 memprot_ll_dram0_set_pms_area_3(r, w);
328 break;
329 case MEMPROT_PMS_AREA_IRAM0_RTCFAST_LO:
330 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_cpuid_valid(core))
331 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_rtcfast_set_pms_area(core, r, w, x, MEMP_HAL_WORLD_0, MEMP_HAL_AREA_LOW)))
332 break;
333 case MEMPROT_PMS_AREA_IRAM0_RTCFAST_HI:
334 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_cpuid_valid(core))
335 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_rtcfast_set_pms_area(core, r, w, x, MEMP_HAL_WORLD_0, MEMP_HAL_AREA_HIGH)))
336 break;
337 case MEMPROT_PMS_AREA_ICACHE_0:
338 memprot_ll_icache_set_pms_area_0(r, w, x);
339 break;
340 case MEMPROT_PMS_AREA_ICACHE_1:
341 memprot_ll_icache_set_pms_area_1(r, w, x);
342 break;
343 default:
344 return ESP_ERR_NOT_SUPPORTED;
345 }
346
347 return ESP_OK;
348 }
349
esp_mprot_get_pms_area(const esp_mprot_pms_area_t area_type,uint32_t * flags,const int core)350 esp_err_t esp_mprot_get_pms_area(const esp_mprot_pms_area_t area_type, uint32_t *flags, const int core __attribute__((unused)))
351 {
352 if (flags == NULL) {
353 return ESP_ERR_INVALID_ARG;
354 }
355
356 esp_err_t err;
357 bool r = false;
358 bool w = false;
359 bool x = false;
360
361 switch (area_type) {
362 case MEMPROT_PMS_AREA_IRAM0_0:
363 memprot_ll_iram0_get_pms_area_0(&r, &w, &x);
364 break;
365 case MEMPROT_PMS_AREA_IRAM0_1:
366 memprot_ll_iram0_get_pms_area_1(&r, &w, &x);
367 break;
368 case MEMPROT_PMS_AREA_IRAM0_2:
369 memprot_ll_iram0_get_pms_area_2(&r, &w, &x);
370 break;
371 case MEMPROT_PMS_AREA_IRAM0_3:
372 memprot_ll_iram0_get_pms_area_3(&r, &w, &x);
373 break;
374 case MEMPROT_PMS_AREA_DRAM0_0:
375 memprot_ll_dram0_get_pms_area_0(&r, &w);
376 break;
377 case MEMPROT_PMS_AREA_DRAM0_1:
378 memprot_ll_dram0_get_pms_area_1(&r, &w);
379 break;
380 case MEMPROT_PMS_AREA_DRAM0_2:
381 memprot_ll_dram0_get_pms_area_2(&r, &w);
382 break;
383 case MEMPROT_PMS_AREA_DRAM0_3:
384 memprot_ll_dram0_get_pms_area_3(&r, &w);
385 break;
386 case MEMPROT_PMS_AREA_IRAM0_RTCFAST_LO:
387 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_cpuid_valid(core))
388 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_rtcfast_get_pms_area(core, &r, &w, &x, MEMP_HAL_WORLD_0, MEMP_HAL_AREA_LOW)))
389 break;
390 case MEMPROT_PMS_AREA_IRAM0_RTCFAST_HI:
391 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_cpuid_valid(core))
392 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_rtcfast_get_pms_area(core, &r, &w, &x, MEMP_HAL_WORLD_0, MEMP_HAL_AREA_HIGH)))
393 break;
394 case MEMPROT_PMS_AREA_ICACHE_0:
395 memprot_ll_icache_get_pms_area_0(&r, &w, &x);
396 break;
397 case MEMPROT_PMS_AREA_ICACHE_1:
398 memprot_ll_icache_get_pms_area_1(&r, &w, &x);
399 break;
400 default:
401 return ESP_ERR_MEMPROT_MEMORY_TYPE_INVALID;
402 }
403
404 *flags = 0;
405 if (r) {
406 *flags |= MEMPROT_OP_READ;
407 }
408 if (w) {
409 *flags |= MEMPROT_OP_WRITE;
410 }
411 if (x) {
412 *flags |= MEMPROT_OP_EXEC;
413 }
414
415 return ESP_OK;
416 }
417
esp_mprot_set_monitor_lock(const esp_mprot_mem_t mem_type,const int core)418 esp_err_t esp_mprot_set_monitor_lock(const esp_mprot_mem_t mem_type, const int core)
419 {
420 esp_err_t err;
421 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_cpuid_valid(core))
422
423 switch (mem_type) {
424 case MEMPROT_TYPE_IRAM0_SRAM:
425 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_iram0_set_monitor_lock(core)))
426 break;
427 case MEMPROT_TYPE_DRAM0_SRAM:
428 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_dram0_set_monitor_lock(core)))
429 break;
430 case MEMPROT_TYPE_IRAM0_RTCFAST:
431 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_rtcfast_set_monitor_lock(core)))
432 break;
433 default:
434 return ESP_ERR_MEMPROT_MEMORY_TYPE_INVALID;
435 }
436
437 return ESP_OK;
438 }
439
esp_mprot_get_monitor_lock(const esp_mprot_mem_t mem_type,bool * locked,const int core)440 esp_err_t esp_mprot_get_monitor_lock(const esp_mprot_mem_t mem_type, bool *locked, const int core)
441 {
442 if (locked == NULL) {
443 return ESP_ERR_INVALID_ARG;
444 }
445
446 esp_err_t err;
447 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_cpuid_valid(core))
448
449 switch (mem_type) {
450 case MEMPROT_TYPE_IRAM0_SRAM:
451 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_iram0_get_monitor_lock(core, locked)))
452 break;
453 case MEMPROT_TYPE_DRAM0_SRAM:
454 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_dram0_get_monitor_lock(core, locked)))
455 break;
456 case MEMPROT_TYPE_IRAM0_RTCFAST:
457 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_rtcfast_get_monitor_lock(core, locked)))
458 break;
459 default:
460 return ESP_ERR_MEMPROT_MEMORY_TYPE_INVALID;
461 }
462
463 return ESP_OK;
464 }
465
esp_mprot_set_monitor_en(const esp_mprot_mem_t mem_type,const bool enable,const int core)466 esp_err_t esp_mprot_set_monitor_en(const esp_mprot_mem_t mem_type, const bool enable, const int core)
467 {
468 esp_err_t err;
469 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_cpuid_valid(core))
470
471 switch (mem_type) {
472 case MEMPROT_TYPE_IRAM0_SRAM:
473 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_iram0_set_monitor_en(core, enable)))
474 break;
475 case MEMPROT_TYPE_DRAM0_SRAM:
476 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_dram0_set_monitor_en(core, enable)))
477 break;
478 case MEMPROT_TYPE_IRAM0_RTCFAST:
479 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_rtcfast_set_monitor_en(core, enable)))
480 break;
481 default:
482 return ESP_ERR_MEMPROT_MEMORY_TYPE_INVALID;
483 }
484
485 return ESP_OK;
486 }
487
esp_mprot_get_monitor_en(esp_mprot_mem_t mem_type,bool * enabled,const int core)488 esp_err_t esp_mprot_get_monitor_en(esp_mprot_mem_t mem_type, bool *enabled, const int core)
489 {
490 if (enabled == NULL) {
491 return ESP_ERR_INVALID_ARG;
492 }
493
494 esp_err_t err;
495 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_cpuid_valid(core))
496
497 switch (mem_type) {
498 case MEMPROT_TYPE_IRAM0_SRAM:
499 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_iram0_get_monitor_en(core, enabled)))
500 break;
501 case MEMPROT_TYPE_DRAM0_SRAM:
502 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_dram0_get_monitor_en(core, enabled)))
503 break;
504 case MEMPROT_TYPE_IRAM0_RTCFAST:
505 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_rtcfast_get_monitor_en(core, enabled)))
506 break;
507 default:
508 return ESP_ERR_MEMPROT_MEMORY_TYPE_INVALID;
509 }
510
511 return ESP_OK;
512 }
513
514 //////////////////////////////////////////////////////////////////////////////
515 // PMS-violation interrupt handling APIs (IRAM section - called from panic-handler)
516
esp_mprot_get_active_intr(esp_memp_intr_source_t * active_memp_intr)517 esp_err_t esp_mprot_get_active_intr(esp_memp_intr_source_t *active_memp_intr)
518 {
519 if (active_memp_intr == NULL) {
520 return ESP_ERR_INVALID_ARG;
521 }
522
523 esp_mprot_mem_t mt;
524 int c;
525
526 do {
527 uint32_t intr_on = 0;
528 esp_err_t err;
529
530 //IRAM0
531 if (s_memp_cfg.mem_type_mask & MEMPROT_TYPE_IRAM0_SRAM) {
532
533 mt = MEMPROT_TYPE_IRAM0_SRAM;
534
535 c = PRO_CPU_NUM;
536 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_iram0_get_monitor_status_intr(c, &intr_on)))
537 if (intr_on) {
538 break;
539 }
540
541 //2-core
542 if (s_memp_cfg.target_cpu_count > 1) {
543 c = APP_CPU_NUM;
544 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_iram0_get_monitor_status_intr(c, &intr_on)))
545 if (intr_on) {
546 break;
547 }
548 }
549 }
550
551 //DRAM0
552 if (s_memp_cfg.mem_type_mask & MEMPROT_TYPE_DRAM0_SRAM) {
553
554 mt = MEMPROT_TYPE_DRAM0_SRAM;
555
556 c = PRO_CPU_NUM;
557 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_dram0_get_monitor_status_intr(c, &intr_on)))
558 if (intr_on) {
559 break;
560 }
561
562 //2-core
563 if (s_memp_cfg.target_cpu_count > 1) {
564 c = APP_CPU_NUM;
565 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_dram0_get_monitor_status_intr(c, &intr_on)))
566 if (intr_on) {
567 break;
568 }
569 }
570 }
571
572 //RTCFAST
573 if (s_memp_cfg.mem_type_mask & MEMPROT_TYPE_IRAM0_RTCFAST) {
574
575 mt = MEMPROT_TYPE_IRAM0_RTCFAST;
576
577 c = PRO_CPU_NUM;
578 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_rtcfast_get_monitor_status_intr(c, &intr_on)))
579 if (intr_on) {
580 break;
581 }
582
583 //2-core
584 if (s_memp_cfg.target_cpu_count > 1) {
585 c = APP_CPU_NUM;
586 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_rtcfast_get_monitor_status_intr(c, &intr_on)))
587 if (intr_on) {
588 break;
589 }
590 }
591 }
592
593 mt = MEMPROT_TYPE_NONE;
594 c = -1;
595
596 } while (0);
597
598 active_memp_intr->mem_type = mt;
599 active_memp_intr->core = c;
600
601 return ESP_OK;
602 }
603
esp_mprot_monitor_clear_intr(esp_mprot_mem_t mem_type,const int core)604 esp_err_t esp_mprot_monitor_clear_intr(esp_mprot_mem_t mem_type, const int core)
605 {
606 esp_err_t err;
607 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_cpuid_valid(core))
608
609 switch (mem_type) {
610 case MEMPROT_TYPE_IRAM0_SRAM:
611 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_iram0_set_monitor_intrclr(core)))
612 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_iram0_reset_monitor_intrclr(core)))
613 break;
614 case MEMPROT_TYPE_DRAM0_SRAM:
615 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_dram0_set_monitor_intrclr(core)))
616 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_dram0_reset_monitor_intrclr(core)))
617 break;
618 case MEMPROT_TYPE_IRAM0_RTCFAST:
619 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_rtcfast_set_monitor_intrclr(core)))
620 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_rtcfast_reset_monitor_intrclr(core)))
621 break;
622 default:
623 return ESP_ERR_MEMPROT_MEMORY_TYPE_INVALID;
624 }
625
626 return ESP_OK;
627 }
628
esp_mprot_is_conf_locked_any(bool * locked)629 esp_err_t esp_mprot_is_conf_locked_any(bool *locked)
630 {
631 if (locked == NULL) {
632 return ESP_ERR_INVALID_ARG;
633 }
634
635 bool lock_on = false;
636 esp_err_t err;
637
638 //IRAM0
639 if (s_memp_cfg.mem_type_mask & MEMPROT_TYPE_IRAM0_SRAM) {
640
641 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_get_split_addr_lock(MEMPROT_TYPE_IRAM0_SRAM, &lock_on, DEFAULT_CPU_NUM))
642 *locked |= lock_on;
643
644 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_get_pms_lock(MEMPROT_TYPE_IRAM0_SRAM, &lock_on, DEFAULT_CPU_NUM))
645 *locked |= lock_on;
646
647 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_get_monitor_lock(MEMPROT_TYPE_IRAM0_SRAM, &lock_on, PRO_CPU_NUM))
648 *locked |= lock_on;
649
650 //2-core
651 if (s_memp_cfg.target_cpu_count > 1) {
652 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_get_monitor_lock(MEMPROT_TYPE_IRAM0_SRAM, &lock_on, APP_CPU_NUM))
653 *locked |= lock_on;
654 }
655 }
656
657 //DRAM0
658 if (s_memp_cfg.mem_type_mask & MEMPROT_TYPE_DRAM0_SRAM) {
659
660 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_get_split_addr_lock(MEMPROT_TYPE_DRAM0_SRAM, &lock_on, DEFAULT_CPU_NUM))
661 *locked |= lock_on;
662
663 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_get_pms_lock(MEMPROT_TYPE_DRAM0_SRAM, &lock_on, DEFAULT_CPU_NUM))
664 *locked |= lock_on;
665
666 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_get_monitor_lock(MEMPROT_TYPE_DRAM0_SRAM, &lock_on, PRO_CPU_NUM));
667 *locked |= lock_on;
668
669 //2-core
670 if (s_memp_cfg.target_cpu_count > 1) {
671 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_get_monitor_lock(MEMPROT_TYPE_DRAM0_SRAM, &lock_on, APP_CPU_NUM));
672 *locked |= lock_on;
673 }
674 }
675
676 //RTCFAST
677 if (s_memp_cfg.mem_type_mask & MEMPROT_TYPE_IRAM0_RTCFAST) {
678
679 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_get_pms_lock(MEMPROT_TYPE_IRAM0_RTCFAST, &lock_on, PRO_CPU_NUM))
680 *locked |= lock_on;
681
682 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_get_monitor_lock(MEMPROT_TYPE_IRAM0_RTCFAST, &lock_on, PRO_CPU_NUM));
683 *locked |= lock_on;
684
685 //2-core
686 if (s_memp_cfg.target_cpu_count > 1) {
687 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_get_pms_lock(MEMPROT_TYPE_IRAM0_RTCFAST, &lock_on, APP_CPU_NUM))
688 *locked |= lock_on;
689
690 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_get_monitor_lock(MEMPROT_TYPE_IRAM0_RTCFAST, &lock_on, APP_CPU_NUM));
691 *locked |= lock_on;
692 }
693 }
694
695 return ESP_OK;
696 }
697
esp_mprot_is_intr_ena_any(bool * enabled)698 esp_err_t esp_mprot_is_intr_ena_any(bool *enabled)
699 {
700 if (enabled == NULL) {
701 return ESP_ERR_INVALID_ARG;
702 }
703
704 bool ena_on = false;
705 esp_err_t err;
706
707 //IRAM0
708 if (s_memp_cfg.mem_type_mask & MEMPROT_TYPE_IRAM0_SRAM) {
709 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_get_monitor_en(MEMPROT_TYPE_IRAM0_SRAM, &ena_on, PRO_CPU_NUM))
710 *enabled |= ena_on;
711 //2-core
712 if (s_memp_cfg.target_cpu_count > 1) {
713 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_get_monitor_en(MEMPROT_TYPE_IRAM0_SRAM, &ena_on, APP_CPU_NUM))
714 *enabled |= ena_on;
715 }
716 }
717
718 //DRAM0
719 if (s_memp_cfg.mem_type_mask & MEMPROT_TYPE_DRAM0_SRAM) {
720 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_get_monitor_en(MEMPROT_TYPE_DRAM0_SRAM, &ena_on, PRO_CPU_NUM))
721 *enabled |= ena_on;
722 //2-core
723 if (s_memp_cfg.target_cpu_count > 1) {
724 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_get_monitor_en(MEMPROT_TYPE_DRAM0_SRAM, &ena_on, APP_CPU_NUM))
725 *enabled |= ena_on;
726 }
727 }
728
729 //RTCFAST
730 if (s_memp_cfg.mem_type_mask & MEMPROT_TYPE_IRAM0_RTCFAST) {
731 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_get_monitor_en(MEMPROT_TYPE_IRAM0_RTCFAST, &ena_on, PRO_CPU_NUM))
732 *enabled |= ena_on;
733 //2-core
734 if (s_memp_cfg.target_cpu_count > 1) {
735 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_get_monitor_en(MEMPROT_TYPE_IRAM0_RTCFAST, &ena_on, APP_CPU_NUM))
736 *enabled |= ena_on;
737 }
738 }
739
740 return ESP_OK;
741 }
742
esp_mprot_get_violate_addr(const esp_mprot_mem_t mem_type,void ** fault_addr,const int core)743 esp_err_t esp_mprot_get_violate_addr(const esp_mprot_mem_t mem_type, void **fault_addr, const int core)
744 {
745 if (fault_addr == NULL) {
746 return ESP_ERR_INVALID_ARG;
747 }
748
749 esp_err_t err;
750 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_cpuid_valid(core))
751
752 void *fault_addr_temp = NULL;
753
754 switch (mem_type) {
755 case MEMPROT_TYPE_IRAM0_SRAM:
756 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_iram0_get_monitor_status_fault_addr(core, &fault_addr_temp)))
757 break;
758 case MEMPROT_TYPE_DRAM0_SRAM:
759 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_dram0_get_monitor_status_fault_addr(core, &fault_addr_temp)))
760 break;
761 case MEMPROT_TYPE_IRAM0_RTCFAST:
762 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_rtcfast_get_monitor_status_fault_addr(core, &fault_addr_temp)))
763 break;
764 default:
765 return ESP_ERR_MEMPROT_MEMORY_TYPE_INVALID;
766 }
767
768 *fault_addr = fault_addr_temp;
769
770 return ESP_OK;
771 }
772
esp_mprot_get_violate_world(const esp_mprot_mem_t mem_type,esp_mprot_pms_world_t * world,const int core)773 esp_err_t esp_mprot_get_violate_world(const esp_mprot_mem_t mem_type, esp_mprot_pms_world_t *world, const int core)
774 {
775 if (world == NULL) {
776 return ESP_ERR_INVALID_ARG;
777 }
778
779 esp_err_t err;
780 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_cpuid_valid(core))
781
782 uint32_t regval = MEMPROT_PMS_WORLD_INVALID;
783
784 switch (mem_type) {
785 case MEMPROT_TYPE_IRAM0_SRAM:
786 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_iram0_get_monitor_status_fault_world(core, ®val)))
787 break;
788 case MEMPROT_TYPE_DRAM0_SRAM:
789 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_dram0_get_monitor_status_fault_world(core, ®val)))
790 break;
791 case MEMPROT_TYPE_IRAM0_RTCFAST:
792 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_rtcfast_get_monitor_status_fault_world(core, ®val)))
793 break;
794 default:
795 return ESP_ERR_MEMPROT_MEMORY_TYPE_INVALID;
796 }
797
798 if (regval == MEMPROT_PMS_WORLD_INVALID) {
799 return ESP_ERR_MEMPROT_WORLD_INVALID;
800 }
801
802 *world = esp_mprot_ll_world_to_hl_world(regval);
803
804 return ESP_OK;
805 }
806
esp_mprot_get_violate_operation(const esp_mprot_mem_t mem_type,uint32_t * oper,const int core)807 esp_err_t esp_mprot_get_violate_operation(const esp_mprot_mem_t mem_type, uint32_t *oper, const int core)
808 {
809 if (oper == NULL) {
810 return ESP_ERR_INVALID_ARG;
811 }
812
813 esp_err_t err;
814 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_cpuid_valid(core))
815
816 uint32_t operation = MEMPROT_OP_NONE;
817 uint32_t regval = 0xFFFFFFFF;
818
819 //check 1: LoadStore, 2: R/W
820 switch (mem_type) {
821 case MEMPROT_TYPE_IRAM0_SRAM: {
822 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_iram0_get_monitor_status_fault_loadstore(core, ®val)))
823 if (regval == 0) {
824 operation = MEMPROT_OP_EXEC;
825 } else {
826 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_iram0_get_monitor_status_fault_wr(core, ®val)))
827 operation = regval == 0 ? MEMPROT_OP_READ : MEMPROT_OP_WRITE;
828 }
829 } break;
830 case MEMPROT_TYPE_IRAM0_RTCFAST: {
831 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_rtcfast_get_monitor_status_fault_loadstore(core, ®val)))
832 if (regval == 0) {
833 operation = MEMPROT_OP_EXEC;
834 } else {
835 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_rtcfast_get_monitor_status_fault_wr(core, ®val)))
836 operation = regval == 0 ? MEMPROT_OP_READ : MEMPROT_OP_WRITE;
837 }
838 } break;
839 case MEMPROT_TYPE_DRAM0_SRAM: {
840 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_dram0_get_monitor_status_fault_wr(core, ®val)))
841 operation = regval == 0 ? MEMPROT_OP_WRITE : MEMPROT_OP_READ;
842 } break;
843 default:
844 return ESP_ERR_MEMPROT_MEMORY_TYPE_INVALID;
845 }
846
847 *oper = operation;
848
849 return ESP_OK;
850 }
851
esp_mprot_has_byte_enables(const esp_mprot_mem_t mem_type)852 bool esp_mprot_has_byte_enables(const esp_mprot_mem_t mem_type)
853 {
854 return mem_type == MEMPROT_TYPE_DRAM0_SRAM;
855 }
856
esp_mprot_get_violate_byte_enables(const esp_mprot_mem_t mem_type,uint32_t * byte_en,const int core)857 esp_err_t esp_mprot_get_violate_byte_enables(const esp_mprot_mem_t mem_type, uint32_t *byte_en, const int core)
858 {
859 if (byte_en == NULL) {
860 return ESP_ERR_INVALID_ARG;
861 }
862
863 esp_err_t err;
864 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_cpuid_valid(core))
865
866 uint32_t byteen = 0xFFFFFFFF;
867
868 //NOTE: more memory types coming in future updates, hence using switch
869 switch (mem_type) {
870 case MEMPROT_TYPE_DRAM0_SRAM:
871 ESP_MEMPROT_ERR_CHECK(err, esp_mprot_ll_err_to_esp_err(memprot_ll_dram0_get_monitor_status_fault_byte_en(core, &byteen)))
872 break;
873 default:
874 return ESP_ERR_MEMPROT_MEMORY_TYPE_INVALID;
875 }
876
877 *byte_en = byteen;
878
879 return ESP_OK;
880 }
881
882 //////////////////////////////////////////////////////////////////////////////
883 // convenient "public" APIs
esp_mprot_set_prot(const esp_memp_config_t * memp_config)884 esp_err_t esp_mprot_set_prot(const esp_memp_config_t *memp_config)
885 {
886 //debugger connected:
887 // 1.check the signal repeatedly to avoid possible glitching attempt
888 // 2.leave the Memprot unset to allow debug operations
889
890 if (esp_cpu_dbgr_is_attached()) {
891 ESP_FAULT_ASSERT(esp_cpu_dbgr_is_attached());
892 return ESP_OK;
893 }
894
895 //sanity checks
896 if (memp_config == NULL) {
897 return ESP_ERR_INVALID_ARG;
898 }
899 if (memp_config->mem_type_mask == MEMPROT_TYPE_NONE) {
900 return ESP_ERR_NO_MEM;
901 }
902 if (memp_config->target_cpu_count < 1 || memp_config->target_cpu_count > portNUM_PROCESSORS) {
903 return ESP_ERR_MEMPROT_CPUID_INVALID;
904 }
905
906 esp_err_t ret = ESP_OK;
907 size_t core_count = memp_config->target_cpu_count;
908 for (size_t x = 0; x < core_count; x++) {
909 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_cpuid_valid(memp_config->target_cpu[x]))
910 }
911
912 bool use_iram0 = memp_config->mem_type_mask & MEMPROT_TYPE_IRAM0_SRAM;
913 bool use_dram0 = memp_config->mem_type_mask & MEMPROT_TYPE_DRAM0_SRAM;
914 bool use_rtcfast = memp_config->mem_type_mask & MEMPROT_TYPE_IRAM0_RTCFAST;
915
916 //disable protection (must be unlocked)
917 if (use_iram0) {
918 for (size_t x = 0; x < core_count; x++) {
919 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_monitor_en(MEMPROT_TYPE_IRAM0_SRAM, false, memp_config->target_cpu[x]))
920 }
921 }
922 if (use_dram0) {
923 for (size_t x = 0; x < core_count; x++) {
924 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_monitor_en(MEMPROT_TYPE_DRAM0_SRAM, false, memp_config->target_cpu[x]))
925 }
926 }
927 if (use_rtcfast) {
928 for (size_t x = 0; x < core_count; x++) {
929 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_monitor_en(MEMPROT_TYPE_IRAM0_RTCFAST, false, memp_config->target_cpu[x]))
930 }
931 }
932
933 //panic handling
934 if (memp_config->invoke_panic_handler) {
935 if (use_iram0) {
936 for (size_t x = 0; x < core_count; x++) {
937 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_intr_matrix(MEMPROT_TYPE_IRAM0_SRAM, memp_config->target_cpu[x]))
938 }
939 }
940 if (use_dram0) {
941 for (size_t x = 0; x < core_count; x++) {
942 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_intr_matrix(MEMPROT_TYPE_DRAM0_SRAM, memp_config->target_cpu[x]))
943 }
944 }
945 if (use_rtcfast) {
946 for (size_t x = 0; x < core_count; x++) {
947 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_intr_matrix(MEMPROT_TYPE_IRAM0_RTCFAST, memp_config->target_cpu[x]))
948 }
949 }
950 }
951
952 //set split lines (must-have for all mem_types). This version sets only the main I/D which is then shared for all PMS regions
953 void *line_addr __attribute__((unused)) = NULL;
954 if (use_iram0 || use_dram0) {
955 line_addr = memp_config->split_addr;
956 if (line_addr == NULL) {
957 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_get_default_main_split_addr(MEMPROT_TYPE_IRAM0_SRAM, &line_addr))
958 }
959
960 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_split_addr(MEMPROT_TYPE_IRAM0_SRAM, MEMPROT_SPLIT_ADDR_IRAM0_LINE_1, line_addr, DEFAULT_CPU_NUM))
961 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_split_addr(MEMPROT_TYPE_IRAM0_SRAM, MEMPROT_SPLIT_ADDR_IRAM0_LINE_0, line_addr, DEFAULT_CPU_NUM))
962 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_split_addr(MEMPROT_TYPE_IRAM0_SRAM, MEMPROT_SPLIT_ADDR_IRAM0_DRAM0, line_addr, DEFAULT_CPU_NUM))
963 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_split_addr(MEMPROT_TYPE_DRAM0_SRAM, MEMPROT_SPLIT_ADDR_DRAM0_DMA_LINE_0, (void *)(MAP_IRAM_TO_DRAM((uint32_t)line_addr)), DEFAULT_CPU_NUM))
964 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_split_addr(MEMPROT_TYPE_DRAM0_SRAM, MEMPROT_SPLIT_ADDR_DRAM0_DMA_LINE_1, (void *)(MAP_IRAM_TO_DRAM((uint32_t)line_addr)), DEFAULT_CPU_NUM))
965 }
966
967 //set permissions
968 if (use_iram0) {
969 ret = ESP_OK;
970 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_pms_area(MEMPROT_PMS_AREA_ICACHE_0, MEMPROT_OP_NONE, DEFAULT_CPU_NUM));
971 #if CONFIG_ESP32S3_INSTRUCTION_CACHE_16KB
972 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_pms_area(MEMPROT_PMS_AREA_ICACHE_1, MEMPROT_OP_READ | MEMPROT_OP_EXEC, DEFAULT_CPU_NUM));
973 #else
974 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_pms_area(MEMPROT_PMS_AREA_ICACHE_1, MEMPROT_OP_NONE, DEFAULT_CPU_NUM));
975 #endif
976 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_pms_area(MEMPROT_PMS_AREA_IRAM0_0, MEMPROT_OP_READ | MEMPROT_OP_EXEC, DEFAULT_CPU_NUM))
977 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_pms_area(MEMPROT_PMS_AREA_IRAM0_1, MEMPROT_OP_READ | MEMPROT_OP_EXEC, DEFAULT_CPU_NUM))
978 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_pms_area(MEMPROT_PMS_AREA_IRAM0_2, MEMPROT_OP_READ | MEMPROT_OP_EXEC, DEFAULT_CPU_NUM))
979 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_pms_area(MEMPROT_PMS_AREA_IRAM0_3, MEMPROT_OP_NONE, DEFAULT_CPU_NUM))
980 }
981 if (use_dram0) {
982 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_pms_area(MEMPROT_PMS_AREA_DRAM0_0, MEMPROT_OP_READ, DEFAULT_CPU_NUM))
983 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_pms_area(MEMPROT_PMS_AREA_DRAM0_1, MEMPROT_OP_READ | MEMPROT_OP_WRITE, DEFAULT_CPU_NUM))
984 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_pms_area(MEMPROT_PMS_AREA_DRAM0_2, MEMPROT_OP_READ | MEMPROT_OP_WRITE, DEFAULT_CPU_NUM))
985 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_pms_area(MEMPROT_PMS_AREA_DRAM0_3, MEMPROT_OP_READ | MEMPROT_OP_WRITE, DEFAULT_CPU_NUM))
986 }
987
988 void *rtc_fast_line __attribute__((unused)) = NULL;
989 if (use_rtcfast) {
990 //RTCFAST split-line cannot be set manually - always use default
991 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_get_default_main_split_addr(MEMPROT_TYPE_IRAM0_RTCFAST, &rtc_fast_line));
992 for (size_t x = 0; x < core_count; x++) {
993 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_split_addr(MEMPROT_TYPE_IRAM0_RTCFAST, MEMPROT_SPLIT_ADDR_MAIN, rtc_fast_line, memp_config->target_cpu[x]))
994 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_pms_area(MEMPROT_PMS_AREA_IRAM0_RTCFAST_LO, MEMPROT_OP_READ | MEMPROT_OP_EXEC, memp_config->target_cpu[x]))
995 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_pms_area(MEMPROT_PMS_AREA_IRAM0_RTCFAST_HI, MEMPROT_OP_READ | MEMPROT_OP_WRITE, memp_config->target_cpu[x]))
996 }
997 }
998
999 //reenable the protection
1000 if (use_iram0) {
1001 for (size_t x = 0; x < core_count; x++) {
1002 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_monitor_clear_intr(MEMPROT_TYPE_IRAM0_SRAM, memp_config->target_cpu[x]))
1003 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_monitor_en(MEMPROT_TYPE_IRAM0_SRAM, true, memp_config->target_cpu[x]))
1004 }
1005 }
1006
1007 if (use_dram0) {
1008 for (size_t x = 0; x < core_count; x++) {
1009 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_monitor_clear_intr(MEMPROT_TYPE_DRAM0_SRAM, memp_config->target_cpu[x]))
1010 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_monitor_en(MEMPROT_TYPE_DRAM0_SRAM, true, memp_config->target_cpu[x]))
1011 }
1012 }
1013 if (use_rtcfast) {
1014 for (size_t x = 0; x < core_count; x++) {
1015 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_monitor_clear_intr(MEMPROT_TYPE_IRAM0_RTCFAST, memp_config->target_cpu[x]))
1016 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_monitor_en(MEMPROT_TYPE_IRAM0_RTCFAST, true, memp_config->target_cpu[x]))
1017 }
1018 }
1019
1020 //lock if required
1021 if (memp_config->lock_feature) {
1022 if (use_iram0) {
1023 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_split_addr_lock(MEMPROT_TYPE_IRAM0_SRAM, DEFAULT_CPU_NUM))
1024 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_pms_lock(MEMPROT_TYPE_IRAM0_SRAM, DEFAULT_CPU_NUM))
1025 for (size_t x = 0; x < core_count; x++) {
1026 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_monitor_lock(MEMPROT_TYPE_IRAM0_SRAM, memp_config->target_cpu[x]))
1027 }
1028 }
1029 if (use_dram0) {
1030 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_split_addr_lock(MEMPROT_TYPE_DRAM0_SRAM, DEFAULT_CPU_NUM))
1031 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_pms_lock(MEMPROT_TYPE_DRAM0_SRAM, DEFAULT_CPU_NUM))
1032 for (size_t x = 0; x < core_count; x++) {
1033 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_monitor_lock(MEMPROT_TYPE_DRAM0_SRAM, memp_config->target_cpu[x]))
1034 }
1035 }
1036 if (use_rtcfast) {
1037 //split address and area permissions are locked by the same PMS register
1038 for (size_t x = 0; x < core_count; x++) {
1039 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_pms_lock(MEMPROT_TYPE_IRAM0_RTCFAST, memp_config->target_cpu[x]))
1040 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_set_monitor_lock(MEMPROT_TYPE_IRAM0_RTCFAST, memp_config->target_cpu[x]))
1041 }
1042 }
1043 }
1044
1045 //sanity check (RTC FAST tbd within IDF-5208)
1046 if (use_iram0 || use_dram0) {
1047
1048 uint32_t check_val;
1049
1050 //IRAM0 split lines
1051 memprot_ll_prepare_iram0_split_line_regval((const uint32_t) line_addr, &check_val);
1052
1053 if (memprot_ll_get_iram0_split_line_main_I_D_regval() != check_val) {
1054 esp_rom_printf(
1055 "Fatal error: Main I/D split line configuration corrupted (expected 0x%08X, stored 0x%08X)\n",
1056 check_val, memprot_ll_get_iram0_split_line_main_I_D_regval());
1057 abort();
1058 }
1059 if (memprot_ll_get_iram0_split_line_main_I_0_regval() != check_val) {
1060 esp_rom_printf(
1061 "Fatal error: IRAM0 I_0 split line configuration corrupted (expected 0x%08X, stored 0x%08X)\n",
1062 check_val, memprot_ll_get_iram0_split_line_main_I_0_regval());
1063 abort();
1064 }
1065 if (memprot_ll_get_iram0_split_line_main_I_1_regval() != check_val) {
1066 esp_rom_printf(
1067 "Fatal error: IRAM0 I_1 split line configuration corrupted (expected 0x%08X, stored 0x%08X)\n",
1068 check_val, memprot_ll_get_iram0_split_line_main_I_1_regval());
1069 abort();
1070 }
1071
1072 //DRAM0 split lines
1073 memprot_ll_prepare_dram0_split_line_regval(MAP_IRAM_TO_DRAM((const uint32_t) line_addr), &check_val);
1074
1075 if (memprot_ll_get_dram0_split_line_main_D_0_regval() != check_val) {
1076 esp_rom_printf(
1077 "Fatal error: DRAM0 D_0 split line configuration corrupted (expected 0x%08X, stored 0x%08X)\n",
1078 check_val, memprot_ll_get_dram0_split_line_main_D_0_regval());
1079 abort();
1080 }
1081 if (memprot_ll_get_dram0_split_line_main_D_1_regval() != check_val) {
1082 esp_rom_printf(
1083 "Fatal error: DRAM0 D_1 split line configuration corrupted (expected 0x%08X, stored 0x%08X)\n",
1084 check_val, memprot_ll_get_dram0_split_line_main_D_1_regval());
1085 abort();
1086 }
1087
1088 //IRAM0 perms
1089 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_get_pms_area(MEMPROT_PMS_AREA_IRAM0_0, &check_val, DEFAULT_CPU_NUM))
1090 if (check_val != (MEMPROT_OP_READ | MEMPROT_OP_EXEC)) {
1091 esp_rom_printf("Fatal error: IRAM0 PMS AREA_0 configuration corrupted (expected 0x%08X, stored 0x%08X)\n",
1092 MEMPROT_OP_READ | MEMPROT_OP_EXEC, check_val);
1093 abort();
1094 }
1095
1096 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_get_pms_area(MEMPROT_PMS_AREA_IRAM0_1, &check_val, DEFAULT_CPU_NUM))
1097 if (check_val != (MEMPROT_OP_READ | MEMPROT_OP_EXEC)) {
1098 esp_rom_printf("Fatal error: IRAM0 PMS AREA_1 configuration corrupted (expected 0x%08X, stored 0x%08X)\n",
1099 MEMPROT_OP_READ | MEMPROT_OP_EXEC, check_val);
1100 abort();
1101 }
1102
1103 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_get_pms_area(MEMPROT_PMS_AREA_IRAM0_2, &check_val, DEFAULT_CPU_NUM))
1104 if (check_val != (MEMPROT_OP_READ | MEMPROT_OP_EXEC)) {
1105 esp_rom_printf("Fatal error: IRAM0 PMS AREA_2 configuration corrupted (expected 0x%08X, stored 0x%08X)\n",
1106 MEMPROT_OP_READ | MEMPROT_OP_EXEC, check_val);
1107 abort();
1108 }
1109
1110 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_get_pms_area(MEMPROT_PMS_AREA_IRAM0_3, &check_val, DEFAULT_CPU_NUM))
1111 if (check_val != MEMPROT_OP_NONE) {
1112 esp_rom_printf("Fatal error: IRAM0 PMS AREA_3 configuration corrupted (expected 0x%08X, stored 0x%08X)\n",
1113 MEMPROT_OP_NONE, check_val);
1114 abort();
1115 }
1116
1117 //DRAM0 perms
1118 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_get_pms_area(MEMPROT_PMS_AREA_DRAM0_0, &check_val, DEFAULT_CPU_NUM))
1119 if (check_val != MEMPROT_OP_READ) {
1120 esp_rom_printf("Fatal error: DRAM0 PMS AREA_0 configuration corrupted (expected 0x%08X, stored 0x%08X)\n",
1121 MEMPROT_OP_READ, check_val);
1122 abort();
1123 }
1124
1125 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_get_pms_area(MEMPROT_PMS_AREA_DRAM0_1, &check_val, DEFAULT_CPU_NUM))
1126 if (check_val != (MEMPROT_OP_READ | MEMPROT_OP_WRITE)) {
1127 esp_rom_printf("Fatal error: DRAM0 PMS AREA_1 configuration corrupted (expected 0x%08X, stored 0x%08X)\n",
1128 MEMPROT_OP_READ | MEMPROT_OP_WRITE, check_val);
1129 abort();
1130 }
1131
1132 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_get_pms_area(MEMPROT_PMS_AREA_DRAM0_2, &check_val, DEFAULT_CPU_NUM))
1133 if (check_val != (MEMPROT_OP_READ | MEMPROT_OP_WRITE)) {
1134 esp_rom_printf("Fatal error: DRAM0 PMS AREA_2 configuration corrupted (expected 0x%08X, stored 0x%08X)\n",
1135 MEMPROT_OP_READ | MEMPROT_OP_WRITE, check_val);
1136 abort();
1137 }
1138
1139 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_get_pms_area(MEMPROT_PMS_AREA_DRAM0_3, &check_val, DEFAULT_CPU_NUM))
1140 if (check_val != (MEMPROT_OP_READ | MEMPROT_OP_WRITE)) {
1141 esp_rom_printf("Fatal error: DRAM0 PMS AREA_3 configuration corrupted (expected 0x%08X, stored 0x%08X)\n",
1142 MEMPROT_OP_READ | MEMPROT_OP_WRITE, check_val);
1143 abort();
1144 }
1145
1146 //memory protection enabled
1147 bool enabled;
1148 if (use_iram0) {
1149 for (size_t x = 0; x < core_count; x++) {
1150 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_get_monitor_en(MEMPROT_TYPE_IRAM0_SRAM, &enabled,
1151 memp_config->target_cpu[x]))
1152 if (!enabled) {
1153 esp_rom_printf(
1154 "Fatal error: IRAM0 PMS configuration corrupted (memory protection not enabled on core %d)\n",
1155 memp_config->target_cpu[x]);
1156 abort();
1157 }
1158 }
1159 }
1160 if (use_dram0) {
1161 for (size_t x = 0; x < core_count; x++) {
1162 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_get_monitor_en(MEMPROT_TYPE_DRAM0_SRAM, &enabled,
1163 memp_config->target_cpu[x]))
1164 if (!enabled) {
1165 esp_rom_printf(
1166 "Fatal error: DRAM0 PMS configuration corrupted (memory protection not enabled on core %d)\n",
1167 memp_config->target_cpu[x]);
1168 abort();
1169 }
1170 }
1171 }
1172
1173 //locks
1174 if (memp_config->lock_feature) {
1175
1176 bool locked;
1177
1178 if (use_iram0) {
1179 ESP_MEMPROT_ERR_CHECK(ret,
1180 esp_mprot_get_split_addr_lock(MEMPROT_TYPE_IRAM0_SRAM, &locked, DEFAULT_CPU_NUM))
1181 if (!locked) {
1182 esp_rom_printf(
1183 "Fatal error: IRAM0 PMS configuration corrupted (memory protection not locked - split address lock)\n");
1184 abort();
1185 }
1186 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_get_pms_lock(MEMPROT_TYPE_IRAM0_SRAM, &locked, DEFAULT_CPU_NUM))
1187 if (!locked) {
1188 esp_rom_printf(
1189 "Fatal error: IRAM0 PMS configuration corrupted (memory protection not locked - global PMS lock)\n");
1190 abort();
1191 }
1192 for (size_t x = 0; x < core_count; x++) {
1193 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_get_monitor_lock(MEMPROT_TYPE_IRAM0_SRAM, &locked,
1194 memp_config->target_cpu[x]))
1195 if (!locked) {
1196 esp_rom_printf(
1197 "Fatal error: IRAM0 PMS configuration corrupted (memory protection not locked - monitor lock on core %d)\n",
1198 memp_config->target_cpu[x]);
1199 abort();
1200 }
1201 }
1202 }
1203
1204 if (use_dram0) {
1205 ESP_MEMPROT_ERR_CHECK(ret,
1206 esp_mprot_get_split_addr_lock(MEMPROT_TYPE_DRAM0_SRAM, &locked, DEFAULT_CPU_NUM))
1207 if (!locked) {
1208 esp_rom_printf(
1209 "Fatal error: DRAM0 PMS configuration corrupted (memory protection not locked - split address lock)\n");
1210 abort();
1211 }
1212 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_get_pms_lock(MEMPROT_TYPE_DRAM0_SRAM, &locked, DEFAULT_CPU_NUM))
1213 if (!locked) {
1214 esp_rom_printf(
1215 "Fatal error: DRAM0 PMS configuration corrupted (memory protection not locked - global PMS lock)\n");
1216 abort();
1217 }
1218 for (size_t x = 0; x < core_count; x++) {
1219 ESP_MEMPROT_ERR_CHECK(ret, esp_mprot_get_monitor_lock(MEMPROT_TYPE_DRAM0_SRAM, &locked,
1220 memp_config->target_cpu[x]))
1221 if (!locked) {
1222 esp_rom_printf(
1223 "Fatal error: DRAM0 PMS configuration corrupted (memory protection not locked - monitor lock on core %d)\n",
1224 memp_config->target_cpu[x]);
1225 abort();
1226 }
1227 }
1228 }
1229 }
1230 }
1231
1232 //keep current configuration copy if all went well
1233 if (ret == ESP_OK) {
1234 s_memp_cfg = *memp_config;
1235 }
1236
1237 return ret;
1238 }
1239
esp_mprot_dump_configuration(char ** dump_info_string)1240 esp_err_t esp_mprot_dump_configuration(char **dump_info_string)
1241 {
1242 if (dump_info_string == NULL) {
1243 return ESP_ERR_INVALID_ARG;
1244 }
1245
1246 *dump_info_string = (char *)calloc(1, 2048);
1247
1248 if (*dump_info_string == NULL) {
1249 return ESP_ERR_NO_MEM;
1250 }
1251
1252 sprintf(*dump_info_string,
1253 "Memory sections:\n"
1254 " _iram_text_start: 0x%08X\n _iram_text_end: 0x%08X\n",
1255 (uint32_t)&_iram_text_start, (uint32_t)&_iram_text_end);
1256
1257 uint32_t offset = strlen(*dump_info_string);
1258
1259 bool line_lock = memprot_ll_get_iram0_dram0_split_line_lock();
1260 uint32_t line_ID = (uint32_t)memprot_ll_get_iram0_split_line_main_I_D();
1261 uint32_t line_I0 = (uint32_t)memprot_ll_get_iram0_split_line_I_0();
1262 uint32_t line_I1 = (uint32_t)memprot_ll_get_iram0_split_line_I_1();
1263 uint32_t line_D0 = (uint32_t)memprot_ll_get_dram0_split_line_D_0();
1264 uint32_t line_D1 = (uint32_t)memprot_ll_get_dram0_split_line_D_1();
1265 uint32_t line_ID_cat = (uint32_t)memprot_ll_get_iram0_split_line_main_I_D_cat();
1266 uint32_t line_I0_cat = (uint32_t)memprot_ll_get_iram0_split_line_I_0_cat();
1267 uint32_t line_I1_cat = (uint32_t)memprot_ll_get_iram0_split_line_I_1_cat();
1268 uint32_t line_D0_cat = (uint32_t)memprot_ll_get_dram0_split_line_D_0_cat();
1269 uint32_t line_D1_cat = (uint32_t)memprot_ll_get_dram0_split_line_D_1_cat();
1270
1271 sprintf((*dump_info_string + offset),
1272 "Split line settings (lock=%u):\n"
1273 " IRAM0:\n line ID (main): 0x%08X (cat=0x%08X)\n line I0: 0x%08X (cat=0x%08X)\n line I1: 0x%08X (cat=0x%08X)\n"
1274 " DRAM0:\n line D0: 0x%08X (cat=0x%08X)\n line D1: 0x%08X (cat=0x%08X)\n",
1275 line_lock, line_ID, line_ID_cat, line_I0, line_I0_cat, line_I1, line_I1_cat, line_D0, line_D0_cat, line_D1, line_D1_cat);
1276
1277 offset = strlen(*dump_info_string);
1278
1279 void *line_RTC = NULL;
1280 esp_err_t err = esp_mprot_ll_err_to_esp_err(memprot_ll_get_rtcfast_split_line(PRO_CPU_NUM, MEMP_HAL_WORLD_0, &line_RTC));
1281 if (err != ESP_OK) {
1282 sprintf((*dump_info_string + offset), " RTCFAST:\n line main: N/A (world=0) - %s\n", esp_err_to_name(err));
1283 } else {
1284 sprintf((*dump_info_string + offset), " RTCFAST:\n line main: 0x%08X (world=0)\n", (uint32_t)line_RTC);
1285 }
1286 offset = strlen(*dump_info_string);
1287
1288 bool ar0i, ar1i, ar2i, ar3i;
1289 bool aw0i, aw1i, aw2i, aw3i;
1290 bool ax0i, ax1i, ax2i, ax3i;
1291 bool ar0d, ar1d, ar2d, ar3d;
1292 bool aw0d, aw1d, aw2d, aw3d;
1293
1294 bool pms_lock_i = memprot_ll_iram0_get_pms_lock();
1295 memprot_ll_iram0_get_pms_area_0(&ar0i, &aw0i, &ax0i);
1296 memprot_ll_iram0_get_pms_area_1(&ar1i, &aw1i, &ax1i);
1297 memprot_ll_iram0_get_pms_area_2(&ar2i, &aw2i, &ax2i);
1298 memprot_ll_iram0_get_pms_area_3(&ar3i, &aw3i, &ax3i);
1299
1300 bool pms_lock_d = memprot_ll_dram0_get_pms_lock();
1301 memprot_ll_dram0_get_pms_area_0(&ar0d, &aw0d);
1302 memprot_ll_dram0_get_pms_area_1(&ar1d, &aw1d);
1303 memprot_ll_dram0_get_pms_area_2(&ar2d, &aw2d);
1304 memprot_ll_dram0_get_pms_area_3(&ar3d, &aw3d);
1305
1306 bool rtc_line_lock_0;
1307 memprot_ll_get_pif_constraint_lock(PRO_CPU_NUM, &rtc_line_lock_0);
1308
1309 sprintf((*dump_info_string + offset),
1310 "PMS area settings:\n"
1311 " IRAM0 (lock=%u):\n area 0: r=%u,w=%u,x=%u\n area 1: r=%u,w=%u,x=%u\n area 2: r=%u,w=%u,x=%u\n area 3: r=%u,w=%u,x=%u\n"
1312 " DRAM0 (lock=%u):\n area 0: r=%u,w=%u\n area 1: r=%u,w=%u\n area 2: r=%u,w=%u\n area 3: r=%u,w=%u\n"
1313 " RTCFAST (PRO_CPU, lock=%u):\n",
1314 pms_lock_i, ar0i, aw0i, ax0i, ar1i, aw1i, ax1i, ar2i, aw2i, ax2i, ar3i, aw3i, ax3i,
1315 pms_lock_d, ar0d, aw0d, ar1d, aw1d, ar2d, aw2d, ar3d, aw3d, rtc_line_lock_0);
1316
1317 offset = strlen(*dump_info_string);
1318
1319 bool arl0rtc, awl0rtc, axl0rtc;
1320 bool arh0rtc, awh0rtc, axh0rtc;
1321
1322 err = esp_mprot_ll_err_to_esp_err(memprot_ll_rtcfast_get_pms_area(PRO_CPU_NUM, &arl0rtc, &awl0rtc, &axl0rtc, MEMP_HAL_WORLD_0, MEMP_HAL_AREA_LOW));
1323 if (err != ESP_OK) {
1324 sprintf((*dump_info_string + offset), " area low: N/A - %s\n", esp_err_to_name(err));
1325 } else {
1326 sprintf((*dump_info_string + offset), " area low: r=%u,w=%u,x=%u\n", arl0rtc, awl0rtc, axl0rtc);
1327 }
1328
1329 offset = strlen(*dump_info_string);
1330
1331 err = esp_mprot_ll_err_to_esp_err(memprot_ll_rtcfast_get_pms_area(PRO_CPU_NUM, &arh0rtc, &awh0rtc, &axh0rtc, MEMP_HAL_WORLD_0, MEMP_HAL_AREA_HIGH));
1332 if (err != ESP_OK) {
1333 sprintf((*dump_info_string + offset), " area high: N/A - %s\n", esp_err_to_name(err));
1334 } else {
1335 sprintf((*dump_info_string + offset), " area high: r=%u,w=%u,x=%u\n", arh0rtc, awh0rtc, axh0rtc);
1336 }
1337
1338 offset = strlen(*dump_info_string);
1339
1340 //2-CPU setup
1341 if (s_memp_cfg.target_cpu_count > 1) {
1342
1343 bool rtc_line_lock_1;
1344 memprot_ll_get_pif_constraint_lock(APP_CPU_NUM, &rtc_line_lock_1);
1345 sprintf((*dump_info_string + offset), " RTCFAST (APP_CPU, lock=%u):\n", rtc_line_lock_1);
1346
1347 offset = strlen(*dump_info_string);
1348
1349 err = esp_mprot_ll_err_to_esp_err(memprot_ll_rtcfast_get_pms_area(APP_CPU_NUM, &arl0rtc, &awl0rtc, &axl0rtc, MEMP_HAL_WORLD_0, MEMP_HAL_AREA_LOW));
1350 if (err != ESP_OK) {
1351 sprintf((*dump_info_string + offset), " area low: N/A - %s\n", esp_err_to_name(err));
1352 } else {
1353 sprintf((*dump_info_string + offset), " area low: r=%u,w=%u,x=%u\n", arl0rtc, awl0rtc, axl0rtc);
1354 }
1355
1356 offset = strlen(*dump_info_string);
1357
1358 err = esp_mprot_ll_err_to_esp_err(memprot_ll_rtcfast_get_pms_area(APP_CPU_NUM, &arh0rtc, &awh0rtc, &axh0rtc, MEMP_HAL_WORLD_0, MEMP_HAL_AREA_HIGH));
1359 if (err != ESP_OK) {
1360 sprintf((*dump_info_string + offset), " area high: N/A - %s\n", esp_err_to_name(err));
1361 } else {
1362 sprintf((*dump_info_string + offset), " area high: r=%u,w=%u,x=%u\n", arh0rtc, awh0rtc, axh0rtc);
1363 }
1364
1365 offset = strlen(*dump_info_string);
1366 }
1367
1368 return ESP_OK;
1369 }
1370