1 /*
2  * Copyright (c) 2024 Nordic Semiconductor ASA
3  * SPDX-License-Identifier: Apache-2.0
4  */
5 
6 #include <zephyr/arch/cpu.h>
7 #include <zephyr/arch/common/pm_s2ram.h>
8 #include <zephyr/linker/sections.h>
9 #include <zephyr/sys/util.h>
10 #include <hal/nrf_resetinfo.h>
11 #include "pm_s2ram.h"
12 
13 #include <cmsis_core.h>
14 
15 #define NVIC_MEMBER_SIZE(member) ARRAY_SIZE(((NVIC_Type *)0)->member)
16 
17 /* Currently dynamic regions are only used in case of userspace or stack guard and
18  * stack guard is not used by default on Cortex-M33 because there is a dedicated
19  * mechanism for stack overflow detection. Unless those condition change we don't
20  * need to store MPU content, it can just be reinitialized on resuming.
21  */
22 #define MPU_USE_DYNAMIC_REGIONS IS_ENABLED(CONFIG_USERSPACE) || IS_ENABLED(CONFIG_MPU_STACK_GUARD)
23 
24 /* TODO: The num-mpu-regions property should be used. Needs to be added to dts bindings. */
25 #define MPU_MAX_NUM_REGIONS 16
26 
27 typedef struct {
28 	/* NVIC components stored into RAM. */
29 	uint32_t ISER[NVIC_MEMBER_SIZE(ISER)];
30 	uint32_t ISPR[NVIC_MEMBER_SIZE(ISPR)];
31 	uint8_t IPR[NVIC_MEMBER_SIZE(IPR)];
32 } _nvic_context_t;
33 
34 typedef struct {
35 	uint32_t RNR;
36 	uint32_t RBAR[MPU_MAX_NUM_REGIONS];
37 	uint32_t RLAR[MPU_MAX_NUM_REGIONS];
38 	uint32_t MAIR0;
39 	uint32_t MAIR1;
40 	uint32_t CTRL;
41 } _mpu_context_t;
42 
43 struct backup {
44 	_nvic_context_t nvic_context;
45 	_mpu_context_t mpu_context;
46 };
47 
48 static __noinit struct backup backup_data;
49 
50 extern void z_arm_configure_static_mpu_regions(void);
51 extern int z_arm_mpu_init(void);
52 
53 /* MPU registers cannot be simply copied because content of RBARx RLARx registers
54  * depends on region which is selected by RNR register.
55  */
mpu_suspend(_mpu_context_t * backup)56 static void mpu_suspend(_mpu_context_t *backup)
57 {
58 	if (!MPU_USE_DYNAMIC_REGIONS) {
59 		return;
60 	}
61 
62 	backup->RNR = MPU->RNR;
63 
64 	for (uint8_t i = 0; i < MPU_MAX_NUM_REGIONS; i++) {
65 		MPU->RNR = i;
66 		backup->RBAR[i] = MPU->RBAR;
67 		backup->RLAR[i] = MPU->RLAR;
68 	}
69 	backup->MAIR0 = MPU->MAIR0;
70 	backup->MAIR1 = MPU->MAIR1;
71 	backup->CTRL = MPU->CTRL;
72 }
73 
mpu_resume(_mpu_context_t * backup)74 static void mpu_resume(_mpu_context_t *backup)
75 {
76 	if (!MPU_USE_DYNAMIC_REGIONS) {
77 		z_arm_mpu_init();
78 		z_arm_configure_static_mpu_regions();
79 		return;
80 	}
81 
82 	uint32_t rnr = backup->RNR;
83 
84 	for (uint8_t i = 0; i < MPU_MAX_NUM_REGIONS; i++) {
85 		MPU->RNR = i;
86 		MPU->RBAR = backup->RBAR[i];
87 		MPU->RLAR = backup->RLAR[i];
88 	}
89 
90 	MPU->MAIR0 = backup->MAIR0;
91 	MPU->MAIR1 = backup->MAIR1;
92 	MPU->RNR = rnr;
93 	MPU->CTRL = backup->CTRL;
94 }
95 
nvic_suspend(_nvic_context_t * backup)96 static void nvic_suspend(_nvic_context_t *backup)
97 {
98 	memcpy(backup->ISER, (uint32_t *)NVIC->ISER, sizeof(NVIC->ISER));
99 	memcpy(backup->ISPR, (uint32_t *)NVIC->ISPR, sizeof(NVIC->ISPR));
100 	memcpy(backup->IPR, (uint32_t *)NVIC->IPR, sizeof(NVIC->IPR));
101 }
102 
nvic_resume(_nvic_context_t * backup)103 static void nvic_resume(_nvic_context_t *backup)
104 {
105 	memcpy((uint32_t *)NVIC->ISER, backup->ISER, sizeof(NVIC->ISER));
106 	memcpy((uint32_t *)NVIC->ISPR, backup->ISPR, sizeof(NVIC->ISPR));
107 	memcpy((uint32_t *)NVIC->IPR, backup->IPR, sizeof(NVIC->IPR));
108 }
109 
soc_s2ram_suspend(pm_s2ram_system_off_fn_t system_off)110 int soc_s2ram_suspend(pm_s2ram_system_off_fn_t system_off)
111 {
112 	int ret;
113 
114 	nvic_suspend(&backup_data.nvic_context);
115 	mpu_suspend(&backup_data.mpu_context);
116 	ret = arch_pm_s2ram_suspend(system_off);
117 	if (ret < 0) {
118 		return ret;
119 	}
120 
121 	mpu_resume(&backup_data.mpu_context);
122 	nvic_resume(&backup_data.nvic_context);
123 
124 	return ret;
125 }
126 
pm_s2ram_mark_set(void)127 void __attribute__((naked)) pm_s2ram_mark_set(void)
128 {
129 	/* empty */
130 	__asm__ volatile("bx	lr\n");
131 }
132 
pm_s2ram_mark_check_and_clear(void)133 bool __attribute__((naked)) pm_s2ram_mark_check_and_clear(void)
134 {
135 	__asm__ volatile(
136 		/* Set return value to 0 */
137 		"mov	r0, #0\n"
138 
139 		/* Load and check RESETREAS register */
140 		"ldr	r3, [%[resetinfo_addr], %[resetreas_offs]]\n"
141 		"cmp	r3, %[resetreas_unretained_mask]\n"
142 
143 		"bne	exit\n"
144 
145 		/* Clear RESETREAS register */
146 		"str	r0, [%[resetinfo_addr], %[resetreas_offs]]\n"
147 
148 		/* Load RESTOREVALID register */
149 		"ldr	r3, [%[resetinfo_addr], %[restorevalid_offs]]\n"
150 
151 		/* Clear RESTOREVALID */
152 		"str	r0, [%[resetinfo_addr], %[restorevalid_offs]]\n"
153 
154 		/* Check RESTOREVALID register */
155 		"cmp	r3, %[restorevalid_present_mask]\n"
156 		"bne	exit\n"
157 
158 		/* Set return value to 1 */
159 		"mov	r0, #1\n"
160 
161 		"exit:\n"
162 		"bx	lr\n"
163 		:
164 		: [resetinfo_addr] "r"(NRF_RESETINFO),
165 		  [resetreas_offs] "r"(offsetof(NRF_RESETINFO_Type, RESETREAS.LOCAL)),
166 		  [resetreas_unretained_mask] "r"(NRF_RESETINFO_RESETREAS_LOCAL_UNRETAINED_MASK),
167 		  [restorevalid_offs] "r"(offsetof(NRF_RESETINFO_Type, RESTOREVALID)),
168 		  [restorevalid_present_mask] "r"(RESETINFO_RESTOREVALID_RESTOREVALID_Msk)
169 
170 		: "r0", "r1", "r3", "r4", "memory");
171 }
172