1/*
2 * SPDX-FileCopyrightText: 2022-2023 Espressif Systems (Shanghai) CO LTD
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
7#include "soc/soc.h"
8#include "riscv/rvsleep-frames.h"
9#include "soc/soc_caps.h"
10#include "sdkconfig.h"
11
12#if !CONFIG_IDF_TARGET_ESP32C6 && !CONFIG_IDF_TARGET_ESP32H2
13#include "soc/lp_aon_reg.h"
14#include "soc/extmem_reg.h"
15#endif
16
17    .section    .data1,"aw"
18    .global     rv_core_critical_regs_frame
19    .type       rv_core_critical_regs_frame,@object
20    .align      4
21rv_core_critical_regs_frame:
22    .word       0
23
24/*
25--------------------------------------------------------------------------------
26    This assembly subroutine is used to save the critical registers of the CPU
27    core to the internal RAM before sleep, and modify the PMU control flag to
28    indicate that the system needs to sleep. When the subroutine returns, it
29    will return the memory pointer that saves the context information of the CPU
30    critical registers.
31--------------------------------------------------------------------------------
32*/
33
34    .section    .iram1,"ax"
35    .global     rv_core_critical_regs_save
36    .type       rv_core_critical_regs_save,@function
37    .align      4
38
39rv_core_critical_regs_save:
40
41    /* arrived here in critical section. we need:
42       save riscv core critical registers to RvCoreCriticalSleepFrame
43     */
44    csrw    mscratch, t0        /* use mscratch as temp storage */
45    la      t0, rv_core_critical_regs_frame
46    lw      t0, 0(t0)           /* t0 pointer to RvCoreCriticalSleepFrame object */
47
48    sw      ra, RV_SLP_CTX_RA(t0)
49    sw      sp, RV_SLP_CTX_SP(t0)
50    sw      gp, RV_SLP_CTX_GP(t0)
51    sw      tp, RV_SLP_CTX_TP(t0)
52    sw      t1, RV_SLP_CTX_T1(t0)
53    sw      t2, RV_SLP_CTX_T2(t0)
54    sw      s0, RV_SLP_CTX_S0(t0)
55    sw      s1, RV_SLP_CTX_S1(t0)
56
57    /* a0 is caller saved, so it does not need to be saved, but it should be the
58       pointer value of RvCoreCriticalSleepFrame for return.
59     */
60    mv      a0, t0
61    sw      a0, RV_SLP_CTX_A0(t0)
62
63    sw      a1, RV_SLP_CTX_A1(t0)
64    sw      a2, RV_SLP_CTX_A2(t0)
65    sw      a3, RV_SLP_CTX_A3(t0)
66    sw      a4, RV_SLP_CTX_A4(t0)
67    sw      a5, RV_SLP_CTX_A5(t0)
68    sw      a6, RV_SLP_CTX_A6(t0)
69    sw      a7, RV_SLP_CTX_A7(t0)
70    sw      s2, RV_SLP_CTX_S2(t0)
71    sw      s3, RV_SLP_CTX_S3(t0)
72    sw      s4, RV_SLP_CTX_S4(t0)
73    sw      s5, RV_SLP_CTX_S5(t0)
74    sw      s6, RV_SLP_CTX_S6(t0)
75    sw      s7, RV_SLP_CTX_S7(t0)
76    sw      s8, RV_SLP_CTX_S8(t0)
77    sw      s9, RV_SLP_CTX_S9(t0)
78    sw      s10, RV_SLP_CTX_S10(t0)
79    sw      s11, RV_SLP_CTX_S11(t0)
80    sw      t3, RV_SLP_CTX_T3(t0)
81    sw      t4, RV_SLP_CTX_T4(t0)
82    sw      t5, RV_SLP_CTX_T5(t0)
83    sw      t6, RV_SLP_CTX_T6(t0)
84
85    csrr    t1, mstatus
86    sw      t1, RV_SLP_CTX_MSTATUS(t0)
87    csrr    t2, mtvec
88    sw      t2, RV_SLP_CTX_MTVEC(t0)
89    csrr    t3, mcause
90    sw      t3, RV_SLP_CTX_MCAUSE(t0)
91
92    csrr    t1, mtval
93    sw      t1, RV_SLP_CTX_MTVAL(t0)
94    csrr    t2, mie
95    sw      t2, RV_SLP_CTX_MIE(t0)
96    csrr    t3, mip
97    sw      t3, RV_SLP_CTX_MIP(t0)
98    csrr    t1, mepc
99    sw      t1, RV_SLP_CTX_MEPC(t0)
100
101    /*
102    !!! Let idf knows it's going to sleep !!!
103
104    RV_SLP_STK_PMUFUNC field is used to identify whether it is going to sleep or
105    has just been awakened.  We use the lowest 2 bits as indication information,
106    3 means being awakened, 1 means going to sleep.
107    */
108    li      t1, ~0x3
109    lw      t2, RV_SLP_CTX_PMUFUNC(t0)
110    and     t2, t1, t2
111    ori     t2, t2, 0x1
112    sw      t2, RV_SLP_CTX_PMUFUNC(t0)
113
114    mv      t3, t0
115    csrr    t0, mscratch
116    sw      t0, RV_SLP_CTX_T0(t3)
117
118#if !CONFIG_IDF_TARGET_ESP32C6 && !CONFIG_IDF_TARGET_ESP32H2
119    /* writeback dcache is required here!!! */
120    la      t0, EXTMEM_CACHE_SYNC_MAP_REG
121    li      t1, 0x10
122    sw      t1, 0x0(t0)                     /* set EXTMEM_CACHE_SYNC_MAP_REG bit 4 */
123    la      t2, EXTMEM_CACHE_SYNC_ADDR_REG
124    sw      zero, 0x0(t2)                   /* clear EXTMEM_CACHE_SYNC_ADDR_REG */
125    la      t0, EXTMEM_CACHE_SYNC_SIZE_REG
126    sw      zero, 0x0(t0)                   /* clear EXTMEM_CACHE_SYNC_SIZE_REG */
127
128    la      t1, EXTMEM_CACHE_SYNC_CTRL_REG
129    lw      t2, 0x0(t1)
130    ori     t2, t2, 0x4
131    sw      t2, 0x0(t1)
132
133    li      t0, 0x10                        /* SYNC_DONE bit */
134wait_sync_done:
135    lw      t2, 0x0(t1)
136    and     t2, t0, t2
137    beqz    t2, wait_sync_done
138#endif
139
140    lw      t0, RV_SLP_CTX_T0(t3)
141    lw      t1, RV_SLP_CTX_T1(t3)
142    lw      t2, RV_SLP_CTX_T2(t3)
143    lw      t3, RV_SLP_CTX_T3(t3)
144
145    ret
146
147    .size   rv_core_critical_regs_save, . - rv_core_critical_regs_save
148
149
150#define CSR_PCER_U              0x800
151#define CSR_PCMR_U              0x801
152#define PCER_CYCLES             (1<<0)  /* count clock cycles */
153#define PCMR_GLOBAL_EN          (1<<0)  /* enable count */
154#define pcer                    CSR_PCER_U
155#define pcmr                    CSR_PCMR_U
156
157/*
158--------------------------------------------------------------------------------
159    This assembly subroutine is used to restore the CPU core critical register
160    context before sleep after system wakes up, modify the PMU control
161    information, and return the critical register context memory object pointer.
162    After the subroutine returns, continue to restore other modules of the
163    system.
164--------------------------------------------------------------------------------
165*/
166
167    .section    .iram1,"ax"
168    .global     rv_core_critical_regs_restore
169    .weak       rv_core_critical_regs_restore
170    .type       rv_core_critical_regs_restore,@function
171    .global     _rv_core_critical_regs_restore
172    .type       _rv_core_critical_regs_restore,@function
173    .align      4
174
175_rv_core_critical_regs_restore: /* export a strong symbol to jump to here, used
176                                 * for a static callback */
177    nop
178
179rv_core_critical_regs_restore:
180
181    la      t0, rv_core_critical_regs_frame
182    lw      t0, 0(t0)           /* t0 pointer to RvCoreCriticalSleepFrame object */
183    beqz    t0, .skip_restore   /* make sure we do not jump to zero address */
184
185    /*
186    !!! Let idf knows it's sleep awake. !!!
187
188    RV_SLP_STK_PMUFUNC field is used to identify whether it is going to sleep or
189    has just been awakened.  We use the lowest 2 bits as indication information,
190    3 means being awakened, 1 means going to sleep.
191    */
192    lw      t1, RV_SLP_CTX_PMUFUNC(t0)
193    ori     t1, t1, 0x3
194    sw      t1, RV_SLP_CTX_PMUFUNC(t0)
195
196    lw      t2, RV_SLP_CTX_MEPC(t0)
197    csrw    mepc, t2
198    lw      t3, RV_SLP_CTX_MIP(t0)
199    csrw    mip, t3
200    lw      t1, RV_SLP_CTX_MIE(t0)
201    csrw    mie, t1
202    lw      t2, RV_SLP_CTX_MSTATUS(t0)
203    csrw    mstatus, t2
204
205    lw      t3, RV_SLP_CTX_MTVEC(t0)
206    csrw    mtvec, t3
207    lw      t1, RV_SLP_CTX_MCAUSE(t0)
208    csrw    mcause, t1
209    lw      t2, RV_SLP_CTX_MTVAL(t0)
210    csrw    mtval, t2
211
212    lw      t6, RV_SLP_CTX_T6(t0)
213    lw      t5, RV_SLP_CTX_T5(t0)
214    lw      t4, RV_SLP_CTX_T4(t0)
215    lw      t3, RV_SLP_CTX_T3(t0)
216    lw      s11, RV_SLP_CTX_S11(t0)
217    lw      s10, RV_SLP_CTX_S10(t0)
218    lw      s9, RV_SLP_CTX_S9(t0)
219    lw      s8, RV_SLP_CTX_S8(t0)
220    lw      s7, RV_SLP_CTX_S7(t0)
221    lw      s6, RV_SLP_CTX_S6(t0)
222    lw      s5, RV_SLP_CTX_S5(t0)
223    lw      s4, RV_SLP_CTX_S4(t0)
224    lw      s3, RV_SLP_CTX_S3(t0)
225    lw      s2, RV_SLP_CTX_S2(t0)
226    lw      a7, RV_SLP_CTX_A7(t0)
227    lw      a6, RV_SLP_CTX_A6(t0)
228    lw      a5, RV_SLP_CTX_A5(t0)
229    lw      a4, RV_SLP_CTX_A4(t0)
230    lw      a3, RV_SLP_CTX_A3(t0)
231    lw      a2, RV_SLP_CTX_A2(t0)
232    lw      a1, RV_SLP_CTX_A1(t0)
233    lw      a0, RV_SLP_CTX_A0(t0)
234    lw      s1, RV_SLP_CTX_S1(t0)
235    lw      s0, RV_SLP_CTX_S0(t0)
236    lw      t2, RV_SLP_CTX_T2(t0)
237    lw      t1, RV_SLP_CTX_T1(t0)
238    lw      tp, RV_SLP_CTX_TP(t0)
239    lw      gp, RV_SLP_CTX_GP(t0)
240    lw      sp, RV_SLP_CTX_SP(t0)
241    lw      ra, RV_SLP_CTX_RA(t0)
242    lw      t0, RV_SLP_CTX_T0(t0)
243
244.skip_restore:
245    ret
246
247    .size   rv_core_critical_regs_restore, . - rv_core_critical_regs_restore
248