1 /*
2  * SPDX-FileCopyrightText: 2015-2021 Espressif Systems (Shanghai) CO LTD
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  */
6 
7 #ifdef __ZEPHYR__
8 #include <zephyr/kernel.h>
9 #endif
10 
11 #include <stdint.h>
12 #include "esp_rom_sys.h"
13 #include "soc/rtc.h"
14 #include "soc/rtc_cntl_reg.h"
15 #include "soc/timer_group_reg.h"
16 #include "rtc_clk_common.h"
17 
18 /* Calibration of RTC_SLOW_CLK is performed using a special feature of TIMG0.
19  * This feature counts the number of XTAL clock cycles within a given number of
20  * RTC_SLOW_CLK cycles.
21  *
22  * Slow clock calibration feature has two modes of operation: one-off and cycling.
23  * In cycling mode (which is enabled by default on SoC reset), counting of XTAL
24  * cycles within RTC_SLOW_CLK cycle is done continuously. Cycling mode is enabled
25  * using TIMG_RTC_CALI_START_CYCLING bit. In one-off mode counting is performed
26  * once, and TIMG_RTC_CALI_RDY bit is set when counting is done. One-off mode is
27  * enabled using TIMG_RTC_CALI_START bit.
28  */
29 
30 /**
31  * @brief One-off clock calibration function used by rtc_clk_cal_internal
32  * @param cal_clk which clock to calibrate
33  * @param slowclk_cycles number of slow clock cycles to count
34  * @return number of XTAL clock cycles within the given number of slow clock cycles
35  */
rtc_clk_cal_internal_oneoff(rtc_cal_sel_t cal_clk,uint32_t slowclk_cycles)36 static uint32_t rtc_clk_cal_internal_oneoff(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
37 {
38     /* There may be another calibration process already running during we call this function,
39      * so we should wait the last process is done.
40      */
41     if (GET_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START_CYCLING)) {
42         /**
43          * Set a small timeout threshold to accelerate the generation of timeout.
44          * The internal circuit will be reset when the timeout occurs and will not affect the next calibration.
45          */
46         REG_SET_FIELD(TIMG_RTCCALICFG2_REG(0), TIMG_RTC_CALI_TIMEOUT_THRES, 1);
47         while (!GET_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_RDY)
48                && !GET_PERI_REG_MASK(TIMG_RTCCALICFG2_REG(0), TIMG_RTC_CALI_TIMEOUT));
49     }
50 
51     /* Prepare calibration */
52     REG_SET_FIELD(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_CLK_SEL, cal_clk);
53     CLEAR_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START_CYCLING);
54     REG_SET_FIELD(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_MAX, slowclk_cycles);
55     /* Figure out how long to wait for calibration to finish */
56 
57     /* Set timeout reg and expect time delay*/
58     uint32_t expected_freq;
59     if (cal_clk == RTC_CAL_32K_XTAL) {
60         REG_SET_FIELD(TIMG_RTCCALICFG2_REG(0), TIMG_RTC_CALI_TIMEOUT_THRES, RTC_SLOW_CLK_X32K_CAL_TIMEOUT_THRES(slowclk_cycles));
61         expected_freq = RTC_SLOW_CLK_FREQ_32K;
62     } else if (cal_clk == RTC_CAL_8MD256) {
63         REG_SET_FIELD(TIMG_RTCCALICFG2_REG(0), TIMG_RTC_CALI_TIMEOUT_THRES, RTC_SLOW_CLK_8MD256_CAL_TIMEOUT_THRES(slowclk_cycles));
64         expected_freq = RTC_SLOW_CLK_FREQ_8MD256;
65     } else {
66         REG_SET_FIELD(TIMG_RTCCALICFG2_REG(0), TIMG_RTC_CALI_TIMEOUT_THRES, RTC_SLOW_CLK_90K_CAL_TIMEOUT_THRES(slowclk_cycles));
67         expected_freq = RTC_SLOW_CLK_FREQ_90K;
68     }
69     uint32_t us_time_estimate = (uint32_t) (((uint64_t) slowclk_cycles) * 1000000 / expected_freq);
70     /* Start calibration */
71     CLEAR_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START);
72     SET_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START);
73 
74     /* Wait for calibration to finish up to another us_time_estimate */
75 #ifdef __ZEPHYR__
76     k_busy_wait(us_time_estimate);
77 #else
78     esp_rom_delay_us(us_time_estimate);
79 #endif
80     uint32_t cal_val;
81     while (true) {
82         if (GET_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_RDY)) {
83             cal_val = REG_GET_FIELD(TIMG_RTCCALICFG1_REG(0), TIMG_RTC_CALI_VALUE);
84             break;
85         }
86         if (GET_PERI_REG_MASK(TIMG_RTCCALICFG2_REG(0), TIMG_RTC_CALI_TIMEOUT)) {
87             cal_val = 0;
88             break;
89         }
90     }
91 
92     return cal_val;
93 }
94 
95 /**
96  * @brief Cycling clock calibration function used by rtc_clk_cal_internal
97  * @param cal_clk which clock to calibrate
98  * @param slowclk_cycles number of slow clock cycles to count
99  * @return number of XTAL clock cycles within the given number of slow clock cycles
100  */
rtc_clk_cal_internal_cycling(rtc_cal_sel_t cal_clk,uint32_t slowclk_cycles)101 static uint32_t rtc_clk_cal_internal_cycling(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
102 {
103     /* Get which slowclk is in calibration and max cali cycles */
104     rtc_cal_sel_t in_calibration_clk;
105     in_calibration_clk = REG_GET_FIELD(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_CLK_SEL);
106     uint32_t cali_slowclk_cycles = REG_GET_FIELD(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_MAX);
107     /* If no calibration in process or calibration period equal to 0, use slowclk_cycles cycles to calibrate slowclk */
108     if (cali_slowclk_cycles == 0 || !GET_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START_CYCLING) || in_calibration_clk != cal_clk) {
109         CLEAR_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START_CYCLING);
110         REG_SET_FIELD(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_CLK_SEL, cal_clk);
111         REG_SET_FIELD(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_MAX, slowclk_cycles);
112         SET_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START_CYCLING);
113         cali_slowclk_cycles = slowclk_cycles;
114     }
115 
116     /* Wait for calibration finished */
117     while (!GET_PERI_REG_MASK(TIMG_RTCCALICFG1_REG(0), TIMG_RTC_CALI_CYCLING_DATA_VLD));
118     uint32_t cal_val = REG_GET_FIELD(TIMG_RTCCALICFG1_REG(0), TIMG_RTC_CALI_VALUE);
119 
120     return cal_val;
121 }
122 
123 /**
124  * @brief Slowclk period calculating funtion used by rtc_clk_cal and rtc_clk_cal_cycling
125  * @param xtal_cycles number of xtal cycles count
126  * @param slowclk_cycles number of slow clock cycles to count
127  * @return slow clock period
128  */
rtc_clk_xtal_to_slowclk(uint64_t xtal_cycles,uint32_t slowclk_cycles)129 static uint32_t rtc_clk_xtal_to_slowclk(uint64_t xtal_cycles, uint32_t slowclk_cycles)
130 {
131     rtc_xtal_freq_t xtal_freq = rtc_clk_xtal_freq_get();
132     uint64_t divider = ((uint64_t)xtal_freq) * slowclk_cycles;
133     uint64_t period_64 = ((xtal_cycles << RTC_CLK_CAL_FRACT) + divider / 2 - 1) / divider;
134     uint32_t period = (uint32_t)(period_64 & UINT32_MAX);
135     return period;
136 }
137 
138 /**
139  * @brief Clock calibration function used by rtc_clk_cal and rtc_clk_cal_ratio
140  * @param cal_clk which clock to calibrate
141  * @param slowclk_cycles number of slow clock cycles to count
142  * @return number of XTAL clock cycles within the given number of slow clock cycles
143  */
rtc_clk_cal_internal(rtc_cal_sel_t cal_clk,uint32_t slowclk_cycles,uint32_t cal_mode)144 uint32_t rtc_clk_cal_internal(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles, uint32_t cal_mode)
145 {
146     /* On ESP32S2, choosing RTC_CAL_RTC_MUX results in calibration of
147      * the 90k RTC clock regardless of the currenlty selected SLOW_CLK.
148      * On the ESP32, it used the currently selected SLOW_CLK.
149      * The following code emulates ESP32 behavior:
150      */
151     if (cal_clk == RTC_CAL_RTC_MUX) {
152         rtc_slow_freq_t slow_freq = rtc_clk_slow_freq_get();
153         if (slow_freq == RTC_SLOW_FREQ_32K_XTAL) {
154             cal_clk = RTC_CAL_32K_XTAL;
155         } else if (slow_freq == RTC_SLOW_FREQ_8MD256) {
156             cal_clk = RTC_CAL_8MD256;
157         }
158     } else if (cal_clk == RTC_CAL_INTERNAL_OSC) {
159         cal_clk = RTC_CAL_RTC_MUX;
160     }
161 
162     /* Enable requested clock (90k clock is always on) */
163     int dig_32k_xtal_state = REG_GET_FIELD(RTC_CNTL_CLK_CONF_REG, RTC_CNTL_DIG_XTAL32K_EN);
164     if (cal_clk == RTC_CAL_32K_XTAL && !dig_32k_xtal_state) {
165         REG_SET_FIELD(RTC_CNTL_CLK_CONF_REG, RTC_CNTL_DIG_XTAL32K_EN, 1);
166     }
167 
168     if (cal_clk == RTC_CAL_8MD256) {
169         SET_PERI_REG_MASK(RTC_CNTL_CLK_CONF_REG, RTC_CNTL_DIG_CLK8M_D256_EN);
170     }
171 
172     uint32_t cal_val;
173     if (cal_mode == RTC_TIME_CAL_ONEOFF_MODE) {
174         cal_val = rtc_clk_cal_internal_oneoff(cal_clk, slowclk_cycles);
175     } else {
176         cal_val = rtc_clk_cal_internal_cycling(cal_clk, slowclk_cycles);
177     }
178 
179     CLEAR_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START);
180 
181     REG_SET_FIELD(RTC_CNTL_CLK_CONF_REG, RTC_CNTL_DIG_XTAL32K_EN, dig_32k_xtal_state);
182 
183     if (cal_clk == RTC_CAL_8MD256) {
184         CLEAR_PERI_REG_MASK(RTC_CNTL_CLK_CONF_REG, RTC_CNTL_DIG_CLK8M_D256_EN);
185     }
186 
187     return cal_val;
188 }
189 
rtc_clk_cal_ratio(rtc_cal_sel_t cal_clk,uint32_t slowclk_cycles)190 uint32_t rtc_clk_cal_ratio(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
191 {
192     uint64_t xtal_cycles = rtc_clk_cal_internal(cal_clk, slowclk_cycles, RTC_TIME_CAL_ONEOFF_MODE);
193     uint64_t ratio_64 = ((xtal_cycles << RTC_CLK_CAL_FRACT)) / slowclk_cycles;
194     uint32_t ratio = (uint32_t)(ratio_64 & UINT32_MAX);
195     return ratio;
196 }
197 
rtc_clk_cal(rtc_cal_sel_t cal_clk,uint32_t slowclk_cycles)198 uint32_t rtc_clk_cal(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
199 {
200     uint64_t xtal_cycles = rtc_clk_cal_internal(cal_clk, slowclk_cycles, RTC_TIME_CAL_ONEOFF_MODE);
201     uint32_t period = rtc_clk_xtal_to_slowclk(xtal_cycles, slowclk_cycles);
202     return period;
203 }
204 
rtc_clk_cal_cycling(rtc_cal_sel_t cal_clk,uint32_t slowclk_cycles)205 uint32_t rtc_clk_cal_cycling(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
206 {
207     uint64_t xtal_cycles = rtc_clk_cal_internal(cal_clk, slowclk_cycles, RTC_TIME_CAL_CYCLING_MODE);
208     uint32_t period = rtc_clk_xtal_to_slowclk(xtal_cycles, slowclk_cycles);
209     return period;
210 }
211 
rtc_time_us_to_slowclk(uint64_t time_in_us,uint32_t period)212 uint64_t rtc_time_us_to_slowclk(uint64_t time_in_us, uint32_t period)
213 {
214     /* Overflow will happen in this function if time_in_us >= 2^45, which is about 400 days.
215      * TODO: fix overflow.
216      */
217     return (time_in_us << RTC_CLK_CAL_FRACT) / period;
218 }
219 
rtc_time_slowclk_to_us(uint64_t rtc_cycles,uint32_t period)220 uint64_t rtc_time_slowclk_to_us(uint64_t rtc_cycles, uint32_t period)
221 {
222     return (rtc_cycles * period) >> RTC_CLK_CAL_FRACT;
223 }
224 
rtc_time_get(void)225 uint64_t rtc_time_get(void)
226 {
227     SET_PERI_REG_MASK(RTC_CNTL_TIME_UPDATE_REG, RTC_CNTL_TIME_UPDATE);
228     uint64_t t = READ_PERI_REG(RTC_CNTL_TIME0_REG);
229     t |= ((uint64_t) READ_PERI_REG(RTC_CNTL_TIME1_REG)) << 32;
230     return t;
231 }
232 
rtc_light_slp_time_get(void)233 uint64_t rtc_light_slp_time_get(void)
234 {
235     uint64_t t_wake = READ_PERI_REG(RTC_CNTL_TIME_LOW0_REG);
236     t_wake |= ((uint64_t) READ_PERI_REG(RTC_CNTL_TIME_HIGH0_REG)) << 32;
237     uint64_t t_slp = READ_PERI_REG(RTC_CNTL_TIME_LOW1_REG);
238     t_slp |= ((uint64_t) READ_PERI_REG(RTC_CNTL_TIME_HIGH1_REG)) << 32;
239     return (t_wake - t_slp);
240 }
241 
rtc_deep_slp_time_get(void)242 uint64_t rtc_deep_slp_time_get(void)
243 {
244     uint64_t t_slp = READ_PERI_REG(RTC_CNTL_TIME_LOW1_REG);
245     t_slp |= ((uint64_t) READ_PERI_REG(RTC_CNTL_TIME_HIGH1_REG)) << 32;
246     uint64_t t_wake = rtc_time_get();
247     return (t_wake - t_slp);
248 }
249 
rtc_clk_wait_for_slow_cycle(void)250 void rtc_clk_wait_for_slow_cycle(void) //This function may not by useful any more
251 {
252     SET_PERI_REG_MASK(RTC_CNTL_SLOW_CLK_CONF_REG, RTC_CNTL_SLOW_CLK_NEXT_EDGE);
253     while (GET_PERI_REG_MASK(RTC_CNTL_SLOW_CLK_CONF_REG, RTC_CNTL_SLOW_CLK_NEXT_EDGE)) {
254         esp_rom_delay_us(1);
255     }
256 }
257 
rtc_clk_freq_cal(uint32_t cal_val)258 uint32_t rtc_clk_freq_cal(uint32_t cal_val)
259 {
260     if (cal_val == 0) {
261         return 0;   // cal_val will be denominator, return 0 as the symbol of failure.
262     }
263     return 1000000ULL * (1 << RTC_CLK_CAL_FRACT) / cal_val;
264 }
265