1 /*
2 * SPDX-FileCopyrightText: 2020-2022 Espressif Systems (Shanghai) CO LTD
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
7 #include <stdint.h>
8 #include "esp32c2/rom/ets_sys.h"
9 #include "soc/rtc.h"
10 #include "soc/rtc_cntl_reg.h"
11 #include "hal/clk_tree_ll.h"
12 #include "hal/rtc_cntl_ll.h"
13 #include "soc/timer_group_reg.h"
14 #include "esp_rom_sys.h"
15
16 /* Calibration of RTC_SLOW_CLK is performed using a special feature of TIMG0.
17 * This feature counts the number of XTAL clock cycles within a given number of
18 * RTC_SLOW_CLK cycles.
19 *
20 * Slow clock calibration feature has two modes of operation: one-off and cycling.
21 * In cycling mode (which is enabled by default on SoC reset), counting of XTAL
22 * cycles within RTC_SLOW_CLK cycle is done continuously. Cycling mode is enabled
23 * using TIMG_RTC_CALI_START_CYCLING bit. In one-off mode counting is performed
24 * once, and TIMG_RTC_CALI_RDY bit is set when counting is done. One-off mode is
25 * enabled using TIMG_RTC_CALI_START bit.
26 */
27
28 /**
29 * @brief Clock calibration function used by rtc_clk_cal and rtc_clk_cal_ratio
30 * @param cal_clk which clock to calibrate
31 * @param slowclk_cycles number of slow clock cycles to count
32 * @return number of XTAL clock cycles within the given number of slow clock cycles
33 */
rtc_clk_cal_internal(rtc_cal_sel_t cal_clk,uint32_t slowclk_cycles)34 uint32_t rtc_clk_cal_internal(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
35 {
36 /* On ESP32-C2, choosing RTC_CAL_RTC_MUX results in calibration of
37 * the 150k RTC clock regardless of the currenlty selected SLOW_CLK.
38 * On the ESP32, it used the currently selected SLOW_CLK.
39 * The following code emulates ESP32 behavior:
40 */
41 if (cal_clk == RTC_CAL_RTC_MUX) {
42 soc_rtc_slow_clk_src_t slow_clk_src = rtc_clk_slow_src_get();
43 if (slow_clk_src == SOC_RTC_SLOW_CLK_SRC_OSC_SLOW) {
44 cal_clk = RTC_CAL_32K_OSC_SLOW;
45 } else if (slow_clk_src == SOC_RTC_SLOW_CLK_SRC_RC_FAST_D256) {
46 cal_clk = RTC_CAL_8MD256;
47 }
48 }
49 /* Enable requested clock (150k clock is always on) */
50 bool dig_ext_clk_enabled = clk_ll_xtal32k_digi_is_enabled();
51 if (cal_clk == RTC_CAL_32K_OSC_SLOW && !dig_ext_clk_enabled) {
52 clk_ll_xtal32k_digi_enable();
53 }
54
55 bool rc_fast_enabled = clk_ll_rc_fast_is_enabled();
56 bool rc_fast_d256_enabled = clk_ll_rc_fast_d256_is_enabled();
57 if (cal_clk == RTC_CAL_8MD256) {
58 rtc_clk_8m_enable(true, true);
59 clk_ll_rc_fast_d256_digi_enable();
60 }
61 /* There may be another calibration process already running during we call this function,
62 * so we should wait the last process is done.
63 */
64 if (GET_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START_CYCLING)) {
65 /**
66 * Set a small timeout threshold to accelerate the generation of timeout.
67 * The internal circuit will be reset when the timeout occurs and will not affect the next calibration.
68 */
69 REG_SET_FIELD(TIMG_RTCCALICFG2_REG(0), TIMG_RTC_CALI_TIMEOUT_THRES, 1);
70 while (!GET_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_RDY)
71 && !GET_PERI_REG_MASK(TIMG_RTCCALICFG2_REG(0), TIMG_RTC_CALI_TIMEOUT));
72 }
73
74 /* Prepare calibration */
75 REG_SET_FIELD(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_CLK_SEL, cal_clk);
76 CLEAR_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START_CYCLING);
77 REG_SET_FIELD(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_MAX, slowclk_cycles);
78 /* Figure out how long to wait for calibration to finish */
79
80 /* Set timeout reg and expect time delay*/
81 uint32_t expected_freq;
82 if (cal_clk == RTC_CAL_32K_OSC_SLOW) {
83 REG_SET_FIELD(TIMG_RTCCALICFG2_REG(0), TIMG_RTC_CALI_TIMEOUT_THRES, RTC_SLOW_CLK_X32K_CAL_TIMEOUT_THRES(slowclk_cycles));
84 expected_freq = SOC_CLK_OSC_SLOW_FREQ_APPROX;
85 } else if (cal_clk == RTC_CAL_8MD256) {
86 REG_SET_FIELD(TIMG_RTCCALICFG2_REG(0), TIMG_RTC_CALI_TIMEOUT_THRES, RTC_SLOW_CLK_8MD256_CAL_TIMEOUT_THRES(slowclk_cycles));
87 expected_freq = SOC_CLK_RC_FAST_D256_FREQ_APPROX;
88 } else {
89 REG_SET_FIELD(TIMG_RTCCALICFG2_REG(0), TIMG_RTC_CALI_TIMEOUT_THRES, RTC_SLOW_CLK_150K_CAL_TIMEOUT_THRES(slowclk_cycles));
90 expected_freq = SOC_CLK_RC_SLOW_FREQ_APPROX;
91 }
92 uint32_t us_time_estimate = (uint32_t) (((uint64_t) slowclk_cycles) * MHZ / expected_freq);
93 /* Start calibration */
94 CLEAR_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START);
95 SET_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START);
96
97 /* Wait for calibration to finish up to another us_time_estimate */
98 esp_rom_delay_us(us_time_estimate);
99 uint32_t cal_val;
100 while (true) {
101 if (GET_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_RDY)) {
102 cal_val = REG_GET_FIELD(TIMG_RTCCALICFG1_REG(0), TIMG_RTC_CALI_VALUE);
103 break;
104 }
105 if (GET_PERI_REG_MASK(TIMG_RTCCALICFG2_REG(0), TIMG_RTC_CALI_TIMEOUT)) {
106 cal_val = 0;
107 break;
108 }
109 }
110 CLEAR_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START);
111
112 /* if dig_ext_clk was originally off and enabled due to calibration, then set back to off state */
113 if (cal_clk == RTC_CAL_32K_OSC_SLOW && !dig_ext_clk_enabled) {
114 clk_ll_xtal32k_digi_disable();
115 }
116
117 if (cal_clk == RTC_CAL_8MD256) {
118 clk_ll_rc_fast_d256_digi_disable();
119 rtc_clk_8m_enable(rc_fast_enabled, rc_fast_d256_enabled);
120 }
121
122 return cal_val;
123 }
124
rtc_clk_cal_ratio(rtc_cal_sel_t cal_clk,uint32_t slowclk_cycles)125 uint32_t rtc_clk_cal_ratio(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
126 {
127 assert(slowclk_cycles);
128 uint64_t xtal_cycles = rtc_clk_cal_internal(cal_clk, slowclk_cycles);
129 uint64_t ratio_64 = ((xtal_cycles << RTC_CLK_CAL_FRACT)) / slowclk_cycles;
130 uint32_t ratio = (uint32_t)(ratio_64 & UINT32_MAX);
131 return ratio;
132 }
133
rtc_clk_cal_32k_valid(rtc_xtal_freq_t xtal_freq,uint32_t slowclk_cycles,uint64_t actual_xtal_cycles)134 static inline bool rtc_clk_cal_32k_valid(rtc_xtal_freq_t xtal_freq, uint32_t slowclk_cycles, uint64_t actual_xtal_cycles)
135 {
136 uint64_t expected_xtal_cycles = (xtal_freq * 1000000ULL * slowclk_cycles) >> 15; // xtal_freq(hz) * slowclk_cycles / 32768
137 uint64_t delta = expected_xtal_cycles / 2000; // 5/10000
138 return (actual_xtal_cycles >= (expected_xtal_cycles - delta)) && (actual_xtal_cycles <= (expected_xtal_cycles + delta));
139 }
140
rtc_clk_cal(rtc_cal_sel_t cal_clk,uint32_t slowclk_cycles)141 uint32_t rtc_clk_cal(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
142 {
143 assert(slowclk_cycles);
144 rtc_xtal_freq_t xtal_freq = rtc_clk_xtal_freq_get();
145 uint64_t xtal_cycles = rtc_clk_cal_internal(cal_clk, slowclk_cycles);
146
147 if ((cal_clk == RTC_CAL_32K_OSC_SLOW) && !rtc_clk_cal_32k_valid(xtal_freq, slowclk_cycles, xtal_cycles)) {
148 return 0;
149 }
150
151 uint64_t divider = ((uint64_t)xtal_freq) * slowclk_cycles;
152 uint64_t period_64 = ((xtal_cycles << RTC_CLK_CAL_FRACT) + divider / 2 - 1) / divider;
153 uint32_t period = (uint32_t)(period_64 & UINT32_MAX);
154 return period;
155 }
156
rtc_time_us_to_slowclk(uint64_t time_in_us,uint32_t period)157 uint64_t rtc_time_us_to_slowclk(uint64_t time_in_us, uint32_t period)
158 {
159 assert(period);
160 /* Overflow will happen in this function if time_in_us >= 2^45, which is about 400 days.
161 * TODO: fix overflow.
162 */
163 return (time_in_us << RTC_CLK_CAL_FRACT) / period;
164 }
165
rtc_time_slowclk_to_us(uint64_t rtc_cycles,uint32_t period)166 uint64_t rtc_time_slowclk_to_us(uint64_t rtc_cycles, uint32_t period)
167 {
168 return (rtc_cycles * period) >> RTC_CLK_CAL_FRACT;
169 }
170
rtc_time_get(void)171 uint64_t rtc_time_get(void)
172 {
173 return rtc_cntl_ll_get_rtc_time();
174 }
175
rtc_clk_wait_for_slow_cycle(void)176 void rtc_clk_wait_for_slow_cycle(void) //This function may not by useful any more
177 {
178 SET_PERI_REG_MASK(RTC_CNTL_SLOW_CLK_CONF_REG, RTC_CNTL_SLOW_CLK_NEXT_EDGE);
179 while (GET_PERI_REG_MASK(RTC_CNTL_SLOW_CLK_CONF_REG, RTC_CNTL_SLOW_CLK_NEXT_EDGE)) {
180 esp_rom_delay_us(1);
181 }
182 }
183
rtc_clk_freq_cal(uint32_t cal_val)184 uint32_t rtc_clk_freq_cal(uint32_t cal_val)
185 {
186 if (cal_val == 0) {
187 return 0; // cal_val will be denominator, return 0 as the symbol of failure.
188 }
189 return 1000000ULL * (1 << RTC_CLK_CAL_FRACT) / cal_val;
190 }
191