1 // Copyright 2015-2017 Espressif Systems (Shanghai) PTE LTD
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 #include <stdint.h>
16 #include "esp_rom_sys.h"
17 #include "soc/rtc.h"
18 #include "soc/timer_periph.h"
19 #include "soc_log.h"
20
21 #define MHZ (1000000)
22
23 static const char* TAG = "rtc_time";
24
25 /* Calibration of RTC_SLOW_CLK is performed using a special feature of TIMG0.
26 * This feature counts the number of XTAL clock cycles within a given number of
27 * RTC_SLOW_CLK cycles.
28 *
29 * Slow clock calibration feature has two modes of operation: one-off and cycling.
30 * In cycling mode (which is enabled by default on SoC reset), counting of XTAL
31 * cycles within RTC_SLOW_CLK cycle is done continuously. Cycling mode is enabled
32 * using TIMG_RTC_CALI_START_CYCLING bit. In one-off mode counting is performed
33 * once, and TIMG_RTC_CALI_RDY bit is set when counting is done. One-off mode is
34 * enabled using TIMG_RTC_CALI_START bit.
35 */
36
37 /**
38 * @brief Clock calibration function used by rtc_clk_cal and rtc_clk_cal_ratio
39 * @param cal_clk which clock to calibrate
40 * @param slowclk_cycles number of slow clock cycles to count. Max value is 32766.
41 * @return number of XTAL clock cycles within the given number of slow clock cycles
42 */
rtc_clk_cal_internal(rtc_cal_sel_t cal_clk,uint32_t slowclk_cycles)43 static uint32_t rtc_clk_cal_internal(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
44 {
45 assert(slowclk_cycles < 32767);
46 /* Enable requested clock (150k clock is always on) */
47 int dig_32k_xtal_state = REG_GET_FIELD(RTC_CNTL_CLK_CONF_REG, RTC_CNTL_DIG_XTAL32K_EN);
48 if (cal_clk == RTC_CAL_32K_XTAL && !dig_32k_xtal_state) {
49 REG_SET_FIELD(RTC_CNTL_CLK_CONF_REG, RTC_CNTL_DIG_XTAL32K_EN, 1);
50 }
51
52 if (cal_clk == RTC_CAL_8MD256) {
53 SET_PERI_REG_MASK(RTC_CNTL_CLK_CONF_REG, RTC_CNTL_DIG_CLK8M_D256_EN);
54 }
55 /* Prepare calibration */
56 REG_SET_FIELD(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_CLK_SEL, cal_clk);
57 CLEAR_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START_CYCLING);
58 REG_SET_FIELD(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_MAX, slowclk_cycles);
59 /* Figure out how long to wait for calibration to finish */
60 uint32_t expected_freq;
61 rtc_slow_freq_t slow_freq = REG_GET_FIELD(RTC_CNTL_CLK_CONF_REG, RTC_CNTL_ANA_CLK_RTC_SEL);
62 if (cal_clk == RTC_CAL_32K_XTAL ||
63 (cal_clk == RTC_CAL_RTC_MUX && slow_freq == RTC_SLOW_FREQ_32K_XTAL)) {
64 expected_freq = 32768; /* standard 32k XTAL */
65 } else if (cal_clk == RTC_CAL_8MD256 ||
66 (cal_clk == RTC_CAL_RTC_MUX && slow_freq == RTC_SLOW_FREQ_8MD256)) {
67 expected_freq = RTC_FAST_CLK_FREQ_APPROX / 256;
68 } else {
69 expected_freq = 150000; /* 150k internal oscillator */
70 }
71 uint32_t us_time_estimate = (uint32_t) (((uint64_t) slowclk_cycles) * MHZ / expected_freq);
72 /* Check if the required number of slowclk_cycles may result in an overflow of TIMG_RTC_CALI_VALUE */
73 rtc_xtal_freq_t xtal_freq = rtc_clk_xtal_freq_get();
74 if (xtal_freq == RTC_XTAL_FREQ_AUTO) {
75 /* XTAL frequency is not known yet; assume worst case (40 MHz) */
76 xtal_freq = RTC_XTAL_FREQ_40M;
77 }
78 const uint32_t us_timer_max = TIMG_RTC_CALI_VALUE / (uint32_t) xtal_freq;
79 if (us_time_estimate >= us_timer_max) {
80 SOC_LOGE(TAG, "slowclk_cycles value too large, possible overflow");
81 return 0;
82 }
83 /* Start calibration */
84 CLEAR_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START);
85 SET_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START);
86 /* Wait the expected time calibration should take.
87 * TODO: if running under RTOS, and us_time_estimate > RTOS tick, use the
88 * RTOS delay function.
89 */
90 esp_rom_delay_us(us_time_estimate);
91 /* Wait for calibration to finish up to another us_time_estimate */
92 int timeout_us = us_time_estimate;
93 while (!GET_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_RDY) &&
94 timeout_us > 0) {
95 timeout_us--;
96 esp_rom_delay_us(1);
97 }
98
99 REG_SET_FIELD(RTC_CNTL_CLK_CONF_REG, RTC_CNTL_DIG_XTAL32K_EN, dig_32k_xtal_state);
100
101 if (cal_clk == RTC_CAL_8MD256) {
102 CLEAR_PERI_REG_MASK(RTC_CNTL_CLK_CONF_REG, RTC_CNTL_DIG_CLK8M_D256_EN);
103 }
104 if (timeout_us == 0) {
105 /* timed out waiting for calibration */
106 return 0;
107 }
108
109 return REG_GET_FIELD(TIMG_RTCCALICFG1_REG(0), TIMG_RTC_CALI_VALUE);
110 }
111
rtc_clk_cal_ratio(rtc_cal_sel_t cal_clk,uint32_t slowclk_cycles)112 uint32_t rtc_clk_cal_ratio(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
113 {
114 uint64_t xtal_cycles = rtc_clk_cal_internal(cal_clk, slowclk_cycles);
115 uint64_t ratio_64 = ((xtal_cycles << RTC_CLK_CAL_FRACT)) / slowclk_cycles;
116 uint32_t ratio = (uint32_t)(ratio_64 & UINT32_MAX);
117 return ratio;
118 }
119
rtc_clk_cal(rtc_cal_sel_t cal_clk,uint32_t slowclk_cycles)120 uint32_t rtc_clk_cal(rtc_cal_sel_t cal_clk, uint32_t slowclk_cycles)
121 {
122 rtc_xtal_freq_t xtal_freq = rtc_clk_xtal_freq_get();
123 uint64_t xtal_cycles = rtc_clk_cal_internal(cal_clk, slowclk_cycles);
124 uint64_t divider = ((uint64_t)xtal_freq) * slowclk_cycles;
125 uint64_t period_64 = ((xtal_cycles << RTC_CLK_CAL_FRACT) + divider / 2 - 1) / divider;
126 uint32_t period = (uint32_t)(period_64 & UINT32_MAX);
127 return period;
128 }
129
rtc_time_us_to_slowclk(uint64_t time_in_us,uint32_t period)130 uint64_t rtc_time_us_to_slowclk(uint64_t time_in_us, uint32_t period)
131 {
132 /* Overflow will happen in this function if time_in_us >= 2^45, which is about 400 days.
133 * TODO: fix overflow.
134 */
135 return (time_in_us << RTC_CLK_CAL_FRACT) / period;
136 }
137
rtc_time_slowclk_to_us(uint64_t rtc_cycles,uint32_t period)138 uint64_t rtc_time_slowclk_to_us(uint64_t rtc_cycles, uint32_t period)
139 {
140 return (rtc_cycles * period) >> RTC_CLK_CAL_FRACT;
141 }
142
rtc_time_get(void)143 uint64_t rtc_time_get(void)
144 {
145 SET_PERI_REG_MASK(RTC_CNTL_TIME_UPDATE_REG, RTC_CNTL_TIME_UPDATE);
146 while (GET_PERI_REG_MASK(RTC_CNTL_TIME_UPDATE_REG, RTC_CNTL_TIME_VALID) == 0) {
147 esp_rom_delay_us(1); // might take 1 RTC slowclk period, don't flood RTC bus
148 }
149 SET_PERI_REG_MASK(RTC_CNTL_INT_CLR_REG, RTC_CNTL_TIME_VALID_INT_CLR);
150 uint64_t t = READ_PERI_REG(RTC_CNTL_TIME0_REG);
151 t |= ((uint64_t) READ_PERI_REG(RTC_CNTL_TIME1_REG)) << 32;
152 return t;
153 }
154
rtc_clk_wait_for_slow_cycle(void)155 void rtc_clk_wait_for_slow_cycle(void)
156 {
157 REG_CLR_BIT(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START_CYCLING | TIMG_RTC_CALI_START);
158 REG_CLR_BIT(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_RDY);
159 REG_SET_FIELD(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_CLK_SEL, RTC_CAL_RTC_MUX);
160 /* Request to run calibration for 0 slow clock cycles.
161 * RDY bit will be set on the nearest slow clock cycle.
162 */
163 REG_SET_FIELD(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_MAX, 0);
164 REG_SET_BIT(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_START);
165 esp_rom_delay_us(1); /* RDY needs some time to go low */
166 while (!GET_PERI_REG_MASK(TIMG_RTCCALICFG_REG(0), TIMG_RTC_CALI_RDY)) {
167 esp_rom_delay_us(1);
168 }
169 }
170
rtc_clk_freq_cal(uint32_t cal_val)171 uint32_t rtc_clk_freq_cal(uint32_t cal_val)
172 {
173 if (cal_val == 0) {
174 return 0; // cal_val will be denominator, return 0 as the symbol of failure.
175 }
176 return 1000000ULL * (1 << RTC_CLK_CAL_FRACT) / cal_val;
177 }
178