1 /*
2  * Copyright 2022 NXP
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #include "board.h"
8 #include "fsl_cache.h"
9 #if defined(SDK_I2C_BASED_COMPONENT_USED) && SDK_I2C_BASED_COMPONENT_USED
10 #include "fsl_lpi2c.h"
11 #endif
12 
13 /*******************************************************************************
14  * Definitions
15  ******************************************************************************/
16 struct dram_timing_info *timing_info;
17 
18 /*******************************************************************************
19  * Variables
20  ******************************************************************************/
21 #if defined(BOARD_USE_DDR_RETENTION) && BOARD_USE_DDR_RETENTION
22 static const uint16_t BOARD_DDRPHY_Addr_Remap_table[DDRPHY_NB_ADDR_REMAP] = {
23     0x000, // 0x00
24     0x001, // 0x01
25     0x002, // 0x02
26     0x003, // 0x03
27     0x004, // 0x04
28     0x005, // 0x05
29     0x006, // 0x06
30     0x007, // 0x07
31     0x008, // 0x08
32     0x009, // 0x09
33     0x00a, // 0x0a
34     0x00b, // 0x0b
35     0x100, // 0x0c
36     0x101, // 0x0d
37     0x102, // 0x0e
38     0x103, // 0x0f
39     0x104, // 0x10
40     0x105, // 0x11
41     0x106, // 0x12
42     0x107, // 0x13
43     0x108, // 0x14
44     0x109, // 0x15
45     0x10a, // 0x16
46     0x10b, // 0x17
47     0x200, // 0x18
48     0x201, // 0x19
49     0x202, // 0x1a
50     0x203, // 0x1b
51     0x204, // 0x1c
52     0x205, // 0x1d
53     0x206, // 0x1e
54     0x207, // 0x1f
55     0x208, // 0x20
56     0x209, // 0x21
57     0x20a, // 0x22
58     0x20b, // 0x23
59     0x300, // 0x24
60     0x301, // 0x25
61     0x302, // 0x26
62     0x303, // 0x27
63     0x304, // 0x28
64     0x305, // 0x29
65     0x306, // 0x2a
66     0x307, // 0x2b
67     0x308, // 0x2c
68     0x309, // 0x2d
69     0x30a, // 0x2e
70     0x30b, // 0x2f
71     0x010, // 0x30
72     0x011, // 0x31
73     0x012, // 0x32
74     0x013, // 0x33
75     0x014, // 0x34
76     0x015, // 0x35
77     0x016, // 0x36
78     0x017, // 0x37
79     0x018, // 0x38
80     0x019, // 0x39
81     0x110, // 0x3a
82     0x111, // 0x3b
83     0x112, // 0x3c
84     0x113, // 0x3d
85     0x114, // 0x3e
86     0x115, // 0x3f
87     0x116, // 0x40
88     0x117, // 0x41
89     0x118, // 0x42
90     0x119, // 0x43
91     0x210, // 0x44
92     0x211, // 0x45
93     0x212, // 0x46
94     0x213, // 0x47
95     0x214, // 0x48
96     0x215, // 0x49
97     0x216, // 0x4a
98     0x217, // 0x4b
99     0x218, // 0x4c
100     0x219, // 0x4d
101     0x310, // 0x4e
102     0x311, // 0x4f
103     0x312, // 0x50
104     0x313, // 0x51
105     0x314, // 0x52
106     0x315, // 0x53
107     0x316, // 0x54
108     0x317, // 0x55
109     0x318, // 0x56
110     0x319, // 0x57
111     0x020, // 0x58
112     0x120, // 0x59
113     0x220, // 0x5a
114     0x320, // 0x5b
115     0x040, // 0x5c
116     0x140, // 0x5d
117     0x240, // 0x5e
118     0x340, // 0x5f
119     0x050, // 0x60
120     0x051, // 0x61
121     0x052, // 0x62
122     0x053, // 0x63
123     0x054, // 0x64
124     0x055, // 0x65
125     0x056, // 0x66
126     0x057, // 0x67
127     0x070, // 0x68
128     0x090, // 0x69
129     0x190, // 0x6a
130     0x290, // 0x6b
131     0x390, // 0x6c
132     0x0c0, // 0x6d
133     0x0d0, // 0x6e
134 };
135 #endif
136 
137 /*******************************************************************************
138  * Code
139  ******************************************************************************/
140 
141 /* Initialize debug console. */
BOARD_InitDebugConsole(void)142 void BOARD_InitDebugConsole(void)
143 {
144     /* clang-format off */
145     const clock_root_config_t uartClkCfg = {
146         .clockOff = false,
147 	.mux = 0, // 24MHz oscillator source
148 	.div = 1
149     };
150     /* clang-format on */
151 
152     CLOCK_SetRootClock(BOARD_DEBUG_UART_CLOCK_ROOT, &uartClkCfg);
153     CLOCK_EnableClock(BOARD_DEBUG_UART_CLOCK_GATE);
154     DbgConsole_Init(BOARD_DEBUG_UART_INSTANCE, BOARD_DEBUG_UART_BAUDRATE, BOARD_DEBUG_UART_TYPE,
155                     BOARD_DEBUG_UART_CLK_FREQ);
156 }
157 
158 #if defined(SDK_I2C_BASED_COMPONENT_USED) && SDK_I2C_BASED_COMPONENT_USED
BOARD_LPI2C_Init(LPI2C_Type * base,uint32_t clkSrc_Hz)159 void BOARD_LPI2C_Init(LPI2C_Type *base, uint32_t clkSrc_Hz)
160 {
161     lpi2c_master_config_t lpi2cConfig = {0};
162 
163     /*
164      * lpi2cConfig.debugEnable = false;
165      * lpi2cConfig.ignoreAck = false;
166      * lpi2cConfig.pinConfig = kLPI2C_2PinOpenDrain;
167      * lpi2cConfig.baudRate_Hz = 100000U;
168      * lpi2cConfig.busIdleTimeout_ns = 0;
169      * lpi2cConfig.pinLowTimeout_ns = 0;
170      * lpi2cConfig.sdaGlitchFilterWidth_ns = 0;
171      * lpi2cConfig.sclGlitchFilterWidth_ns = 0;
172      */
173     LPI2C_MasterGetDefaultConfig(&lpi2cConfig);
174     LPI2C_MasterInit(base, &lpi2cConfig, clkSrc_Hz);
175 }
176 
BOARD_LPI2C_Send(LPI2C_Type * base,uint8_t deviceAddress,uint32_t subAddress,uint8_t subAddressSize,uint8_t * txBuff,uint8_t txBuffSize,uint32_t flags)177 status_t BOARD_LPI2C_Send(LPI2C_Type *base,
178                           uint8_t deviceAddress,
179                           uint32_t subAddress,
180                           uint8_t subAddressSize,
181                           uint8_t *txBuff,
182                           uint8_t txBuffSize,
183                           uint32_t flags)
184 {
185     lpi2c_master_transfer_t xfer;
186 
187     xfer.flags          = flags;
188     xfer.slaveAddress   = deviceAddress;
189     xfer.direction      = kLPI2C_Write;
190     xfer.subaddress     = subAddress;
191     xfer.subaddressSize = subAddressSize;
192     xfer.data           = txBuff;
193     xfer.dataSize       = txBuffSize;
194 
195     return LPI2C_MasterTransferBlocking(base, &xfer);
196 }
197 
BOARD_LPI2C_Receive(LPI2C_Type * base,uint8_t deviceAddress,uint32_t subAddress,uint8_t subAddressSize,uint8_t * rxBuff,uint8_t rxBuffSize,uint32_t flags)198 status_t BOARD_LPI2C_Receive(LPI2C_Type *base,
199                              uint8_t deviceAddress,
200                              uint32_t subAddress,
201                              uint8_t subAddressSize,
202                              uint8_t *rxBuff,
203                              uint8_t rxBuffSize,
204                              uint32_t flags)
205 {
206     lpi2c_master_transfer_t xfer;
207 
208     xfer.flags          = flags;
209     xfer.slaveAddress   = deviceAddress;
210     xfer.direction      = kLPI2C_Read;
211     xfer.subaddress     = subAddress;
212     xfer.subaddressSize = subAddressSize;
213     xfer.data           = rxBuff;
214     xfer.dataSize       = rxBuffSize;
215 
216     return LPI2C_MasterTransferBlocking(base, &xfer);
217 }
218 
219 #if defined(BOARD_USE_PCAL6524) && BOARD_USE_PCAL6524
BOARD_PCAL6524_I2C_Init(void)220 void BOARD_PCAL6524_I2C_Init(void)
221 {
222     BOARD_LPI2C_Init(BOARD_PCAL6524_I2C, BOARD_PCAL6524_I2C_CLOCK_FREQ);
223 }
224 
BOARD_PCAL6524_I2C_Send(uint8_t deviceAddress,uint32_t subAddress,uint8_t subAddressSize,const uint8_t * txBuff,uint8_t txBuffSize,uint32_t flags)225 status_t BOARD_PCAL6524_I2C_Send(uint8_t deviceAddress,
226                                  uint32_t subAddress,
227                                  uint8_t subAddressSize,
228                                  const uint8_t *txBuff,
229                                  uint8_t txBuffSize,
230                                  uint32_t flags)
231 {
232     return BOARD_LPI2C_Send(BOARD_PCAL6524_I2C, deviceAddress, subAddress, subAddressSize, (uint8_t *)txBuff,
233                             txBuffSize, flags);
234 }
235 
BOARD_PCAL6524_I2C_Receive(uint8_t deviceAddress,uint32_t subAddress,uint8_t subAddressSize,uint8_t * rxBuff,uint8_t rxBuffSize,uint32_t flags)236 status_t BOARD_PCAL6524_I2C_Receive(uint8_t deviceAddress,
237                                     uint32_t subAddress,
238                                     uint8_t subAddressSize,
239                                     uint8_t *rxBuff,
240                                     uint8_t rxBuffSize,
241                                     uint32_t flags)
242 {
243     return BOARD_LPI2C_Receive(BOARD_PCAL6524_I2C, deviceAddress, subAddress, subAddressSize, rxBuff, rxBuffSize,
244                                flags);
245 }
246 
BOARD_InitPCAL6524(pcal6524_handle_t * handle)247 void BOARD_InitPCAL6524(pcal6524_handle_t *handle)
248 {
249     BOARD_PCAL6524_I2C_Init();
250 
251     static const pcal6524_config_t config = {
252         .i2cAddr         = BOARD_PCAL6524_I2C_ADDR,
253         .I2C_SendFunc    = BOARD_PCAL6524_I2C_Send,
254         .I2C_ReceiveFunc = BOARD_PCAL6524_I2C_Receive,
255     };
256 
257     PCAL6524_Init(handle, &config);
258 }
259 
260 #endif /* BOARD_USE_PCAL6524. */
261 
262 #endif
263 
BOARD_McoreSUSPEND(void)264 void BOARD_McoreSUSPEND(void)
265 {
266     uint32_t value = 0U;
267     /* Config OSCPLL LPM setting for M33 SUSPEND */
268     for (unsigned int i = OSCPLL_LPM_START; i <= OSCPLL_LPM_END; i++)
269     {
270         CCM_CTRL->OSCPLL[i].LPM0 |= CCM_OSCPLL_LPM0_LPM_SETTING_D2_MASK;
271     }
272 
273     /* Config GPC_CTRL_CM33 to set Mcore as SUSPEND mode */
274     GPC_CTRL_CM33->CM_MODE_CTRL |= GPC_CPU_CTRL_CM_MODE_CTRL_CPU_MODE_TARGET_MASK;
275 
276     /* Stop SysTick and enter WFI
277      * System will goes into system sleep flow
278      * The STBY LED on board will light with red color
279      */
280     /* Save old value of SYST_CSR */
281     value            = SYSTICK_CTRL_REG;
282     SYSTICK_CTRL_REG = SYSTICK_CLOSE;
283     __WFI();
284     /* Enable systick */
285     SYSTICK_CTRL_REG = value;
286 }
287 
BOARD_ConfigMPU(void)288 void BOARD_ConfigMPU(void)
289 {
290     uint8_t attr;
291 
292     /* Disable code cache(ICache) and system cache(DCache) */
293     XCACHE_DisableCache(LPCAC_PC);
294     XCACHE_DisableCache(LPCAC_PS);
295 
296     /* NOTE: All TCRAM is non-cacheable regardless of MPU setting. */
297 
298     /*
299      * default cache policy(default memory access behavior) after enable mpu on cortex-m33(according to RM of
300      * cortex-m33): 0x00000000-0x1FFFFFFF Normal memory, Non-shareable, Write-Through, not Write Allocate
301      * 0x20000000-0x3FFFFFFF Normal memory, Non-shareable, Write-Back, Write Allocate
302      * 0x40000000-0x5FFFFFFF Device, Shareable
303      * 0x60000000-0x7FFFFFFF Normal memory, Non-shareable, Write-Back, Write Allocate
304      * 0x80000000-0x9FFFFFFF Normal memory, Non-shareable, Write-Through, not Write Allocate
305      * 0xA0000000-0xDFFFFFFF Device, Shareable
306      * 0xE0000000-0xE003FFFF Device, Shareable
307      * 0xE0040000-0xE0043FFF Device, Shareable
308      * 0xE0044000-0xE00EFFFF Device, Shareable
309      * 0xF0000000-0xFFFFFFFF Device, Shareable
310      */
311     /* Disable MPU */
312     ARM_MPU_Disable();
313 
314     /* Attr0: Device-nGnRnE */
315     ARM_MPU_SetMemAttr(0U, ARM_MPU_ATTR(ARM_MPU_ATTR_DEVICE, ARM_MPU_ATTR_DEVICE));
316 
317     /* Attr1: Normal memory, Outer non-cacheable, Inner non-cacheable */
318     ARM_MPU_SetMemAttr(1U, ARM_MPU_ATTR(ARM_MPU_ATTR_NON_CACHEABLE, ARM_MPU_ATTR_NON_CACHEABLE));
319 
320     /* Attr2: Normal memory, Inner write-through transient, read allocate. Inner write-through transient, read allocate
321      */
322     attr = ARM_MPU_ATTR_MEMORY_(0U, 0U, 1U, 0U);
323     ARM_MPU_SetMemAttr(2U, ARM_MPU_ATTR(attr, attr));
324 
325     /* Attr3: Normal memory, Outer write-back transient, read/write allocate. Inner write-back transient, read/write
326      * allocate */
327     attr = ARM_MPU_ATTR_MEMORY_(0U, 1U, 1U, 1U);
328     ARM_MPU_SetMemAttr(3U, ARM_MPU_ATTR(attr, attr));
329 
330     /*
331      * Change macro definitions as follows when choose cache policy as non-cacheable:
332      * #define DDR_NONCACHEABLE (1U)
333      * #define DDR_WRITE_THROUGH (0U)
334      * #define DDR_WRITE_BACK (0U)
335      *
336      *
337      * Change macro definitions as follows when choose cache policy as Write-Through:
338      * #define DDR_NONCACHEABLE (0U)
339      * #define DDR_WRITE_THROUGH (1U)
340      * #define DDR_WRITE_BACK (0U)
341      *
342      *
343      * Change macro definitions as follows when choose cache policy as Write-Back:
344      * #define DDR_NONCACHEABLE (0U)
345      * #define DDR_WRITE_THROUGH (0U)
346      * #define DDR_WRITE_BACK (1U)
347      */
348 #define DDR_NONCACHEABLE  (1U)
349 #define DDR_WRITE_THROUGH (0U)
350 #define DDR_WRITE_BACK    (0U)
351 #if DDR_NONCACHEABLE
352     /* NOTE: DDR is used as shared memory for A/M core communication, set it to non-cacheable. */
353     /* Region 0: [0x80000000, 0xDFFFFFFF](DRAM), outer shareable, read/write, any privileged, executable. Attr 1
354      * (non-cacheable). */
355     ARM_MPU_SetRegion(0U, ARM_MPU_RBAR(0x80000000, ARM_MPU_SH_OUTER, 0U, 1U, 0U), ARM_MPU_RLAR(0xDFFFFFFF, 1U));
356 #elif DDR_WRITE_THROUGH
357     /* Region 0: [0x80000000, 0xDFFFFFFF](DRAM), outer shareable, read/write, any privileged, executable. Attr 2
358      * (Normal memory, Inner write-through transient, read allocate. Inner write-through transient, read allocate). */
359     ARM_MPU_SetRegion(0U, ARM_MPU_RBAR(0x80000000, ARM_MPU_SH_OUTER, 0U, 1U, 0U), ARM_MPU_RLAR(0xDFFFFFFF, 2U));
360 #elif DDR_WRITE_BACK
361     /* Region 0: [0x80000000, 0xDFFFFFFF](DRAM), outer shareable, read/write, any privileged, executable. Attr 3
362      * (Normal memory, Outer write-back transient, read/write allocate. Inner write-back transient, read/write
363      * allocate). */
364     ARM_MPU_SetRegion(0U, ARM_MPU_RBAR(0x80000000, ARM_MPU_SH_OUTER, 0U, 1U, 0U), ARM_MPU_RLAR(0xDFFFFFFF, 3U));
365 #endif
366 
367     /* Enable MPU(use default memory map when access the memory within region) */
368     ARM_MPU_Enable(MPU_CTRL_PRIVDEFENA_Msk);
369 
370     /* Enable ICache and DCache */
371     XCACHE_EnableCache(LPCAC_PC);
372     XCACHE_EnableCache(LPCAC_PS);
373     /* flush pipeline */
374     __DSB();
375     __ISB();
376 }
377 
378 #if defined(BOARD_USE_DDR_RETENTION) && BOARD_USE_DDR_RETENTION
BOARD_DDRPHY_Addr_Remap(uint32_t paddr_apb_from_ctlr)379 static unsigned long BOARD_DDRPHY_Addr_Remap(uint32_t paddr_apb_from_ctlr)
380 {
381     uint32_t paddr_apb_qual;
382     uint32_t paddr_apb_unqual_dec_22_13;
383     uint32_t paddr_apb_unqual_dec_19_13;
384     uint32_t paddr_apb_unqual_dec_12_1;
385     uint32_t paddr_apb_unqual;
386     uint32_t paddr_apb_phy;
387 
388     paddr_apb_qual             = (paddr_apb_from_ctlr << 1);
389     paddr_apb_unqual_dec_22_13 = ((paddr_apb_qual & 0x7fe000) >> 13);
390     paddr_apb_unqual_dec_12_1  = ((paddr_apb_qual & 0x1ffe) >> 1);
391 
392     /* seacrching backward, so the last tested addr (0) is also the default value in case not found */
393     paddr_apb_unqual_dec_19_13 = DDRPHY_NB_ADDR_REMAP - 1;
394     while ((paddr_apb_unqual_dec_19_13 > 0) &&
395            (BOARD_DDRPHY_Addr_Remap_table[paddr_apb_unqual_dec_19_13] != (uint16_t)paddr_apb_unqual_dec_22_13))
396     {
397         paddr_apb_unqual_dec_19_13--;
398     }
399 
400     paddr_apb_unqual = ((paddr_apb_unqual_dec_19_13 << 13) | (paddr_apb_unqual_dec_12_1 << 1));
401     paddr_apb_phy    = (paddr_apb_unqual << 1);
402     return paddr_apb_phy;
403 }
404 
BOARD_Check_DDRC_Idle(int waitus,uint32_t flag)405 int BOARD_Check_DDRC_Idle(int waitus, uint32_t flag)
406 {
407     uint32_t ddrdsr2_val;
408     int waitforever = 0;
409     int waitedus    = 0;
410     if (waitus == 0)
411     {
412         waitforever = 1;
413     }
414 
415     do
416     {
417         ddrdsr2_val = DDR_CTRL->DDRDSR_2;
418         if ((ddrdsr2_val & flag) == flag)
419         {
420             /* Memory controller is idle */
421             break;
422         }
423         SDK_DelayAtLeastUs(1, SDK_DEVICE_MAXIMUM_CPU_CLOCK_FREQUENCY);
424         waitus--;
425         waitedus++;
426     } while (waitus | waitforever);
427 
428     if ((waitus == 0) & !waitforever)
429     {
430         return -1;
431     }
432     return waitedus;
433 }
434 
BOARD_Ipg_Stop_Ack_Wait(uint32_t expect_value)435 static void BOARD_Ipg_Stop_Ack_Wait(uint32_t expect_value)
436 {
437     uint32_t read_data;
438     read_data = BLK_CTRL_DDRMIX->DDRC_STOP_CTRL;
439 
440     if (expect_value == 0x1)
441     {
442         while ((read_data & BIT(1)) == 0x0)
443         {
444             /* DDR Controller ipg_stop_ack is a 0 */
445             read_data = BLK_CTRL_DDRMIX->DDRC_STOP_CTRL;
446         }
447         /* DDR Controller ipg_stop_ack is a 1 */
448     }
449     else if (expect_value == 0x0)
450     {
451         while ((read_data & BIT(1)) != 0x0)
452         {
453             /* DDR Controller ipg_stop_ack is a 1 */
454             read_data = BLK_CTRL_DDRMIX->DDRC_STOP_CTRL;
455         }
456         /* DDR Controller ipg_stop_ack is a 0 */
457     }
458 }
459 
460 /* Check whether PHY initialization is complete */
BOARD_Check_Dfi_Init_Complete(void)461 void BOARD_Check_Dfi_Init_Complete(void)
462 {
463     uint32_t ddr_phy_status = 0;
464 
465     do
466     {
467         ddr_phy_status = DDR_CTRL->DDRDSR_2;
468         if (ddr_phy_status & DDRC_DDRDSR_2_PHY_INIT_CMPLT_MASK)
469         {
470             /* PHY initialization is complete */
471             break;
472         }
473     } while (1);
474 
475     DDR_CTRL->DDRDSR_2 |= DDRC_DDRDSR_2_PHY_INIT_CMPLT_MASK;
476 }
477 
BOARD_DDRCEnterSelfRefresh(void)478 void BOARD_DDRCEnterSelfRefresh(void)
479 {
480     /* Set FRC_SR bit, put DDRC into self-refresh mode */
481     DDR_CTRL->DDR_SDRAM_CFG_2 |= DDRC_DDR_SDRAM_CFG_2_FRC_SR_MASK;
482 }
483 
BOARD_DDRPHY_ClodRest(void)484 void BOARD_DDRPHY_ClodRest(void)
485 {
486     /*
487      * dramphy_apb_n default 1 , assert -> 0, de_assert -> 1
488      * dramphy_reset_n default 0 , assert -> 0, de_assert -> 1
489      * dramphy_PwrOKIn default 0 , assert -> 1, de_assert -> 0
490      */
491     /* src_gen_dphy_apb_sw_rst_de_assert */
492     SRC_DPHY_SLICE->SLICE_SW_CTRL &= ~SRC_MIX_SLICE_SLICE_SW_CTRL_RST_CTRL_SOFT_MASK;
493     /* src_gen_dphy_sw_rst_de_assert */
494     SRC_DPHY_SLICE->SINGLE_RESET_SW_CTRL &= ~SRC_MIX_SLICE_SINGLE_RESET_SW_CTRL_RST_CTRL_SOFT_1_MASK;
495     /* src_gen_dphy_PwrOKIn_sw_rst_de_assert() */
496     SRC_DPHY_SLICE->SINGLE_RESET_SW_CTRL |= SRC_MIX_SLICE_SINGLE_RESET_SW_CTRL_RST_CTRL_SOFT_0_MASK;
497     SDK_DelayAtLeastUs(10, SDK_DEVICE_MAXIMUM_CPU_CLOCK_FREQUENCY);
498 
499     /* src_gen_dphy_apb_sw_rst_assert */
500     SRC_DPHY_SLICE->SLICE_SW_CTRL |= SRC_MIX_SLICE_SLICE_SW_CTRL_RST_CTRL_SOFT_MASK;
501     /* src_gen_dphy_sw_rst_assert */
502     SRC_DPHY_SLICE->SINGLE_RESET_SW_CTRL |= SRC_MIX_SLICE_SINGLE_RESET_SW_CTRL_RST_CTRL_SOFT_1_MASK;
503     SDK_DelayAtLeastUs(10, SDK_DEVICE_MAXIMUM_CPU_CLOCK_FREQUENCY);
504 
505     /* src_gen_dphy_PwrOKIn_sw_rst_assert */
506     SRC_DPHY_SLICE->SINGLE_RESET_SW_CTRL &= ~SRC_MIX_SLICE_SINGLE_RESET_SW_CTRL_RST_CTRL_SOFT_0_MASK;
507     SDK_DelayAtLeastUs(10, SDK_DEVICE_MAXIMUM_CPU_CLOCK_FREQUENCY);
508 
509     /* src_gen_dphy_apb_sw_rst_de_assert */
510     SRC_DPHY_SLICE->SLICE_SW_CTRL &= ~SRC_MIX_SLICE_SLICE_SW_CTRL_RST_CTRL_SOFT_MASK;
511     /* src_gen_dphy_sw_rst_de_assert() */
512     SRC_DPHY_SLICE->SINGLE_RESET_SW_CTRL &= ~SRC_MIX_SLICE_SINGLE_RESET_SW_CTRL_RST_CTRL_SOFT_1_MASK;
513 }
514 
BOARD_DRAM_PLL_Init(void)515 void BOARD_DRAM_PLL_Init(void)
516 {
517     /* Powerup and Enable DRAM PLL block*/
518     DRAMPLL->CTRL.SET |= PLL_CTRL_POWERUP_MASK;
519 
520     /* Wait lock */
521     while (!(R32(DRAMPLL->PLL_STATUS) & PLL_PLL_STATUS_PLL_LOCK_MASK))
522     {
523         ;
524     }
525 
526     /* Enable PLL output clock */
527     DRAMPLL->CTRL.SET |= PLL_CTRL_CLKMUX_EN_MASK;
528 }
529 
BOARD_DDR_Disable_Bypass(void)530 void BOARD_DDR_Disable_Bypass(void)
531 {
532     /* Set DRAM APB to 133Mhz */
533     const clock_root_config_t dramapbClkCfg = {.clockOff = false, .mux = 2, .div = 3};
534     CLOCK_SetRootClock(kCLOCK_Root_DramApb, &dramapbClkCfg);
535 
536     /* wait for the new setting update done */
537     while (R32(DRAM_APB_CLK + 0x20) & BIT(28))
538     {
539         ;
540     }
541 
542     /* Switch from DRAM  clock root from CCM to PLL */
543     CCM_CTRL->GPR_SHARED2.CLR |= CCM_GPR_SHARED2_DRAM_PLL_BYPASS_MASK;
544 }
545 
546 /* Restore the dram PHY config */
BOARD_DRAM_PHY_Restore(struct dram_timing_info * timing)547 void BOARD_DRAM_PHY_Restore(struct dram_timing_info *timing)
548 {
549     struct dram_cfg_param *cfg = timing->ddrphy_cfg;
550     uint32_t i;
551 
552     /* Restore the PHY init config */
553     cfg = timing->ddrphy_cfg;
554     for (i = 0U; i < timing->ddrphy_cfg_num; i++)
555     {
556         dwc_ddrphy_apb_wr(cfg->reg, cfg->val);
557         cfg++;
558     }
559 
560     dwc_ddrphy_apb_wr(0xd0000, 0x0);
561     dwc_ddrphy_apb_wr(0xc0080, 0x3);
562 
563     /* Restore the DDR PHY CSRs */
564     cfg = timing->ddrphy_trained_csr;
565     for (i = 0U; i < timing->ddrphy_trained_csr_num; i++)
566     {
567         dwc_ddrphy_apb_wr(cfg->reg, cfg->val);
568         cfg++;
569     }
570 
571     dwc_ddrphy_apb_wr(0xc0080, 0x2);
572     dwc_ddrphy_apb_wr(0xd0000, 0x1);
573 
574     /* Load the PIE image */
575     cfg = timing->ddrphy_pie;
576     for (i = 0U; i < timing->ddrphy_pie_num; i++)
577     {
578         dwc_ddrphy_apb_wr(cfg->reg, cfg->val);
579         cfg++;
580     }
581 }
582 
BOARD_DDRC_Restore(struct dram_timing_info * timing)583 void BOARD_DDRC_Restore(struct dram_timing_info *timing)
584 {
585     struct dram_cfg_param *ddrc_cfg = timing->ddrc_cfg;
586     uint32_t i;
587     uint32_t waitus = 0;
588 
589     for (i = 0; i < timing->ddrc_cfg_num; i++)
590     {
591         /* skip the dram init as we resume from retention */
592         if (ddrc_cfg->reg == DDR_SDRAM_CFG_ADDR)
593         {
594             W32(ddrc_cfg->reg, ddrc_cfg->val & ~BIT(4));
595         }
596         else
597         {
598             W32(ddrc_cfg->reg, ddrc_cfg->val);
599         }
600         ddrc_cfg++;
601     }
602 
603     if (timing->fsp_cfg != NULL)
604     {
605         ddrc_cfg = timing->fsp_cfg[0].ddrc_cfg;
606         while (ddrc_cfg->reg != 0)
607         {
608             W32(ddrc_cfg->reg, ddrc_cfg->val);
609             ddrc_cfg++;
610         }
611     }
612 
613     /* Check whether PHY initialization is complete */
614     BOARD_Check_Dfi_Init_Complete();
615 
616     /* Enable DDRC */
617     DDR_CTRL->DDR_SDRAM_CFG |= DDRC_DDR_SDRAM_CFG_MEM_EN_MASK;
618 
619     /* Waiting for DDRC idel status all the times */
620     waitus = BOARD_Check_DDRC_Idle(DDRC_DDRDSR_2_Check_Idel_time, DDRC_DDRDSR_2_IDLE_MASK);
621 
622     if (waitus == -1)
623     {
624         PRINTF("Check DDRC Idel status fail\r\n");
625     }
626 }
627 
BOARD_DRAMEnterRetention(void)628 void BOARD_DRAMEnterRetention(void)
629 {
630     uint32_t waitus = 0;
631 
632     /* Waiting for DDRC idel status all the times */
633     waitus = BOARD_Check_DDRC_Idle(DDRC_DDRDSR_2_Check_Idel_time, DDRC_DDRDSR_2_IDLE_MASK);
634 
635     if (waitus == -1)
636     {
637         PRINTF("Check DDRC Idel status fail\r\n");
638     }
639 
640     /* Set MEM_HALT bit, halt any new transactions for DDR SDRAM */
641     DDR_CTRL->DDR_SDRAM_CFG |= DDRC_DDR_SDRAM_CFG_MEM_HALT_MASK;
642 
643     /* Set DDR_SDRAM_CFG_3[SR_FAST_WK_EN], select fast wakeup method */
644     DDR_CTRL->DDR_SDRAM_CFG_3 |= DDRC_DDR_SDRAM_CFG_3_SR_FAST_WK_EN_MASK;
645 
646     /*
647      * Temporarily DDR_SDRAM_CFG_4 info unavailable on 93 RM.
648      * Program DFI Frequency to max value, DDR_SDRAM_CFG_4[DFI_FREQ] to 2b'1111.
649      */
650     DDR_CTRL->DDR_SDRAM_CFG_4 |= 0x1f000;
651 
652     /* Clear DDR_ZQ_CNTL register */
653     DDR_CTRL->DDR_ZQ_CNTL = 0x0;
654 
655     /* Set DEBUG_26[DIS_CTRLUPD_REQ */
656     SETBIT32(DDR_CTRL_BASE + DDR_DEBUG_26, (0x1f << 12));
657 
658     /* Force the DDRC to enter self-refresh */
659     BOARD_DDRCEnterSelfRefresh();
660 
661     /* Set BLK_CTRL_DDRMIX AUTO_CG_CTRL[HWFFC_ACG_FORCE_B], enable ipg_stop_reg while auto cg */
662     BLK_CTRL_DDRMIX->AUTO_CG_CTRL |= DDRMIX_BLK_CTRL_AUTO_CG_CTRL_HWFFC_ACG_FORCE_B_MASK;
663     /* Set DDR_SDRAM_CFG[SREN] bit, enable DDR self-refresh function during sleep */
664     DDR_CTRL->DDR_SDRAM_CFG |= DDRC_DDR_SDRAM_CFG_SREN_MASK;
665     /* Set DDR_SDRAM_CFG_3[DRAIN_FOR_SR], drain DDRC main command for self refresh */
666     DDR_CTRL->DDR_SDRAM_CFG_3 |= DDRC_DDR_SDRAM_CFG_3_DRAIN_FOR_SR_MASK;
667 
668     /* Set stop and poll stop ack */
669     BOARD_Ipg_Stop_Ack_Wait(DDRC_STOP_ACK_SET_MASK);
670     BLK_CTRL_DDRMIX->DDRC_STOP_CTRL |= DDRMIX_BLK_CTRL_DDRC_STOP_CTRL_DDRC_STOP_MASK;
671 
672     BOARD_Ipg_Stop_Ack_Wait(DDRC_STOP_ACK_POLL_MASK);
673 
674     /* Clear DDR_INTERVAL(this disables refreshes) */
675     DDR_CTRL->DDR_SDRAM_INTERVAL = 0x0;
676 
677     /* Set DDR_SDRAM_MD_CNTL[CKE_CNTL] to 0x00100000(this forces CKE to remain low) */
678     DDR_CTRL->DDR_SDRAM_MD_CNTL = 0x00100000;
679 
680     /* Remove Stop request via DDRMIX register */
681     BLK_CTRL_DDRMIX->DDRC_STOP_CTRL &= ~DDRMIX_BLK_CTRL_DDRC_STOP_CTRL_DDRC_STOP_MASK;
682 
683     BOARD_Check_Dfi_Init_Complete();
684 
685     /* Clear DDR_SDRAM_CFG[SREN] */
686     DDR_CTRL->DDR_SDRAM_CFG &= ~DDRC_DDR_SDRAM_CFG_SREN_MASK;
687 
688     /* Clear DDR_SDRAM_CFG_3[SR_FAST_WK_EN] */
689     DDR_CTRL->DDR_SDRAM_CFG_3 &= ~DDRC_DDR_SDRAM_CFG_3_SR_FAST_WK_EN_MASK;
690 
691     /* Set stop request and poll again via DDRMIX register */
692     BOARD_Ipg_Stop_Ack_Wait(DDRC_STOP_ACK_SET_MASK);
693     BLK_CTRL_DDRMIX->DDRC_STOP_CTRL |= DDRMIX_BLK_CTRL_DDRC_STOP_CTRL_DDRC_STOP_MASK;
694 
695     BOARD_Ipg_Stop_Ack_Wait(DDRC_STOP_ACK_POLL_MASK);
696 
697     /* Set STOP request again via DDRMIX register */
698     BLK_CTRL_DDRMIX->DDRC_STOP_CTRL &= ~DDRMIX_BLK_CTRL_DDRC_STOP_CTRL_DDRC_STOP_MASK;
699 
700     /* Should check PhyInLP3 pub reg */
701     dwc_ddrphy_apb_wr(0xd0000, 0x0);
702     if (!(dwc_ddrphy_apb_rd(0x90028) & 0x1))
703     {
704         PRINTF("PhyInLP3 = 1\r\n");
705     }
706     dwc_ddrphy_apb_wr(0xd0000, 0x1);
707 
708     /* Clear PwrOkIn via DDRMIX regsiter */
709     SRC_DPHY_SLICE->SINGLE_RESET_SW_CTRL |= SRC_MIX_SLICE_SINGLE_RESET_SW_CTRL_RST_CTRL_SOFT_0_MASK;
710     /* Power off the DDRMIX */
711     SDK_DelayAtLeastUs(10, SDK_DEVICE_MAXIMUM_CPU_CLOCK_FREQUENCY);
712     SRC_DDR_SLICE->SLICE_SW_CTRL |= SRC_MIX_SLICE_SLICE_SW_CTRL_PDN_SOFT_MASK;
713     SDK_DelayAtLeastUs(10, SDK_DEVICE_MAXIMUM_CPU_CLOCK_FREQUENCY);
714 }
715 
BOARD_DRAMExitRetention(void)716 void BOARD_DRAMExitRetention(void)
717 {
718     /* Power up the DDRMIX */
719     SRC_DDR_SLICE->SLICE_SW_CTRL &= ~SRC_MIX_SLICE_SLICE_SW_CTRL_PDN_SOFT_MASK;
720 
721     /* additional step to make sure DDR exit retenton works */
722     SRC_DDR_SLICE->SLICE_SW_CTRL |= SRC_MIX_SLICE_SLICE_SW_CTRL_RST_CTRL_SOFT_MASK;
723     SDK_DelayAtLeastUs(10, SDK_DEVICE_MAXIMUM_CPU_CLOCK_FREQUENCY);
724 
725     SRC_DDR_SLICE->SLICE_SW_CTRL &= ~SRC_MIX_SLICE_SLICE_SW_CTRL_RST_CTRL_SOFT_MASK;
726     SDK_DelayAtLeastUs(10, SDK_DEVICE_MAXIMUM_CPU_CLOCK_FREQUENCY);
727 
728     /* Cold reset the DDRPHY */
729     BOARD_DDRPHY_ClodRest();
730 
731     /* Config the DRAM PLL to FSP0 */
732     BOARD_DRAM_PLL_Init();
733     BOARD_DDR_Disable_Bypass();
734 
735     /* Reload the DDRPHY config */
736     BOARD_DRAM_PHY_Restore(timing_info);
737 
738     /* Reload the ddrc config */
739     BOARD_DDRC_Restore(timing_info);
740 }
741 #endif
742