1 /*******************************************************************************
2  * Copyright 2019-2021 Microchip FPGA Embedded Systems Solutions.
3  *
4  * SPDX-License-Identifier: MIT
5  *
6  * MPFS HAL Embedded Software
7  *
8  */
9 
10 /*******************************************************************************
11  * @file mss_ddr.h
12  * @author Microchip-FPGA Embedded Systems Solutions
13  * @brief DDR related code
14  *
15  */
16 #include <string.h>
17 #include <stdio.h>
18 #include "mpfs_hal/mss_hal.h"
19 #ifdef DDR_SUPPORT
20 #include "mss_ddr_debug.h"
21 #include "simulation.h"
22 #ifdef FABRIC_NOISE_TEST
23 #include "drivers/mss_gpio/mss_gpio.h"
24 #endif
25 
26 #define MANUAL_ADDCMD_TRAINIG
27 
28 /*******************************************************************************
29  * Local Defines
30  */
31 /* This string is updated if any change to ddr driver */
32 #define DDR_DRIVER_VERSION_STRING   "0.3.011"
33 /* Version     |  Comment                                                     */
34 /* 0.3.011     |  Update to DDR4 ADD CMD sweep @800 <0,7,1> to <7,0>          */
35 /* 0.3.010     |  Update to LPDDR4 ADD CMD sweep values <5,4,6,3> to <1,5,1,5>*/
36 /* 0.3.009     |  Corrected refclk_offset used for lower frequecies           */
37 /*             |  See function: ddr_manual_addcmd_refclk_offset()             */
38 /* 0.3.008     |  Removed weak rand() function, which continually returned 0  */
39 /* 0.3.007     |  Updated DDR3 add cmd offsets                                */
40 /*             |  Updated DDR4 add cmd offsets                                */
41 /* 0.3.006     |  modified debug printing after failure                       */
42 /* 0.3.005     |  modified addcmd offsets DDR3/DDR3L @ 1333 = 0,1             */
43 /*             |  DDR3/DDR3L to 0,1                                           */
44 /*             |  Also some ADD CMD training improvments from Jaswanth        */
45 /* 0.3.004     |  Removed dq setting before claibration for DDR3/4 and lpddr3 */
46 /*             |  Some tidy up                                                */
47 /* 0.3.003     |  Modified latency sweep from 0-8 to 0-3. Speeded u[p MCC test*/
48 /*             |  when faulure                                                */
49 /* 0.3.002     |  Move refclk offset outside manual training loop             */
50 /* 0.3.001     |  wip - adding in manual add cmd training                     */
51 /* 0.3.000     |  wip - adding in manual add cmd training                     */
52 /* 0.2.003     |  Updated SEG setup to match Libero 12.7, Removed warnings,   */
53 /*             |  shortened timeout in mtc_test                               */
54 /* 0.2.002     |  MTC_test() update -added more tests                         */
55 /* 0.2.001     |  Reverted ADDCMD training command                            */
56 /* 0.2.000     |  RPC166 now does short retrain by default                    */
57 /* 0.1.009     |  Removed AXI overrides. Agreed better placed in              */
58 /*             |  mss_sw_config.h util until corrected in configurator v3.0   */
59 /* 0.1.008     |  Added manual addcmd traing for all variants                 */
60 /* 0.1.007     |  Added some updates from SVG and DCT. Also overrides AXI     */
61 /*             |  ranges if incorrectly set (Liber0 v12.5 and Liber0 v12.6    */
62 /* 0.1.006     |  Added tuning for rpc166, read lane FIFO alignement          */
63 /* 0.1.005     |  Added parameter to modify rpc166, lane out of sync on read  */
64 /* 0.1.004     |  Corrected default RPC220 setting so dq/dqs window centred   */
65 /* 0.1.003     |  refclk_phase correctly masked during bclk sclk sw training  */
66 /* 0.1.002     |  Reset modified- corrects softreset on retry issue  (1.8.x)  */
67 /* 0.1.001     |  Reset modified- corrects softreset on retry issue  (1.7.2)  */
68 /* 0.0.016     |  Added #define DDR_FULL_32BIT_NC_CHECK_EN to mss_ddr.h       */
69 /* 0.0.016     |  updated mss_ddr_debug.c with additio of 32-bit write test   */
70 /* 0.0.015     |  DDR3L - Use Software Bclk Sclk training                     */
71 /* 0.0.014     |  DDR3 and DDR update to sync with SVG proven golden version  */
72 /* 0.0.013     |  Added code to turn off DM if DDR4 and using ECC             */
73 /* 0.0.012     |  Added support for turning off unused I/O from Libero        */
74 
75 /*
76  * Calibration data records calculated write calibration values during training
77  */
78 mss_ddr_calibration calib_data;
79 
80 /* rx lane FIFO used for tuning  */
81 #if (TUNE_RPC_166_VALUE == 1)
82 static uint32_t rpc_166_fifo_offset;
83 #endif
84 
85 /*
86  * This string is used as a quick sanity check of write/read to DDR.
87  * The memory test core is used for more comprehensive testing during and
88  * post calibration
89  */
90 #ifdef DDR_SANITY_CHECKS_EN
91 static const uint32_t test_string[] = {
92         0x12345678,23211234,0x35675678,0x4456789,0x56789123,0x65432198,\
93         0x45673214,0xABCD1234,0x99999999,0xaaaaaaaa,0xbbbbbbbb,0xcccccccc,\
94         0xdddddddd,0xeeeeeeee,0x12121212,0x12345678};
95 #endif
96 
97 /*******************************************************************************
98  * external functions
99  */
100 #ifdef DEBUG_DDR_INIT
101 extern mss_uart_instance_t *g_debug_uart;
102 extern uint32_t tip_register_status (mss_uart_instance_t *g_mss_uart_debug_pt);
103 #endif
104 
105 /* Use to record instance of errors during calibration */
106 static uint32_t ddr_error_count;
107 #ifdef SWEEP_ENABLED
108 uint8_t sweep_results[MAX_NUMBER_DPC_VS_GEN_SWEEPS]\
109                      [MAX_NUMBER_DPC_H_GEN_SWEEPS]\
110                      [MAX_NUMBER_DPC_V_GEN_SWEEPS]\
111                      [MAX_NUMBER__BCLK_SCLK_OFFSET_SWEEPS]\
112                      [MAX_NUMBER_ADDR_CMD_OFFSET_SWEEPS];
113 #define TOTAL_SWEEPS (MAX_NUMBER_DPC_H_GEN_SWEEPS*MAX_NUMBER_DPC_H_GEN_SWEEPS*\
114         MAX_NUMBER_DPC_V_GEN_SWEEPS*MAX_NUMBER__BCLK_SCLK_OFFSET_SWEEPS*\
115         MAX_NUMBER_ADDR_CMD_OFFSET_SWEEPS)
116 #endif
117 
118 /*******************************************************************************
119  * Local function declarations
120  */
121 static uint32_t ddr_setup(void);
122 static void init_ddrc(void);
123 static uint8_t write_calibration_using_mtc(uint8_t num_of_lanes_to_calibrate);
124 /*static uint8_t mode_register_write(uint32_t MR_ADDR, uint32_t MR_DATA);*/
125 static uint8_t MTC_test(uint8_t mask, uint64_t start_address, uint32_t size, MTC_PATTERN pattern, MTC_ADD_PATTERN add_pattern, uint32_t *error);
126 #ifdef VREFDQ_CALIB
127 static uint8_t FPGA_VREFDQ_calibration_using_mtc(void);
128 static uint8_t VREFDQ_calibration_using_mtc(void);
129 #endif
130 #ifdef DDR_SANITY_CHECKS_EN
131 static uint8_t rw_sanity_chk(uint64_t * address, uint32_t count);
132 static uint8_t mtc_sanity_check(uint64_t start_address);
133 #endif
134 #ifdef SET_VREF_LPDDR4_MODE_REGS
135 static uint8_t mode_register_write(uint32_t MR_ADDR, uint32_t MR_DATA);
136 #endif
137 #ifdef DDR_SANITY_CHECKS_EN
138 static uint8_t memory_tests(void);
139 #endif
140 static void ddr_off_mode(void);
141 static void set_ddr_mode_reg_and_vs_bits(uint32_t dpc_bits);
142 static void set_ddr_rpc_regs(DDR_TYPE ddr_type);
143 static uint8_t get_num_lanes(void);
144 static void load_dq(uint8_t lane);
145 static uint8_t use_software_bclk_sclk_training(DDR_TYPE ddr_type);
146 static void config_ddr_io_pull_up_downs_rpc_bits(DDR_TYPE ddr_type);
147 #ifdef SWEEP_ENABLED
148 static uint8_t get_best_sweep(sweep_index *good_index);
149 #endif
150 #ifdef MANUAL_ADDCMD_TRAINIG
151 static uint8_t ddr_manual_addcmd_refclk_offset(DDR_TYPE ddr_type, uint8_t * refclk_sweep_index);
152 #endif
153 
154 /*******************************************************************************
155  * External function declarations
156  */
157 extern void delay(uint32_t n);
158 
159 #ifdef DEBUG_DDR_INIT
160 extern mss_uart_instance_t *g_debug_uart;
161 #ifdef DEBUG_DDR_DDRCFG
162 void debug_read_ddrcfg(void);
163 #endif
164 #endif
165 
166 #ifdef FABRIC_NOISE_TEST
167 uint32_t fabric_noise_en = 1;
168 uint32_t fabric_noise_en_log = 1;
169 uint32_t num_of_noise_blocks_en = 3; /* do not set less than 1 */
170 uint32_t noise_ena = 0x0;
171 #endif
172 
173 /*******************************************************************************
174  * Instance definitions
175  */
176 
177 /*******************************************************************************
178  * Public Functions - API
179  ******************************************************************************/
180 
181 
182 /***************************************************************************//**
183  * ddr_state_machine(DDR_SS_COMMAND)
184  * call this routine if you do not require the state machine
185  *
186  * @param ddr_type
187  */
ddr_state_machine(DDR_SS_COMMAND command)188 uint32_t  ddr_state_machine(DDR_SS_COMMAND command)
189 {
190     static DDR_SM_STATES ddr_state;
191     static uint32_t return_status;
192     if (command == DDR_SS__INIT)
193     {
194         ddr_state = DDR_STATE_INIT;
195     }
196     SIM_FEEDBACK0(100U + ddr_state);
197     SIM_FEEDBACK1(ddr_state);
198     switch (ddr_state)
199     {
200         default:
201         case DDR_STATE_INIT:
202             ddr_state = DDR_STATE_TRAINING;
203             return_status = 0U;
204             break;
205 
206         case DDR_STATE_TRAINING:
207             /*
208              * We stay in this state until finished training/fail training
209              */
210             return_status = ddr_setup();
211             break;
212 
213         case DDR_STATE_MONITOR:
214             /*
215              * 1. Periodically check DDR access
216              * 2. Run any tests, as directed
217              */
218 //            return_status = ddr_monitor();
219             break;
220     }
221     SIM_FEEDBACK1(0xFF000000UL + return_status);
222     return (return_status);
223 }
224 
225 
226 /***************************************************************************//**
227  * ddr_setup(DDR_TYPE ddr_type)
228  * call this routine if you do not require the state machine
229  *
230  * @param ddr_type
231  */
ddr_setup(void)232 static uint32_t ddr_setup(void)
233 {
234     static DDR_TRAINING_SM ddr_training_state = DDR_TRAINING_INIT;
235     static uint32_t error;
236     static uint32_t timeout;
237 #ifdef DEBUG_DDR_INIT
238     static uint32_t addr_cmd_value;
239     static uint32_t bclk_sclk_offset_value;
240     static uint32_t dpc_vrgen_v_value;
241     static uint32_t dpc_vrgen_h_value;
242     static uint32_t dpc_vrgen_vs_value;
243 #endif
244 #ifdef SWEEP_ENABLED
245     static SWEEP_STATES sweep_state = INIT_SWEEP;
246 #endif
247     static uint32_t retry_count;
248     static uint32_t write_latency;
249     static uint32_t tip_cfg_params;
250     static uint32_t dpc_bits;
251     static uint8_t last_sweep_status;
252 #if (TUNE_RPC_166_VALUE == 1)
253     static uint8_t num_rpc_166_retires = 0U;
254 #endif
255 #ifdef MANUAL_ADDCMD_TRAINIG
256     static uint8_t refclk_offset;
257     static  uint8_t refclk_sweep_index =0xFU;
258 #endif
259     static uint32_t bclk_answer = 0U;
260     DDR_TYPE ddr_type;
261     uint32_t ret_status = 0U;
262     uint8_t number_of_lanes_to_calibrate;
263 
264     ddr_type = LIBERO_SETTING_DDRPHY_MODE & DDRPHY_MODE_MASK;
265 
266     SIM_FEEDBACK0(200U + ddr_training_state);
267     SIM_FEEDBACK1(0U);
268 
269     switch (ddr_training_state)
270     {
271         case DDR_TRAINING_INIT:
272             tip_cfg_params = LIBERO_SETTING_TIP_CFG_PARAMS;
273             dpc_bits = LIBERO_SETTING_DPC_BITS ;
274             write_latency = LIBERO_SETTING_CFG_WRITE_LATENCY_SET;
275 #if (TUNE_RPC_166_VALUE == 1)
276             rpc_166_fifo_offset = DEFAULT_RPC_166_VALUE;
277 #endif
278 #ifdef MANUAL_ADDCMD_TRAINIG
279             refclk_offset = LIBERO_SETTING_MAX_MANUAL_REF_CLK_PHASE_OFFSET + 1U;
280 #endif
281 #ifdef SWEEP_ENABLED
282             sweep_state = INIT_SWEEP;
283 #endif
284             ddr_error_count = 0U;
285             error = 0U;
286             memfill((uint8_t *)&calib_data,0U,sizeof(calib_data));
287             retry_count = 0U;
288 #ifdef DEBUG_DDR_INIT
289             (void)uprint32(g_debug_uart, "\n\r Start training. TIP_CFG_PARAMS:"\
290                     , LIBERO_SETTING_TIP_CFG_PARAMS);
291 #endif
292 #ifdef SWEEP_ENABLED
293             addr_cmd_value = LIBERO_SETTING_TIP_CFG_PARAMS\
294                                          & ADDRESS_CMD_OFFSETT_MASK;
295             bclk_sclk_offset_value = (LIBERO_SETTING_TIP_CFG_PARAMS\
296                    & BCLK_SCLK_OFFSET_MASK)>>BCLK_SCLK_OFFSET_SHIFT;
297             dpc_vrgen_v_value = (LIBERO_SETTING_DPC_BITS & \
298                     BCLK_DPC_VRGEN_V_MASK)>>BCLK_DPC_VRGEN_V_SHIFT;
299             dpc_vrgen_h_value = (LIBERO_SETTING_DPC_BITS & \
300                     BCLK_DPC_VRGEN_H_MASK)>>BCLK_DPC_VRGEN_H_SHIFT;
301             dpc_vrgen_vs_value = (LIBERO_SETTING_DPC_BITS & \
302                     BCLK_DPC_VRGEN_VS_MASK)>>BCLK_DPC_VRGEN_VS_SHIFT;
303 #endif
304             ddr_training_state = DDR_TRAINING_CHECK_FOR_OFFMODE;
305             break;
306         case DDR_TRAINING_FAIL_SM2_VERIFY:
307 #ifdef DEBUG_DDR_INIT
308             (void)uprint32(g_debug_uart, "\n\r SM2_VERIFY: ",addr_cmd_value);
309 #endif
310             ddr_training_state = DDR_TRAINING_FAIL;
311             break;
312         case DDR_TRAINING_FAIL_SM_VERIFY:
313 #ifdef DEBUG_DDR_INIT
314             (void)uprint32(g_debug_uart, "\n\r SM_VERIFY: ",addr_cmd_value);
315 #endif
316             ddr_training_state = DDR_TRAINING_FAIL;
317             break;
318         case DDR_TRAINING_FAIL_SM_DQ_DQS:
319 #ifdef DEBUG_DDR_INIT
320             (void)uprint32(g_debug_uart, "\n\r SM_DQ_DQS: ",addr_cmd_value);
321 #endif
322             ddr_training_state = DDR_TRAINING_FAIL;
323             break;
324         case DDR_TRAINING_FAIL_SM_RDGATE:
325 #ifdef DEBUG_DDR_INIT
326             (void)uprint32(g_debug_uart, "\n\r SM_RDGATE: ",addr_cmd_value);
327 #endif
328             ddr_training_state = DDR_TRAINING_FAIL;
329             break;
330         case DDR_TRAINING_FAIL_SM_WRLVL:
331 #ifdef DEBUG_DDR_INIT
332             (void)uprint32(g_debug_uart, "\n\r SM_WRLVL: ",addr_cmd_value);
333 #endif
334             ddr_training_state = DDR_TRAINING_FAIL;
335             break;
336         case DDR_TRAINING_FAIL_SM_ADDCMD:
337 #ifdef DEBUG_DDR_INIT
338             (void)uprint32(g_debug_uart, "\n\r SM_ADDCMD: ",addr_cmd_value);
339 #endif
340             ddr_training_state = DDR_TRAINING_FAIL;
341             break;
342         case DDR_TRAINING_FAIL_SM_BCLKSCLK:
343 #ifdef DEBUG_DDR_INIT
344             (void)uprint32(g_debug_uart, "\n\r BCLKSCLK_SWY: ",addr_cmd_value);
345 #endif
346             ddr_training_state = DDR_TRAINING_FAIL;
347             break;
348         case DDR_TRAINING_FAIL_BCLKSCLK_SW:
349 #ifdef DEBUG_DDR_INIT
350             (void)uprint32(g_debug_uart, "\n\r BCLKSCLK_SW: ",addr_cmd_value);
351 #endif
352             ddr_training_state = DDR_TRAINING_FAIL;
353             break;
354         case DDR_TRAINING_FAIL_FULL_32BIT_NC_CHECK:
355 #ifdef DEBUG_DDR_INIT
356             (void)uprint32(g_debug_uart, "\n\r 32BIT_NC_CHECK: ",addr_cmd_value);
357 #endif
358             ddr_training_state = DDR_TRAINING_FAIL;
359             break;
360         case DDR_TRAINING_FAIL_32BIT_CACHE_CHECK:
361 #ifdef DEBUG_DDR_INIT
362             (void)uprint32(g_debug_uart, "\n\r 32BIT_CACHE_CHECK: ",addr_cmd_value);
363 #endif
364             ddr_training_state = DDR_TRAINING_FAIL;
365             break;
366         case DDR_TRAINING_FAIL_MIN_LATENCY:
367 #ifdef DEBUG_DDR_INIT
368             (void)uprint32(g_debug_uart, "\n\r MIN_LATENCY: ",addr_cmd_value);
369 #endif
370             ddr_training_state = DDR_TRAINING_FAIL;
371             break;
372         case DDR_TRAINING_FAIL_START_CHECK:
373 #ifdef DEBUG_DDR_INIT
374             (void)uprint32(g_debug_uart, "\n\r START_CHECK: ",addr_cmd_value);
375 #endif
376             ddr_training_state = DDR_TRAINING_FAIL;
377             break;
378         case DDR_TRAINING_FAIL_PLL_LOCK:
379 #ifdef DEBUG_DDR_INIT
380             (void)uprint32(g_debug_uart, "\n\r PLL LOCK FAIL: ",addr_cmd_value);
381 #endif
382             ddr_training_state = DDR_TRAINING_FAIL;
383             break;
384         case DDR_TRAINING_FAIL_DDR_SANITY_CHECKS:
385 #ifdef DEBUG_DDR_INIT
386             (void)uprint32(g_debug_uart, "\n\r DDR_SANITY_CHECKS FAIL: ",\
387                                                                 addr_cmd_value);
388             ddr_training_state = DDR_TRAINING_FAIL;
389 #endif
390             break;
391         case DDR_SWEEP_AGAIN:
392             retry_count++;
393             last_sweep_status = CALIBRATION_PASSED;
394     #ifdef DEBUG_DDR_INIT
395             (void)uprint32(g_debug_uart, "\n\r\n\r DDR_SWEEP_AGAIN: ",\
396                                         ddr_training_state);
397     #endif
398             ddr_training_state = DDR_CHECK_TRAINING_SWEEP;
399             break;
400         case DDR_TRAINING_FAIL:
401 #ifdef DEBUG_DDR_INIT
402             {
403                 tip_register_status (g_debug_uart);
404                 (void)uprint32(g_debug_uart, "\n\r ****************************************************", 0U);
405 
406             }
407 #endif
408             retry_count++;
409             if(last_sweep_status != CALIBRATION_SUCCESS)
410             {
411                 last_sweep_status = CALIBRATION_FAILED;
412             }
413     #ifdef DEBUG_DDR_INIT
414             (void)uprint32(g_debug_uart, "\n\r\n\r DDR_TRAINING_FAIL: ",\
415                         ddr_training_state);
416             (void)uprint32(g_debug_uart, "\n\r Retry Count: ", retry_count);
417     #endif
418             ddr_training_state = DDR_CHECK_TRAINING_SWEEP;
419             break;
420 
421         case DDR_CHECK_TRAINING_SWEEP:
422             {
423 #ifdef SWEEP_ENABLED
424                 /* first check if we are finished */
425                 if(last_sweep_status == CALIBRATION_SUCCESS)
426                 {
427                     /*
428                      * Try again with calculated values
429                      */
430                     ddr_training_state = DDR_TRAINING_CHECK_FOR_OFFMODE;
431                 }
432                 else if(retry_count == TOTAL_SWEEPS)
433                 {
434                     sweep_index index;
435 #ifdef DEBUG_DDR_INIT
436                     sweep_status(g_debug_uart);
437 #endif
438                     /*
439                      * Choose the best index
440                      */
441                     if (get_best_sweep(&index) == 0U)
442                     {
443 #ifdef DEBUG_DDR_INIT
444                         (void)uprint32(g_debug_uart, "\n\r sweep success: ",\
445                                                                 tip_cfg_params);
446 #endif
447                         last_sweep_status = CALIBRATION_SUCCESS;
448                         /*
449                          * Use obtained settings
450                          */
451                         addr_cmd_value = index.cmd_index +\
452                                 LIBERO_SETTING_MIN_ADDRESS_CMD_OFFSET;
453                         bclk_sclk_offset_value = index.bclk_sclk_index +\
454                                 LIBERO_SETTING_MIN_ADDRESS_BCLK_SCLK_OFFSET;
455                         dpc_vrgen_v_value = index.dpc_vgen_index +\
456                                 LIBERO_SETTING_MIN_DPC_V_GEN;
457                         dpc_vrgen_h_value = index.dpc_vgen_h_index +\
458                                 LIBERO_SETTING_MIN_DPC_H_GEN;
459                         dpc_vrgen_vs_value = index.dpc_vgen_vs_index +\
460                                 LIBERO_SETTING_MIN_DPC_VS_GEN;
461 
462                         tip_cfg_params = ((tip_cfg_params &\
463                             (~BCLK_SCLK_OFFSET_MASK))|\
464                              (bclk_sclk_offset_value<<BCLK_SCLK_OFFSET_SHIFT));
465                         tip_cfg_params = ((tip_cfg_params &\
466                                  (~ADDRESS_CMD_OFFSETT_MASK))|(addr_cmd_value));
467                         dpc_bits = ((dpc_bits &\
468                                  (~BCLK_DPC_VRGEN_V_MASK))|\
469                                    (dpc_vrgen_v_value<<BCLK_DPC_VRGEN_V_SHIFT));
470                         dpc_bits = ((dpc_bits &\
471                                  (~BCLK_DPC_VRGEN_H_MASK))|\
472                                    (dpc_vrgen_h_value<<BCLK_DPC_VRGEN_H_SHIFT));
473                         dpc_bits = ((dpc_bits &\
474                                  (~BCLK_DPC_VRGEN_VS_MASK))|\
475                                  (dpc_vrgen_vs_value<<BCLK_DPC_VRGEN_VS_SHIFT));
476                         ddr_training_state = DDR_TRAINING_CHECK_FOR_OFFMODE;
477                     }
478                     else
479                     {
480 #ifdef SWEEP_ENABLED
481                         sweep_state = INIT_SWEEP;
482 #endif
483                         retry_count = 0U;
484                         ddr_training_state = DDR_TRAINING_SWEEP;
485                     }
486                 }
487                 else
488                 {
489                     ddr_training_state = DDR_TRAINING_SWEEP;
490                 }
491                 ddr_error_count = 0U;
492                 error = 0U;
493                 memfill((uint8_t *)&calib_data,0U,sizeof(calib_data));
494                 DDRCFG->DFI.PHY_DFI_INIT_START.PHY_DFI_INIT_START   = 0x0U;
495                 /* reset controller */
496                 DDRCFG->MC_BASE2.CTRLR_INIT.CTRLR_INIT = 0x0U;
497                 CFG_DDR_SGMII_PHY->training_start.training_start = 0x0U;
498             }
499 #else       /* we are not SWEEP_ENABLED */
500             ddr_error_count = 0U;
501             error = 0U;
502             memfill((uint8_t *)&calib_data,0U,sizeof(calib_data));
503             DDRCFG->DFI.PHY_DFI_INIT_START.PHY_DFI_INIT_START   = 0x0U;
504             /* reset controller */
505             DDRCFG->MC_BASE2.CTRLR_INIT.CTRLR_INIT = 0x0U;
506             CFG_DDR_SGMII_PHY->training_start.training_start = 0x0U;
507 
508             ddr_training_state = DDR_TRAINING_CHECK_FOR_OFFMODE;
509             }
510 #endif
511             break;
512 
513         case DDR_TRAINING_SWEEP:
514 #ifdef SWEEP_ENABLED
515             {
516                 static uint32_t sweep_count_cmd_offset;
517                 static uint32_t sweep_count_bck_sclk;
518                 static uint32_t sweep_count_dpc_v_bits;
519                 static uint32_t sweep_count_dpc_h_bits;
520                 static uint32_t sweep_count_dpc_vs_bits;
521 
522                 switch(sweep_state)
523                 {
524                     case INIT_SWEEP:
525                         /*
526                          * Parameter values
527                          */
528                         addr_cmd_value = LIBERO_SETTING_MIN_ADDRESS_CMD_OFFSET;
529                         bclk_sclk_offset_value =\
530                                 LIBERO_SETTING_MIN_ADDRESS_BCLK_SCLK_OFFSET;
531                         dpc_vrgen_v_value = LIBERO_SETTING_MIN_DPC_V_GEN;
532                         dpc_vrgen_h_value = LIBERO_SETTING_MIN_DPC_H_GEN;
533                         dpc_vrgen_vs_value = LIBERO_SETTING_MIN_DPC_VS_GEN;
534                         /*
535                          * state counts
536                          */
537                         sweep_count_cmd_offset = 0U;
538                         sweep_count_bck_sclk = 0U;
539                         sweep_count_dpc_v_bits = 0U;
540                         sweep_count_dpc_h_bits = 0U;
541                         sweep_count_dpc_vs_bits = 0U;
542                         sweep_state = ADDR_CMD_OFFSET_SWEEP;
543                         __attribute__((fallthrough)); /* deliberately fall through */
544                     case ADDR_CMD_OFFSET_SWEEP:
545                         /*
546                          * Record sweep result
547                          */
548                         sweep_results[sweep_count_dpc_vs_bits][sweep_count_dpc_h_bits][sweep_count_dpc_v_bits]\
549                             [sweep_count_bck_sclk]\
550                                 [sweep_count_cmd_offset] = last_sweep_status;
551                         /*
552                          * sweep:  ADDR_CMD OFFSET
553                          */
554                         addr_cmd_value++;
555                         if (addr_cmd_value > \
556                                 LIBERO_SETTING_MAX_ADDRESS_CMD_OFFSET)
557                         {
558                             addr_cmd_value = \
559                                     LIBERO_SETTING_MIN_ADDRESS_CMD_OFFSET;
560                         }
561 
562                         tip_cfg_params = ((tip_cfg_params &\
563                                  (~ADDRESS_CMD_OFFSETT_MASK))|(addr_cmd_value));
564                         sweep_count_cmd_offset++;
565                         if(sweep_count_cmd_offset > MAX_NUMBER_ADDR_CMD_OFFSET_SWEEPS)
566                         {
567                             sweep_count_cmd_offset = 0U;
568                             sweep_state = BCLK_SCLK_OFFSET_SWEEP;
569                         }
570                         else
571                         {
572                             /*
573                              * Now do a sweep
574                              */
575                             ddr_error_count = 0U;
576                             error = 0U;
577                             memfill((uint8_t *)&calib_data,0U,sizeof(calib_data));
578                             DDRCFG->DFI.PHY_DFI_INIT_START.PHY_DFI_INIT_START   = 0x00000000U;
579                             /* reset controller */
580                             DDRCFG->MC_BASE2.CTRLR_INIT.CTRLR_INIT = 0x00000000U;
581                             CFG_DDR_SGMII_PHY->training_start.training_start    = 0x00000000U;
582                             ddr_training_state = DDR_TRAINING_CHECK_FOR_OFFMODE;
583                         }
584                         break;
585                     case BCLK_SCLK_OFFSET_SWEEP:
586                         /*
587                          * sweep:  BCLK_SCLK
588                          */
589                         bclk_sclk_offset_value++;
590                         if (bclk_sclk_offset_value > \
591                                 LIBERO_SETTING_MAX_ADDRESS_BCLK_SCLK_OFFSET)
592                         {
593                             bclk_sclk_offset_value = \
594                                     LIBERO_SETTING_MIN_ADDRESS_BCLK_SCLK_OFFSET;
595                         }
596                         tip_cfg_params = ((tip_cfg_params &\
597                              (~BCLK_SCLK_OFFSET_MASK))|\
598                                  (bclk_sclk_offset_value<<BCLK_SCLK_OFFSET_SHIFT));
599                         sweep_count_bck_sclk++;
600                         if(sweep_count_bck_sclk > MAX_NUMBER__BCLK_SCLK_OFFSET_SWEEPS)
601                         {
602                             sweep_count_bck_sclk = 0U;
603                             sweep_state = DPC_VRGEN_V_SWEEP;
604                         }
605                         else
606                         {
607                             sweep_state = ADDR_CMD_OFFSET_SWEEP;
608                         }
609                         break;
610                     case DPC_VRGEN_V_SWEEP:
611                         /*
612                          * sweep:  DPC_VRGEN_V [4:6]
613                          * LIBERO_SETTING_DPC_BITS
614                          */
615                         dpc_vrgen_v_value++;
616                         if (dpc_vrgen_v_value > \
617                                     LIBERO_SETTING_MAX_DPC_V_GEN)
618                         {
619                             dpc_vrgen_v_value = \
620                                     LIBERO_SETTING_MIN_DPC_V_GEN;
621                         }
622                         dpc_bits = ((dpc_bits &\
623                              (~BCLK_DPC_VRGEN_V_MASK))|\
624                                  (dpc_vrgen_v_value<<BCLK_DPC_VRGEN_V_SHIFT));
625                         sweep_count_dpc_v_bits++;
626                         if(sweep_count_dpc_v_bits > MAX_NUMBER_DPC_V_GEN_SWEEPS)
627                         {
628                             sweep_count_dpc_v_bits = 0U;
629                             sweep_state = DPC_VRGEN_H_SWEEP;
630                         }
631                         else
632                         {
633                             sweep_state = BCLK_SCLK_OFFSET_SWEEP;
634                         }
635                         break;
636                     case DPC_VRGEN_H_SWEEP:
637                         /*
638                          * sweep:  DPC_VRGEN_V [4:6]
639                          * LIBERO_SETTING_DPC_BITS
640                          */
641                         dpc_vrgen_h_value++;
642                         if (dpc_vrgen_h_value > \
643                                     LIBERO_SETTING_MAX_DPC_H_GEN)
644                         {
645                             dpc_vrgen_h_value = \
646                                     LIBERO_SETTING_MIN_DPC_H_GEN;
647                         }
648                         dpc_bits = ((dpc_bits &\
649                              (~BCLK_DPC_VRGEN_H_MASK))|\
650                                  (dpc_vrgen_h_value<<BCLK_DPC_VRGEN_H_SHIFT));
651                         sweep_count_dpc_h_bits++;
652                         if(sweep_count_dpc_h_bits > MAX_NUMBER_DPC_H_GEN_SWEEPS)
653                         {
654                             sweep_count_dpc_h_bits = 0U;
655                             sweep_state = DPC_VRGEN_VS_SWEEP;
656                         }
657                         else
658                         {
659                             sweep_state = DPC_VRGEN_V_SWEEP;
660                         }
661                         break;
662                     case DPC_VRGEN_VS_SWEEP:
663                         /*
664                          * sweep:  DPC_VRGEN_V [4:6]
665                          * LIBERO_SETTING_DPC_BITS
666                          */
667                         dpc_vrgen_vs_value++;
668                         if (dpc_vrgen_vs_value > \
669                                     LIBERO_SETTING_MAX_DPC_VS_GEN)
670                         {
671                             dpc_vrgen_vs_value = \
672                                     LIBERO_SETTING_MIN_DPC_VS_GEN;
673                         }
674                         dpc_bits = ((dpc_bits &\
675                              (~BCLK_DPC_VRGEN_VS_MASK))|\
676                                  (dpc_vrgen_vs_value<<BCLK_DPC_VRGEN_VS_SHIFT));
677                         sweep_count_dpc_vs_bits++;
678                         if(sweep_count_dpc_vs_bits > MAX_NUMBER_DPC_VS_GEN_SWEEPS)
679                         {
680                             sweep_count_dpc_vs_bits = 0U;
681                         }
682                         sweep_state = DPC_VRGEN_H_SWEEP;
683                         break;
684                     case FINISHED_SWEEP:
685                         break;
686                     default:
687                         break;
688                 }
689             }
690 #endif /* SWEEP_ENABLED */
691             break;
692 
693         case DDR_TRAINING_CHECK_FOR_OFFMODE:
694             /*
695              * check if we are in off mode
696              */
697             if (ddr_type == DDR_OFF_MODE)
698             {
699                 ddr_off_mode();
700                 ret_status |= DDR_SETUP_DONE;
701                 return (ret_status);
702             }
703             else
704             {
705                 /*
706                  * set initial conditions
707                  */
708                 /* enable fabric noise */
709 #ifdef FABRIC_NOISE_TEST
710                if(fabric_noise_en)
711                {
712                     SYSREG->SOFT_RESET_CR &= 0x00U;
713                     SYSREG->SUBBLK_CLOCK_CR = 0xffffffffUL;
714                     SYSREG->GPIO_INTERRUPT_FAB_CR = 0x00000000UL;
715                     PLIC_init();
716                     PLIC_SetPriority_Threshold(0);
717                     __enable_irq();
718                     /* bit0-bit15 used to enable noise logic in steps of 5%
719                       bit 16 noise logic reset
720                       bit 17 clkmux sel
721                       bit 18 pll powerdown
722                       bit 19 external io enable for GCLKINT */
723                     PLIC_SetPriority(GPIO0_BIT0_or_GPIO2_BIT0_PLIC_0, 4U);
724                     PLIC_SetPriority(GPIO0_BIT1_or_GPIO2_BIT1_PLIC_1, 4U);
725                     PLIC_SetPriority(GPIO0_BIT2_or_GPIO2_BIT2_PLIC_2, 4U);
726                     PLIC_SetPriority(GPIO0_BIT3_or_GPIO2_BIT3_PLIC_3, 4U);
727                     PLIC_SetPriority(GPIO0_BIT4_or_GPIO2_BIT4_PLIC_4, 4U);
728                     PLIC_SetPriority(GPIO0_BIT5_or_GPIO2_BIT5_PLIC_5, 4U);
729                     PLIC_SetPriority(GPIO0_BIT6_or_GPIO2_BIT6_PLIC_6, 4U);
730                     PLIC_SetPriority(GPIO0_BIT7_or_GPIO2_BIT7_PLIC_7, 4U);
731                     PLIC_SetPriority(GPIO0_BIT8_or_GPIO2_BIT8_PLIC_8, 4U);
732                     PLIC_SetPriority(GPIO0_BIT9_or_GPIO2_BIT9_PLIC_9, 4U);
733                     PLIC_SetPriority(GPIO0_BIT10_or_GPIO2_BIT10_PLIC_10, 4U);
734                     PLIC_SetPriority(GPIO0_BIT11_or_GPIO2_BIT11_PLIC_11, 4U);
735                     PLIC_SetPriority(GPIO0_BIT12_or_GPIO2_BIT12_PLIC_12, 4U);
736                     PLIC_SetPriority(GPIO0_BIT13_or_GPIO2_BIT13_PLIC_13, 4U);
737                     PLIC_SetPriority(GPIO1_BIT0_or_GPIO2_BIT14_PLIC_14, 4U);
738                     PLIC_SetPriority(GPIO1_BIT1_or_GPIO2_BIT15_PLIC_15, 4U);
739                     PLIC_SetPriority(GPIO1_BIT2_or_GPIO2_BIT16_PLIC_16, 4U);
740                     PLIC_SetPriority(GPIO1_BIT3_or_GPIO2_BIT17_PLIC_17, 4U);
741                     PLIC_SetPriority(GPIO1_BIT4_or_GPIO2_BIT18_PLIC_18, 4U);
742                     PLIC_SetPriority(GPIO1_BIT5_or_GPIO2_BIT19_PLIC_19, 4U);
743 
744                     MSS_GPIO_init(GPIO2_LO);
745                     MSS_GPIO_config_all(GPIO2_LO, MSS_GPIO_OUTPUT_MODE);
746                     MSS_GPIO_set_outputs(GPIO2_LO, 0x00000UL);      /* bits 15:0 - 0, noise logic  disabled */
747                     delay(100);
748                     /*MSS_GPIO_set_outputs(GPIO2_LO, 0x00FFFUL);*/    /* bits 12:0 - 1,  56% enabled */
749                     noise_ena = (1 << num_of_noise_blocks_en) - 1;
750                     MSS_GPIO_set_outputs(GPIO2_LO, noise_ena);      /* num_of_noise_blocks_en * 4.72% */
751                     fabric_noise_en = 0;
752                 }
753 #endif /* FABRIC_NOISE_TEST */
754                 write_latency = MIN_LATENCY;
755                 ddr_training_state = DDR_TRAINING_SET_MODE_VS_BITS;
756             }
757             break;
758 
759         case DDR_TRAINING_SET_MODE_VS_BITS:
760 #ifdef DEBUG_DDR_INIT
761                 (void)uprint32(g_debug_uart, "\n\r dpc_bits: ",\
762                                                                       dpc_bits);
763 #endif
764             /*
765              * Set the training mode
766              */
767             set_ddr_mode_reg_and_vs_bits(dpc_bits);
768             ddr_training_state = DDR_TRAINING_FLASH_REGS;
769             break;
770 
771         case DDR_TRAINING_FLASH_REGS:
772             /*
773              * flash registers with RPC values
774              *   Enable DDR IO decoders
775              *   Note :
776              *      rpc sequence:
777              *          power-up -> mss_boot -> re-flash nv_map -> override
778              *      any changes (to fix issues)
779              *
780              *   SOFT_RESET_  bit 0 == periph soft reset, auto cleared
781              */
782             CFG_DDR_SGMII_PHY->SOFT_RESET_DECODER_DRIVER.SOFT_RESET_DECODER_DRIVER = 1U;
783             CFG_DDR_SGMII_PHY->SOFT_RESET_DECODER_ODT.SOFT_RESET_DECODER_ODT=1U;
784             CFG_DDR_SGMII_PHY->SOFT_RESET_DECODER_IO.SOFT_RESET_DECODER_IO = 1U;
785             ddr_training_state = DDR_TRAINING_CORRECT_RPC;
786             break;
787 
788        case DDR_TRAINING_CORRECT_RPC:
789             /*
790              * correct some rpc registers, which were incorrectly set in mode
791              * setting
792              */
793             set_ddr_rpc_regs(ddr_type);
794             ddr_training_state = DDR_TRAINING_SOFT_RESET;
795             break;
796         case DDR_TRAINING_SOFT_RESET:
797             /*
798              * Set soft reset on IP to load RPC to SCB regs (dynamic mode)
799              * Bring the DDR bank controller out of reset
800              */
801             IOSCB_BANKCONT_DDR->soft_reset = 1U;  /* DPC_BITS   NV_MAP  reset */
802             ddr_training_state = DDR_TRAINING_CALIBRATE_IO;
803             break;
804         case DDR_TRAINING_CALIBRATE_IO:
805             /*
806              * Calibrate DDR I/O here, once all RPC settings correct
807              */
808             ddr_pvt_calibration();
809 #ifdef DEBUG_DDR_INIT
810             (void)uprint32(g_debug_uart,  "\n\r PCODE = ",\
811                     (CFG_DDR_SGMII_PHY->IOC_REG2.IOC_REG2 & 0x7FU));
812             (void)uprint32(g_debug_uart,  "\n\r NCODE = ", \
813                     (((CFG_DDR_SGMII_PHY->IOC_REG2.IOC_REG2) >> 7U) & 0x7FU));
814             (void)uprint32(g_debug_uart, "\n\r addr_cmd_value: ",\
815                                                 addr_cmd_value);
816             (void)uprint32(g_debug_uart, "\n\r bclk_sclk_offset_value: ",\
817                                                     bclk_sclk_offset_value);
818             (void)uprint32(g_debug_uart, "\n\r dpc_vrgen_v_value: ",\
819                                                dpc_vrgen_v_value);
820             (void)uprint32(g_debug_uart, "\n\r dpc_vrgen_h_value: ",\
821                                                dpc_vrgen_h_value);
822             (void)uprint32(g_debug_uart, "\n\r dpc_vrgen_vs_value: ",\
823                                                dpc_vrgen_vs_value);
824 #endif
825             ddr_training_state = DDR_TRAINING_CONFIG_PLL;
826             break;
827         case DDR_TRAINING_CONFIG_PLL:
828             /*
829              *  Configure the DDR PLL
830              */
831             ddr_pll_config(SCB_UPDATE);
832             timeout = 0xFFFF;
833             ddr_training_state = DDR_TRAINING_VERIFY_PLL_LOCK;
834             break;
835         case DDR_TRAINING_VERIFY_PLL_LOCK:
836             /*
837              *  Verify DDR PLL lock
838              */
839             if (ddr_pll_lock_scb() == 0U)
840             {
841                 ddr_training_state = DDR_TRAINING_SETUP_SEGS;
842             }
843             else if(--timeout == 0U)
844             {
845                 ddr_training_state = DDR_TRAINING_FAIL_PLL_LOCK;
846             }
847             break;
848         case DDR_TRAINING_SETUP_SEGS:
849             /*
850              * Configure Segments- address mapping,  CFG0/CFG1
851              */
852             setup_ddr_segments(DEFAULT_SEG_SETUP);
853             /*
854              * enable the  DDRC
855              */
856             /* Turn on DDRC clock */
857             SYSREG->SUBBLK_CLOCK_CR |= SUBBLK_CLOCK_CR_DDRC_MASK;
858             /* Remove soft reset */
859             SYSREG->SOFT_RESET_CR   &= (uint32_t)~SOFT_RESET_CR_DDRC_MASK;
860             ddr_training_state = DDR_TRAINING_SETUP_DDRC;
861             break;
862         case DDR_TRAINING_SETUP_DDRC:
863             /*
864              * set-up DDRC
865              * Configuration taken from the user.
866              */
867             {
868                 init_ddrc();
869                 ddr_training_state = DDR_TRAINING_RESET;
870             }
871             break;
872         case DDR_TRAINING_RESET:
873             /*
874              * Assert training reset
875              *  reset pin is bit [1]
876              * and load skip setting
877              */
878             /* leave in reset */
879 /* To verify if separate reset required for DDR4 - believe it is not */
880 #ifndef SPECIAL_TRAINIG_RESET
881             CFG_DDR_SGMII_PHY->training_reset.training_reset    = 0x00000002U;
882 #ifndef     SOFT_RESET_PRE_TAG_172
883             DDRCFG->MC_BASE2.CTRLR_SOFT_RESET_N.CTRLR_SOFT_RESET_N  =\
884                                                                                 0x00000000U;
885             DDRCFG->MC_BASE2.CTRLR_SOFT_RESET_N.CTRLR_SOFT_RESET_N  =\
886                                                                                 0x00000001U;
887 #endif      /* !SOFT_RESET_PRE_TAG_172 */
888 #else
889             /* Disable CKE */
890             DDRCFG->MC_BASE2.INIT_DISABLE_CKE.INIT_DISABLE_CKE = 0x1;
891 
892             /* Assert FORCE_RESET */
893             DDRCFG->MC_BASE2.INIT_FORCE_RESET.INIT_FORCE_RESET = 0x1;
894             delay(100);
895             /* release reset to memory here, set INIT_FORCE_RESET to 0 */
896             DDRCFG->MC_BASE2.INIT_FORCE_RESET.INIT_FORCE_RESET = 0x0;
897             delay(500000);
898 
899             /* Enable CKE */
900             DDRCFG->MC_BASE2.INIT_DISABLE_CKE.INIT_DISABLE_CKE = 0x0;
901             delay(1000);
902 
903             /* reset pin is bit [1] */
904             CFG_DDR_SGMII_PHY->training_reset.training_reset    = 0x00000002U;
905 
906 #endif
907             ddr_training_state = DDR_TRAINING_ROTATE_CLK;
908             break;
909         case DDR_TRAINING_ROTATE_CLK:
910         /*
911          * Rotate bclk90 by 90 deg
912          */
913             CFG_DDR_SGMII_PHY->expert_pllcnt.expert_pllcnt = 0x00000004U;
914             /*expert mode enabling */
915             CFG_DDR_SGMII_PHY->expert_mode_en.expert_mode_en = 0x00000002U;
916             /*   */
917             CFG_DDR_SGMII_PHY->expert_pllcnt.expert_pllcnt= 0x7CU; /* loading */
918             CFG_DDR_SGMII_PHY->expert_pllcnt.expert_pllcnt= 0x78U;
919             CFG_DDR_SGMII_PHY->expert_pllcnt.expert_pllcnt= 0x78U;
920             CFG_DDR_SGMII_PHY->expert_pllcnt.expert_pllcnt= 0x7CU;
921             CFG_DDR_SGMII_PHY->expert_pllcnt.expert_pllcnt= 0x4U;
922             CFG_DDR_SGMII_PHY->expert_pllcnt.expert_pllcnt= 0x64U;
923             CFG_DDR_SGMII_PHY->expert_pllcnt.expert_pllcnt= 0x66U; /* increment */
924             for (uint32_t d=0;d< \
925                 LIBERO_SETTING_TIP_CONFIG_PARAMS_BCLK_VCOPHS_OFFSET;d++)
926             {
927                 CFG_DDR_SGMII_PHY->expert_pllcnt.expert_pllcnt= 0x67U;
928                 CFG_DDR_SGMII_PHY->expert_pllcnt.expert_pllcnt= 0x66U;
929             }
930             CFG_DDR_SGMII_PHY->expert_pllcnt.expert_pllcnt= 0x64U;
931             CFG_DDR_SGMII_PHY->expert_pllcnt.expert_pllcnt= 0x4U;
932 
933             /* setting load delay lines */
934             CFG_DDR_SGMII_PHY->expert_dlycnt_mv_rd_dly_reg.expert_dlycnt_mv_rd_dly_reg\
935                 = 0x1FU;
936             CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1=\
937                     0xFFFFFFFFU;  /* setting to 1 to load delaylines */
938             CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1=\
939                     0x00000000U;
940 
941             /* write w DFICFG_REG mv_rd_dly 0x00000000 #
942                tip_apb_write(12'h89C, 32'h0);   mv_rd_dly  */
943             CFG_DDR_SGMII_PHY->expert_dlycnt_mv_rd_dly_reg.expert_dlycnt_mv_rd_dly_reg \
944                 = 0x0U;
945 
946             CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1=\
947                     0xFFFFFFFFU;  /* setting to 1 to load delaylines */
948             CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1=\
949                     0x00000000U;
950 
951 
952             CFG_DDR_SGMII_PHY->expert_dlycnt_pause.expert_dlycnt_pause       =\
953                     0x0000003FU;
954             CFG_DDR_SGMII_PHY->expert_dlycnt_pause.expert_dlycnt_pause       =\
955                     0x00000000U;
956 
957             /* DQ */
958             /*    dfi_training_complete_shim = 1'b1
959                   dfi_wrlvl_en_shim = 1'b1 */
960             CFG_DDR_SGMII_PHY->expert_dfi_status_override_to_shim.expert_dfi_status_override_to_shim = 0x6;
961             CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg0.expert_dlycnt_load_reg0=\
962                     0xFFFFFFFFU;   /* load output delays */
963             CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1=\
964                     0xF;           /* (ECC) - load output delays */
965             CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg0.expert_dlycnt_load_reg0=\
966                     0x0;           /* clear */
967             CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1=\
968                     0x0;           /* (ECC) clear */
969 
970             /* DQS
971              * dfi_wrlvl_en_shim = 1'b1 */
972             CFG_DDR_SGMII_PHY->expert_dfi_status_override_to_shim.expert_dfi_status_override_to_shim = 0x4;
973             CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg0.expert_dlycnt_load_reg0=\
974                     0xFFFFFFFFU;   /* load output delays */
975             CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1=\
976                     0xF;           /* (ECC) - load output delays */
977             CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg0.expert_dlycnt_load_reg0=\
978                     0x0;           /* clear */
979             CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1=\
980                     0x0;           /* (ECC) clear */
981 
982             CFG_DDR_SGMII_PHY->expert_dfi_status_override_to_shim.expert_dfi_status_override_to_shim = 0x0; /* clear */
983 
984             CFG_DDR_SGMII_PHY->expert_dlycnt_pause.expert_dlycnt_pause       =\
985                     0x0000003FU;
986             CFG_DDR_SGMII_PHY->expert_dlycnt_pause.expert_dlycnt_pause       =\
987                     0x00000000U;
988 
989             /* expert mode disabling */
990             CFG_DDR_SGMII_PHY->expert_mode_en.expert_mode_en                 =\
991                     0x00000000U;
992             ddr_training_state = DDR_TRAINING_SET_TRAINING_PARAMETERS;
993             break;
994         case DDR_TRAINING_SET_TRAINING_PARAMETERS:
995             /*
996              * SET TRAINING PARAMETERS
997              *
998              * TIP STATIC PARAMETERS 0
999              *
1000              *  30:22   Number of VCO Phase offsets between BCLK and SCLK
1001              *  21:13   Number of VCO Phase offsets between BCLK and SCLK
1002              *  12:6    Number of VCO Phase offsets between BCLK and SCLK
1003              *  5:3     Number of VCO Phase offsets between BCLK and SCLK
1004              *  2:0  Number of VCO Phase offsets between REFCLK and ADDCMD bits
1005              */
1006             {
1007 #ifdef DEBUG_DDR_INIT
1008                 (void)uprint32(g_debug_uart, "\n\r tip_cfg_params: ",\
1009                                                                 tip_cfg_params);
1010 #endif
1011 
1012                 CFG_DDR_SGMII_PHY->tip_cfg_params.tip_cfg_params =\
1013                                                                 tip_cfg_params;
1014                 timeout = 0xFFFF;
1015 
1016                 if(use_software_bclk_sclk_training(ddr_type) == 1U)
1017                 {
1018                     /*
1019                      * Initiate software training
1020                      */
1021 #ifdef     SOFT_RESET_PRE_TAG_172
1022                     DDRCFG->MC_BASE2.CTRLR_SOFT_RESET_N.CTRLR_SOFT_RESET_N  =\
1023                                                                     0x00000001U;
1024 #endif
1025                     ddr_training_state = DDR_TRAINING_IP_SM_BCLKSCLK_SW;
1026                 }
1027                 else
1028                 {
1029                     /*
1030                      * Initiate IP training and wait for dfi_init_complete
1031                      */
1032                     /*asserting training_reset */
1033                     if (ddr_type != DDR3)
1034                     {
1035                         CFG_DDR_SGMII_PHY->training_reset.training_reset =\
1036                             0x00000000U;
1037                     }
1038                     else
1039                     {
1040                         DDRCFG->MC_BASE2.CTRLR_SOFT_RESET_N.CTRLR_SOFT_RESET_N  =\
1041                                                                    0x00000001U;
1042                     }
1043                     ddr_training_state = DDR_TRAINING_IP_SM_START;
1044                 }
1045             }
1046             break;
1047 
1048         case DDR_TRAINING_IP_SM_BCLKSCLK_SW:
1049             /*
1050              * We have chosen to use software bclk sclk sweep instead of IP
1051              */
1052             {
1053                 uint32_t bclk_phase, bclk90_phase,refclk_phase;
1054                 bclk_answer = 0U;
1055                 {
1056                     /*
1057                      * BEGIN MANUAL BCLKSCLK TRAINING
1058                      */
1059                     uint32_t rx_previous=0x3U;
1060                     uint32_t rx_current=0U;
1061                     uint32_t answer_count[8U]={0U,0U,0U,0U,0U,0U,0U,0U};
1062                     uint32_t answer_index=0U;
1063 
1064                     /*UPPER LIMIT MUST BE MULTIPLE OF 8 */
1065                     for (uint32_t i=0U; i<(8U * 100); i++)
1066                     {
1067 
1068                         bclk_phase   = ( i    & 0x07UL ) << 8U;
1069                         bclk90_phase = ((i+2U) & 0x07UL ) << 11U;
1070                         /*
1071                          * LOAD BCLK90 PHASE
1072                          */
1073                         MSS_SCB_DDR_PLL->PLL_PHADJ = (0x00004003UL | bclk_phase | bclk90_phase);
1074                         MSS_SCB_DDR_PLL->PLL_PHADJ = (0x00000003UL | bclk_phase | bclk90_phase);
1075                         MSS_SCB_DDR_PLL->PLL_PHADJ = (0x00004003UL | bclk_phase | bclk90_phase);
1076 
1077                         /*
1078                         * No pause required, causes an issue
1079                         */
1080 
1081                         /*
1082                         * SAMPLE RX_BCLK
1083                         */
1084                         rx_current = ((CFG_DDR_SGMII_PHY->expert_addcmd_ln_readback.expert_addcmd_ln_readback) >> 12)& 0x03;
1085                         /* IF WE FOUND A TRANSITION, BREAK THE LOOP */
1086                         if ((rx_current & (~rx_previous)) != 0x00000000UL)
1087                         {
1088                             answer_index=i&0x07U;
1089                             /* increment the answer count for this index */
1090                             answer_count[answer_index]++;
1091                         }
1092 
1093                         rx_previous = rx_current;
1094                         uint32_t max=0U;
1095                         for (uint32_t j=0U;j<8U;j++)
1096                         {
1097                             /* sweep through found answers and select the most common */
1098                             if (answer_count[j] > max)
1099                             {
1100                                 bclk_answer = j;
1101                                 max=answer_count[j];
1102                             }
1103                         }
1104                     }
1105                 }
1106                 ddr_training_state = DDR_MANUAL_ADDCMD_TRAINING_SW;
1107                 break;
1108 
1109           case DDR_MANUAL_ADDCMD_TRAINING_SW:
1110                 {
1111                     /*
1112                      * APPLY OFFSET & LOAD THE PHASE
1113                      * bclk_sclk_offset_value
1114                      * BCLK_SCLK_OFFSET_BASE
1115                      */
1116                     {
1117                         bclk_phase = ((bclk_answer+SW_TRAING_BCLK_SCLK_OFFSET)    & 0x07UL ) << 8U;
1118                         bclk90_phase=((bclk_answer+SW_TRAING_BCLK_SCLK_OFFSET+2U)  & 0x07UL ) << 11U;
1119                         MSS_SCB_DDR_PLL->PLL_PHADJ      = (0x00004003UL | bclk_phase | bclk90_phase);
1120                         MSS_SCB_DDR_PLL->PLL_PHADJ      = (0x00000003UL | bclk_phase | bclk90_phase);
1121                         MSS_SCB_DDR_PLL->PLL_PHADJ      = (0x00004003UL | bclk_phase | bclk90_phase);
1122 
1123                     }
1124 
1125                     /* SET Store DRV & VREF initial values (to be re-applied after CA training) */
1126                     uint32_t ca_drv=CFG_DDR_SGMII_PHY->rpc1_DRV.rpc1_DRV;
1127                     uint32_t ca_vref=(CFG_DDR_SGMII_PHY->DPC_BITS.DPC_BITS >>12)&0x3F;
1128 
1129                     /* SET DRIVE TO MAX */
1130                     { /* vref training begins */
1131                         uint32_t dpc_bits_new;
1132                         uint32_t vref_answer;
1133                         uint32_t transition_a5_min_last = 129U;
1134                         CFG_DDR_SGMII_PHY->expert_mode_en.expert_mode_en = 0x00000001U;
1135                         for (uint32_t ca_indly=0;ca_indly < 30; ca_indly=ca_indly+5)
1136                         {
1137                             CFG_DDR_SGMII_PHY->rpc145.rpc145 = ca_indly;//TEMPORARY
1138                             CFG_DDR_SGMII_PHY->rpc147.rpc147 = ca_indly;//TEMPORARY
1139                             uint32_t break_loop=1;
1140                             uint32_t in_window=0;
1141                             vref_answer=128;
1142                         for (uint32_t vref=5;vref <30;vref++) //begin vref training
1143                         {
1144                             uint32_t transition_a5_max=0;
1145                             uint32_t transition_a5_min=128;
1146                             uint32_t rx_a5_last,rx_a5;
1147                             uint32_t transition_a5;
1148                             uint32_t range_a5=0;
1149 
1150                             if(transition_a5_min_last > 128U)
1151                             {
1152                                 transition_a5_min_last=128U;
1153                             }
1154 
1155                             IOSCB_BANKCONT_DDR->soft_reset = 0U;  /* DPC_BITS   NV_MAP  reset */
1156                             //SET VREF HERE
1157                             delay(10);
1158                             dpc_bits_new=( CFG_DDR_SGMII_PHY->DPC_BITS.DPC_BITS & 0xFFFC0FFF ) | (vref <<12) | (0x1<<18);
1159                             CFG_DDR_SGMII_PHY->DPC_BITS.DPC_BITS=dpc_bits_new;
1160                             delay(10);
1161                             IOSCB_BANKCONT_DDR->soft_reset = 1U;  /* DPC_BITS   NV_MAP  reset */
1162                             delay(10);
1163 
1164 
1165                              //ADDCMD Training improvement , adds delay on A9 loopback path - Suggested by Alister
1166                              //CFG_DDR_SGMII_PHY->rpc145.rpc145 = 0x8U;
1167 
1168                             uint32_t deltat = 128UL;
1169 
1170                             for (uint32_t j = 0; j<20 ; j++)
1171                             {
1172 
1173                                 //LOAD INDLY
1174                                 CFG_DDR_SGMII_PHY->expert_dlycnt_direction_reg1.expert_dlycnt_direction_reg1 = 0x000000U;
1175                                 CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1 = 0x000000U;
1176                                 CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1 = 0x180000U;
1177                                 CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1 = 0x000000U;
1178 
1179                                 //LOAD OUTDLY
1180                                 CFG_DDR_SGMII_PHY->expert_dlycnt_direction_reg1.expert_dlycnt_direction_reg1 = 0x180000U;
1181                                 CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1 = 0x000000U;
1182                                 CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1 = 0x180000U;
1183                                 CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1 = 0x000000U;
1184 
1185                                // rx_a5_last=0x0;
1186                                 rx_a5_last=0xF;
1187                                 transition_a5=0;
1188                                 deltat=128;
1189                                 delay(10);
1190 
1191                                 for (uint32_t i=0; i < (128-ca_indly);i++)
1192                                 {
1193                                     CFG_DDR_SGMII_PHY->expert_dlycnt_move_reg1.expert_dlycnt_move_reg1 = 0x0U;
1194                                     CFG_DDR_SGMII_PHY->expert_dlycnt_move_reg1.expert_dlycnt_move_reg1 = 0x180000U;
1195                                     CFG_DDR_SGMII_PHY->expert_dlycnt_move_reg1.expert_dlycnt_move_reg1 = 0x0U;
1196                                     delay(10);
1197                                     rx_a5 = (CFG_DDR_SGMII_PHY->expert_addcmd_ln_readback.expert_addcmd_ln_readback & 0x0300) >> 8;
1198 
1199                                     if (transition_a5 != 0){
1200                                        if (((i - transition_a5) > 8) ){ //was 8 ////////////
1201                                            break;
1202                                        }
1203                                     }
1204 
1205                                     if (transition_a5 ==0) {
1206                                          // if ( ((rx_a5 ^ rx_a5_last) & (~rx_a5) )  ){
1207                                          if ( ((rx_a5 ^ rx_a5_last) & rx_a5 )  ){
1208                                              transition_a5 = i;
1209                                          }
1210                                          else{
1211                                          rx_a5_last=rx_a5;
1212                                          }
1213                                      }
1214                                      else {
1215                                          if ((i - transition_a5) == 4)  //was 4 ////////////
1216                                              //if (rx_a5!=rx_a5_last) //IF rx_ca not stable after 4 increments, set transition detected to 0 (false transition)
1217                                             // if(!((rx_a5 ^ rx_a5_last) & (~rx_a5) ))
1218                                              if(!((rx_a5 ^ rx_a5_last) & rx_a5 ))
1219                                              {
1220                                                  transition_a5=0; //Continue looking for transition
1221                                                  rx_a5_last=rx_a5;
1222                                              }
1223                                      }
1224 
1225 
1226 
1227                                 }//delay loop ends here
1228                                 if (transition_a5 !=0)
1229                                 {
1230                                     if (transition_a5 > transition_a5_max)
1231                                     {
1232                                         transition_a5_max = transition_a5;
1233                                     }
1234                                     if (transition_a5 < transition_a5_min)
1235                                     {
1236 
1237                                         transition_a5_min = transition_a5;
1238                                     }
1239                                 }
1240                             }//Sample loop ends here
1241                             range_a5=transition_a5_max-transition_a5_min;
1242                             if (transition_a5_min < 10){
1243                                 break_loop=0;
1244                             }
1245 
1246 
1247                             if (range_a5 <=5)
1248                             {
1249                                 //(min(transition_a5_min - transition_a5_min_last,transition_a5_min_last-transition_a5_min) <=4))
1250                                 if (transition_a5_min > transition_a5_min_last)
1251                                 {
1252                                     deltat=transition_a5_min-transition_a5_min_last;
1253                                 }
1254                                 else
1255                                 {
1256                                     deltat=transition_a5_min_last-transition_a5_min;
1257                                 }
1258                                 if (deltat <=5)
1259                                 {
1260                                     in_window=(in_window<<1)|1;
1261                                 }
1262                             }
1263                             else
1264                             {
1265                                 in_window=(in_window<<1)|0;
1266                             }
1267 
1268 #ifdef DEBUG_DDR_INIT
1269                             (void)uprint32(g_debug_uart,  "\n\r ca_indly ", ca_indly);
1270                             (void)uprint32(g_debug_uart,  " vref ", vref);
1271                             (void)uprint32(g_debug_uart,  " a5_dly_max:", transition_a5_max);
1272                             (void)uprint32(g_debug_uart,  " a5_dly_min:", transition_a5_min);
1273                             (void)uprint32(g_debug_uart,  " a5_dly_min_last:", transition_a5_min_last);
1274                             (void)uprint32(g_debug_uart,  " range_a5:", range_a5);
1275                             (void)uprint32(g_debug_uart,  " deltat:", deltat);
1276                             (void)uprint32(g_debug_uart,  " in_window:", in_window);
1277                             (void)uprint32(g_debug_uart,  " vref_answer:", vref_answer);
1278 #endif
1279                             if(vref_answer==128)
1280                             {
1281                                 if ((in_window &0x3)==0x3) //ALISTER CHANGE 2/17/2021
1282                                 {
1283                                     vref_answer=vref; //ALISTER CHANGE
1284 #ifndef PRINT_CA_VREF_WINDOW
1285                                     break;
1286 #endif
1287                                 }
1288                             }
1289                             transition_a5_min_last=transition_a5_min;
1290                         }
1291                             if (break_loop)
1292                             {
1293                                 break;
1294                             }
1295                         }
1296 #ifdef DEBUG_DDR_INIT
1297                         if (vref_answer!=128U)
1298                         {
1299                             (void)uprint32(g_debug_uart,  "\n\r vref_answer found", vref_answer);
1300                         }
1301                         else
1302                         {
1303                             (void)uprint32(g_debug_uart,  "\n\r CA_VREF training failed! ", vref_answer);
1304 
1305                         }
1306 #endif
1307                         IOSCB_BANKCONT_DDR->soft_reset = 0U;  /* DPC_BITS   NV_MAP  reset */
1308                         /* SET VREF HERE */
1309                         delay(10);
1310                         if(vref_answer == 128U)
1311                         {
1312                             vref_answer = 0x10U;
1313                             dpc_bits_new=( CFG_DDR_SGMII_PHY->DPC_BITS.DPC_BITS & 0xFFFC0FFF ) | (vref_answer <<12U) | (0x1<<18U);
1314                         }
1315                         else
1316                         {
1317                             vref_answer = vref_answer;
1318                             dpc_bits_new=( CFG_DDR_SGMII_PHY->DPC_BITS.DPC_BITS & 0xFFFC0FFF ) | (vref_answer <<12) | (0x1<<18U);
1319                         }
1320 
1321                         //dpc_bits_new=( CFG_DDR_SGMII_PHY->DPC_BITS.DPC_BITS & 0xFFFC0FFF ) | (vref_answer <<12) | (0x1<<18U);
1322 
1323                         CFG_DDR_SGMII_PHY->DPC_BITS.DPC_BITS=dpc_bits_new;
1324                         delay(10);
1325                         IOSCB_BANKCONT_DDR->soft_reset = 1U;  /* DPC_BITS   NV_MAP  reset */
1326                         delay(10000);
1327 
1328                     }/* end vref_training; */
1329 
1330                     {
1331                         /* Begin MANUAL ADDCMD TRAINING */
1332                         uint32_t init_del_offset = 0x8U;
1333                         uint32_t a5_offset_fail;
1334                         uint32_t rpc147_offset = 0x2U; //4  //0
1335                         uint32_t rpc145_offset = 0x0U; //0  //4
1336                         refclk_offset = ddr_manual_addcmd_refclk_offset(ddr_type, &refclk_sweep_index);
1337                         a5_offset_fail = 1U;
1338                         while(a5_offset_fail)
1339                         {
1340                            // uint32_t init_del_offset;
1341                            // uint32_t a5_offset_fail = 0U;
1342 
1343                             a5_offset_fail = 0U; //1 indicates fail
1344 
1345                             //ADDCMD Training improvement , adds delay on DDR clock loopback path - Suggested by Alister
1346                             // CFG_DDR_SGMII_PHY->rpc147.rpc147 = 0x8U;//0x4U;
1347                             CFG_DDR_SGMII_PHY->rpc147.rpc147 = init_del_offset + rpc147_offset;
1348 
1349                              //ADDCMD Training improvement , adds delay on A9 loopback path - Suggested by Alister
1350                              //CFG_DDR_SGMII_PHY->rpc145.rpc145 = 0x8U;
1351                             CFG_DDR_SGMII_PHY->rpc145.rpc145 = init_del_offset + rpc145_offset;
1352 
1353                             CFG_DDR_SGMII_PHY->expert_mode_en.expert_mode_en = 0x00000003U; //ENABLE DLY Control & PLL Control
1354 
1355                             uint32_t rx_a5;
1356                             uint32_t rx_a5_last;
1357                             uint32_t rx_ck;
1358                             uint32_t rx_ck_last;
1359                             uint32_t transition_a5;
1360                             uint32_t transition_ck;
1361                             uint32_t i;
1362                             uint32_t j;
1363                             uint32_t difference [8]={0};
1364                             //uint32_t transition_a5_array [8]={0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff};
1365                             uint32_t transition_ck_array [8]={0};
1366 
1367                             uint32_t transitions_found;
1368                             uint32_t transition_a5_max = 0U;
1369 
1370                             for (j = 0U; j<16U ; j++)
1371                             { //Increase J loop to increase number of samples on transition_a5 (for noisy CA in LPDDR4)
1372                                 //LOAD INDLY
1373                                 CFG_DDR_SGMII_PHY->expert_dlycnt_direction_reg1.expert_dlycnt_direction_reg1 = 0x000000U;
1374                                 CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1 = 0x000000U;
1375                                 CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1 = 0x180000U;
1376                                 CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1 = 0x000000U;
1377 
1378                                 //LOAD OUTDLY
1379                                 CFG_DDR_SGMII_PHY->expert_dlycnt_direction_reg1.expert_dlycnt_direction_reg1 = 0x180000U;
1380                                 CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1 = 0x000000U;
1381                                 CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1 = 0x180000U;
1382                                 CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1 = 0x000000U;
1383 
1384                                 refclk_phase = (j % 8U) << 2U;
1385                                 MSS_SCB_DDR_PLL->PLL_PHADJ      = (0x00004003UL | bclk_phase | bclk90_phase | refclk_phase);
1386                                 MSS_SCB_DDR_PLL->PLL_PHADJ      = (0x00000003UL | bclk_phase | bclk90_phase | refclk_phase);
1387                                 MSS_SCB_DDR_PLL->PLL_PHADJ      = (0x00004003UL | bclk_phase | bclk90_phase | refclk_phase);
1388                                // rx_a5_last=0x0U;
1389                                 rx_a5_last=0xFU;
1390                                 rx_ck_last=0x5U;
1391                                 transition_a5=0U;
1392                                 transition_ck=0U;
1393 
1394                                 delay(100U);
1395                                 transitions_found = 0U;
1396                                 i = 0U;
1397                                 while((!transitions_found) & (i < 128U))
1398                                 {
1399                                         CFG_DDR_SGMII_PHY->expert_dlycnt_move_reg1.expert_dlycnt_move_reg1 = 0x0U;
1400                                         CFG_DDR_SGMII_PHY->expert_dlycnt_move_reg1.expert_dlycnt_move_reg1 = 0x180000U;
1401                                         CFG_DDR_SGMII_PHY->expert_dlycnt_move_reg1.expert_dlycnt_move_reg1 = 0x0U;
1402                                         delay(10);
1403                                         rx_a5 = (CFG_DDR_SGMII_PHY->expert_addcmd_ln_readback.expert_addcmd_ln_readback & 0x0300U) >> 8U;
1404                                         rx_ck = CFG_DDR_SGMII_PHY->expert_addcmd_ln_readback.expert_addcmd_ln_readback & 0x000F;
1405 
1406                                         if ((transition_a5 != 0U) && (transition_ck != 0U))
1407                                         {
1408                                            if (((i - transition_a5) > 8U) && ((i - transition_ck) > 8U))
1409                                            {
1410                                                   //break;
1411                                                transitions_found = 1U;
1412                                            }
1413                                         }
1414 
1415                                         if (transition_ck == 0U) {
1416                                              if (rx_ck_last != 0x5U)  //IF EDGE DETECTED
1417                                                  if (rx_ck == 0x5U)
1418                                                      transition_ck=i; //SET TRANSITION DETECTED AT I
1419                                              rx_ck_last=rx_ck;
1420                                          }
1421                                          else {
1422                                              if ( (i - transition_ck ) == 4U)       //was 4///////////
1423                                                  if (rx_ck != rx_ck_last) //IF rx_ck not stable after 4 increments, set transition detected to 0 (false transition)
1424                                                  {
1425                                                      transition_ck = 0U;    //Continue looking for transition
1426                                                      rx_ck_last=rx_ck;
1427                                                  }
1428                                          }
1429 
1430                                          if (transition_a5 == 0U) {
1431                                             // if ( ((rx_a5 ^ rx_a5_last) & (~rx_a5) )  ){
1432                                              if ( ((rx_a5 ^ rx_a5_last) & rx_a5 )  ){
1433                                                  transition_a5 = i;
1434                                              }
1435                                              else{
1436                                              rx_a5_last=rx_a5;
1437                                              }
1438                                          }
1439                                          else {
1440                                              if ((i - transition_a5) == 4U) //was 4 ////////////
1441                                                  //if (rx_a5!=rx_a5_last) //IF rx_ca not stable after 4 increments, set transition detected to 0 (false transition)
1442                                                 // if(!((rx_a5 ^ rx_a5_last) & (~rx_a5) ))
1443                                                  if(!((rx_a5 ^ rx_a5_last) & rx_a5 ))
1444                                                  {
1445                                                      transition_a5=0; //Continue looking for transition
1446                                                      rx_a5_last=rx_a5;
1447                                                  }
1448                                          }
1449 
1450 
1451                                         if ((transition_a5 != 0U) && (transition_ck != 0U))
1452                                             if ((i==transition_a5) || (i==transition_ck))
1453                                         {
1454     #ifdef DEBUG_DDR_INIT
1455                                             (void)uprint32(g_debug_uart, \
1456                                                                                                     "\n\r   rx_a5   ",\
1457                                                                                                     rx_a5);
1458                                             (void)uprint32(g_debug_uart, \
1459                                                     "   rx_ck   ",\
1460                                                     rx_ck);
1461                                             (void)uprint32(g_debug_uart, \
1462                                                                                         "   rx_ck_last  ",\
1463                                                                                         rx_ck_last);
1464                                             (void)uprint32(g_debug_uart, \
1465                                                                                         "   transition_a5   ",\
1466                                                                                         transition_a5);
1467                                             (void)uprint32(g_debug_uart, \
1468                                                                                         "   transition_ck   ",\
1469                                                                                         transition_ck);
1470                                             (void)uprint32(g_debug_uart, \
1471                                                     "   Iteration:  ",\
1472                                                     i);
1473                                             (void)uprint32(g_debug_uart, \
1474                                                                                         "   REFCLK_PHASE:   ",\
1475                                                                                         j);
1476     #endif
1477                                         }
1478                                        i++;
1479                                     }/* delay loop ends here */
1480                                     if(transition_a5 > transition_a5_max)
1481                                         transition_a5_max =transition_a5;
1482 
1483                                     if ((transition_a5 != 0U) && (transition_ck != 0U) && (j<8U))
1484                                     {
1485                                         //transition_a5_array[j]=transition_a5;
1486                                         transition_ck_array[j]=transition_ck;
1487                                         /* difference now calculated in separate loop with max a5 intstead of per offset-AL*/
1488                                     }
1489                                 //transition_a5_last = transition_a5;
1490                                 }/* phase loop ends here */
1491 
1492 
1493                                 uint32_t min_diff=0xFFU;
1494                                 uint32_t min_refclk=0x8U;
1495 
1496                                 if(transition_a5_max < 5U)
1497                                 {
1498                                     a5_offset_fail = a5_offset_fail | 1U;
1499                                 }
1500                                 for (uint32_t k = 0U;k<8U;k++)
1501                                 {
1502                                     //if(difference[k]!=0xff)
1503                                     if(transition_a5_max >= transition_ck_array[k])
1504                                         difference[k]= transition_a5_max-transition_ck_array[k];
1505                                     else
1506                                         difference[k]=0xff;
1507 
1508                                     if (difference[k] < min_diff){
1509                                         min_diff=difference[k];
1510                                         min_refclk=k;
1511                                     }
1512 #ifdef DEBUG_DDR_INIT
1513                                     (void)uprint32(g_debug_uart, "\n\r   difference  ", difference[k]);
1514                                     (void)uprint32(g_debug_uart, "   REFCLK_PHASE    ", k);
1515 #endif
1516                                 }
1517                                 if(min_diff == 0xFFU)
1518                                 {
1519                                     a5_offset_fail = a5_offset_fail | 1U;
1520                                 }
1521                                 if (min_refclk==0x8U)
1522                                 { //If ADDCMD training fails due to extremely low frequency, use PLL to provide offset.
1523                                   /*  refclk_phase = ((bclk90_phase>>11)+1)<<2;
1524                                     MSS_SCB_DDR_PLL->PLL_PHADJ      = (0x00004003UL | bclk_phase | bclk90_phase | refclk_phase);
1525                                     MSS_SCB_DDR_PLL->PLL_PHADJ      = (0x00000003UL | bclk_phase | bclk90_phase | refclk_phase);
1526                                     MSS_SCB_DDR_PLL->PLL_PHADJ      = (0x00004003UL | bclk_phase | bclk90_phase | refclk_phase);
1527                                     //LOAD INDLY
1528                                     CFG_DDR_SGMII_PHY->expert_dlycnt_direction_reg1.expert_dlycnt_direction_reg1 = 0x000000U;
1529                                     CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1 = 0x000000U;
1530                                     CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1 = 0x180000U;
1531                                     CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1 = 0x000000U;
1532 
1533                                     //LOAD OUTDLY
1534                                     CFG_DDR_SGMII_PHY->expert_dlycnt_direction_reg1.expert_dlycnt_direction_reg1 = 0x180000U;
1535                                     CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1 = 0x000000U;
1536                                     CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1 = 0x180000U;
1537                                     CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1 = 0x000000U;*/
1538                                    a5_offset_fail = a5_offset_fail | 4U;
1539                                 }
1540                                 if(a5_offset_fail == 0U)
1541                                 {
1542                                     refclk_phase =((refclk_offset+min_refclk) & 0x7U)<<2U;
1543                                     MSS_SCB_DDR_PLL->PLL_PHADJ      = (0x00004003UL | bclk_phase | bclk90_phase | refclk_phase);
1544                                     MSS_SCB_DDR_PLL->PLL_PHADJ      = (0x00000003UL | bclk_phase | bclk90_phase | refclk_phase);
1545                                     MSS_SCB_DDR_PLL->PLL_PHADJ      = (0x00004003UL | bclk_phase | bclk90_phase | refclk_phase);
1546                                     //LOAD INDLY
1547                                     CFG_DDR_SGMII_PHY->expert_dlycnt_direction_reg1.expert_dlycnt_direction_reg1 = 0x000000U;
1548                                     CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1 = 0x000000U;
1549                                     CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1 = 0x180000U;
1550                                     CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1 = 0x000000U;
1551 
1552                                     //LOAD OUTDLY
1553                                     CFG_DDR_SGMII_PHY->expert_dlycnt_direction_reg1.expert_dlycnt_direction_reg1 = 0x180000U;
1554                                     CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1 = 0x000000U;
1555                                     CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1 = 0x180000U;
1556                                     CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1 = 0x000000U;
1557                                     for (uint32_t m=0U;m < min_diff; m++)
1558                                     {
1559                                         CFG_DDR_SGMII_PHY->expert_dlycnt_move_reg1.expert_dlycnt_move_reg1 = 0x0U;
1560                                         CFG_DDR_SGMII_PHY->expert_dlycnt_move_reg1.expert_dlycnt_move_reg1 = 0x180000U;
1561                                         CFG_DDR_SGMII_PHY->expert_dlycnt_move_reg1.expert_dlycnt_move_reg1 = 0x0U;
1562                                     }
1563                                     CFG_DDR_SGMII_PHY->expert_dlycnt_direction_reg1.expert_dlycnt_direction_reg1 = 0x000000U;
1564                                     CFG_DDR_SGMII_PHY->expert_mode_en.expert_mode_en = 0x00000000U; //DISABLE DLY Control & PLL Control
1565 #ifdef DEBUG_DDR_INIT
1566                                     (void)uprint32(g_debug_uart, "\n\r  MANUAL ADDCMD TRAINING Results:\r\n          PLL OFFSET:  ",min_refclk);
1567                                     (void)uprint32(g_debug_uart, "\n\r          transition_a5_max:  ", transition_a5_max);
1568                                     (void)uprint32(g_debug_uart, "\n\r          CA Output Delay:  ", min_diff);
1569 #endif
1570                                     //break;
1571                                 }
1572                                 else
1573                                 {
1574                                     if(a5_offset_fail & 0x1U)
1575                                     {
1576                                         if(init_del_offset < 0xFFU )
1577                                         {
1578                                             init_del_offset = init_del_offset + (transition_a5_max) + 5U; //if transition_a5 too low, increase indly offset on CK and CA and retrain
1579                                         }
1580                                     else
1581                                     {
1582                                         break;
1583                                     }
1584                                     if(a5_offset_fail & 0x2U)
1585                                     {
1586                                         //if transition_a5 not consistent, retrain without increasing indly offset on CK and CA
1587                                     }
1588                                     if(a5_offset_fail & 0x4U)
1589                                     {
1590                                         //if no solution found, retrain without increasing indly offset on CK and CA
1591                                     }
1592                                 }
1593                             }
1594                         }
1595 
1596                     } /* END MANUAL ADDCMD TRAINING */
1597 #ifdef DEBUG_DDR_INIT
1598                     (void)uprint32(g_debug_uart,  "\n\r Returning FPGA CA VREF & CA drive to user setting.\n\r ", 0x0);
1599 #endif
1600                     /* SET VREF BACK TO CONFIGURED VALUE */
1601                     IOSCB_BANKCONT_DDR->soft_reset = 0U;  /* DPC_BITS   NV_MAP  reset */
1602                     delay(10);
1603 
1604                     CFG_DDR_SGMII_PHY->DPC_BITS.DPC_BITS=\
1605                             ( CFG_DDR_SGMII_PHY->DPC_BITS.DPC_BITS & 0xFFFC0FFF ) | (ca_vref <<12U) | (0x1<<18U);
1606                     delay(10);
1607                     IOSCB_BANKCONT_DDR->soft_reset = 1U;  /* DPC_BITS   NV_MAP  reset */
1608                     delay(10);
1609                     /* SET CA DRV BACK TO CONFIGURED VALUE */
1610                     CFG_DDR_SGMII_PHY->rpc1_DRV.rpc1_DRV=ca_drv; //return ca_drv to original value
1611                     ddr_training_state = DDR_TRAINING_IP_SM_START;
1612                 }
1613             }
1614             if(--timeout == 0U)
1615             {
1616                 ddr_training_state = DDR_TRAINING_FAIL_BCLKSCLK_SW;
1617             }
1618             break;
1619         case DDR_TRAINING_IP_SM_START:
1620             {
1621                 CFG_DDR_SGMII_PHY->training_skip.training_skip      =\
1622                                         LIBERO_SETTING_TRAINING_SKIP_SETTING;
1623                 if ((ddr_type == DDR3)||(ddr_type == LPDDR4)||(ddr_type == DDR4))
1624                 {
1625                     /* RX_MD_CLKN */
1626                     CFG_DDR_SGMII_PHY->rpc168.rpc168 = 0x0U;
1627                 }
1628 #ifdef DDR_TRAINING_IP_SM_START_DELAY
1629                 delay(100);
1630 #endif
1631                 /* release reset to training */
1632                 CFG_DDR_SGMII_PHY->training_reset.training_reset    = 0x00000000U;
1633 #ifdef IP_SM_START_TRAINING_PAUSE
1634                 /* todo: pause removed at Alister's request for test. Will
1635                  * remove once verified not required after further testing
1636                  */
1637                 CFG_DDR_SGMII_PHY->expert_mode_en.expert_mode_en = 0xffU;
1638                 delay(100);
1639                 CFG_DDR_SGMII_PHY->expert_dlycnt_pause.expert_dlycnt_pause = 0x00000000U;
1640                 CFG_DDR_SGMII_PHY->expert_dlycnt_pause.expert_dlycnt_pause = 0x0000003FU;
1641                 CFG_DDR_SGMII_PHY->expert_dlycnt_pause.expert_dlycnt_pause = 0x00000000U;
1642                 delay(100);
1643                 CFG_DDR_SGMII_PHY->expert_mode_en.expert_mode_en = 0x00U;
1644                 delay(100);
1645 #endif
1646             }
1647             {
1648                 DDRCFG->DFI.PHY_DFI_INIT_START.PHY_DFI_INIT_START   =\
1649                                                                     0x00000000U;
1650                 /* kick off training- DDRC, set dfi_init_start */
1651                 DDRCFG->DFI.PHY_DFI_INIT_START.PHY_DFI_INIT_START   =\
1652                                                                     0x00000001U;
1653                 DDRCFG->MC_BASE2.CTRLR_INIT.CTRLR_INIT = 0x00000000U;
1654                 DDRCFG->MC_BASE2.CTRLR_INIT.CTRLR_INIT = 0x00000001U;
1655 
1656                 timeout = 0xFFFF;
1657                 ddr_training_state = DDR_TRAINING_IP_SM_START_CHECK;
1658             }
1659 #ifdef DEBUG_DDR_INIT
1660 #ifdef MANUAL_ADDCMD_TRAINIG
1661             (void)uprint32(g_debug_uart,  "\n\r\n\r ADDCMD_OFFSET ", refclk_offset);
1662 #endif
1663 #endif
1664             break;
1665         case DDR_TRAINING_IP_SM_START_CHECK:
1666 #ifndef RENODE_DEBUG
1667             if((DDRCFG->DFI.STAT_DFI_INIT_COMPLETE.STAT_DFI_INIT_COMPLETE\
1668                     & 0x01U) == 0x01U)
1669 #endif
1670             {
1671 #ifdef LANE_ALIGNMENT_RESET_REQUIRED
1672                 CFG_DDR_SGMII_PHY->lane_alignment_fifo_control.lane_alignment_fifo_control = 0x00U;
1673                 CFG_DDR_SGMII_PHY->lane_alignment_fifo_control.lane_alignment_fifo_control = 0x02U;
1674 #endif
1675 
1676 #ifdef DEBUG_DDR_INIT
1677                 (void)uprint32(g_debug_uart, \
1678                         "\n\r\n\r pll_phadj_after_hw_training ",\
1679                                         MSS_SCB_DDR_PLL->PLL_DIV_2_3);
1680                 (void)uprint32(g_debug_uart, \
1681                         "\n\r\n\r pll_phadj_after_hw_training ",\
1682                                         MSS_SCB_DDR_PLL->PLL_DIV_0_1);
1683 #endif
1684 
1685                 if(LIBERO_SETTING_TRAINING_SKIP_SETTING & BCLK_SCLK_BIT)
1686                 {
1687                     ddr_training_state = DDR_TRAINING_IP_SM_ADDCMD;
1688                 }
1689                 else
1690                 {
1691                     ddr_training_state = DDR_TRAINING_IP_SM_BCLKSCLK;
1692                 }
1693                 timeout = 0xFFFF;
1694             }
1695             if(--timeout == 0U)
1696             {
1697                 ddr_training_state = DDR_TRAINING_FAIL_START_CHECK;
1698             }
1699             break;
1700         case DDR_TRAINING_IP_SM_BCLKSCLK:
1701             if(CFG_DDR_SGMII_PHY->training_status.training_status & BCLK_SCLK_BIT)
1702             {
1703                 timeout = 0xFFFF;
1704                 ddr_training_state = DDR_TRAINING_IP_SM_ADDCMD;
1705             }
1706             if(--timeout == 0U)
1707             {
1708                 ddr_training_state = DDR_TRAINING_FAIL_SM_BCLKSCLK;
1709             }
1710             break;
1711 
1712         case DDR_TRAINING_IP_SM_ADDCMD:
1713             if(LIBERO_SETTING_TRAINING_SKIP_SETTING & ADDCMD_BIT)
1714             {
1715                 timeout = 0xFFFFF;
1716                 ddr_training_state = DDR_TRAINING_IP_SM_WRLVL;
1717             }
1718             else if(CFG_DDR_SGMII_PHY->training_status.training_status & ADDCMD_BIT)
1719             {
1720                 timeout = 0xFFFFF;
1721                 ddr_training_state = DDR_TRAINING_IP_SM_WRLVL;
1722             }
1723             if(--timeout == 0U)
1724             {
1725                 /*
1726                  * Typically this can fail for two
1727                  * reasons:
1728                  * 1. ADD/CMD not being received
1729                  * We need to sweep:
1730                  * ADDCMD_OFFSET [0:3]   RW value
1731                  *  sweep->  0x2 -> 4 -> C -> 0
1732                  * 2. DQ not received
1733                  * We need to sweep:
1734                  * LIBERO_SETTING_DPC_BITS
1735                  *  DPC_VRGEN_H [4:6]   value= 0x8->0xC
1736                  *
1737                  * */
1738                 ddr_training_state = DDR_TRAINING_FAIL_SM_ADDCMD;
1739             }
1740             break;
1741         case DDR_TRAINING_IP_SM_WRLVL:
1742             if(LIBERO_SETTING_TRAINING_SKIP_SETTING & WRLVL_BIT)
1743             {
1744                 timeout = 0xFFFF;
1745                 ddr_training_state = DDR_TRAINING_IP_SM_RDGATE;
1746             }
1747             else if(CFG_DDR_SGMII_PHY->training_status.training_status & WRLVL_BIT)
1748             {
1749                 timeout = 0xFFFFF;
1750                 ddr_training_state = DDR_TRAINING_IP_SM_RDGATE;
1751             }
1752             if(--timeout == 0U)
1753             {
1754                 ddr_training_state = DDR_TRAINING_FAIL_SM_WRLVL;
1755             }
1756             break;
1757         case DDR_TRAINING_IP_SM_RDGATE:
1758             if(LIBERO_SETTING_TRAINING_SKIP_SETTING & RDGATE_BIT)
1759             {
1760                 timeout = 0xFFFF;
1761                 ddr_training_state = DDR_TRAINING_IP_SM_DQ_DQS;
1762             }
1763             else if(CFG_DDR_SGMII_PHY->training_status.training_status & RDGATE_BIT)
1764             {
1765                 timeout = 0xFFFF;
1766                 ddr_training_state = DDR_TRAINING_IP_SM_DQ_DQS;
1767             }
1768             if(--timeout == 0U)
1769             {
1770                 ddr_training_state = DDR_TRAINING_FAIL_SM_RDGATE;
1771             }
1772             break;
1773         case DDR_TRAINING_IP_SM_DQ_DQS:
1774             if(LIBERO_SETTING_TRAINING_SKIP_SETTING & DQ_DQS_BIT)
1775             {
1776                 timeout = 0xFFFF;
1777                 ddr_training_state = DDR_TRAINING_IP_SM_VERIFY;
1778             }
1779             else if(CFG_DDR_SGMII_PHY->training_status.training_status & DQ_DQS_BIT)
1780             {
1781                 timeout = 0xFFFF;
1782                 ddr_training_state = DDR_TRAINING_IP_SM_VERIFY;
1783             }
1784             if(--timeout == 0U)
1785             {
1786                 ddr_training_state = DDR_TRAINING_FAIL_SM_DQ_DQS;
1787             }
1788             break;
1789 
1790         case DDR_TRAINING_IP_SM_VERIFY:
1791             if ((DDRCFG->DFI.STAT_DFI_TRAINING_COMPLETE.STAT_DFI_TRAINING_COMPLETE & 0x01U) == 0x01U)
1792             {
1793                  /*
1794                   * Step 15:
1795                   * check worked for each lane
1796                   */
1797                  uint32_t lane_sel, t_status = 0U;
1798                  for (lane_sel=0U; lane_sel< \
1799                             LIBERO_SETTING_DATA_LANES_USED; lane_sel++)
1800                  {
1801                      SIM_FEEDBACK1(1000U);
1802                      delay(10U);
1803                      SIM_FEEDBACK1(1001U);
1804                      CFG_DDR_SGMII_PHY->lane_select.lane_select =\
1805                              lane_sel;
1806                      delay(10U);
1807                      /*
1808                       * verify cmd address results
1809                       *  rejects if not acceptable
1810                       * */
1811                      {
1812                         uint32_t ca_status[8]= {\
1813                             ((CFG_DDR_SGMII_PHY->addcmd_status0.addcmd_status0)&0xFFU),\
1814                             ((CFG_DDR_SGMII_PHY->addcmd_status0.addcmd_status0>>8U)&0xFFU), \
1815                             ((CFG_DDR_SGMII_PHY->addcmd_status0.addcmd_status0>>16U)&0xFFU),\
1816                             ((CFG_DDR_SGMII_PHY->addcmd_status0.addcmd_status0>>24U)&0xFFU),\
1817                             ((CFG_DDR_SGMII_PHY->addcmd_status1.addcmd_status1)&0xFFU),\
1818                             ((CFG_DDR_SGMII_PHY->addcmd_status1.addcmd_status1>>8U)&0xFFU),\
1819                             ((CFG_DDR_SGMII_PHY->addcmd_status1.addcmd_status1>>16U)&0xFFU),\
1820                             ((CFG_DDR_SGMII_PHY->addcmd_status1.addcmd_status1>>24U)&0xFFU)};
1821                         uint32_t low_ca_dly_count = 0U;
1822                         uint32_t last = 0U;
1823                         uint32_t decrease_count = 0U;
1824                         for(uint32_t i =0U; i<8U;i++)
1825                         {
1826                             if(ca_status[i] < 5U)
1827                             {
1828                                 low_ca_dly_count++;
1829                             }
1830                             if(ca_status[i]<=last)
1831                             {
1832                                 decrease_count++;
1833                             }
1834                             last = ca_status[i];
1835                         }
1836                         if(ca_status[0]<= ca_status[7U])
1837                         {
1838                             decrease_count++;
1839                         }
1840                         if((LIBERO_SETTING_TRAINING_SKIP_SETTING & ADDCMD_BIT) != ADDCMD_BIT)
1841                         {
1842                             /* Retrain if abnormal CA training result detected */
1843                             if(low_ca_dly_count > ABNORMAL_RETRAIN_CA_DLY_DECREASE_COUNT)
1844                             {
1845                                 t_status = t_status | 0x01U;
1846                             }
1847                             /* Retrain if abnormal CA training result detected */
1848                             if(decrease_count > ABNORMAL_RETRAIN_CA_DECREASE_COUNT)
1849                             {
1850                                 t_status = t_status | 0x01U;
1851                             }
1852                         }
1853                      }
1854                      /* Check that gate training passed without error  */
1855                      t_status =t_status |\
1856                              CFG_DDR_SGMII_PHY->gt_err_comb.gt_err_comb;
1857                      delay(10U);
1858                      /* Check that DQ/DQS training passed without error */
1859                      if(CFG_DDR_SGMII_PHY->dq_dqs_err_done.dq_dqs_err_done != 8U)
1860                      {
1861                          t_status = t_status | 0x01U;
1862                      }
1863                      /* Check that DQ/DQS calculated window is above 5 taps. */
1864                      if(CFG_DDR_SGMII_PHY->dqdqs_status1.dqdqs_status1 < \
1865                                                                         DQ_DQS_NUM_TAPS)
1866                      {
1867                          t_status = t_status | 0x01U;
1868                      }
1869 
1870 #define DCT_EXTRA_CHECKS
1871 #ifdef DCT_EXTRA_CHECKS  /* todo: Theses checks was added by DCT */
1872                      uint32_t temp = 0U, gt_clk_sel = (CFG_DDR_SGMII_PHY->gt_clk_sel.gt_clk_sel & 3U);
1873                      if(((CFG_DDR_SGMII_PHY->gt_txdly.gt_txdly)&0xFFU) == 0U) // Gate training tx_dly check: AL
1874                      {
1875                          temp++;
1876                          if(gt_clk_sel == 0)
1877                          {
1878                              t_status = t_status | 0x01U;
1879                          }
1880                      }
1881                      if(((CFG_DDR_SGMII_PHY->gt_txdly.gt_txdly>>8U)&0xFFU) == 0U)
1882                      {
1883                          temp++;
1884                          if(gt_clk_sel == 1)
1885                              {
1886                                  t_status = t_status | 0x01U;
1887                              }
1888                      }
1889                      if(((CFG_DDR_SGMII_PHY->gt_txdly.gt_txdly>>16U)&0xFFU) == 0U)
1890                      {
1891                          temp++;
1892                          if(gt_clk_sel == 2)
1893                              {
1894                                  t_status = t_status | 0x01U;
1895                              }
1896                       }
1897                      if(((CFG_DDR_SGMII_PHY->gt_txdly.gt_txdly>>24U)&0xFFU) == 0U)
1898                      {
1899                          temp++;
1900                          if(gt_clk_sel == 3)
1901                          {
1902                              t_status = t_status | 0x01U;
1903                          }
1904                      }
1905                      if(temp > 1)
1906                      {
1907                          t_status = t_status | 0x01U;
1908                      }
1909 #endif
1910                  }
1911     #ifdef RENODE_DEBUG
1912                  t_status = 0U;  /* Dummy success -move on to
1913                                     next stage */
1914     #endif
1915                  if(t_status == 0U)
1916                  {
1917                      SIM_FEEDBACK1(21U);
1918                      /*
1919                       * We can now set vref on the memory
1920                       * mode register for lpddr4
1921                       * May include other modes, and include a sweep
1922                       * Alister looking into this and will revert.
1923                       */
1924                      if (ddr_type == LPDDR4)
1925                      {
1926 #ifdef SET_VREF_LPDDR4_MODE_REGS
1927                          mode_register_write(DDR_MODE_REG_VREF,\
1928                              DDR_MODE_REG_VREF_VALUE);
1929 #endif
1930                      }
1931                      ddr_training_state = DDR_TRAINING_SET_FINAL_MODE;
1932                  }
1933                  else /* fail, try again */
1934                  {
1935                      SIM_FEEDBACK1(20U);
1936                      ddr_training_state = DDR_TRAINING_FAIL_SM_VERIFY;
1937                  }
1938              }
1939             else
1940             {
1941                 ddr_training_state = DDR_TRAINING_FAIL_SM2_VERIFY;
1942             }
1943             break;
1944 
1945 
1946         case DDR_TRAINING_SET_FINAL_MODE:
1947             /*
1948              * Set final mode register value.
1949              */
1950             CFG_DDR_SGMII_PHY->DDRPHY_MODE.DDRPHY_MODE =\
1951                 LIBERO_SETTING_DDRPHY_MODE;
1952 #ifdef DEBUG_DDR_INIT
1953             (void)uprint32(g_debug_uart, "\n\r\n\r DDR FINAL_MODE: ",\
1954                     LIBERO_SETTING_DDRPHY_MODE);
1955 #ifdef DEBUG_DDR_CFG_DDR_SGMII_PHY
1956             (void)print_reg_array(g_debug_uart ,
1957                   (uint32_t *)CFG_DDR_SGMII_PHY,\
1958                           (sizeof(CFG_DDR_SGMII_PHY_TypeDef)/4U));
1959 #endif
1960 #ifdef DEBUG_DDR_DDRCFG
1961             debug_read_ddrcfg();
1962 #endif
1963 #endif
1964 #ifdef DEBUG_DDR_INIT
1965             {
1966                 tip_register_status (g_debug_uart);
1967                 (void)uprint32(g_debug_uart, "\n\r ****************************************************", 0U);
1968 
1969             }
1970 #endif
1971             ddr_training_state = DDR_TRAINING_WRITE_CALIBRATION;
1972             break;
1973 
1974         case DDR_TRAINING_WRITE_CALIBRATION:
1975             /*
1976              * Does the following in the DDRC need to be checked??
1977              * DDRCFG->DFI.STAT_DFI_TRAINING_COMPLETE.STAT_DFI_TRAINING_COMPLETE;
1978              *
1979              */
1980             number_of_lanes_to_calibrate = get_num_lanes();
1981             /*
1982              *  Now start the write calibration as training has been successful
1983              */
1984             if(error == 0U)
1985             {
1986                 if (ddr_type == LPDDR4)
1987                 {
1988 #ifdef SWEEP_DQ_DELAY
1989                     uint8_t lane;
1990                     uint32_t dly_firstpass=0xFF;
1991                     uint32_t dly_right_edge=20U;
1992                     uint32_t pass=0U;
1993                     for(lane = 0U; lane < number_of_lanes_to_calibrate; lane++) //load DQ
1994                     {
1995                         load_dq(lane);
1996                     }
1997 
1998                     delay(1000);
1999                     for (uint32_t dq_dly=0U;dq_dly < 20U ; dq_dly=dq_dly+1U){
2000                         CFG_DDR_SGMII_PHY->rpc220.rpc220 = dq_dly; //set DQ load value
2001                         for(lane = 0U; lane < number_of_lanes_to_calibrate; lane++) //load DQ
2002                         {
2003                             load_dq(lane);
2004                         }
2005                         SIM_FEEDBACK1(1U);
2006 
2007                          delay(1000);
2008                         pass =\
2009                             write_calibration_using_mtc(\
2010                                                   number_of_lanes_to_calibrate);
2011 #ifdef DEBUG_DDR_INIT
2012                         (void)uprint32(g_debug_uart, "\n\r    dq_dly ",\
2013                                                                     dq_dly);
2014                         (void)uprint32(g_debug_uart, "    pass ",\
2015                                                                     pass);
2016                         (void)uprint32(g_debug_uart, "    wr calib result ",\
2017                                             calib_data.write_cal.lane_calib_result);
2018 #endif
2019                         if (dly_firstpass != 0xFFU)
2020                         {
2021                             if (pass !=0U)
2022                             {
2023                                 dly_right_edge=dq_dly;
2024                                 break;
2025                             }
2026                         }
2027                         if (dly_firstpass ==0xFFU)
2028                         {
2029                             if (pass==0U)
2030                             {
2031                                 dly_firstpass=dq_dly;
2032                             }
2033                         }
2034 
2035                     }
2036                     if(dly_firstpass == 0xFFU)
2037                     {
2038                         error = 1U;
2039                     }
2040                     else
2041                     {
2042                         CFG_DDR_SGMII_PHY->rpc220.rpc220 = (dly_firstpass + dly_right_edge)/2U;
2043 #ifdef DEBUG_DDR_INIT
2044                         (void)uprint32(g_debug_uart, "\n\r    dq_dly_answer ",\
2045                                 CFG_DDR_SGMII_PHY->rpc220.rpc220);
2046                         //(void)uprint32(g_debug_uart, "    vrefdq_answer ", (vref_firstpass + vref_right_edge)/2);
2047                         (void)uprint32(g_debug_uart, "    wr calib result ",\
2048                                 calib_data.write_cal.lane_calib_result);
2049 #endif
2050                         for(lane = 0U; lane < number_of_lanes_to_calibrate; lane++) //load DQ
2051                         {
2052                             load_dq(lane);
2053                         }
2054                         delay(1000);
2055                         error =\
2056                                 write_calibration_using_mtc(\
2057                                         number_of_lanes_to_calibrate);
2058                     }
2059 #else /* alternate calibration */
2060                     if(ddr_type == LPDDR4)
2061                     {
2062                         uint8_t lane;
2063                         /* Changed default value to centre dq/dqs on window */
2064                         CFG_DDR_SGMII_PHY->rpc220.rpc220 = 0xCUL;
2065                         for(lane = 0U; lane < number_of_lanes_to_calibrate; lane++)
2066                         {
2067                             load_dq(lane);
2068                         }
2069                     }
2070                     error = write_calibration_using_mtc(number_of_lanes_to_calibrate);
2071 #endif  /* end of alternate calibration */
2072                 }
2073                 else
2074                 {
2075                     SIM_FEEDBACK1(2U);
2076                     error =\
2077                       write_calibration_using_mtc(number_of_lanes_to_calibrate);
2078                 }
2079 
2080                 if(error)
2081                 {
2082                     ddr_error_count++;
2083                     SIM_FEEDBACK1(106U);
2084                 }
2085             }
2086 
2087 #if (EN_RETRY_ON_FIRST_TRAIN_PASS == 1)
2088             if((error == 0U)&&(retry_count != 0U))
2089 #else
2090             if(error == 0U)
2091 #endif
2092             {
2093 #ifdef DEBUG_DDR_INIT
2094             (void)uprint32(g_debug_uart, "\n\r\n\r wr calib result ",\
2095                     calib_data.write_cal.lane_calib_result);
2096 #endif
2097                 ddr_training_state = DDR_SWEEP_CHECK;
2098             }
2099             else if(error == MTC_TIMEOUT_ERROR)
2100             {
2101                 error = 0U;
2102                 ddr_training_state = DDR_TRAINING_FAIL_DDR_SANITY_CHECKS;
2103             }
2104             else
2105             {
2106                 error = 0U;
2107                 ddr_training_state = DDR_TRAINING_WRITE_CALIBRATION_RETRY;
2108             }
2109             break;
2110 
2111         case DDR_TRAINING_WRITE_CALIBRATION_RETRY:
2112             /*
2113              * Clear write calibration data
2114              */
2115             memfill((uint8_t *)&calib_data,0U,sizeof(calib_data));
2116             /*
2117              * Try the next offset
2118              */
2119             write_latency++;
2120             if (write_latency > MAX_LATENCY)
2121             {
2122                 write_latency = MIN_LATENCY;
2123                 ddr_training_state = DDR_TRAINING_FAIL_MIN_LATENCY;
2124             }
2125             else
2126             {
2127                 DDRCFG->DFI.CFG_DFI_T_PHY_WRLAT.CFG_DFI_T_PHY_WRLAT =\
2128                         write_latency;
2129 #ifdef DEBUG_DDR_INIT
2130                 (void)uprint32(g_debug_uart, "\n\r\n\r wr write latency ",\
2131                                                                 write_latency);
2132 #endif
2133                 ddr_training_state = DDR_TRAINING_WRITE_CALIBRATION;
2134             }
2135             break;
2136 
2137         case DDR_SWEEP_CHECK:
2138 #ifdef SWEEP_ENABLED
2139             if((retry_count != 0U)&&(retry_count < (TOTAL_SWEEPS-1U)))
2140             {
2141                 ddr_training_state = DDR_SWEEP_AGAIN;
2142             }
2143             else
2144 #endif
2145             {
2146                 ddr_training_state = DDR_SANITY_CHECKS;
2147             }
2148             break;
2149 
2150         case DDR_SANITY_CHECKS:
2151             /*
2152              *  Now start the write calibration if training successful
2153              */
2154 #ifdef DEBUG_DDR_INIT
2155             (void)uprint32(g_debug_uart, "\n\r\n\r DDR SANITY_CHECKS: ",\
2156                                                                         error);
2157 #endif
2158             if(error == 0U)
2159             {
2160 #ifdef DDR_SANITY_CHECKS_EN
2161                 error = memory_tests();
2162 #endif
2163             }
2164             if(error == 0U)
2165             {
2166                 ddr_training_state = DDR_FULL_MTC_CHECK;
2167             }
2168             else
2169             {
2170                 ddr_training_state = DDR_TRAINING_FAIL_DDR_SANITY_CHECKS;
2171             }
2172             break;
2173 
2174         case DDR_FULL_MTC_CHECK:
2175             {
2176                 uint64_t start_address = 0x0000000000000000ULL;
2177                 uint32_t size = ONE_MB_MTC;  /* Number of reads for each iteration 2**size*/
2178                 uint8_t mask;
2179                 if (get_num_lanes() <= 3U)
2180                 {
2181                     mask = 0x3U;
2182                 }
2183                 else
2184                 {
2185                     mask = 0xFU;
2186                 }
2187                 error = MTC_test(mask, start_address, size, MTC_COUNTING_PATTERN, MTC_ADD_SEQUENTIAL, &error);
2188                 /* Read using different patterns */
2189                 error = 0U;
2190                 error |= MTC_test(mask, start_address, size, MTC_COUNTING_PATTERN, MTC_ADD_SEQUENTIAL, &error);
2191                 error |= MTC_test(mask, start_address, size, MTC_WALKING_ONE, MTC_ADD_SEQUENTIAL, &error);
2192                 error |= MTC_test(mask, start_address, size, MTC_PSEUDO_RANDOM, MTC_ADD_SEQUENTIAL, &error);
2193                 error |= MTC_test(mask, start_address, size, MTC_NO_REPEATING_PSEUDO_RANDOM, MTC_ADD_SEQUENTIAL, &error);
2194                 error |= MTC_test(mask, start_address, size, MTC_ALT_ONES_ZEROS, MTC_ADD_SEQUENTIAL, &error);
2195                 error |= MTC_test(mask, start_address, size, MTC_ALT_5_A, MTC_ADD_SEQUENTIAL, &error);
2196                 error |= MTC_test(mask, start_address, size, MTC_PSEUDO_RANDOM_16BIT, MTC_ADD_SEQUENTIAL, &error);
2197                 error |= MTC_test(mask, start_address, size, MTC_PSEUDO_RANDOM_8BIT, MTC_ADD_SEQUENTIAL, &error);
2198 
2199                 error |= MTC_test(mask, start_address, size, MTC_COUNTING_PATTERN, MTC_ADD_RANDOM, &error);
2200                 error |= MTC_test(mask, start_address, size, MTC_WALKING_ONE, MTC_ADD_RANDOM, &error);
2201                 error |= MTC_test(mask, start_address, size, MTC_PSEUDO_RANDOM, MTC_ADD_RANDOM, &error);
2202                 error |= MTC_test(mask, start_address, size, MTC_NO_REPEATING_PSEUDO_RANDOM, MTC_ADD_RANDOM, &error);
2203                 error |= MTC_test(mask, start_address, size, MTC_ALT_ONES_ZEROS, MTC_ADD_RANDOM, &error);
2204                 error |= MTC_test(mask, start_address, size, MTC_ALT_5_A, MTC_ADD_RANDOM, &error);
2205                 error |= MTC_test(mask, start_address, size, MTC_PSEUDO_RANDOM_16BIT, MTC_ADD_RANDOM, &error);
2206                 error |= MTC_test(mask, start_address, size, MTC_PSEUDO_RANDOM_8BIT, MTC_ADD_RANDOM, &error);
2207             }
2208             if(error == 0U)
2209             {
2210 #ifdef DEBUG_DDR_INIT
2211                 (void)uprint32(g_debug_uart, "\n\r\n\r Passed MTC full check ", error);
2212 #endif
2213                 ddr_training_state = DDR_FULL_32BIT_NC_CHECK;
2214             }
2215             else
2216             {
2217 #ifdef DEBUG_DDR_INIT
2218                 (void)uprint32(g_debug_uart, "\n\r\n\r Failed MTC full check ", error);
2219 #endif
2220                 ddr_training_state = DDR_TRAINING_FAIL;
2221             }
2222             break;
2223 
2224         case DDR_FULL_32BIT_NC_CHECK:
2225             /*
2226              * write and read back test from drr, non cached access
2227              */
2228             {
2229 #if (DDR_FULL_32BIT_NC_CHECK_EN == 1)
2230                 error = ddr_read_write_fn((uint64_t*)LIBERO_SETTING_DDR_32_NON_CACHE,\
2231                                      SW_CFG_NUM_READS_WRITES,\
2232                                          SW_CONFIG_PATTERN);
2233 #endif
2234             }
2235             if(error == 0U)
2236             {
2237                 ddr_training_state = DDR_FULL_32BIT_CACHE_CHECK;
2238             }
2239             else
2240             {
2241                 ddr_training_state = DDR_TRAINING_FAIL_FULL_32BIT_NC_CHECK;
2242             }
2243             break;
2244         case DDR_FULL_32BIT_CACHE_CHECK:
2245 #if (DDR_FULL_32BIT_CACHED_CHECK_EN == 1)
2246             error = ddr_read_write_fn((uint64_t*)LIBERO_SETTING_DDR_32_CACHE,\
2247                                     SW_CFG_NUM_READS_WRITES,\
2248                                     SW_CONFIG_PATTERN);
2249 #endif
2250             if(error == 0U)
2251             {
2252 #ifdef SKIP_VERIFY_PATTERN_IN_CACHE
2253                 ddr_training_state = DDR_FULL_32BIT_WRC_CHECK;
2254 #else
2255                 ddr_training_state = DDR_LOAD_PATTERN_TO_CACHE;
2256 #endif
2257             }
2258             else
2259             {
2260                 ddr_training_state = DDR_TRAINING_FAIL_32BIT_CACHE_CHECK;
2261             }
2262             break;
2263         case DDR_LOAD_PATTERN_TO_CACHE:
2264             load_ddr_pattern(LIBERO_SETTING_DDR_32_CACHE, SIZE_OF_PATTERN_TEST, SIZE_OF_PATTERN_OFFSET);
2265             if(error == 0U)
2266             {
2267                 ddr_training_state = DDR_VERIFY_PATTERN_IN_CACHE;
2268             }
2269             else
2270             {
2271                 ddr_training_state = DDR_TRAINING_FAIL;
2272             }
2273             break;
2274         case DDR_VERIFY_PATTERN_IN_CACHE:
2275             error = test_ddr(NO_PATTERN_IN_CACHE_READS, SIZE_OF_PATTERN_TEST);
2276             if(error == 0U)
2277             {
2278 #ifdef DEBUG_DDR_INIT
2279                 (void)uprint32(g_debug_uart, "\n\r\n\r wr write latency ",\
2280                                                                 write_latency);
2281 #if (TUNE_RPC_166_VALUE == 1)
2282                 (void)uprint32(g_debug_uart, "\n\r rpc_166_fifo_offset: ",\
2283                         rpc_166_fifo_offset);
2284 #endif
2285 #endif
2286                 ddr_training_state = DDR_FULL_32BIT_WRC_CHECK;
2287             }
2288             else
2289             {
2290 #if (TUNE_RPC_166_VALUE == 1)
2291 
2292 #ifdef DEBUG_DDR_INIT
2293                 (void)uprint32(g_debug_uart, "\n\r rpc_166_fifo_offset: ",\
2294                                         rpc_166_fifo_offset);
2295 #endif
2296 
2297 #ifdef NOT_A_FULL_RETRAIN
2298 
2299                 /* this fails post tests */
2300                 if(num_rpc_166_retires < NUM_RPC_166_VALUES)
2301                 {
2302                     num_rpc_166_retires++;
2303                     rpc_166_fifo_offset++;
2304                     if(rpc_166_fifo_offset > MAX_RPC_166_VALUE)
2305                     {
2306                         rpc_166_fifo_offset = MIN_RPC_166_VALUE;
2307                     }
2308                     /* try again here DDR_LOAD_PATTERN_TO_CACHE */
2309                 }
2310                 else
2311                 {
2312                     num_rpc_166_retires = 0U;
2313                     rpc_166_fifo_offset = DEFAULT_RPC_166_VALUE;
2314                     ddr_training_state = DDR_TRAINING_FAIL;
2315                 }
2316                 CFG_DDR_SGMII_PHY->rpc166.rpc166 = rpc_166_fifo_offset;
2317 
2318                 /* PAUSE to reset fifo (loads new RXPTR value).*/
2319                 //CFG_DDR_SGMII_PHY->expert_dfi_status_override_to_shim.expert_dfi_status_override_to_shim = 0x07U;
2320                 CFG_DDR_SGMII_PHY->expert_mode_en.expert_mode_en = 0x1U;
2321                 CFG_DDR_SGMII_PHY->expert_dlycnt_pause.expert_dlycnt_pause =\
2322                 0x0000003EU ;
2323                 CFG_DDR_SGMII_PHY->expert_dlycnt_pause.expert_dlycnt_pause =\
2324                 0x00000000U;
2325                 CFG_DDR_SGMII_PHY->expert_mode_en.expert_mode_en = 0x8U;
2326                 delay(1000);
2327                 //END PAUSE
2328 #else
2329                 if(num_rpc_166_retires < NUM_RPC_166_VALUES)
2330                 {
2331                     num_rpc_166_retires++;
2332                     rpc_166_fifo_offset++;
2333                     if(rpc_166_fifo_offset > MAX_RPC_166_VALUE)
2334                     {
2335                         rpc_166_fifo_offset = MIN_RPC_166_VALUE;
2336                     }
2337                     /* try again here DDR_LOAD_PATTERN_TO_CACHE */
2338                 }
2339                 else
2340                 {
2341                     num_rpc_166_retires = 0U;
2342                     rpc_166_fifo_offset = DEFAULT_RPC_166_VALUE;
2343                     ddr_training_state = DDR_TRAINING_FAIL;
2344                 }
2345                 ddr_training_state = DDR_TRAINING_FAIL;
2346 #endif
2347 #else       /* (TUNE_RPC_166_VALUE == 0) */
2348                 ddr_training_state = DDR_TRAINING_FAIL;
2349 #endif
2350             }
2351             break;
2352         case DDR_FULL_32BIT_WRC_CHECK:
2353             if(error == 0U)
2354             {
2355                 ddr_training_state = DDR_FULL_64BIT_NC_CHECK;
2356             }
2357             else
2358             {
2359                 ddr_training_state = DDR_TRAINING_FAIL;
2360             }
2361             break;
2362         case DDR_FULL_64BIT_NC_CHECK:
2363             if(error == 0U)
2364             {
2365                 ddr_training_state = DDR_FULL_64BIT_CACHE_CHECK;
2366             }
2367             else
2368             {
2369                 ddr_training_state = DDR_TRAINING_FAIL;
2370             }
2371             break;
2372         case DDR_FULL_64BIT_CACHE_CHECK:
2373             if(error == 0U)
2374             {
2375                 ddr_training_state = DDR_FULL_64BIT_WRC_CHECK;
2376             }
2377             else
2378             {
2379                 ddr_training_state = DDR_TRAINING_FAIL;
2380             }
2381             break;
2382         case DDR_FULL_64BIT_WRC_CHECK:
2383             if(error == 0U)
2384             {
2385                 ddr_training_state = DDR_TRAINING_VREFDQ_CALIB;
2386             }
2387             else
2388             {
2389                 ddr_training_state = DDR_TRAINING_FAIL;
2390             }
2391 
2392             break;
2393 
2394         case DDR_TRAINING_VREFDQ_CALIB:
2395 #ifdef VREFDQ_CALIB
2396             /*
2397              * This step is optional
2398              * todo: Test once initial board verification complete
2399              */
2400             error = VREFDQ_calibration_using_mtc();
2401             if(error != 0U)
2402             {
2403                 ddr_error_count++;
2404             }
2405 #endif
2406             ddr_training_state = DDR_TRAINING_FPGA_VREFDQ_CALIB;
2407             break;
2408 
2409         case DDR_TRAINING_FPGA_VREFDQ_CALIB:
2410 #ifdef FPGA_VREFDQ_CALIB
2411             /*
2412              * This step is optional
2413              * todo: Test once initial board verification complete
2414              */
2415             error = FPGA_VREFDQ_calibration_using_mtc();
2416             if(error != 0U)
2417             {
2418                 ddr_error_count++;
2419             }
2420 #endif
2421             ddr_training_state = DDR_TRAINING_FINISH_CHECK;
2422             break;
2423 
2424         case DDR_TRAINING_FINISH_CHECK:
2425             /*
2426              *   return status
2427              */
2428 #ifdef DEBUG_DDR_INIT
2429             {
2430                 tip_register_status (g_debug_uart);
2431                 uprint(g_debug_uart, "\n\r\n\r DDR_TRAINING_PASS: ");
2432                 uprint(g_debug_uart, "\n\n\r ****************************************************** \n\r");
2433                 (void)uprint32(g_debug_uart, "\n ****************************************************", 0);
2434                 (void)uprint32(g_debug_uart, "\n\r ****************************************************", 0U);
2435                 (void)uprint32(g_debug_uart, "\n\r ***************PHY PARAMETERS After training pass******************", 0U);
2436                 (void)print_reg_array(g_debug_uart ,
2437                     (uint32_t *)CFG_DDR_SGMII_PHY,
2438                         (sizeof(CFG_DDR_SGMII_PHY_TypeDef)/4U));
2439                 (void)uprint32(g_debug_uart, "\n\r ***************CTRLR PARAMETERS After training pass******************", 0U);
2440 #ifdef DEBUG_DDR_DDRCFG
2441                 debug_read_ddrcfg();
2442 #endif
2443 
2444             }
2445 #endif
2446             if(ddr_error_count > 0)
2447             {
2448                 ret_status |= DDR_SETUP_FAIL;
2449             }
2450             else
2451             {
2452                 /*
2453                  * Configure Segments- address mapping,  CFG0/CFG1
2454                  */
2455                 setup_ddr_segments(LIBERO_SEG_SETUP);
2456             }
2457             ret_status |= DDR_SETUP_DONE;
2458             ddr_training_state = DDR_TRAINING_FINISHED;
2459             break;
2460 
2461         default:
2462         case DDR_TRAINING_FINISHED:
2463               break;
2464     } /* end of case statement */
2465 
2466     return (ret_status);
2467 }
2468 
2469 
2470 /**
2471  * get_num_lanes(void)
2472  * @return number of lanes used, 2(16 bit), 3(16 bit + ecc), 4(32 bit) or 5
2473  * Note: Lane 4 always used when ECC enabled, even for x16
2474  */
get_num_lanes(void)2475 static uint8_t get_num_lanes(void)
2476 {
2477     uint8_t lanes;
2478     /* Check width, 16bit or 32bit bit supported, 1 => 32 bit */
2479     if ((LIBERO_SETTING_DDRPHY_MODE & DDRPHY_MODE_BUS_WIDTH_MASK) ==\
2480             DDRPHY_MODE_BUS_WIDTH_4_LANE)
2481     {
2482         lanes = 4U;
2483     }
2484     else
2485     {
2486         lanes = 2U;
2487     }
2488     /* Check if using ECC, add a lane */
2489     if ((LIBERO_SETTING_DDRPHY_MODE & DDRPHY_MODE_ECC_MASK) ==\
2490             DDRPHY_MODE_ECC_ON)
2491     {
2492         lanes++;
2493     }
2494     return lanes;
2495 }
2496 
2497 
2498 
2499 /***************************************************************************//**
2500  * set_ddr_mode_reg_and_vs_bits()
2501  *
2502  */
set_ddr_mode_reg_and_vs_bits(uint32_t dpc_bits)2503 static void set_ddr_mode_reg_and_vs_bits(uint32_t dpc_bits)
2504 {
2505     DDR_TYPE ddr_type = LIBERO_SETTING_DDRPHY_MODE & DDRPHY_MODE_MASK;
2506     /*
2507      * R1.6
2508      * Write DDR phy mode reg (eg DDR3)
2509      * When we write to the mode register, an ip state machine copies default
2510      * values for the particular mode chosen to RPC registers associated with
2511      * DDR in the MSS custom block.
2512      * ( Note: VS bits are not include in the copy so we set below )
2513      * The RPC register values are transferred to the SCB registers in a
2514      * subsequent step.
2515      */
2516     /*
2517      * Set VS bits
2518      * Select VS bits for DDR mode selected  --- set dynamic pc bit settings to
2519      * allow editing of RPC registers
2520      * pvt calibration etc
2521      *
2522      * [19]         dpc_move_en_v   enable dynamic control of vrgen circuit for
2523      *              ADDCMD pins
2524      * [18]         dpc_vrgen_en_v  enable vref generator for ADDCMD pins
2525      * [17:12]      dpc_vrgen_v     reference voltage ratio setting for ADDCMD
2526      *              pins
2527      * [11:11]      dpc_move_en_h   enable dynamic control of vrgen circuit for
2528      *              DQ/DQS pins
2529      * [10:10]      dpc_vrgen_en_h  enable vref generator for DQ/DQS pins
2530      * [9:4]        dpc_vrgen_h     reference voltage ratio setting for DQ/DQS
2531      *              pins
2532      * [3:0]        dpc_vs          bank voltage select for pvt calibration
2533      */
2534     /*
2535         DDRPHY_MODE setting from MSS configurator
2536             DDRMODE              :3;
2537             ECC                  :1;
2538             CRC                  :1;
2539             Bus_width            :3;
2540             DMI_DBI              :1;
2541             DQ_drive             :2;
2542             DQS_drive            :2;
2543             ADD_CMD_drive        :2;
2544             Clock_out_drive      :2;
2545             DQ_termination       :2;
2546             DQS_termination      :2;
2547             ADD_CMD_input_pin_termination :2;
2548             preset_odt_clk       :2;
2549             Power_down           :1;
2550             rank                 :1;
2551             Command_Address_Pipe :2;
2552     */
2553     {
2554         if ((ddr_type == DDR4) &&\
2555                 (LIBERO_SETTING_DDRPHY_MODE & DDRPHY_MODE_ECC_MASK) ==\
2556                     DDRPHY_MODE_ECC_ON)
2557         {
2558             /*
2559              * For ECC on when DDR4, and data mask on during training, training
2560              * will not pass
2561              * This will eventually be handled by the configurator
2562              * DM will not be allowed for DDR4 with ECC
2563              */
2564             CFG_DDR_SGMII_PHY->DDRPHY_MODE.DDRPHY_MODE  =\
2565                              (LIBERO_SETTING_DDRPHY_MODE  & DMI_DBI_MASK );
2566         }
2567         else
2568         {
2569             CFG_DDR_SGMII_PHY->DDRPHY_MODE.DDRPHY_MODE  =\
2570                                                      LIBERO_SETTING_DDRPHY_MODE;
2571         }
2572         delay((uint32_t) 100U);
2573         CFG_DDR_SGMII_PHY->DPC_BITS.DPC_BITS        = dpc_bits;
2574     }
2575 }
2576 
2577 
2578 
2579 /***************************************************************************//**
2580  * set_ddr_rpc_regs()
2581  * @param ddr_type
2582  */
set_ddr_rpc_regs(DDR_TYPE ddr_type)2583 static void set_ddr_rpc_regs(DDR_TYPE ddr_type)
2584 {
2585     /*
2586      * Write DDR phy mode reg (eg DDR3)
2587      * When we write to the mode register, an ip state machine copies default
2588      * values for the particular mode chossen
2589      * to RPC registers associated with DDR in the MSS custom block.
2590      * The RPC register values are transferred to the SCB registers in a
2591      * subsequent step.
2592      *
2593      * Question:
2594      * Select VS bits (eg DDR3 ) (vs bits not included in mode setup - should
2595      * be??)
2596      * Small wait while state machine transfer takes place required here.
2597      * (status bit required?)
2598      *
2599      */
2600     /*
2601         DDRPHY_MODE setting from MSS configurator
2602             DDRMODE              :3;
2603             ECC                  :1;
2604             CRC                  :1;
2605             Bus_width            :3;
2606             DMI_DBI              :1;
2607             DQ_drive             :2;
2608             DQS_drive            :2;
2609             ADD_CMD_drive        :2;
2610             Clock_out_drive      :2;
2611             DQ_termination       :2;
2612             DQS_termination      :2;
2613             ADD_CMD_input_pin_termination :2;
2614             preset_odt_clk       :2;
2615             Power_down           :1;
2616             rank                 :1;
2617             Command_Address_Pipe :2;
2618       */
2619     {
2620         switch (ddr_type)
2621         {
2622             default:
2623             case DDR_OFF_MODE:
2624                 /* Below have already been set  */
2625                 /* CFG_DDR_SGMII_PHY->rpc95.rpc95 = 0x07;  */    /* addcmd I/O*/
2626                 /* CFG_DDR_SGMII_PHY->rpc96.rpc96 = 0x07;  */    /* clk */
2627                 /* CFG_DDR_SGMII_PHY->rpc97.rpc97 = 0x07;  */    /* dq */
2628                 /* CFG_DDR_SGMII_PHY->rpc98.rpc98 = 0x07;  */    /* dqs */
2629                 /*
2630                  *    bits 15:14 connect to ibufmx DQ/DQS/DM
2631                  *    bits 13:12 connect to ibufmx CA/CK
2632                  */
2633                 CFG_DDR_SGMII_PHY->UNUSED_SPACE0[0] = 0x0U;
2634                 break;
2635             case DDR3L:
2636             case DDR3:
2637                 /* Required when rank x 2 */
2638                 if ((LIBERO_SETTING_DDRPHY_MODE & DDRPHY_MODE_RANK_MASK) ==\
2639                         DDRPHY_MODE_TWO_RANKS)
2640                 {
2641                     CFG_DDR_SGMII_PHY->spio253.spio253 = 1U;
2642                 }
2643 
2644                 {
2645                     /*
2646                      * firmware set this to 3'b100 for all cases except when we
2647                      * are in OFF mode (DDR3,DDR4,LPDDR3,LPDDR4).
2648                      */
2649                     CFG_DDR_SGMII_PHY->rpc98.rpc98 = 0x04U;
2650                 }
2651                 /*
2652                  *    SAR xxxx
2653                  *    bits 15:14 connect to ibufmx DQ/DQS/DM
2654                  *    bits 13:12 connect to ibufmx CA/CK
2655                  */
2656                 CFG_DDR_SGMII_PHY->UNUSED_SPACE0[0] = 0x0U;
2657                 break;
2658             case DDR4:
2659                 {
2660                     /*
2661                      * Sar 108017
2662                      * ODT_STATIC setting is wrong for DDR4/LPDDR3, needs to
2663                      * be overwritten in embedded SW for E51
2664                      *
2665                      * ODT_STATIC is set to 001 for DQ/DQS/DBI bits in
2666                      * DDR3/LPDDR4, this enables termination to VSS.
2667                      *
2668                      * This needs to be switched to VDDI termination.
2669                      *
2670                      * To do this, we do APB register writes to override
2671                      * the following PC bits:
2672                      * odt_static_dq=010
2673                      * odt_static_dqs=010
2674                      */
2675                     CFG_DDR_SGMII_PHY->rpc10_ODT.rpc10_ODT = 2U;
2676                     CFG_DDR_SGMII_PHY->rpc11_ODT.rpc11_ODT = 2U;
2677                     /*
2678                      * SAR 108218
2679                      * The firmware should set this to 3'b100 for
2680                      * all cases except when we are in OFF mode (DDR3,DDR4,
2681                      * LPDDR3,LPDDR4).
2682                      */
2683                     CFG_DDR_SGMII_PHY->rpc98.rpc98 = 0x04U;
2684                     /*
2685                      *    bits 15:14 connect to ibufmx DQ/DQS/DM
2686                      *    bits 13:12 connect to ibufmx CA/CK
2687                      */
2688                     CFG_DDR_SGMII_PHY->UNUSED_SPACE0[0] =  0x0U;
2689                 }
2690                 break;
2691             case LPDDR3:
2692                 {
2693                     /*
2694                      * Sar 108017
2695                      * ODT_STATIC setting is wrong for DDR4/LPDDR3, needs to be
2696                      * overwritten in embedded SW for E51
2697                      *
2698                      * ODT_STATIC is set to 001 for DQ/DQS/DBI bits in
2699                      * DDR3/LPDDR4, this enables termination to VSS.
2700                      *
2701                      * This needs to be switched to VDDI termination.
2702                      *
2703                      * To do this, we should do APB register writes to override
2704                      * the following PC bits:
2705                      * odt_static_dq=010
2706                      * odt_static_dqs=010
2707                      */
2708                     CFG_DDR_SGMII_PHY->rpc10_ODT.rpc10_ODT = 2U;
2709                     CFG_DDR_SGMII_PHY->rpc11_ODT.rpc11_ODT = 2U;
2710                     /*
2711                      * SAR 108218
2712                      * I've reviewed the results, and the ibufmd bit should be
2713                      * fixed in firmware for ibufmd_dqs. Malachy please have
2714                      * the firmware set this to 3'b100 for all cases except
2715                      * when we are in OFF mode (DDR3,DDR4,LPDDR3,LPDDR4).
2716                      */
2717                     CFG_DDR_SGMII_PHY->rpc98.rpc98 = 0x04U;
2718                     /*
2719                      *    SAR xxxx
2720                      *    bits 15:14 connect to ibufmx DQ/DQS/DM
2721                      *    bits 13:12 connect to ibufmx CA/CK
2722                      */
2723                     CFG_DDR_SGMII_PHY->UNUSED_SPACE0[0] = 0x0U;
2724                 }
2725                 break;
2726             case LPDDR4:
2727                 {
2728                     /*
2729                      * We need to be able to implement different physical
2730                      * configurations of LPDDR4, given the twindie architecture.
2731                      * These are not fully decoded by the APB decoder (we dont
2732                      * have all the options).
2733                      * Basically we want to support:
2734                      * Hook the CA buses from the 2 die up in parallel on the
2735                      * same FPGA pins
2736                      * Hook the CA buses from the 2 die up in parallel using
2737                      * the mirrored FPGA pins (IE CA_A/CA_B)
2738                      * Some combination of the 2, ie duplicate the clocks but
2739                      * not the CA, duplicate the clocks and command, but not
2740                      * address, etc.
2741                      */
2742                     /* OVRT_EN_ADDCMD1 (default 0xF00), register named ovrt11 */
2743 #ifndef LIBERO_SETTING_RPC_EN_ADDCMD0_OVRT9
2744                     /*
2745                      * If this define is not present, indicates older
2746                      * Libero core (pre 2.0.109)
2747                      * So we run this code
2748                      */
2749                     CFG_DDR_SGMII_PHY->ovrt10.ovrt10 =\
2750                             LIBERO_SETTING_RPC_EN_ADDCMD1_OVRT10;
2751                     {
2752                         /* Use pull-ups to set the CMD/ADD ODT */
2753                         CFG_DDR_SGMII_PHY->rpc245.rpc245 =\
2754                             0x00000000U;
2755 
2756                         CFG_DDR_SGMII_PHY->rpc237.rpc237 =\
2757                             0xffffffff;
2758                     }
2759 
2760                     /* OVRT_EN_ADDCMD2 (default 0xE06U), register named ovrt12 */
2761                     CFG_DDR_SGMII_PHY->ovrt11.ovrt11 =\
2762                             LIBERO_SETTING_RPC_EN_ADDCMD2_OVRT11;
2763 #endif
2764                     /* Required when rank x 2 */
2765                     if ((LIBERO_SETTING_DDRPHY_MODE & DDRPHY_MODE_RANK_MASK) ==\
2766                             DDRPHY_MODE_TWO_RANKS)
2767                     {
2768                         /* todo: need to verify this setting with verification */
2769                         CFG_DDR_SGMII_PHY->spio253.spio253 = 1;
2770                     }
2771 
2772                     {
2773                         /*
2774                          * SAR 108218
2775                          * I've reviewed the results, and the ibufmd bit should be
2776                          * fixed in firmware for ibufmd_dqs. Malachy please have the
2777                          * firmware set this to 3'b100 for all cases except when we
2778                          * are in OFF mode (DDR3,DDR4,LPDDR3,LPDDR4).
2779                          */
2780                         CFG_DDR_SGMII_PHY->rpc98.rpc98 = 0x04U;
2781                     }
2782                     /*
2783                      *    SAR xxxx
2784                      *    bits 15:14 connect to ibufmx DQ/DQS/DM
2785                      *    bits 13:12 connect to ibufmx CA/CK
2786                      */
2787                     CFG_DDR_SGMII_PHY->UNUSED_SPACE0[0] =  0xA000U;
2788 
2789                 }
2790 
2791                 break;
2792         }
2793     }
2794 
2795     {
2796 
2797         /*
2798          * sar107009 found by Paul in Crevin,
2799          * This has been fixed in tag g5_mss_ddrphy_apb tag 2.9.130
2800          * todo: remove this software workaround as no longer required
2801          *
2802          * Default of rpc27 should be 2, currently is 0
2803          * We will set to 2 for the moment with software.
2804          */
2805         CFG_DDR_SGMII_PHY->rpc27.rpc27 = 0x2U;
2806         /*
2807          * Default of rpc27 Issue see by Paul/Alister 10th June
2808          * tb_top.duv_wrapper.u_design.mss_custom.gbank6.tip.gapb.\
2809          *                      MAIN.u_apb_mss_decoder_io.rpc203_spare_iog_dqsn
2810          */
2811         CFG_DDR_SGMII_PHY->rpc203.rpc203 = 0U;
2812     }
2813 
2814     {
2815         /*
2816          *
2817          * We'll have to pass that one in via E51, meaning APB writes to
2818          * addresses:
2819          * 0x2000 7384   rpc1_ODT       ODT_CA
2820          * 0x2000 7388   rpc2_ODT       RPC_ODT_CLK
2821          * 0x2000 738C   rpc3_ODT       ODT_DQ
2822          * 0x2000 7390   rpc4_ODT       ODT_DQS
2823          *
2824          * todo: replace with Libero settings below, once values verified
2825          */
2826         CFG_DDR_SGMII_PHY->rpc1_ODT.rpc1_ODT = LIBERO_SETTING_RPC_ODT_ADDCMD;
2827         CFG_DDR_SGMII_PHY->rpc2_ODT.rpc2_ODT = LIBERO_SETTING_RPC_ODT_CLK;
2828         CFG_DDR_SGMII_PHY->rpc3_ODT.rpc3_ODT = LIBERO_SETTING_RPC_ODT_DQ;
2829         CFG_DDR_SGMII_PHY->rpc4_ODT.rpc4_ODT = LIBERO_SETTING_RPC_ODT_DQS;
2830     }
2831     {
2832         /*
2833         * bclk_sel_clkn - selects bclk sclk training clock
2834         */
2835         CFG_DDR_SGMII_PHY->rpc19.rpc19 = 0x01U;     /* bclk_sel_clkn */
2836         /*
2837         * add cmd - selects bclk sclk training clock
2838         */
2839         CFG_DDR_SGMII_PHY->rpc20.rpc20 = 0x00U;     /* bclk_sel_clkp */
2840 
2841     }
2842 
2843     {
2844         /*
2845           *  Each lane has its own FIFO. This paramater adjusts offset for all lanes.
2846           */
2847 #if (TUNE_RPC_166_VALUE == 1)
2848         CFG_DDR_SGMII_PHY->rpc166.rpc166 = rpc_166_fifo_offset;
2849 #endif
2850     }
2851 
2852     /*
2853      *  Override RPC bits for weak PU and PD's
2854      *  Set over-ride bit for unused I/O
2855      */
2856     config_ddr_io_pull_up_downs_rpc_bits(ddr_type);
2857 }
2858 
2859 /**
2860   Info on OFF modes:
2861 
2862   OFF MODE from reset- I/O not being used
2863         MSSIO from reset- non default values
2864             Needs non default values to completely go completely OFF
2865             Drive bits and ibuff mode
2866             Ciaran to define what need to be done
2867               SAR107676
2868         DDR - by default put to DDR4 mode so needs active intervention
2869             Bills sac spec (DDR PHY SAC spec section 6.1)
2870             Mode register set to 7
2871             Ibuff mode set to 7 (rx turned off)
2872             P-Code/ N-code of no relevance as not used
2873             Disable DDR PLL
2874                Will be off from reset- no need
2875             Need to reflash
2876             DDR APB ( three resets - soft reset bit 0 to 1)
2877                 Drive odt etc
2878        SGMII - from reset nothing to be done
2879            See Jeff's spread sheet- default values listed
2880            Extn clock off also defined in spread sheet
2881  */
2882 
2883 
2884 /**
2885  *  ddr_off_mode(void)
2886  *  Assumed in Dynamic mode.
2887  *  i.e.
2888  *  SCB dynamic enable bit is high
2889  *  MSS core_up = 1
2890  *  dce[0,1,2] 0,0,0
2891  *  flash valid = 1
2892  *  IP:
2893  *  DECODER_DRIVER, ODT, IO all out of reset
2894  *
2895  *  DDR PHY off mode that I took from version 1.58 of the DDR SAC spec.
2896  *  1.  DDR PHY OFF mode (not used at all).
2897  *  1.  Set the DDR_MODE register to 7
2898  *  This will disable all the drive and ODT to 0, as well as set all WPU bits.
2899  *  2.  Set the RPC_IBUF_MD_* registers to 7
2900  *  This will disable all receivers.
2901  *  3.  Set the REG_POWERDOWN_B register to 0
2902  *  This will disable the DDR PLL
2903  *
2904  */
ddr_off_mode(void)2905 static void ddr_off_mode(void)
2906 {
2907     /*
2908       * DDR PLL is not turn on on reset- so no need to do anything
2909       */
2910      /*
2911       * set the mode register to 7 => off mode
2912       * From the DDRPHY training firmware spec.:
2913       * If the DDR interface is unused, the firmware will have to write 3'b111
2914       * into the APB_DDR_MODE register. This will disable all the DRIVERs, ODT
2915       * and INPUT  receivers.
2916       * By default, WPD will be applied to all pads.
2917       *
2918       * If a user wants to apply WPU, this will have to be applied through
2919       * firmware, by changing all RPC_WPU_*=0, and RPC_WPD_*=1, via APB register
2920       * writes.
2921       *
2922       * Unused IO within an interface will automatically be shut off, as unused
2923       * DQ/DM/DQS/and CA buffers and odt are automatically disabled by the
2924       * decode, and put into WPD mode.
2925       * Again, if the user wants to change this to WPU, the will have to write
2926       * RPC_WPU_*=0 and RPC_WPD_*=1 to override the default.
2927       *
2928       */
2929      /* Note: DMI_DBI [8:1]   needs to be 0 (off) during training */
2930      CFG_DDR_SGMII_PHY->DDRPHY_MODE.DDRPHY_MODE  =\
2931              (LIBERO_SETTING_DDRPHY_MODE_OFF /* & DMI_DBI_MASK */);
2932      /*
2933       * VS for off mode
2934       */
2935      CFG_DDR_SGMII_PHY->DPC_BITS.DPC_BITS        =\
2936              LIBERO_SETTING_DPC_BITS_OFF_MODE;
2937 
2938     /*
2939      * Toggle decoder here
2940      *  bit 0 == PERIPH   soft reset, auto cleared
2941      */
2942      CFG_DDR_SGMII_PHY->SOFT_RESET_DECODER_DRIVER.SOFT_RESET_DECODER_DRIVER= 1U;
2943      CFG_DDR_SGMII_PHY->SOFT_RESET_DECODER_ODT.SOFT_RESET_DECODER_ODT      = 1U;
2944      CFG_DDR_SGMII_PHY->SOFT_RESET_DECODER_IO.SOFT_RESET_DECODER_IO        = 1U;
2945 
2946      /*
2947       * set ibuff mode to 7 in off mode
2948       *
2949       */
2950      CFG_DDR_SGMII_PHY->rpc95.rpc95 = 0x07;     /* addcmd I/O*/
2951      CFG_DDR_SGMII_PHY->rpc96.rpc96 = 0x07;     /* clk */
2952      CFG_DDR_SGMII_PHY->rpc97.rpc97 = 0x07;     /* dq */
2953      CFG_DDR_SGMII_PHY->rpc98.rpc98 = 0x07;     /* dqs */
2954 
2955      /*
2956       * Default  WPU, modify If user wants Weak Pull Up
2957       */
2958      /*
2959       * UNUSED_SPACE0
2960       *     bits 15:14 connect to ibufmx DQ/DQS/DM
2961       *     bits 13:12 connect to ibufmx CA/CK
2962       *    todo: Do we need to add Pu/PD option for off mode to Libero setting?
2963       */
2964      CFG_DDR_SGMII_PHY->UNUSED_SPACE0[0] = 0x0000U;
2965 
2966     /*
2967      *  REG_POWERDOWN_B on PLL turn-off, in case was turned on.
2968      */
2969     ddr_pll_config_scb_turn_off();
2970     return;
2971 }
2972 
2973 
2974 /***************************************************************************//**
2975  * Number of tests which write and read from DDR
2976  * Tests data path through the cache and through AXI4 switch.
2977  */
2978 #ifdef DDR_SANITY_CHECKS_EN
memory_tests(void)2979 static uint8_t memory_tests(void)
2980 {
2981     uint64_t shift_walking_one = 4U;
2982     uint64_t start_address = 0x0000000000000000U;
2983     uint8_t error = 0U;
2984     SIM_FEEDBACK1(199U);
2985     /*
2986      * Verify seg1 reg 2, datapath through AXI4 switch
2987      */
2988     while(shift_walking_one <= 28U) /* 28 => 1G, as 2**28 == 256K and this is
2989                                       mult by (4 lanes) */
2990     {
2991         SIM_FEEDBACK1(shift_walking_one);
2992         start_address = (uint64_t)(0xC0000000U + (0x1U<<shift_walking_one));
2993         error = rw_sanity_chk((uint64_t *)start_address , (uint32_t)0x5U);
2994 
2995         if(error)
2996         {
2997             ddr_error_count++;
2998             SIM_FEEDBACK1(200U);
2999         }
3000         shift_walking_one++;
3001     }
3002     SIM_FEEDBACK1(500U);
3003     /*
3004      * Verify seg1 reg 3, datapath through AXI4 switch
3005      */
3006     shift_walking_one = 4U;
3007     while(shift_walking_one <= 28U) //28 => 1G
3008     {
3009         SIM_FEEDBACK1(shift_walking_one);
3010         start_address = (uint64_t)(0x1400000000U + (0x1U<<shift_walking_one));
3011         error = rw_sanity_chk((uint64_t *)start_address , (uint32_t)0x5U);
3012 
3013         if(error)
3014         {
3015             ddr_error_count++;
3016             SIM_FEEDBACK1(208U);
3017         }
3018 
3019         /* check upper bound */
3020         if(shift_walking_one >= 4U)
3021         {
3022             start_address = (uint64_t)(0x1400000000U + \
3023                     (((0x1U<<(shift_walking_one +1)) - 1U) -0x0F) );
3024             error = rw_sanity_chk((uint64_t *)start_address , (uint32_t)0x5U);
3025 
3026             if(error)
3027             {
3028                 ddr_error_count++;
3029                 SIM_FEEDBACK1(201U);
3030             }
3031         }
3032 
3033         shift_walking_one++;
3034     }
3035     /*
3036      * Verify mtc
3037      */
3038     SIM_FEEDBACK1(600U);
3039     shift_walking_one = 4U;
3040     while(shift_walking_one <= 28U) //28 => 1G
3041     {
3042         SIM_FEEDBACK1(shift_walking_one);
3043         start_address = (uint64_t)(0x1U<<shift_walking_one);
3044         error = mtc_sanity_check(start_address);
3045 
3046         if(error)
3047         {
3048             ddr_error_count++;
3049             SIM_FEEDBACK1(203U);
3050         }
3051 
3052         /* check upper bound */
3053         if(shift_walking_one >= 4U)
3054         {
3055              start_address = (uint64_t)((((0x1U<<(shift_walking_one +1)) - 1U)\
3056                      -0x0F) );
3057              error = mtc_sanity_check(start_address);
3058 
3059              if(error)
3060              {
3061                  ddr_error_count++;
3062                  SIM_FEEDBACK1(204U);
3063              }
3064         }
3065         shift_walking_one++;
3066     }
3067 
3068     /*
3069      * Verify seg0 reg 0, datapath through cache
3070      */
3071     SIM_FEEDBACK1(700U);
3072     shift_walking_one = 4U;
3073     while(shift_walking_one <= 27U) //28 => 1G
3074     {
3075         SIM_FEEDBACK1(shift_walking_one);
3076         start_address = (uint64_t)(0x80000000U + (0x1U<<shift_walking_one));
3077         error = rw_sanity_chk((uint64_t *)start_address , (uint32_t)0x5U);
3078 
3079         if(error)
3080         {
3081             ddr_error_count++;
3082             SIM_FEEDBACK1(206U);
3083         }
3084         shift_walking_one++;
3085     }
3086 
3087     /*
3088      * Verify seg0 reg 1, datapath through cache,
3089      */
3090 #if 0 /* issue with cache setup for 64 address width, need to checkout */
3091     SIM_FEEDBACK1(800U);
3092     shift_walking_one = 4U;
3093     while(shift_walking_one <= 28U)  //28 => 1G (0x10000000(address) * 4 (32bits wide))
3094     {
3095         SIM_FEEDBACK1(shift_walking_one);
3096         start_address = (uint64_t)(0x1000000000U + (0x1U<<shift_walking_one));
3097         error = rw_sanity_chk((uint64_t *)start_address , (uint32_t)0x5U);
3098 
3099         if(error)
3100         {
3101             ddr_error_count++;
3102             SIM_FEEDBACK1(207U);
3103         }
3104 
3105 #if 0 /* this check will not work as written, need to look further into flushing
3106           cache as part of this test */
3107         /*
3108          * read back via axi switch datapath to make sure write through on
3109          * cache occurred
3110          */
3111         start_address = (uint64_t)(0x1400000000U + (0x1U<<shift_walking_one));
3112         error = read_back_sanity_check((uint64_t *)start_address , 0x5UL);
3113 
3114         if(error)
3115             ddr_error_count++;
3116 
3117         shift_walking_one++;
3118 #endif
3119     }
3120 #endif
3121 
3122     SIM_FEEDBACK1(299U);
3123     return (error);
3124 }
3125 #endif
3126 
3127 /***************************************************************************//**
3128  * rw_sanity_chk()
3129  * writes and verifies reads back from DDR
3130  * Uses values defined in the test_string[]
3131  * @param address
3132  * @param count
3133  * @return non zero if error
3134  */
3135 #ifdef DDR_SANITY_CHECKS_EN
rw_sanity_chk(uint64_t * address,uint32_t count)3136 static uint8_t rw_sanity_chk(uint64_t * address, uint32_t count)
3137 {
3138     volatile uint64_t *DDR_word_ptr;
3139     uint64_t value;
3140     uint8_t error = 0U;
3141     /* DDR memory address from E51 - 0xC0000000 is non cache access */
3142     DDR_word_ptr =  address;
3143 
3144     volatile uint32_t i = 0x0U;
3145 
3146     /*
3147      * First fill
3148      */
3149     while(i < count)
3150     {
3151         *DDR_word_ptr = test_string[i & 0xfU];
3152 
3153         value = *DDR_word_ptr;
3154 
3155         if( value != test_string[i & 0xfU])
3156         {
3157             value = *DDR_word_ptr;
3158             if( value != test_string[i & 0xfU])
3159             {
3160                 ddr_error_count++;
3161                 error = 1;
3162             }
3163         }
3164         ++i;
3165         DDR_word_ptr = DDR_word_ptr + 1U;
3166     }
3167     /*
3168      * Recheck read, if first read successful
3169      */
3170     if(error == 0)
3171     {
3172         /* DDR memory address from E51 - 0xC0000000 is non cache access */
3173         DDR_word_ptr =  address;
3174         i = 0x0U;
3175         while(i < count)
3176         {
3177             if( *DDR_word_ptr != test_string[i & 0xfU])
3178             {
3179                 ddr_error_count++;
3180                 error = 1;
3181             }
3182             ++i;
3183             DDR_word_ptr = DDR_word_ptr + 1U;
3184         }
3185     }
3186     return error;
3187 }
3188 #endif
3189 
3190 /***************************************************************************//**
3191  *
3192  * @param address
3193  * @param count
3194  * @return
3195  */
3196 #if 0  /* todo: review, add in if required */
3197 static uint8_t read_back_sanity_check(uint64_t * address, uint32_t count)
3198 {
3199     volatile uint64_t *DDR_word_ptr;
3200     uint8_t error = 0U;
3201     DDR_word_ptr =  address;   /* DDR memory address from E51 - 0xC0000000 is
3202                                   non cache access */
3203 
3204     volatile uint32_t i = 0x0U;
3205 
3206     /*
3207      * Recheck read, if first read successful
3208      */
3209     if(error == 0)
3210     {
3211         DDR_word_ptr =  address;   /* DDR memory address from E51 - 0xC0000000
3212                                       is non cache access */
3213         i = 0x0U;
3214         while(i < count)
3215         {
3216             if( *DDR_word_ptr != test_string[i & 0xfU])
3217             {
3218                 ddr_error_count++;
3219                 error = 1;
3220             }
3221             ++i;
3222             DDR_word_ptr = DDR_word_ptr + 1U;
3223         }
3224     }
3225     return error;
3226 }
3227 #endif
3228 
3229 /***************************************************************************//**
3230  * Memory test Core sanity check
3231  * @param start_address
3232  * @return non zero if error
3233  */
3234 #ifdef DDR_SANITY_CHECKS_EN
mtc_sanity_check(uint64_t start_address)3235 static uint8_t mtc_sanity_check(uint64_t start_address)
3236 {
3237     uint8_t result;
3238     uint64_t size = 4U;
3239     result = MTC_test((0xFU), start_address, size );
3240     return result;
3241 }
3242 #endif
3243 
3244 
3245 /***************************************************************************//**
3246  *
3247  * load_dq(lane)
3248  *      set dyn_ovr_dlycnt_dq_load* = 0
3249  *      set expert_dfi_status_override_to_shim = 0x7
3250  *      set expert_mode_en = 0x21
3251  *      set dyn_ovr_dlycnt_dq_load* = 1
3252  *      set dyn_ovr_dlycnt_dq_load* = 0
3253  *      set expert_mode_en = 0x8
3254  *
3255  * @param lane
3256  */
load_dq(uint8_t lane)3257 static void load_dq(uint8_t lane)
3258 {
3259     //set dyn_ovr_dlycnt_dq_load* = 0
3260     if(lane < 4U)
3261     {
3262         CFG_DDR_SGMII_PHY->expert_dlycnt_move_reg0.expert_dlycnt_move_reg0 = 0U;
3263     }
3264     else
3265     {
3266         CFG_DDR_SGMII_PHY->expert_dlycnt_move_reg1.expert_dlycnt_move_reg1 = \
3267             (CFG_DDR_SGMII_PHY->expert_dlycnt_move_reg1.expert_dlycnt_move_reg1\
3268                                                 & (uint32_t)~0x0FU);
3269     }
3270     //set expert_dfi_status_override_to_shim = 0x7
3271     CFG_DDR_SGMII_PHY->expert_dfi_status_override_to_shim.expert_dfi_status_override_to_shim = 0x07U;
3272     //set expert_mode_en = 0x21
3273     CFG_DDR_SGMII_PHY->expert_mode_en.expert_mode_en = 0x21U;
3274     //set dyn_ovr_dlycnt_dq_load* = 1
3275     if(lane < 4U)
3276     {
3277         CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg0.expert_dlycnt_load_reg0 =\
3278                 (0xFFU << (lane * 8U));
3279     }
3280     else
3281     {
3282         CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1 |=\
3283                 0x0FU;
3284     }
3285     //set dyn_ovr_dlycnt_dq_load* = 0
3286     CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg0.expert_dlycnt_load_reg0 = 0U;
3287     if(lane < 4U)
3288     {
3289         CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg0.expert_dlycnt_load_reg0 = 0U;
3290     }
3291     else
3292     {
3293         CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1 = \
3294             (CFG_DDR_SGMII_PHY->expert_dlycnt_load_reg1.expert_dlycnt_load_reg1\
3295                                                              & (uint32_t)~0x0FU);
3296     }
3297     //set expert_mode_en = 0x8
3298     CFG_DDR_SGMII_PHY->expert_mode_en.expert_mode_en = 0x8U;
3299 }
3300 
3301 /***************************************************************************//**
3302  *  increment_dq()
3303  *     set dyn_ovr_dlycnt_dq_move* = 0
3304  *     set dyn_ovr_dlycnt_dq_direction* = 1
3305  *     set expert_dfi_status_override_to_shim = 0x7
3306  *     set expert_mode_en = 0x21
3307  *
3308  *     #to increment multiple times loop the move=0/1 multiple times
3309  *     set dyn_ovr_dlycnt_dq_move* = 1
3310  *     set dyn_ovr_dlycnt_dq_move* = 0
3311  *     #
3312  *     set expert_mode_en = 0x8
3313  * @param lane
3314  * @param move_count
3315  */
3316 #ifdef SW_CONFIG_LPDDR_WR_CALIB_FN
increment_dq(uint8_t lane,uint32_t move_count)3317 static void increment_dq(uint8_t lane, uint32_t move_count)
3318 {
3319     //set dyn_ovr_dlycnt_dq_move* = 0
3320     if(lane < 4U)
3321     {
3322         CFG_DDR_SGMII_PHY->expert_dlycnt_move_reg0.expert_dlycnt_move_reg0 = 0U;
3323     }
3324     else
3325     {
3326         CFG_DDR_SGMII_PHY->expert_dlycnt_move_reg1.expert_dlycnt_move_reg1 = \
3327            (CFG_DDR_SGMII_PHY->expert_dlycnt_move_reg1.expert_dlycnt_move_reg1\
3328                    & ~0x0FU);
3329     }
3330     //set dyn_ovr_dlycnt_dq_direction* = 1
3331     if(lane < 4U)
3332     {
3333         CFG_DDR_SGMII_PHY->expert_dlycnt_direction_reg0.expert_dlycnt_direction_reg0\
3334             = (0xFFU << (lane * 8U));
3335     }
3336     else
3337     {
3338         /* only four lines, use 0xFU */
3339         CFG_DDR_SGMII_PHY->expert_dlycnt_direction_reg1.expert_dlycnt_direction_reg1 |= 0xFU;
3340     }
3341     /*   set expert_dfi_status_override_to_shim = 0x7 */
3342     CFG_DDR_SGMII_PHY->expert_dfi_status_override_to_shim.expert_dfi_status_override_to_shim = 0x07U;
3343     /*  set expert_mode_en = 0x21 */
3344     CFG_DDR_SGMII_PHY->expert_mode_en.expert_mode_en = 0x21U;
3345     /*  #to increment multiple times loop the move=0/1 multiple times */
3346     move_count = move_count + move_count + move_count;
3347     while(move_count)
3348     {
3349         //   set dyn_ovr_dlycnt_dq_move* = 1
3350         if(lane < 4U)
3351         {
3352             CFG_DDR_SGMII_PHY->expert_dlycnt_move_reg0.expert_dlycnt_move_reg0\
3353                 = (0xFFU << (lane * 8U));
3354         }
3355         else
3356         {
3357             CFG_DDR_SGMII_PHY->expert_dlycnt_move_reg1.expert_dlycnt_move_reg1\
3358                 |= 0x0FU;
3359         }
3360         //   set dyn_ovr_dlycnt_dq_move* = 0
3361         CFG_DDR_SGMII_PHY->expert_dlycnt_move_reg0.expert_dlycnt_move_reg0 = 0U;
3362         if(lane < 4U)
3363         {
3364             CFG_DDR_SGMII_PHY->expert_dlycnt_move_reg0.expert_dlycnt_move_reg0\
3365                 = 0U;
3366         }
3367         else
3368         {
3369             CFG_DDR_SGMII_PHY->expert_dlycnt_move_reg1.expert_dlycnt_move_reg1 = \
3370                     (CFG_DDR_SGMII_PHY->expert_dlycnt_move_reg1.expert_dlycnt_move_reg1 & ~0x0FU);
3371         }
3372         move_count--;
3373     }
3374    /* set expert_mode_en = 0x8 */
3375    CFG_DDR_SGMII_PHY->expert_mode_en.expert_mode_en = 0x8U;
3376 }
3377 #endif
3378 
3379 /***************************************************************************//**
3380  *
3381  */
set_write_calib(uint8_t user_lanes)3382 static void set_write_calib(uint8_t user_lanes)
3383 {
3384     uint32_t temp = 0U;
3385     uint8_t lane_to_set;
3386     uint8_t shift = 0U;
3387 
3388     /*
3389      * Calculate the calibrated value and write back
3390      */
3391     calib_data.write_cal.lane_calib_result = 0U;
3392     for (lane_to_set = 0x00U;\
3393         lane_to_set<user_lanes /*USER_TOTAL_LANES_USED */; lane_to_set++)
3394     {
3395         temp = calib_data.write_cal.lower[lane_to_set];
3396         calib_data.write_cal.lane_calib_result =   \
3397                 calib_data.write_cal.lane_calib_result | (temp << (shift));
3398         shift = (uint8_t)(shift + 0x04U);
3399     }
3400 
3401     /*
3402      * bit 3  must be set if we want to use the
3403      * expert_wrcalib
3404      * register
3405      */
3406     CFG_DDR_SGMII_PHY->expert_mode_en.expert_mode_en = 0x00000008U;
3407 
3408     SIM_FEEDBACK1(0xFF000000);
3409     SIM_FEEDBACK1(calib_data.write_cal.lane_calib_result);
3410     SIM_FEEDBACK1(0xFF000000);
3411 
3412     /* set the calibrated value */
3413     CFG_DDR_SGMII_PHY->expert_wrcalib.expert_wrcalib =\
3414             calib_data.write_cal.lane_calib_result;
3415 }
3416 
3417 /***************************************************************************//**
3418  *
3419  * @param lane_to_set
3420  */
3421 #ifdef SW_CONFIG_LPDDR_WR_CALIB_FN
set_calc_dq_delay_offset(uint8_t lane_to_set)3422 static void set_calc_dq_delay_offset(uint8_t lane_to_set)
3423 {
3424     uint32_t move_count;
3425 
3426     load_dq(lane_to_set); /* set to start */
3427 
3428     /* shift by 1 to divide by two */
3429     move_count = ((calib_data.dq_cal.upper[lane_to_set] -\
3430             calib_data.dq_cal.lower[lane_to_set]  ) >> 1U) +\
3431             calib_data.dq_cal.lower[lane_to_set];
3432 
3433     increment_dq(lane_to_set, move_count);
3434 
3435 }
3436 #endif
3437 
3438 /***************************************************************************//**
3439  *
3440  *  @param user_lanes
3441  */
3442 #ifdef SW_CONFIG_LPDDR_WR_CALIB_FN
set_calib_values(uint8_t user_lanes)3443 static void set_calib_values(uint8_t user_lanes)
3444 {
3445     uint8_t lane_to_set;
3446     uint32_t move_count;
3447 
3448     for (lane_to_set = 0x00U;\
3449         lane_to_set< user_lanes ; lane_to_set++)
3450     {
3451         set_calc_dq_delay_offset(lane_to_set);
3452     }
3453 
3454     /* and set the write calibration calculated */
3455     set_write_calib(user_lanes);
3456 }
3457 #endif
3458 
3459 
3460 /***************************************************************************//**
3461  * write_calibration_using_mtc
3462  *   Use Memory Test Core plugged in to the front end of the DDR controller to
3463  *   perform lane-based writes and read backs and increment write calibration
3464  *   offset for each lane until data match occurs. The Memory Test Core is the
3465  *   basis for all training.
3466  *
3467  * @param number_of_lanes_to_calibrate
3468  * @return
3469  */
3470 static uint8_t \
write_calibration_using_mtc(uint8_t number_of_lanes_to_calibrate)3471     write_calibration_using_mtc(uint8_t number_of_lanes_to_calibrate)
3472 {
3473     uint8_t laneToTest;
3474     uint32_t result = 0U;
3475     uint32_t cal_data;
3476     uint64_t start_address = 0x0000000000000000ULL;
3477     uint32_t size = ONE_MB_MTC;  /* Number of reads for each iteration 2**size*/
3478 
3479     calib_data.write_cal.status_lower = 0U;
3480     /*
3481      * bit 3  must be set if we want to use the
3482      * expert_wrcalib
3483      * register
3484      */
3485     CFG_DDR_SGMII_PHY->expert_mode_en.expert_mode_en = 0x00000008U;
3486 
3487     /*
3488      * training carried out here- sweeping write calibration offset from 0 to F
3489      * Explanation: A register, expert_wrcalib, is described in MSS DDR TIP
3490      * Register Map [1], and its purpose is to delay--by X number of memory clock
3491      * cycles--the write data, write data mask, and write output enable with the
3492      * respect to the address and command for each lane.
3493      */
3494     for (cal_data=0x00000U;cal_data<0xfffffU;cal_data=cal_data+0x11111U)
3495     {
3496 #ifdef DEBUG_DDR_INIT
3497         (void)uprint32(g_debug_uart, "\n\rCalibration offset used:",cal_data &0xFUL);
3498 #endif
3499         CFG_DDR_SGMII_PHY->expert_wrcalib.expert_wrcalib = cal_data;
3500 
3501         for (laneToTest = 0x00U; laneToTest<number_of_lanes_to_calibrate;\
3502                                                                 laneToTest++)
3503         {
3504             /*
3505              * read once to flush MTC. During write calibration the first MTC read
3506              * must be discarded as it is unreliable after a series of bad writes.
3507              */
3508             uint8_t mask = (uint8_t)(1U<<laneToTest);
3509             result = MTC_test(mask, start_address, size, MTC_COUNTING_PATTERN, MTC_ADD_SEQUENTIAL, &result);
3510             /* Read using different patterns */
3511             result |= MTC_test(mask, start_address, size, MTC_COUNTING_PATTERN, MTC_ADD_SEQUENTIAL, &result);
3512             result |= MTC_test(mask, start_address, size, MTC_WALKING_ONE, MTC_ADD_SEQUENTIAL, &result);
3513             result |= MTC_test(mask, start_address, size, MTC_PSEUDO_RANDOM, MTC_ADD_SEQUENTIAL, &result);
3514             result |= MTC_test(mask, start_address, size, MTC_NO_REPEATING_PSEUDO_RANDOM, MTC_ADD_SEQUENTIAL, &result);
3515             result |= MTC_test(mask, start_address, size, MTC_ALT_ONES_ZEROS, MTC_ADD_SEQUENTIAL, &result);
3516             result |= MTC_test(mask, start_address, size, MTC_ALT_5_A, MTC_ADD_SEQUENTIAL, &result);
3517             result |= MTC_test(mask, start_address, size, MTC_PSEUDO_RANDOM_16BIT, MTC_ADD_SEQUENTIAL, &result);
3518             result |= MTC_test(mask, start_address, size, MTC_PSEUDO_RANDOM_8BIT, MTC_ADD_SEQUENTIAL, &result);
3519 
3520             if(result == 0U) /* if passed for this lane */
3521             {
3522                 if((calib_data.write_cal.status_lower & (0x01U<<laneToTest)) \
3523                                     == 0U) /* Still looking for good value */
3524                 {
3525                     calib_data.write_cal.lower[laneToTest]  = (cal_data & 0xFU);
3526                     calib_data.write_cal.status_lower |= (0x01U<<laneToTest);
3527                 }
3528                 /*
3529                  * Check the result
3530                  */
3531 #ifdef DEBUG_DDR_INIT
3532                 (void)uprint32(g_debug_uart, "\n\rLane passed:",laneToTest);
3533                 (void)uprint32(g_debug_uart, " All lanes status:",calib_data.write_cal.status_lower);
3534 #endif
3535                 uint32_t laneToCheck;
3536                 for (laneToCheck = 0x00U;\
3537                     laneToCheck<number_of_lanes_to_calibrate; laneToCheck++)
3538                 {
3539                     if(((calib_data.write_cal.status_lower) &\
3540                             (0x01U<<laneToCheck)) == 0U)
3541                     {
3542                         result = 1U; /* not finished, still looking */
3543                         break;
3544                     }
3545                 }
3546                 if(result == 0U) /* if true, we are good for all lanes, can stop
3547                                     looking */
3548                 {
3549                     SIM_FEEDBACK1(0xF7000000);
3550                     break;
3551                 }
3552             }
3553             else
3554             {
3555 #ifdef DEBUG_DDR_INIT
3556                 (void)uprint32(g_debug_uart, "\n\rLane failed:",laneToTest);
3557                 (void)uprint32(g_debug_uart, " All lanes status:",calib_data.write_cal.status_lower);
3558 #endif
3559             }
3560 
3561         } /* end laneToTest */
3562         if(result == 0U) /* if true, we are good for all lanes, can stop */
3563         {                /* looking */
3564             SIM_FEEDBACK1(0xF8000000);
3565             break;
3566         }
3567     }  /* end cal_data */
3568 
3569     SIM_FEEDBACK1(0x01000000);
3570     SIM_FEEDBACK1(calib_data.write_cal.lower[0]);
3571     SIM_FEEDBACK1(0x02000000);
3572     SIM_FEEDBACK1(calib_data.write_cal.lower[1]);
3573     SIM_FEEDBACK1(0x03000000);
3574     SIM_FEEDBACK1(calib_data.write_cal.lower[2]);
3575     SIM_FEEDBACK1(0x04000000);
3576     SIM_FEEDBACK1(calib_data.write_cal.lower[3]);
3577     SIM_FEEDBACK1(0x05000000);
3578     SIM_FEEDBACK1(calib_data.write_cal.lower[4]);
3579     SIM_FEEDBACK1(0x06000000);
3580     SIM_FEEDBACK1(calib_data.write_cal.lower[5]);
3581     SIM_FEEDBACK1(0x07000000);
3582 
3583     /* if calibration successful, calculate and set the value */
3584     if(result == 0U)
3585     {
3586         /* and set the write calibration which has been calculated */
3587         set_write_calib(number_of_lanes_to_calibrate);
3588     }
3589 
3590     SIM_FEEDBACK1(0x08000000);
3591     SIM_FEEDBACK1(result);
3592     SIM_FEEDBACK1(0x08000000);
3593     return result;
3594 }
3595 
3596 
3597 /**
3598  * MODE register write
3599  * @param MR_ADDR
3600  * @param MR_DATA
3601  * @return fail/pass
3602  */
3603 #ifdef SET_VREF_LPDDR4_MODE_REGS
mode_register_write(uint32_t MR_ADDR,uint32_t MR_DATA)3604 static uint8_t mode_register_write(uint32_t MR_ADDR, uint32_t MR_DATA)
3605 {
3606     uint32_t test = 0xFFFFU;
3607     uint32_t result = 0U;
3608     /*
3609     *
3610     */
3611     //DDRCFG->MC_BASE2.INIT_MRR_MODE.INIT_MRR_MODE        = 0x01;
3612     DDRCFG->MC_BASE2.INIT_MR_ADDR.INIT_MR_ADDR          = MR_ADDR ;
3613     /*
3614     * next:
3615     * write desired VREF calibration range (0=Range 1, 1=Range 2) to bit 6
3616     * of MR6
3617     * write 0x00 to bits 5:0 of MR6 (base calibration value)
3618     */
3619     DDRCFG->MC_BASE2.INIT_MR_WR_DATA.INIT_MR_WR_DATA    = MR_DATA;
3620     DDRCFG->MC_BASE2.INIT_MR_WR_MASK.INIT_MR_WR_MASK = 0U;
3621 
3622     DDRCFG->MC_BASE2.INIT_MR_W_REQ.INIT_MR_W_REQ    = 0x01U;
3623     while((DDRCFG->MC_BASE2.INIT_ACK.INIT_ACK & 0x01U) == 0U) /* wait for ack-
3624                                           to confirm register is written */
3625     {
3626        test--;
3627        if(test-- == 0U)
3628        {
3629            result = 1U;
3630            break;
3631        }
3632     }
3633     return result;
3634 }
3635 #endif
3636 
3637 #define VREF_INVALID 0x01U
3638 /***************************************************************************//**
3639  * FPGA_VREFDQ_calibration_using_mtc(void)
3640  * vary DQ voltage and set optimum DQ voltage
3641  * @return
3642  */
3643 #ifdef VREFDQ_CALIB
3644             /*
3645              * This step is optional
3646              * todo: Test once initial board verification complete
3647              */
FPGA_VREFDQ_calibration_using_mtc(void)3648 static uint8_t FPGA_VREFDQ_calibration_using_mtc(void)
3649 {
3650     uint8_t laneToTest, result = 0U;
3651     uint64_t mask;
3652     uint32_t vRef;
3653     uint64_t start_address = 0x0000000000000000ULL;
3654     uint64_t size = 4U;
3655 
3656     /*
3657     * Step 2a. FPGA VREF (Local VREF training)
3658     * Train FPGA VREF using the vrgen_h and vrgen_v registers
3659     */
3660     {
3661     /*
3662      * To manipulate the FPGA VREF value, firmware must write to the
3663      * DPC_BITS register, located at physical address 0x2000 7184.
3664      * Full documentation for this register can be found in
3665      * DFICFG Register Map [4].
3666      */
3667     /*
3668      * See DPC_BITS definition in .h file
3669      */
3670     /* CFG_DDR_SGMII_PHY->DPC_BITS.bitfield.dpc_vrgen_h; */
3671     /* CFG_DDR_SGMII_PHY->DPC_BITS.bitfield.dpc_vrgen_v; */
3672 
3673     }
3674 
3675     /*
3676     * training carried out here- sweeping write calibration offset from 0 to F
3677     * Explanation: A register, expert_wrcalib, is described in MSS DDR TIP
3678     * Register Map [1], and its purpose is to delay--by X number of memory
3679     * clock cycles--the write data, write data mask, and write output enable
3680     * with the respect to the address and command for each lane.
3681     */
3682     calib_data.fpga_vref.vref_result = 0U;
3683     calib_data.fpga_vref.lower = VREF_INVALID;
3684     calib_data.fpga_vref.upper = VREF_INVALID;
3685     calib_data.fpga_vref.status_lower = 0x00U;
3686     calib_data.fpga_vref.status_upper = 0x00U;
3687     mask = 0xFU;        /* todo: obtain data width from user parameters */
3688     uint32_t count = 0U;
3689     /* each bit .25% of VDD ?? */
3690     for (vRef=(0x1U<<4U);vRef<(0x1fU<<4U);vRef=vRef+(0x1U<<4U))
3691     {
3692         /*
3693             CFG_DDR_SGMII_PHY->DPC_BITS.DPC_BITS =\
3694                           (CFG_DDR_SGMII_PHY->DPC_BITS.DPC_BITS & (~(0x1U<<10U)));
3695             CFG_DDR_SGMII_PHY->DPC_BITS.DPC_BITS =\
3696                   (CFG_DDR_SGMII_PHY->DPC_BITS.DPC_BITS & (~(0x1fU<<4U))) | vRef;
3697         */
3698         /* need to set via the SCB, otherwise reset required. So lines below
3699          * rather than above used */
3700 
3701         IOSCB_BANKCONT_DDR->dpc_bits = (IOSCB_BANKCONT_DDR->dpc_bits &\
3702                 (~(0x1U<<10U)));
3703         IOSCB_BANKCONT_DDR->dpc_bits = (IOSCB_BANKCONT_DDR->dpc_bits &\
3704                 (~(0x1fU<<4U))) | vRef;
3705 
3706 
3707         /* read one to flush MTC -  this is required */
3708         result = MTC_test(1U<<laneToTest, start_address, size);
3709         /* Read twice, two different patterns will be used */
3710         result = MTC_test(1U<<laneToTest, start_address, size);
3711         result |= MTC_test(1U<<laneToTest, start_address, size);
3712         if((result == 0U)&&(calib_data.fpga_vref.lower == VREF_INVALID))
3713         {
3714             calib_data.fpga_vref.lower = vRef;
3715             calib_data.fpga_vref.upper = vRef;
3716             calib_data.fpga_vref.status_lower = 0x01;
3717         }
3718         else if((result == 0U)&&(calib_data.fpga_vref.lower != VREF_INVALID))
3719         {
3720             calib_data.fpga_vref.upper = vRef;
3721             calib_data.fpga_vref.status_upper = 0x01;
3722         }
3723         else if(calib_data.fpga_vref.upper != VREF_INVALID)
3724         {
3725             break; /* we are finished */
3726         }
3727         else
3728         {
3729             /* nothing to do */
3730         }
3731     }
3732 
3733     if(calib_data.fpga_vref.upper != VREF_INVALID) /* we found lower/upper */
3734     {
3735         /*
3736          * now set vref
3737          * calculate optimal VREF calibration value =
3738          *                              (left side + right side) / 2
3739          * */
3740         vRef = ((calib_data.fpga_vref.lower + calib_data.fpga_vref.upper)>>1U);
3741         CFG_DDR_SGMII_PHY->DPC_BITS.DPC_BITS =\
3742                 (CFG_DDR_SGMII_PHY->DPC_BITS.DPC_BITS & (0x1fU<<4U)) | vRef;
3743         /* need to set via the SCB, otherwise reset required. */
3744         IOSCB_BANKCONT_DDR->dpc_bits = (IOSCB_BANKCONT_DDR->dpc_bits &\
3745                 (0x1fU<<4U)) | vRef;
3746     }
3747     else
3748     {
3749         result = 1U; /* failed to get good data at any voltage level */
3750     }
3751 
3752   return result;
3753 }
3754 
3755 #endif
3756 
3757 #ifdef VREFDQ_CALIB
3758             /*
3759              * This step is optional
3760              * todo: Test once initial board verification complete
3761              */
3762 #define MEM_VREF_INVALID 0xFFFFFFFFU
3763 /***************************************************************************//**
3764  *
3765  * VREFDQ_calibration_using_mtc
3766  * In order to write to mode registers, the E51 must use the INIT_* interface
3767  * at the front end of the DDR controller,
3768  * which is available via a series of control registers described in the DDR
3769  * CSR APB Register Map.
3770  *
3771  * @return
3772  */
VREFDQ_calibration_using_mtc(void)3773 static uint8_t VREFDQ_calibration_using_mtc(void)
3774 {
3775     uint8_t laneToTest, result = 0U;
3776     uint64_t mask;
3777     uint32_t vRef;
3778     uint64_t start_address = 0x00000000C0000000ULL;
3779     uint64_t size = 4U;
3780 
3781     /*
3782     * Step 2a. FPGA VREF (Local VREF training)
3783     * Train FPGA VREF using the vrgen_h and vrgen_v registers
3784     */
3785     {
3786         /*
3787          *
3788          */
3789         DDRCFG->MC_BASE2.INIT_MRR_MODE.INIT_MRR_MODE    = 0x01U;
3790         DDRCFG->MC_BASE2.INIT_MR_ADDR.INIT_MR_ADDR      = 6U ;
3791         /*
3792          * next:
3793          * write desired VREF calibration range (0=Range 1, 1=Range 2) to bit 6
3794          * of MR6
3795          * write 0x00 to bits 5:0 of MR6 (base calibration value)
3796          */
3797         DDRCFG->MC_BASE2.INIT_MR_WR_DATA.INIT_MR_WR_DATA  = 0U;
3798         DDRCFG->MC_BASE2.INIT_MR_WR_MASK.INIT_MR_WR_MASK = (0x01U <<6U) |\
3799                 (0x3FU) ;
3800 
3801         DDRCFG->MC_BASE2.INIT_MR_W_REQ.INIT_MR_W_REQ   = 0x01U;
3802         if((DDRCFG->MC_BASE2.INIT_ACK.INIT_ACK & 0x01U) == 0U) /* wait for ack-
3803                                                to confirm register is written */
3804         {
3805 
3806         }
3807     }
3808 
3809     /*
3810     * training carried out here- sweeping write calibration offset from 0 to F
3811     * Explanation: A register, expert_wrcalib, is described in MSS DDR TIP
3812     * Register Map [1], and its purpose is to delay--by X number of memory clock
3813     * cycles--the write data, write data mask, and write output enable with the
3814     * respect to the address and command for each lane.
3815     */
3816     calib_data.mem_vref.vref_result = 0U;
3817     calib_data.mem_vref.lower = MEM_VREF_INVALID;
3818     calib_data.mem_vref.upper = MEM_VREF_INVALID;
3819     calib_data.mem_vref.status_lower = 0x00U;
3820     calib_data.mem_vref.status_upper = 0x00U;
3821     mask = 0xFU;    /* todo: obtain data width from user paramaters */
3822 
3823     for (vRef=(0x1U<<4U);vRef<0x3fU;vRef=(vRef+0x1U))
3824     {
3825         /*
3826         * We change the value in the RPC register, but we will lso need to
3827         * change SCB as will not be reflected without a soft reset
3828         */
3829         CFG_DDR_SGMII_PHY->DPC_BITS.DPC_BITS =\
3830             (CFG_DDR_SGMII_PHY->DPC_BITS.DPC_BITS & (0x1fU<<4U)) | vRef;
3831         /* need to set via the SCB, otherwise reset required. */
3832         IOSCB_BANKCONT_DDR->dpc_bits = (IOSCB_BANKCONT_DDR->dpc_bits\
3833             & (0x1fU<<4U)) | vRef;
3834 
3835         /* read one to flush MTC -  this is required */
3836         result = MTC_test(1U<<laneToTest, start_address, size);
3837         /* Read twice, two different patterns will be used */
3838         result = MTC_test(1U<<laneToTest, start_address, size);
3839         result |= MTC_test(1U<<laneToTest, start_address, size);
3840         if((result == 0U)&&(calib_data.mem_vref.lower == MEM_VREF_INVALID))
3841         {
3842             calib_data.mem_vref.lower = vRef;
3843             calib_data.mem_vref.upper = vRef;
3844             calib_data.mem_vref.status_lower = 0x01;
3845         }
3846         else if((result == 0U)&&(calib_data.mem_vref.lower != MEM_VREF_INVALID))
3847         {
3848             calib_data.mem_vref.upper = vRef;
3849             calib_data.mem_vref.status_lower = 0x01;
3850         }
3851         else if(calib_data.mem_vref.upper != MEM_VREF_INVALID)
3852         {
3853             break; /* we are finished */
3854         }
3855         else
3856         {
3857             /* continue */
3858         }
3859 
3860     }
3861 
3862     if(calib_data.mem_vref.upper != MEM_VREF_INVALID) /* we found lower/upper */
3863     {
3864         /*
3865         * now set vref
3866         * calculate optimal VREF calibration value =
3867         *                                    (left side + right side) / 2
3868         * */
3869         vRef = ((calib_data.mem_vref.lower + calib_data.mem_vref.lower)>1U);
3870         CFG_DDR_SGMII_PHY->DPC_BITS.DPC_BITS =\
3871             (CFG_DDR_SGMII_PHY->DPC_BITS.DPC_BITS & (0x1fU<<4U)) | vRef;
3872         /* need to set via the SCB, otherwise reset required. */
3873         IOSCB_BANKCONT_DDR->dpc_bits = (IOSCB_BANKCONT_DDR->dpc_bits & (0x1fU<<4U)) | vRef;
3874     }
3875     else
3876     {
3877         result = 1U; /* failed to get good data at any voltage level */
3878     }
3879 
3880     return result;
3881     }
3882 #endif
3883 
3884 /***************************************************************************//**
3885  * MTC_test
3886  * test memory using the NWL memory test core
3887  * There are numerous options
3888  * todo: Add user input as to option to use?
3889  * @param laneToTest
3890  * @param mask0
3891  * @param mask1   some lane less DQ as only used for parity
3892  * @param start_address
3893  * @param size = x, where x is used as power of two 2**x e.g. 256K => x == 18
3894  * @return pass/fail
3895  */
MTC_test(uint8_t mask,uint64_t start_address,uint32_t size,MTC_PATTERN data_pattern,MTC_ADD_PATTERN add_pattern,uint32_t * error)3896 static uint8_t MTC_test(uint8_t mask, uint64_t start_address, uint32_t size, MTC_PATTERN data_pattern, MTC_ADD_PATTERN add_pattern, uint32_t *error)
3897 {
3898     if((*error & MTC_TIMEOUT_ERROR) == MTC_TIMEOUT_ERROR)
3899     {
3900         return (uint8_t)*error;
3901     }
3902     /* Write Calibration - first configure memory test */
3903     {
3904         /*
3905          *  write calibration
3906          *  configure common memory test interface by writing registers:
3907          *  MT_STOP_ON_ERROR, MT_DATA_PATTERN, MT_ADDR_PATTERN, MT_ADDR_BITS
3908          */
3909         /* see MTC user guide */
3910         DDRCFG->MEM_TEST.MT_STOP_ON_ERROR.MT_STOP_ON_ERROR = 0U;
3911         /* make sure off, will turn on later. */
3912         DDRCFG->MEM_TEST.MT_EN_SINGLE.MT_EN_SINGLE = 0x00U;
3913         /*
3914          * MT_DATA_PATTERN
3915          *
3916          * 0x00 => Counting pattern
3917          * 0x01 => walking 1's
3918          * 0x02 => pseudo random
3919          * 0x03 => no repeating pseudo random
3920          * 0x04 => alt 1's and 0's
3921          * 0x05 => alt 5's and A's
3922          * 0x06 => User specified
3923          * 0x07 => pseudo random 16-bit
3924          * 0x08 => pseudo random 8-bit
3925          * 0x09- 0x0f reserved
3926          *
3927          */
3928         {
3929             /*
3930             * Added changing pattern so write pattern is different, read back
3931             * can not pass on previously written data
3932             */
3933             DDRCFG->MEM_TEST.MT_DATA_PATTERN.MT_DATA_PATTERN = data_pattern;
3934         }
3935         if(add_pattern == MTC_ADD_RANDOM)
3936         {
3937             /*
3938              * MT_ADDR_PATTERN
3939              * 0x00 => Count in pattern
3940              * 0x01 => Pseudo Random Pattern
3941              * 0x02 => Arbiatry Pattern Gen (user defined ) - Using RAMS
3942              */
3943             DDRCFG->MEM_TEST.MT_ADDR_PATTERN.MT_ADDR_PATTERN = 1U;
3944         }
3945         else
3946         {
3947             DDRCFG->MEM_TEST.MT_ADDR_PATTERN.MT_ADDR_PATTERN = 0U;
3948         }
3949     }
3950 
3951     if(add_pattern != MTC_ADD_RANDOM)
3952     {
3953         /*
3954          * Set the starting address and number to test
3955          *
3956          * MT_START_ADDR
3957          *   Starting address
3958          * MT_ADRESS_BITS
3959          *   Length to test = 2 ** MT_ADRESS_BITS
3960          */
3961         DDRCFG->MEM_TEST.MT_START_ADDR_0.MT_START_ADDR_0   =\
3962                 (uint32_t)(start_address & 0xFFFFFFFFUL);
3963         /* The address here is as see from DDR controller => start at 0x0*/
3964         DDRCFG->MEM_TEST.MT_START_ADDR_1.MT_START_ADDR_1   =\
3965                 (uint32_t)((start_address >> 32U));
3966     }
3967     else
3968     {
3969         DDRCFG->MEM_TEST.MT_START_ADDR_0.MT_START_ADDR_0   = 0U;
3970         DDRCFG->MEM_TEST.MT_START_ADDR_1.MT_START_ADDR_1   = 0U;
3971     }
3972     DDRCFG->MEM_TEST.MT_ADDR_BITS.MT_ADDR_BITS        =\
3973             size; /* 2 power 24 => 256k to do- make user programmable */
3974 
3975     {
3976     /*
3977     * FOR each DQ lane
3978     *  set error mask registers MT_ERROR_MASK_* to mask out
3979     *    all error bits but the ones for the current DQ lane
3980     *    WHILE timeout counter is less than a threshold
3981     *        perform memory test by writing MT_EN or MT_EN_SINGLE
3982     *        wait for memory test completion by polling MT_DONE_ACK
3983     *        read back memory test error status from MT_ERROR_STS
3984     *       IF no error detected
3985     *          exit loop
3986     *        ELSE
3987     *          increment write calibration offset for current DQ lane
3988     *          by writing EXPERT_WRCALIB
3989     *    ENDWHILE
3990     *  ENDFOR
3991     */
3992     {
3993     /*
3994     * MT_ERROR_MASK
3995     * All bits set in this field mask corresponding bits in data fields
3996     * i.e. mt_error and mt_error_hold will not be set for errors in
3997     * those fields
3998     *
3999     * Structure of 144 bits same as DFI bus
4000     * 36 bits per lane ( 8 physical * 4) + (1ECC * 4) = 36
4001     *
4002     * If we wrote out the following pattern from software:
4003     * 0x12345678
4004     * 0x87654321
4005     * 0x56789876
4006     * 0x43211234
4007     * We should see:
4008     *      NNNN_YXXX_XXX3_4YXX_XXXX_76YX_XXXX_X21Y_XXXX_XX78
4009     *      N: not used
4010     *      Y:
4011     */
4012         DDRCFG->MEM_TEST.MT_ERROR_MASK_0.MT_ERROR_MASK_0 = 0xFFFFFFFFU;
4013         DDRCFG->MEM_TEST.MT_ERROR_MASK_1.MT_ERROR_MASK_1 = 0xFFFFFFFFU;
4014         DDRCFG->MEM_TEST.MT_ERROR_MASK_2.MT_ERROR_MASK_2 = 0xFFFFFFFFU;
4015         DDRCFG->MEM_TEST.MT_ERROR_MASK_3.MT_ERROR_MASK_3 = 0xFFFFFFFFU;
4016         DDRCFG->MEM_TEST.MT_ERROR_MASK_4.MT_ERROR_MASK_4 = 0xFFFFFFFFU;
4017 
4018         if (mask & 0x1U)
4019         {
4020             DDRCFG->MEM_TEST.MT_ERROR_MASK_0.MT_ERROR_MASK_0 &= 0xFFFFFF00U;
4021                 DDRCFG->MEM_TEST.MT_ERROR_MASK_1.MT_ERROR_MASK_1 &= 0xFFFFF00FU;
4022                 DDRCFG->MEM_TEST.MT_ERROR_MASK_2.MT_ERROR_MASK_2 &= 0xFFFF00FFU;
4023                 DDRCFG->MEM_TEST.MT_ERROR_MASK_3.MT_ERROR_MASK_3 &= 0xFFF00FFFU;
4024                 DDRCFG->MEM_TEST.MT_ERROR_MASK_4.MT_ERROR_MASK_4 &= 0xFFFFFFFFU;
4025         }
4026         if (mask & 0x2U)
4027         {
4028             DDRCFG->MEM_TEST.MT_ERROR_MASK_0.MT_ERROR_MASK_0 &= 0xFFFF00FFU;
4029             DDRCFG->MEM_TEST.MT_ERROR_MASK_1.MT_ERROR_MASK_1 &= 0xFFF00FFFU;
4030             DDRCFG->MEM_TEST.MT_ERROR_MASK_2.MT_ERROR_MASK_2 &= 0xFF00FFFFU;
4031             DDRCFG->MEM_TEST.MT_ERROR_MASK_3.MT_ERROR_MASK_3 &= 0xF00FFFFFU;
4032             DDRCFG->MEM_TEST.MT_ERROR_MASK_4.MT_ERROR_MASK_4 &= 0xFFFFFFFFU;
4033         }
4034         if (mask & 0x4U)
4035         {
4036             DDRCFG->MEM_TEST.MT_ERROR_MASK_0.MT_ERROR_MASK_0 &= 0xFF00FFFFU;
4037             DDRCFG->MEM_TEST.MT_ERROR_MASK_1.MT_ERROR_MASK_1 &= 0xF00FFFFFU;
4038             DDRCFG->MEM_TEST.MT_ERROR_MASK_2.MT_ERROR_MASK_2 &= 0x00FFFFFFU;
4039             DDRCFG->MEM_TEST.MT_ERROR_MASK_3.MT_ERROR_MASK_3 &= 0x0FFFFFFFU;
4040             DDRCFG->MEM_TEST.MT_ERROR_MASK_4.MT_ERROR_MASK_4 &= 0xFFFFFFF0U;
4041         }
4042         if (mask & 0x8U)
4043         {
4044             DDRCFG->MEM_TEST.MT_ERROR_MASK_0.MT_ERROR_MASK_0 &= 0x00FFFFFFU;
4045             DDRCFG->MEM_TEST.MT_ERROR_MASK_1.MT_ERROR_MASK_1 &= 0x0FFFFFFFU;
4046             DDRCFG->MEM_TEST.MT_ERROR_MASK_2.MT_ERROR_MASK_2 &= 0xFFFFFFF0U;
4047             DDRCFG->MEM_TEST.MT_ERROR_MASK_3.MT_ERROR_MASK_3 &= 0xFFFFFF00U;
4048             DDRCFG->MEM_TEST.MT_ERROR_MASK_4.MT_ERROR_MASK_4 &= 0xFFFFF00FU;
4049         }
4050         if (mask & 0x10U)
4051         {
4052             DDRCFG->MEM_TEST.MT_ERROR_MASK_0.MT_ERROR_MASK_0 &= 0xFFFFFFFFU;
4053             DDRCFG->MEM_TEST.MT_ERROR_MASK_1.MT_ERROR_MASK_1 &= 0xFFFFFFF0U;
4054             DDRCFG->MEM_TEST.MT_ERROR_MASK_2.MT_ERROR_MASK_2 &= 0xFFFFFF0FU;
4055             DDRCFG->MEM_TEST.MT_ERROR_MASK_3.MT_ERROR_MASK_3 &= 0xFFFFF0FFU;
4056             DDRCFG->MEM_TEST.MT_ERROR_MASK_4.MT_ERROR_MASK_4 &= 0xFFFF0FFFU;
4057         }
4058 
4059         /*
4060         * MT_EN
4061         * Enables memory test
4062         * If asserted at end of memory test, will keep going
4063         */
4064         DDRCFG->MEM_TEST.MT_EN.MT_EN = 0U;
4065         /*
4066         * MT_EN_SINGLE
4067         * Will not repeat if this is set
4068         */
4069         DDRCFG->MEM_TEST.MT_EN_SINGLE.MT_EN_SINGLE = 0x00U;
4070         DDRCFG->MEM_TEST.MT_EN_SINGLE.MT_EN_SINGLE = 0x01U;
4071         /*
4072         * MT_DONE_ACK
4073         * Set when test completes
4074         */
4075         volatile uint64_t something_to_do = 0U;
4076         #ifndef UNITTEST
4077         while (( DDRCFG->MEM_TEST.MT_DONE_ACK.MT_DONE_ACK & 0x01U) == 0U)
4078         {
4079             something_to_do++;
4080             if(something_to_do > 0xFFFFFFUL)
4081             {
4082 #ifdef DEBUG_DDR_INIT
4083                 (void)uprint32(g_debug_uart, "\n\rmtc test error:",MTC_TIMEOUT_ERROR);
4084 #endif
4085                 return (MTC_TIMEOUT_ERROR);
4086             }
4087             #ifdef RENODE_DEBUG
4088             break;
4089             #endif
4090         }
4091         #endif
4092         }
4093     }
4094     /*
4095     * MT_ERROR_STS
4096     * Return the error status
4097     * todo:Check NWL data and detail error states here
4098     */
4099 
4100     return (DDRCFG->MEM_TEST.MT_ERROR_STS.MT_ERROR_STS & 0x01U);
4101 
4102 }
4103 
4104 
4105 /***************************************************************************//**
4106  * Setup DDRC
4107  * These settings come from config tool
4108  *
4109  */
4110 #define _USE_SETTINGS_USED_IN_DDR3_FULL_CHIP_TEST
4111 
init_ddrc(void)4112 static void init_ddrc(void)
4113 {
4114     DDRCFG->ADDR_MAP.CFG_MANUAL_ADDRESS_MAP.CFG_MANUAL_ADDRESS_MAP =\
4115         LIBERO_SETTING_CFG_MANUAL_ADDRESS_MAP;
4116     DDRCFG->ADDR_MAP.CFG_CHIPADDR_MAP.CFG_CHIPADDR_MAP =\
4117         LIBERO_SETTING_CFG_CHIPADDR_MAP;
4118     DDRCFG->ADDR_MAP.CFG_CIDADDR_MAP.CFG_CIDADDR_MAP =\
4119         LIBERO_SETTING_CFG_CIDADDR_MAP;
4120     DDRCFG->ADDR_MAP.CFG_MB_AUTOPCH_COL_BIT_POS_LOW.CFG_MB_AUTOPCH_COL_BIT_POS_LOW =\
4121         LIBERO_SETTING_CFG_MB_AUTOPCH_COL_BIT_POS_LOW;
4122     DDRCFG->ADDR_MAP.CFG_MB_AUTOPCH_COL_BIT_POS_HIGH.CFG_MB_AUTOPCH_COL_BIT_POS_HIGH =\
4123         LIBERO_SETTING_CFG_MB_AUTOPCH_COL_BIT_POS_HIGH;
4124     DDRCFG->ADDR_MAP.CFG_BANKADDR_MAP_0.CFG_BANKADDR_MAP_0 =\
4125         LIBERO_SETTING_CFG_BANKADDR_MAP_0;
4126     DDRCFG->ADDR_MAP.CFG_BANKADDR_MAP_1.CFG_BANKADDR_MAP_1 =\
4127         LIBERO_SETTING_CFG_BANKADDR_MAP_1;
4128     DDRCFG->ADDR_MAP.CFG_ROWADDR_MAP_0.CFG_ROWADDR_MAP_0 =\
4129         LIBERO_SETTING_CFG_ROWADDR_MAP_0;
4130     DDRCFG->ADDR_MAP.CFG_ROWADDR_MAP_1.CFG_ROWADDR_MAP_1 =\
4131         LIBERO_SETTING_CFG_ROWADDR_MAP_1;
4132     DDRCFG->ADDR_MAP.CFG_ROWADDR_MAP_2.CFG_ROWADDR_MAP_2 =\
4133         LIBERO_SETTING_CFG_ROWADDR_MAP_2;
4134     DDRCFG->ADDR_MAP.CFG_ROWADDR_MAP_3.CFG_ROWADDR_MAP_3 =\
4135         LIBERO_SETTING_CFG_ROWADDR_MAP_3;
4136     DDRCFG->ADDR_MAP.CFG_COLADDR_MAP_0.CFG_COLADDR_MAP_0 =\
4137         LIBERO_SETTING_CFG_COLADDR_MAP_0;
4138     DDRCFG->ADDR_MAP.CFG_COLADDR_MAP_1.CFG_COLADDR_MAP_1 =\
4139         LIBERO_SETTING_CFG_COLADDR_MAP_1;
4140     DDRCFG->ADDR_MAP.CFG_COLADDR_MAP_2.CFG_COLADDR_MAP_2 =\
4141         LIBERO_SETTING_CFG_COLADDR_MAP_2;
4142     DDRCFG->MC_BASE3.CFG_VRCG_ENABLE.CFG_VRCG_ENABLE =\
4143         LIBERO_SETTING_CFG_VRCG_ENABLE;
4144     DDRCFG->MC_BASE3.CFG_VRCG_DISABLE.CFG_VRCG_DISABLE =\
4145         LIBERO_SETTING_CFG_VRCG_DISABLE;
4146     DDRCFG->MC_BASE3.CFG_WRITE_LATENCY_SET.CFG_WRITE_LATENCY_SET =\
4147         LIBERO_SETTING_CFG_WRITE_LATENCY_SET;
4148     DDRCFG->MC_BASE3.CFG_THERMAL_OFFSET.CFG_THERMAL_OFFSET =\
4149         LIBERO_SETTING_CFG_THERMAL_OFFSET;
4150     DDRCFG->MC_BASE3.CFG_SOC_ODT.CFG_SOC_ODT = LIBERO_SETTING_CFG_SOC_ODT;
4151     DDRCFG->MC_BASE3.CFG_ODTE_CK.CFG_ODTE_CK = LIBERO_SETTING_CFG_ODTE_CK;
4152     DDRCFG->MC_BASE3.CFG_ODTE_CS.CFG_ODTE_CS = LIBERO_SETTING_CFG_ODTE_CS;
4153     DDRCFG->MC_BASE3.CFG_ODTD_CA.CFG_ODTD_CA = LIBERO_SETTING_CFG_ODTD_CA;
4154     DDRCFG->MC_BASE3.CFG_LPDDR4_FSP_OP.CFG_LPDDR4_FSP_OP =\
4155         LIBERO_SETTING_CFG_LPDDR4_FSP_OP;
4156     DDRCFG->MC_BASE3.CFG_GENERATE_REFRESH_ON_SRX.CFG_GENERATE_REFRESH_ON_SRX =\
4157         LIBERO_SETTING_CFG_GENERATE_REFRESH_ON_SRX;
4158     DDRCFG->MC_BASE3.CFG_DBI_CL.CFG_DBI_CL = LIBERO_SETTING_CFG_DBI_CL;
4159     DDRCFG->MC_BASE3.CFG_NON_DBI_CL.CFG_NON_DBI_CL =\
4160         LIBERO_SETTING_CFG_NON_DBI_CL;
4161     DDRCFG->MC_BASE3.INIT_FORCE_WRITE_DATA_0.INIT_FORCE_WRITE_DATA_0 =\
4162         LIBERO_SETTING_INIT_FORCE_WRITE_DATA_0;
4163     DDRCFG->MC_BASE1.CFG_WRITE_CRC.CFG_WRITE_CRC =\
4164         LIBERO_SETTING_CFG_WRITE_CRC;
4165     DDRCFG->MC_BASE1.CFG_MPR_READ_FORMAT.CFG_MPR_READ_FORMAT =\
4166         LIBERO_SETTING_CFG_MPR_READ_FORMAT;
4167     DDRCFG->MC_BASE1.CFG_WR_CMD_LAT_CRC_DM.CFG_WR_CMD_LAT_CRC_DM =\
4168         LIBERO_SETTING_CFG_WR_CMD_LAT_CRC_DM;
4169     DDRCFG->MC_BASE1.CFG_FINE_GRAN_REF_MODE.CFG_FINE_GRAN_REF_MODE =\
4170         LIBERO_SETTING_CFG_FINE_GRAN_REF_MODE;
4171     DDRCFG->MC_BASE1.CFG_TEMP_SENSOR_READOUT.CFG_TEMP_SENSOR_READOUT =\
4172         LIBERO_SETTING_CFG_TEMP_SENSOR_READOUT;
4173     DDRCFG->MC_BASE1.CFG_PER_DRAM_ADDR_EN.CFG_PER_DRAM_ADDR_EN =\
4174         LIBERO_SETTING_CFG_PER_DRAM_ADDR_EN;
4175     DDRCFG->MC_BASE1.CFG_GEARDOWN_MODE.CFG_GEARDOWN_MODE =\
4176         LIBERO_SETTING_CFG_GEARDOWN_MODE;
4177     DDRCFG->MC_BASE1.CFG_WR_PREAMBLE.CFG_WR_PREAMBLE =\
4178         LIBERO_SETTING_CFG_WR_PREAMBLE;
4179     DDRCFG->MC_BASE1.CFG_RD_PREAMBLE.CFG_RD_PREAMBLE =\
4180         LIBERO_SETTING_CFG_RD_PREAMBLE;
4181     DDRCFG->MC_BASE1.CFG_RD_PREAMB_TRN_MODE.CFG_RD_PREAMB_TRN_MODE =\
4182         LIBERO_SETTING_CFG_RD_PREAMB_TRN_MODE;
4183     DDRCFG->MC_BASE1.CFG_SR_ABORT.CFG_SR_ABORT = LIBERO_SETTING_CFG_SR_ABORT;
4184     DDRCFG->MC_BASE1.CFG_CS_TO_CMDADDR_LATENCY.CFG_CS_TO_CMDADDR_LATENCY =\
4185         LIBERO_SETTING_CFG_CS_TO_CMDADDR_LATENCY;
4186     DDRCFG->MC_BASE1.CFG_INT_VREF_MON.CFG_INT_VREF_MON =\
4187         LIBERO_SETTING_CFG_INT_VREF_MON;
4188     DDRCFG->MC_BASE1.CFG_TEMP_CTRL_REF_MODE.CFG_TEMP_CTRL_REF_MODE =\
4189         LIBERO_SETTING_CFG_TEMP_CTRL_REF_MODE;
4190     DDRCFG->MC_BASE1.CFG_TEMP_CTRL_REF_RANGE.CFG_TEMP_CTRL_REF_RANGE =\
4191         LIBERO_SETTING_CFG_TEMP_CTRL_REF_RANGE;
4192     DDRCFG->MC_BASE1.CFG_MAX_PWR_DOWN_MODE.CFG_MAX_PWR_DOWN_MODE =\
4193         LIBERO_SETTING_CFG_MAX_PWR_DOWN_MODE;
4194     DDRCFG->MC_BASE1.CFG_READ_DBI.CFG_READ_DBI = LIBERO_SETTING_CFG_READ_DBI;
4195     DDRCFG->MC_BASE1.CFG_WRITE_DBI.CFG_WRITE_DBI =\
4196         LIBERO_SETTING_CFG_WRITE_DBI;
4197     DDRCFG->MC_BASE1.CFG_DATA_MASK.CFG_DATA_MASK =\
4198         LIBERO_SETTING_CFG_DATA_MASK;
4199     DDRCFG->MC_BASE1.CFG_CA_PARITY_PERSIST_ERR.CFG_CA_PARITY_PERSIST_ERR =\
4200         LIBERO_SETTING_CFG_CA_PARITY_PERSIST_ERR;
4201     DDRCFG->MC_BASE1.CFG_RTT_PARK.CFG_RTT_PARK = LIBERO_SETTING_CFG_RTT_PARK;
4202     DDRCFG->MC_BASE1.CFG_ODT_INBUF_4_PD.CFG_ODT_INBUF_4_PD =\
4203         LIBERO_SETTING_CFG_ODT_INBUF_4_PD;
4204     DDRCFG->MC_BASE1.CFG_CA_PARITY_ERR_STATUS.CFG_CA_PARITY_ERR_STATUS =\
4205         LIBERO_SETTING_CFG_CA_PARITY_ERR_STATUS;
4206     DDRCFG->MC_BASE1.CFG_CRC_ERROR_CLEAR.CFG_CRC_ERROR_CLEAR =\
4207         LIBERO_SETTING_CFG_CRC_ERROR_CLEAR;
4208     DDRCFG->MC_BASE1.CFG_CA_PARITY_LATENCY.CFG_CA_PARITY_LATENCY =\
4209         LIBERO_SETTING_CFG_CA_PARITY_LATENCY;
4210     DDRCFG->MC_BASE1.CFG_CCD_S.CFG_CCD_S = LIBERO_SETTING_CFG_CCD_S;
4211     DDRCFG->MC_BASE1.CFG_CCD_L.CFG_CCD_L = LIBERO_SETTING_CFG_CCD_L;
4212     DDRCFG->MC_BASE1.CFG_VREFDQ_TRN_ENABLE.CFG_VREFDQ_TRN_ENABLE =\
4213         LIBERO_SETTING_CFG_VREFDQ_TRN_ENABLE;
4214     DDRCFG->MC_BASE1.CFG_VREFDQ_TRN_RANGE.CFG_VREFDQ_TRN_RANGE =\
4215         LIBERO_SETTING_CFG_VREFDQ_TRN_RANGE;
4216     DDRCFG->MC_BASE1.CFG_VREFDQ_TRN_VALUE.CFG_VREFDQ_TRN_VALUE =\
4217         LIBERO_SETTING_CFG_VREFDQ_TRN_VALUE;
4218     DDRCFG->MC_BASE1.CFG_RRD_S.CFG_RRD_S = LIBERO_SETTING_CFG_RRD_S;
4219     DDRCFG->MC_BASE1.CFG_RRD_L.CFG_RRD_L = LIBERO_SETTING_CFG_RRD_L;
4220     DDRCFG->MC_BASE1.CFG_WTR_S.CFG_WTR_S = LIBERO_SETTING_CFG_WTR_S;
4221     DDRCFG->MC_BASE1.CFG_WTR_L.CFG_WTR_L = LIBERO_SETTING_CFG_WTR_L;
4222     DDRCFG->MC_BASE1.CFG_WTR_S_CRC_DM.CFG_WTR_S_CRC_DM =\
4223         LIBERO_SETTING_CFG_WTR_S_CRC_DM;
4224     DDRCFG->MC_BASE1.CFG_WTR_L_CRC_DM.CFG_WTR_L_CRC_DM =\
4225         LIBERO_SETTING_CFG_WTR_L_CRC_DM;
4226     DDRCFG->MC_BASE1.CFG_WR_CRC_DM.CFG_WR_CRC_DM =\
4227         LIBERO_SETTING_CFG_WR_CRC_DM;
4228     DDRCFG->MC_BASE1.CFG_RFC1.CFG_RFC1 = LIBERO_SETTING_CFG_RFC1;
4229     DDRCFG->MC_BASE1.CFG_RFC2.CFG_RFC2 = LIBERO_SETTING_CFG_RFC2;
4230     DDRCFG->MC_BASE1.CFG_RFC4.CFG_RFC4 = LIBERO_SETTING_CFG_RFC4;
4231     DDRCFG->MC_BASE1.CFG_NIBBLE_DEVICES.CFG_NIBBLE_DEVICES =\
4232         LIBERO_SETTING_CFG_NIBBLE_DEVICES;
4233     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS0_0.CFG_BIT_MAP_INDEX_CS0_0 =\
4234         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS0_0;
4235     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS0_1.CFG_BIT_MAP_INDEX_CS0_1 =\
4236         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS0_1;
4237     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS1_0.CFG_BIT_MAP_INDEX_CS1_0 =\
4238         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS1_0;
4239     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS1_1.CFG_BIT_MAP_INDEX_CS1_1 =\
4240         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS1_1;
4241     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS2_0.CFG_BIT_MAP_INDEX_CS2_0 =\
4242         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS2_0;
4243     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS2_1.CFG_BIT_MAP_INDEX_CS2_1 =\
4244         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS2_1;
4245     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS3_0.CFG_BIT_MAP_INDEX_CS3_0 =\
4246         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS3_0;
4247     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS3_1.CFG_BIT_MAP_INDEX_CS3_1 =\
4248         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS3_1;
4249     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS4_0.CFG_BIT_MAP_INDEX_CS4_0 =\
4250         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS4_0;
4251     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS4_1.CFG_BIT_MAP_INDEX_CS4_1 =\
4252         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS4_1;
4253     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS5_0.CFG_BIT_MAP_INDEX_CS5_0 =\
4254         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS5_0;
4255     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS5_1.CFG_BIT_MAP_INDEX_CS5_1 =\
4256         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS5_1;
4257     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS6_0.CFG_BIT_MAP_INDEX_CS6_0 =\
4258         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS6_0;
4259     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS6_1.CFG_BIT_MAP_INDEX_CS6_1 =\
4260         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS6_1;
4261     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS7_0.CFG_BIT_MAP_INDEX_CS7_0 =\
4262         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS7_0;
4263     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS7_1.CFG_BIT_MAP_INDEX_CS7_1 =\
4264         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS7_1;
4265     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS8_0.CFG_BIT_MAP_INDEX_CS8_0 =\
4266         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS8_0;
4267     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS8_1.CFG_BIT_MAP_INDEX_CS8_1 =\
4268         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS8_1;
4269     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS9_0.CFG_BIT_MAP_INDEX_CS9_0 =\
4270         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS9_0;
4271     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS9_1.CFG_BIT_MAP_INDEX_CS9_1 =\
4272         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS9_1;
4273     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS10_0.CFG_BIT_MAP_INDEX_CS10_0 =\
4274         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS10_0;
4275     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS10_1.CFG_BIT_MAP_INDEX_CS10_1 =\
4276         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS10_1;
4277     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS11_0.CFG_BIT_MAP_INDEX_CS11_0 =\
4278         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS11_0;
4279     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS11_1.CFG_BIT_MAP_INDEX_CS11_1 =\
4280         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS11_1;
4281     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS12_0.CFG_BIT_MAP_INDEX_CS12_0 =\
4282         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS12_0;
4283     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS12_1.CFG_BIT_MAP_INDEX_CS12_1 =\
4284         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS12_1;
4285     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS13_0.CFG_BIT_MAP_INDEX_CS13_0 =\
4286         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS13_0;
4287     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS13_1.CFG_BIT_MAP_INDEX_CS13_1 =\
4288         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS13_1;
4289     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS14_0.CFG_BIT_MAP_INDEX_CS14_0 =\
4290         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS14_0;
4291     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS14_1.CFG_BIT_MAP_INDEX_CS14_1 =\
4292         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS14_1;
4293     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS15_0.CFG_BIT_MAP_INDEX_CS15_0 =\
4294         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS15_0;
4295     DDRCFG->MC_BASE1.CFG_BIT_MAP_INDEX_CS15_1.CFG_BIT_MAP_INDEX_CS15_1 =\
4296         LIBERO_SETTING_CFG_BIT_MAP_INDEX_CS15_1;
4297     DDRCFG->MC_BASE1.CFG_NUM_LOGICAL_RANKS_PER_3DS.CFG_NUM_LOGICAL_RANKS_PER_3DS =\
4298         LIBERO_SETTING_CFG_NUM_LOGICAL_RANKS_PER_3DS;
4299     DDRCFG->MC_BASE1.CFG_RFC_DLR1.CFG_RFC_DLR1 = LIBERO_SETTING_CFG_RFC_DLR1;
4300     DDRCFG->MC_BASE1.CFG_RFC_DLR2.CFG_RFC_DLR2 = LIBERO_SETTING_CFG_RFC_DLR2;
4301     DDRCFG->MC_BASE1.CFG_RFC_DLR4.CFG_RFC_DLR4 = LIBERO_SETTING_CFG_RFC_DLR4;
4302     DDRCFG->MC_BASE1.CFG_RRD_DLR.CFG_RRD_DLR = LIBERO_SETTING_CFG_RRD_DLR;
4303     DDRCFG->MC_BASE1.CFG_FAW_DLR.CFG_FAW_DLR = LIBERO_SETTING_CFG_FAW_DLR;
4304     DDRCFG->MC_BASE1.CFG_ADVANCE_ACTIVATE_READY.CFG_ADVANCE_ACTIVATE_READY =\
4305         LIBERO_SETTING_CFG_ADVANCE_ACTIVATE_READY;
4306     DDRCFG->MC_BASE2.CTRLR_SOFT_RESET_N.CTRLR_SOFT_RESET_N =\
4307         LIBERO_SETTING_CTRLR_SOFT_RESET_N;
4308     DDRCFG->MC_BASE2.CFG_LOOKAHEAD_PCH.CFG_LOOKAHEAD_PCH =\
4309         LIBERO_SETTING_CFG_LOOKAHEAD_PCH;
4310     DDRCFG->MC_BASE2.CFG_LOOKAHEAD_ACT.CFG_LOOKAHEAD_ACT =\
4311         LIBERO_SETTING_CFG_LOOKAHEAD_ACT;
4312     DDRCFG->MC_BASE2.INIT_AUTOINIT_DISABLE.INIT_AUTOINIT_DISABLE =\
4313         LIBERO_SETTING_INIT_AUTOINIT_DISABLE;
4314     DDRCFG->MC_BASE2.INIT_FORCE_RESET.INIT_FORCE_RESET =\
4315         LIBERO_SETTING_INIT_FORCE_RESET;
4316     DDRCFG->MC_BASE2.INIT_GEARDOWN_EN.INIT_GEARDOWN_EN =\
4317         LIBERO_SETTING_INIT_GEARDOWN_EN;
4318     DDRCFG->MC_BASE2.INIT_DISABLE_CKE.INIT_DISABLE_CKE =\
4319         LIBERO_SETTING_INIT_DISABLE_CKE;
4320     DDRCFG->MC_BASE2.INIT_CS.INIT_CS = LIBERO_SETTING_INIT_CS;
4321     DDRCFG->MC_BASE2.INIT_PRECHARGE_ALL.INIT_PRECHARGE_ALL =\
4322         LIBERO_SETTING_INIT_PRECHARGE_ALL;
4323     DDRCFG->MC_BASE2.INIT_REFRESH.INIT_REFRESH = LIBERO_SETTING_INIT_REFRESH;
4324     DDRCFG->MC_BASE2.INIT_ZQ_CAL_REQ.INIT_ZQ_CAL_REQ =\
4325         LIBERO_SETTING_INIT_ZQ_CAL_REQ;
4326     DDRCFG->MC_BASE2.CFG_BL.CFG_BL = LIBERO_SETTING_CFG_BL;
4327     DDRCFG->MC_BASE2.CTRLR_INIT.CTRLR_INIT = LIBERO_SETTING_CTRLR_INIT;
4328     DDRCFG->MC_BASE2.CFG_AUTO_REF_EN.CFG_AUTO_REF_EN =\
4329         LIBERO_SETTING_CFG_AUTO_REF_EN;
4330     DDRCFG->MC_BASE2.CFG_RAS.CFG_RAS = LIBERO_SETTING_CFG_RAS;
4331     DDRCFG->MC_BASE2.CFG_RCD.CFG_RCD = LIBERO_SETTING_CFG_RCD;
4332     DDRCFG->MC_BASE2.CFG_RRD.CFG_RRD = LIBERO_SETTING_CFG_RRD;
4333     DDRCFG->MC_BASE2.CFG_RP.CFG_RP = LIBERO_SETTING_CFG_RP;
4334     DDRCFG->MC_BASE2.CFG_RC.CFG_RC = LIBERO_SETTING_CFG_RC;
4335     DDRCFG->MC_BASE2.CFG_FAW.CFG_FAW = LIBERO_SETTING_CFG_FAW;
4336     DDRCFG->MC_BASE2.CFG_RFC.CFG_RFC = LIBERO_SETTING_CFG_RFC;
4337     DDRCFG->MC_BASE2.CFG_RTP.CFG_RTP = LIBERO_SETTING_CFG_RTP;
4338     DDRCFG->MC_BASE2.CFG_WR.CFG_WR = LIBERO_SETTING_CFG_WR;
4339     DDRCFG->MC_BASE2.CFG_WTR.CFG_WTR = LIBERO_SETTING_CFG_WTR;
4340     DDRCFG->MC_BASE2.CFG_PASR.CFG_PASR = LIBERO_SETTING_CFG_PASR;
4341     DDRCFG->MC_BASE2.CFG_XP.CFG_XP = LIBERO_SETTING_CFG_XP;
4342     DDRCFG->MC_BASE2.CFG_XSR.CFG_XSR = LIBERO_SETTING_CFG_XSR;
4343     DDRCFG->MC_BASE2.CFG_CL.CFG_CL = LIBERO_SETTING_CFG_CL;
4344     DDRCFG->MC_BASE2.CFG_READ_TO_WRITE.CFG_READ_TO_WRITE =\
4345         LIBERO_SETTING_CFG_READ_TO_WRITE;
4346     DDRCFG->MC_BASE2.CFG_WRITE_TO_WRITE.CFG_WRITE_TO_WRITE =\
4347         LIBERO_SETTING_CFG_WRITE_TO_WRITE;
4348     DDRCFG->MC_BASE2.CFG_READ_TO_READ.CFG_READ_TO_READ =\
4349         LIBERO_SETTING_CFG_READ_TO_READ;
4350     DDRCFG->MC_BASE2.CFG_WRITE_TO_READ.CFG_WRITE_TO_READ =\
4351         LIBERO_SETTING_CFG_WRITE_TO_READ;
4352     DDRCFG->MC_BASE2.CFG_READ_TO_WRITE_ODT.CFG_READ_TO_WRITE_ODT =\
4353         LIBERO_SETTING_CFG_READ_TO_WRITE_ODT;
4354     DDRCFG->MC_BASE2.CFG_WRITE_TO_WRITE_ODT.CFG_WRITE_TO_WRITE_ODT =\
4355         LIBERO_SETTING_CFG_WRITE_TO_WRITE_ODT;
4356     DDRCFG->MC_BASE2.CFG_READ_TO_READ_ODT.CFG_READ_TO_READ_ODT =\
4357         LIBERO_SETTING_CFG_READ_TO_READ_ODT;
4358     DDRCFG->MC_BASE2.CFG_WRITE_TO_READ_ODT.CFG_WRITE_TO_READ_ODT =\
4359         LIBERO_SETTING_CFG_WRITE_TO_READ_ODT;
4360     DDRCFG->MC_BASE2.CFG_MIN_READ_IDLE.CFG_MIN_READ_IDLE =\
4361         LIBERO_SETTING_CFG_MIN_READ_IDLE;
4362     DDRCFG->MC_BASE2.CFG_MRD.CFG_MRD = LIBERO_SETTING_CFG_MRD;
4363     DDRCFG->MC_BASE2.CFG_BT.CFG_BT = LIBERO_SETTING_CFG_BT;
4364     DDRCFG->MC_BASE2.CFG_DS.CFG_DS = LIBERO_SETTING_CFG_DS;
4365     DDRCFG->MC_BASE2.CFG_QOFF.CFG_QOFF = LIBERO_SETTING_CFG_QOFF;
4366     DDRCFG->MC_BASE2.CFG_RTT.CFG_RTT = LIBERO_SETTING_CFG_RTT;
4367     DDRCFG->MC_BASE2.CFG_DLL_DISABLE.CFG_DLL_DISABLE =\
4368         LIBERO_SETTING_CFG_DLL_DISABLE;
4369     DDRCFG->MC_BASE2.CFG_REF_PER.CFG_REF_PER = LIBERO_SETTING_CFG_REF_PER;
4370     DDRCFG->MC_BASE2.CFG_STARTUP_DELAY.CFG_STARTUP_DELAY =\
4371         LIBERO_SETTING_CFG_STARTUP_DELAY;
4372     DDRCFG->MC_BASE2.CFG_MEM_COLBITS.CFG_MEM_COLBITS =\
4373         LIBERO_SETTING_CFG_MEM_COLBITS;
4374     DDRCFG->MC_BASE2.CFG_MEM_ROWBITS.CFG_MEM_ROWBITS =\
4375         LIBERO_SETTING_CFG_MEM_ROWBITS;
4376     DDRCFG->MC_BASE2.CFG_MEM_BANKBITS.CFG_MEM_BANKBITS =\
4377         LIBERO_SETTING_CFG_MEM_BANKBITS;
4378     DDRCFG->MC_BASE2.CFG_ODT_RD_MAP_CS0.CFG_ODT_RD_MAP_CS0 =\
4379         LIBERO_SETTING_CFG_ODT_RD_MAP_CS0;
4380     DDRCFG->MC_BASE2.CFG_ODT_RD_MAP_CS1.CFG_ODT_RD_MAP_CS1 =\
4381         LIBERO_SETTING_CFG_ODT_RD_MAP_CS1;
4382     DDRCFG->MC_BASE2.CFG_ODT_RD_MAP_CS2.CFG_ODT_RD_MAP_CS2 =\
4383         LIBERO_SETTING_CFG_ODT_RD_MAP_CS2;
4384     DDRCFG->MC_BASE2.CFG_ODT_RD_MAP_CS3.CFG_ODT_RD_MAP_CS3 =\
4385         LIBERO_SETTING_CFG_ODT_RD_MAP_CS3;
4386     DDRCFG->MC_BASE2.CFG_ODT_RD_MAP_CS4.CFG_ODT_RD_MAP_CS4 =\
4387         LIBERO_SETTING_CFG_ODT_RD_MAP_CS4;
4388     DDRCFG->MC_BASE2.CFG_ODT_RD_MAP_CS5.CFG_ODT_RD_MAP_CS5 =\
4389         LIBERO_SETTING_CFG_ODT_RD_MAP_CS5;
4390     DDRCFG->MC_BASE2.CFG_ODT_RD_MAP_CS6.CFG_ODT_RD_MAP_CS6 =\
4391         LIBERO_SETTING_CFG_ODT_RD_MAP_CS6;
4392     DDRCFG->MC_BASE2.CFG_ODT_RD_MAP_CS7.CFG_ODT_RD_MAP_CS7 =\
4393         LIBERO_SETTING_CFG_ODT_RD_MAP_CS7;
4394     DDRCFG->MC_BASE2.CFG_ODT_WR_MAP_CS0.CFG_ODT_WR_MAP_CS0 =\
4395         LIBERO_SETTING_CFG_ODT_WR_MAP_CS0;
4396     DDRCFG->MC_BASE2.CFG_ODT_WR_MAP_CS1.CFG_ODT_WR_MAP_CS1 =\
4397         LIBERO_SETTING_CFG_ODT_WR_MAP_CS1;
4398     DDRCFG->MC_BASE2.CFG_ODT_WR_MAP_CS2.CFG_ODT_WR_MAP_CS2 =\
4399         LIBERO_SETTING_CFG_ODT_WR_MAP_CS2;
4400     DDRCFG->MC_BASE2.CFG_ODT_WR_MAP_CS3.CFG_ODT_WR_MAP_CS3 =\
4401         LIBERO_SETTING_CFG_ODT_WR_MAP_CS3;
4402     DDRCFG->MC_BASE2.CFG_ODT_WR_MAP_CS4.CFG_ODT_WR_MAP_CS4 =\
4403         LIBERO_SETTING_CFG_ODT_WR_MAP_CS4;
4404     DDRCFG->MC_BASE2.CFG_ODT_WR_MAP_CS5.CFG_ODT_WR_MAP_CS5 =\
4405         LIBERO_SETTING_CFG_ODT_WR_MAP_CS5;
4406     DDRCFG->MC_BASE2.CFG_ODT_WR_MAP_CS6.CFG_ODT_WR_MAP_CS6 =\
4407         LIBERO_SETTING_CFG_ODT_WR_MAP_CS6;
4408     DDRCFG->MC_BASE2.CFG_ODT_WR_MAP_CS7.CFG_ODT_WR_MAP_CS7 =\
4409         LIBERO_SETTING_CFG_ODT_WR_MAP_CS7;
4410     DDRCFG->MC_BASE2.CFG_ODT_RD_TURN_ON.CFG_ODT_RD_TURN_ON =\
4411         LIBERO_SETTING_CFG_ODT_RD_TURN_ON;
4412     DDRCFG->MC_BASE2.CFG_ODT_WR_TURN_ON.CFG_ODT_WR_TURN_ON =\
4413         LIBERO_SETTING_CFG_ODT_WR_TURN_ON;
4414     DDRCFG->MC_BASE2.CFG_ODT_RD_TURN_OFF.CFG_ODT_RD_TURN_OFF =\
4415         LIBERO_SETTING_CFG_ODT_RD_TURN_OFF;
4416     DDRCFG->MC_BASE2.CFG_ODT_WR_TURN_OFF.CFG_ODT_WR_TURN_OFF =\
4417         LIBERO_SETTING_CFG_ODT_WR_TURN_OFF;
4418     DDRCFG->MC_BASE2.CFG_EMR3.CFG_EMR3 = LIBERO_SETTING_CFG_EMR3;
4419     DDRCFG->MC_BASE2.CFG_TWO_T.CFG_TWO_T = LIBERO_SETTING_CFG_TWO_T;
4420     DDRCFG->MC_BASE2.CFG_TWO_T_SEL_CYCLE.CFG_TWO_T_SEL_CYCLE =\
4421         LIBERO_SETTING_CFG_TWO_T_SEL_CYCLE;
4422     DDRCFG->MC_BASE2.CFG_REGDIMM.CFG_REGDIMM = LIBERO_SETTING_CFG_REGDIMM;
4423     DDRCFG->MC_BASE2.CFG_MOD.CFG_MOD = LIBERO_SETTING_CFG_MOD;
4424     DDRCFG->MC_BASE2.CFG_XS.CFG_XS = LIBERO_SETTING_CFG_XS;
4425     DDRCFG->MC_BASE2.CFG_XSDLL.CFG_XSDLL = LIBERO_SETTING_CFG_XSDLL;
4426     DDRCFG->MC_BASE2.CFG_XPR.CFG_XPR = LIBERO_SETTING_CFG_XPR;
4427     DDRCFG->MC_BASE2.CFG_AL_MODE.CFG_AL_MODE = LIBERO_SETTING_CFG_AL_MODE;
4428     DDRCFG->MC_BASE2.CFG_CWL.CFG_CWL = LIBERO_SETTING_CFG_CWL;
4429     DDRCFG->MC_BASE2.CFG_BL_MODE.CFG_BL_MODE = LIBERO_SETTING_CFG_BL_MODE;
4430     DDRCFG->MC_BASE2.CFG_TDQS.CFG_TDQS = LIBERO_SETTING_CFG_TDQS;
4431     DDRCFG->MC_BASE2.CFG_RTT_WR.CFG_RTT_WR = LIBERO_SETTING_CFG_RTT_WR;
4432     DDRCFG->MC_BASE2.CFG_LP_ASR.CFG_LP_ASR = LIBERO_SETTING_CFG_LP_ASR;
4433     DDRCFG->MC_BASE2.CFG_AUTO_SR.CFG_AUTO_SR = LIBERO_SETTING_CFG_AUTO_SR;
4434     DDRCFG->MC_BASE2.CFG_SRT.CFG_SRT = LIBERO_SETTING_CFG_SRT;
4435     DDRCFG->MC_BASE2.CFG_ADDR_MIRROR.CFG_ADDR_MIRROR =\
4436         LIBERO_SETTING_CFG_ADDR_MIRROR;
4437     DDRCFG->MC_BASE2.CFG_ZQ_CAL_TYPE.CFG_ZQ_CAL_TYPE =\
4438         LIBERO_SETTING_CFG_ZQ_CAL_TYPE;
4439     DDRCFG->MC_BASE2.CFG_ZQ_CAL_PER.CFG_ZQ_CAL_PER =\
4440         LIBERO_SETTING_CFG_ZQ_CAL_PER;
4441     DDRCFG->MC_BASE2.CFG_AUTO_ZQ_CAL_EN.CFG_AUTO_ZQ_CAL_EN =\
4442         LIBERO_SETTING_CFG_AUTO_ZQ_CAL_EN;
4443     DDRCFG->MC_BASE2.CFG_MEMORY_TYPE.CFG_MEMORY_TYPE =\
4444         LIBERO_SETTING_CFG_MEMORY_TYPE;
4445     DDRCFG->MC_BASE2.CFG_ONLY_SRANK_CMDS.CFG_ONLY_SRANK_CMDS =\
4446         LIBERO_SETTING_CFG_ONLY_SRANK_CMDS;
4447     DDRCFG->MC_BASE2.CFG_NUM_RANKS.CFG_NUM_RANKS =\
4448         LIBERO_SETTING_CFG_NUM_RANKS;
4449     DDRCFG->MC_BASE2.CFG_QUAD_RANK.CFG_QUAD_RANK =\
4450         LIBERO_SETTING_CFG_QUAD_RANK;
4451     DDRCFG->MC_BASE2.CFG_EARLY_RANK_TO_WR_START.CFG_EARLY_RANK_TO_WR_START =\
4452         LIBERO_SETTING_CFG_EARLY_RANK_TO_WR_START;
4453     DDRCFG->MC_BASE2.CFG_EARLY_RANK_TO_RD_START.CFG_EARLY_RANK_TO_RD_START =\
4454         LIBERO_SETTING_CFG_EARLY_RANK_TO_RD_START;
4455     DDRCFG->MC_BASE2.CFG_PASR_BANK.CFG_PASR_BANK =\
4456         LIBERO_SETTING_CFG_PASR_BANK;
4457     DDRCFG->MC_BASE2.CFG_PASR_SEG.CFG_PASR_SEG = LIBERO_SETTING_CFG_PASR_SEG;
4458     DDRCFG->MC_BASE2.INIT_MRR_MODE.INIT_MRR_MODE =\
4459         LIBERO_SETTING_INIT_MRR_MODE;
4460     DDRCFG->MC_BASE2.INIT_MR_W_REQ.INIT_MR_W_REQ =\
4461         LIBERO_SETTING_INIT_MR_W_REQ;
4462     DDRCFG->MC_BASE2.INIT_MR_ADDR.INIT_MR_ADDR = LIBERO_SETTING_INIT_MR_ADDR;
4463     DDRCFG->MC_BASE2.INIT_MR_WR_DATA.INIT_MR_WR_DATA =\
4464         LIBERO_SETTING_INIT_MR_WR_DATA;
4465     DDRCFG->MC_BASE2.INIT_MR_WR_MASK.INIT_MR_WR_MASK =\
4466         LIBERO_SETTING_INIT_MR_WR_MASK;
4467     DDRCFG->MC_BASE2.INIT_NOP.INIT_NOP = LIBERO_SETTING_INIT_NOP;
4468     DDRCFG->MC_BASE2.CFG_INIT_DURATION.CFG_INIT_DURATION =\
4469         LIBERO_SETTING_CFG_INIT_DURATION;
4470     DDRCFG->MC_BASE2.CFG_ZQINIT_CAL_DURATION.CFG_ZQINIT_CAL_DURATION =\
4471         LIBERO_SETTING_CFG_ZQINIT_CAL_DURATION;
4472     DDRCFG->MC_BASE2.CFG_ZQ_CAL_L_DURATION.CFG_ZQ_CAL_L_DURATION =\
4473         LIBERO_SETTING_CFG_ZQ_CAL_L_DURATION;
4474     DDRCFG->MC_BASE2.CFG_ZQ_CAL_S_DURATION.CFG_ZQ_CAL_S_DURATION =\
4475         LIBERO_SETTING_CFG_ZQ_CAL_S_DURATION;
4476     DDRCFG->MC_BASE2.CFG_ZQ_CAL_R_DURATION.CFG_ZQ_CAL_R_DURATION =\
4477         LIBERO_SETTING_CFG_ZQ_CAL_R_DURATION;
4478     DDRCFG->MC_BASE2.CFG_MRR.CFG_MRR = LIBERO_SETTING_CFG_MRR;
4479     DDRCFG->MC_BASE2.CFG_MRW.CFG_MRW = LIBERO_SETTING_CFG_MRW;
4480     DDRCFG->MC_BASE2.CFG_ODT_POWERDOWN.CFG_ODT_POWERDOWN =\
4481         LIBERO_SETTING_CFG_ODT_POWERDOWN;
4482     DDRCFG->MC_BASE2.CFG_WL.CFG_WL = LIBERO_SETTING_CFG_WL;
4483     DDRCFG->MC_BASE2.CFG_RL.CFG_RL = LIBERO_SETTING_CFG_RL;
4484     DDRCFG->MC_BASE2.CFG_CAL_READ_PERIOD.CFG_CAL_READ_PERIOD =\
4485         LIBERO_SETTING_CFG_CAL_READ_PERIOD;
4486     DDRCFG->MC_BASE2.CFG_NUM_CAL_READS.CFG_NUM_CAL_READS =\
4487         LIBERO_SETTING_CFG_NUM_CAL_READS;
4488     DDRCFG->MC_BASE2.INIT_SELF_REFRESH.INIT_SELF_REFRESH =\
4489         LIBERO_SETTING_INIT_SELF_REFRESH;
4490     DDRCFG->MC_BASE2.INIT_POWER_DOWN.INIT_POWER_DOWN =\
4491         LIBERO_SETTING_INIT_POWER_DOWN;
4492     DDRCFG->MC_BASE2.INIT_FORCE_WRITE.INIT_FORCE_WRITE =\
4493         LIBERO_SETTING_INIT_FORCE_WRITE;
4494     DDRCFG->MC_BASE2.INIT_FORCE_WRITE_CS.INIT_FORCE_WRITE_CS =\
4495         LIBERO_SETTING_INIT_FORCE_WRITE_CS;
4496     DDRCFG->MC_BASE2.CFG_CTRLR_INIT_DISABLE.CFG_CTRLR_INIT_DISABLE =\
4497         LIBERO_SETTING_CFG_CTRLR_INIT_DISABLE;
4498     DDRCFG->MC_BASE2.INIT_RDIMM_COMPLETE.INIT_RDIMM_COMPLETE =\
4499         LIBERO_SETTING_INIT_RDIMM_COMPLETE;
4500     DDRCFG->MC_BASE2.CFG_RDIMM_LAT.CFG_RDIMM_LAT =\
4501         LIBERO_SETTING_CFG_RDIMM_LAT;
4502     DDRCFG->MC_BASE2.CFG_RDIMM_BSIDE_INVERT.CFG_RDIMM_BSIDE_INVERT =\
4503         LIBERO_SETTING_CFG_RDIMM_BSIDE_INVERT;
4504     DDRCFG->MC_BASE2.CFG_LRDIMM.CFG_LRDIMM = LIBERO_SETTING_CFG_LRDIMM;
4505     DDRCFG->MC_BASE2.INIT_MEMORY_RESET_MASK.INIT_MEMORY_RESET_MASK =\
4506         LIBERO_SETTING_INIT_MEMORY_RESET_MASK;
4507     DDRCFG->MC_BASE2.CFG_RD_PREAMB_TOGGLE.CFG_RD_PREAMB_TOGGLE =\
4508         LIBERO_SETTING_CFG_RD_PREAMB_TOGGLE;
4509     DDRCFG->MC_BASE2.CFG_RD_POSTAMBLE.CFG_RD_POSTAMBLE =\
4510         LIBERO_SETTING_CFG_RD_POSTAMBLE;
4511     DDRCFG->MC_BASE2.CFG_PU_CAL.CFG_PU_CAL = LIBERO_SETTING_CFG_PU_CAL;
4512     DDRCFG->MC_BASE2.CFG_DQ_ODT.CFG_DQ_ODT = LIBERO_SETTING_CFG_DQ_ODT;
4513     DDRCFG->MC_BASE2.CFG_CA_ODT.CFG_CA_ODT = LIBERO_SETTING_CFG_CA_ODT;
4514     DDRCFG->MC_BASE2.CFG_ZQLATCH_DURATION.CFG_ZQLATCH_DURATION =\
4515         LIBERO_SETTING_CFG_ZQLATCH_DURATION;
4516     DDRCFG->MC_BASE2.INIT_CAL_SELECT.INIT_CAL_SELECT =\
4517         LIBERO_SETTING_INIT_CAL_SELECT;
4518     DDRCFG->MC_BASE2.INIT_CAL_L_R_REQ.INIT_CAL_L_R_REQ =\
4519         LIBERO_SETTING_INIT_CAL_L_R_REQ;
4520     DDRCFG->MC_BASE2.INIT_CAL_L_B_SIZE.INIT_CAL_L_B_SIZE =\
4521         LIBERO_SETTING_INIT_CAL_L_B_SIZE;
4522     DDRCFG->MC_BASE2.INIT_RWFIFO.INIT_RWFIFO = LIBERO_SETTING_INIT_RWFIFO;
4523     DDRCFG->MC_BASE2.INIT_RD_DQCAL.INIT_RD_DQCAL =\
4524         LIBERO_SETTING_INIT_RD_DQCAL;
4525     DDRCFG->MC_BASE2.INIT_START_DQSOSC.INIT_START_DQSOSC =\
4526         LIBERO_SETTING_INIT_START_DQSOSC;
4527     DDRCFG->MC_BASE2.INIT_STOP_DQSOSC.INIT_STOP_DQSOSC =\
4528         LIBERO_SETTING_INIT_STOP_DQSOSC;
4529     DDRCFG->MC_BASE2.INIT_ZQ_CAL_START.INIT_ZQ_CAL_START =\
4530         LIBERO_SETTING_INIT_ZQ_CAL_START;
4531     DDRCFG->MC_BASE2.CFG_WR_POSTAMBLE.CFG_WR_POSTAMBLE =\
4532         LIBERO_SETTING_CFG_WR_POSTAMBLE;
4533     DDRCFG->MC_BASE2.INIT_CAL_L_ADDR_0.INIT_CAL_L_ADDR_0 =\
4534         LIBERO_SETTING_INIT_CAL_L_ADDR_0;
4535     DDRCFG->MC_BASE2.INIT_CAL_L_ADDR_1.INIT_CAL_L_ADDR_1 =\
4536         LIBERO_SETTING_INIT_CAL_L_ADDR_1;
4537     DDRCFG->MC_BASE2.CFG_CTRLUPD_TRIG.CFG_CTRLUPD_TRIG =\
4538         LIBERO_SETTING_CFG_CTRLUPD_TRIG;
4539     DDRCFG->MC_BASE2.CFG_CTRLUPD_START_DELAY.CFG_CTRLUPD_START_DELAY =\
4540         LIBERO_SETTING_CFG_CTRLUPD_START_DELAY;
4541     DDRCFG->MC_BASE2.CFG_DFI_T_CTRLUPD_MAX.CFG_DFI_T_CTRLUPD_MAX =\
4542         LIBERO_SETTING_CFG_DFI_T_CTRLUPD_MAX;
4543     DDRCFG->MC_BASE2.CFG_CTRLR_BUSY_SEL.CFG_CTRLR_BUSY_SEL =\
4544         LIBERO_SETTING_CFG_CTRLR_BUSY_SEL;
4545     DDRCFG->MC_BASE2.CFG_CTRLR_BUSY_VALUE.CFG_CTRLR_BUSY_VALUE =\
4546         LIBERO_SETTING_CFG_CTRLR_BUSY_VALUE;
4547     DDRCFG->MC_BASE2.CFG_CTRLR_BUSY_TURN_OFF_DELAY.CFG_CTRLR_BUSY_TURN_OFF_DELAY =\
4548         LIBERO_SETTING_CFG_CTRLR_BUSY_TURN_OFF_DELAY;
4549     DDRCFG->MC_BASE2.CFG_CTRLR_BUSY_SLOW_RESTART_WINDOW.CFG_CTRLR_BUSY_SLOW_RESTART_WINDOW =\
4550         LIBERO_SETTING_CFG_CTRLR_BUSY_SLOW_RESTART_WINDOW;
4551     DDRCFG->MC_BASE2.CFG_CTRLR_BUSY_RESTART_HOLDOFF.CFG_CTRLR_BUSY_RESTART_HOLDOFF =\
4552         LIBERO_SETTING_CFG_CTRLR_BUSY_RESTART_HOLDOFF;
4553     DDRCFG->MC_BASE2.CFG_PARITY_RDIMM_DELAY.CFG_PARITY_RDIMM_DELAY =\
4554         LIBERO_SETTING_CFG_PARITY_RDIMM_DELAY;
4555     DDRCFG->MC_BASE2.CFG_CTRLR_BUSY_ENABLE.CFG_CTRLR_BUSY_ENABLE =\
4556         LIBERO_SETTING_CFG_CTRLR_BUSY_ENABLE;
4557     DDRCFG->MC_BASE2.CFG_ASYNC_ODT.CFG_ASYNC_ODT =\
4558         LIBERO_SETTING_CFG_ASYNC_ODT;
4559     DDRCFG->MC_BASE2.CFG_ZQ_CAL_DURATION.CFG_ZQ_CAL_DURATION =\
4560         LIBERO_SETTING_CFG_ZQ_CAL_DURATION;
4561     DDRCFG->MC_BASE2.CFG_MRRI.CFG_MRRI = LIBERO_SETTING_CFG_MRRI;
4562     DDRCFG->MC_BASE2.INIT_ODT_FORCE_EN.INIT_ODT_FORCE_EN =\
4563         LIBERO_SETTING_INIT_ODT_FORCE_EN;
4564     DDRCFG->MC_BASE2.INIT_ODT_FORCE_RANK.INIT_ODT_FORCE_RANK =\
4565         LIBERO_SETTING_INIT_ODT_FORCE_RANK;
4566     DDRCFG->MC_BASE2.CFG_PHYUPD_ACK_DELAY.CFG_PHYUPD_ACK_DELAY =\
4567         LIBERO_SETTING_CFG_PHYUPD_ACK_DELAY;
4568     DDRCFG->MC_BASE2.CFG_MIRROR_X16_BG0_BG1.CFG_MIRROR_X16_BG0_BG1 =\
4569         LIBERO_SETTING_CFG_MIRROR_X16_BG0_BG1;
4570     DDRCFG->MC_BASE2.INIT_PDA_MR_W_REQ.INIT_PDA_MR_W_REQ =\
4571         LIBERO_SETTING_INIT_PDA_MR_W_REQ;
4572     DDRCFG->MC_BASE2.INIT_PDA_NIBBLE_SELECT.INIT_PDA_NIBBLE_SELECT =\
4573         LIBERO_SETTING_INIT_PDA_NIBBLE_SELECT;
4574     DDRCFG->MC_BASE2.CFG_DRAM_CLK_DISABLE_IN_SELF_REFRESH.CFG_DRAM_CLK_DISABLE_IN_SELF_REFRESH =\
4575         LIBERO_SETTING_CFG_DRAM_CLK_DISABLE_IN_SELF_REFRESH;
4576     DDRCFG->MC_BASE2.CFG_CKSRE.CFG_CKSRE = LIBERO_SETTING_CFG_CKSRE;
4577     DDRCFG->MC_BASE2.CFG_CKSRX.CFG_CKSRX = LIBERO_SETTING_CFG_CKSRX;
4578     DDRCFG->MC_BASE2.CFG_RCD_STAB.CFG_RCD_STAB = LIBERO_SETTING_CFG_RCD_STAB;
4579     DDRCFG->MC_BASE2.CFG_DFI_T_CTRL_DELAY.CFG_DFI_T_CTRL_DELAY =\
4580         LIBERO_SETTING_CFG_DFI_T_CTRL_DELAY;
4581     DDRCFG->MC_BASE2.CFG_DFI_T_DRAM_CLK_ENABLE.CFG_DFI_T_DRAM_CLK_ENABLE =\
4582         LIBERO_SETTING_CFG_DFI_T_DRAM_CLK_ENABLE;
4583     DDRCFG->MC_BASE2.CFG_IDLE_TIME_TO_SELF_REFRESH.CFG_IDLE_TIME_TO_SELF_REFRESH =\
4584         LIBERO_SETTING_CFG_IDLE_TIME_TO_SELF_REFRESH;
4585     DDRCFG->MC_BASE2.CFG_IDLE_TIME_TO_POWER_DOWN.CFG_IDLE_TIME_TO_POWER_DOWN =\
4586         LIBERO_SETTING_CFG_IDLE_TIME_TO_POWER_DOWN;
4587     DDRCFG->MC_BASE2.CFG_BURST_RW_REFRESH_HOLDOFF.CFG_BURST_RW_REFRESH_HOLDOFF =\
4588         LIBERO_SETTING_CFG_BURST_RW_REFRESH_HOLDOFF;
4589     DDRCFG->MC_BASE2.CFG_BG_INTERLEAVE.CFG_BG_INTERLEAVE =\
4590         LIBERO_SETTING_CFG_BG_INTERLEAVE;
4591     DDRCFG->MC_BASE2.CFG_REFRESH_DURING_PHY_TRAINING.CFG_REFRESH_DURING_PHY_TRAINING =\
4592         LIBERO_SETTING_CFG_REFRESH_DURING_PHY_TRAINING;
4593     DDRCFG->MPFE.CFG_STARVE_TIMEOUT_P0.CFG_STARVE_TIMEOUT_P0 =\
4594         LIBERO_SETTING_CFG_STARVE_TIMEOUT_P0;
4595     DDRCFG->MPFE.CFG_STARVE_TIMEOUT_P1.CFG_STARVE_TIMEOUT_P1 =\
4596         LIBERO_SETTING_CFG_STARVE_TIMEOUT_P1;
4597     DDRCFG->MPFE.CFG_STARVE_TIMEOUT_P2.CFG_STARVE_TIMEOUT_P2 =\
4598         LIBERO_SETTING_CFG_STARVE_TIMEOUT_P2;
4599     DDRCFG->MPFE.CFG_STARVE_TIMEOUT_P3.CFG_STARVE_TIMEOUT_P3 =\
4600         LIBERO_SETTING_CFG_STARVE_TIMEOUT_P3;
4601     DDRCFG->MPFE.CFG_STARVE_TIMEOUT_P4.CFG_STARVE_TIMEOUT_P4 =\
4602         LIBERO_SETTING_CFG_STARVE_TIMEOUT_P4;
4603     DDRCFG->MPFE.CFG_STARVE_TIMEOUT_P5.CFG_STARVE_TIMEOUT_P5 =\
4604         LIBERO_SETTING_CFG_STARVE_TIMEOUT_P5;
4605     DDRCFG->MPFE.CFG_STARVE_TIMEOUT_P6.CFG_STARVE_TIMEOUT_P6 =\
4606         LIBERO_SETTING_CFG_STARVE_TIMEOUT_P6;
4607     DDRCFG->MPFE.CFG_STARVE_TIMEOUT_P7.CFG_STARVE_TIMEOUT_P7 =\
4608         LIBERO_SETTING_CFG_STARVE_TIMEOUT_P7;
4609     DDRCFG->REORDER.CFG_REORDER_EN.CFG_REORDER_EN =\
4610         LIBERO_SETTING_CFG_REORDER_EN;
4611     DDRCFG->REORDER.CFG_REORDER_QUEUE_EN.CFG_REORDER_QUEUE_EN =\
4612         LIBERO_SETTING_CFG_REORDER_QUEUE_EN;
4613     DDRCFG->REORDER.CFG_INTRAPORT_REORDER_EN.CFG_INTRAPORT_REORDER_EN =\
4614         LIBERO_SETTING_CFG_INTRAPORT_REORDER_EN;
4615     DDRCFG->REORDER.CFG_MAINTAIN_COHERENCY.CFG_MAINTAIN_COHERENCY =\
4616         LIBERO_SETTING_CFG_MAINTAIN_COHERENCY;
4617     DDRCFG->REORDER.CFG_Q_AGE_LIMIT.CFG_Q_AGE_LIMIT =\
4618         LIBERO_SETTING_CFG_Q_AGE_LIMIT;
4619     DDRCFG->REORDER.CFG_RO_CLOSED_PAGE_POLICY.CFG_RO_CLOSED_PAGE_POLICY =\
4620         LIBERO_SETTING_CFG_RO_CLOSED_PAGE_POLICY;
4621     DDRCFG->REORDER.CFG_REORDER_RW_ONLY.CFG_REORDER_RW_ONLY =\
4622         LIBERO_SETTING_CFG_REORDER_RW_ONLY;
4623     DDRCFG->REORDER.CFG_RO_PRIORITY_EN.CFG_RO_PRIORITY_EN =\
4624         LIBERO_SETTING_CFG_RO_PRIORITY_EN;
4625     DDRCFG->RMW.CFG_DM_EN.CFG_DM_EN = LIBERO_SETTING_CFG_DM_EN;
4626     DDRCFG->RMW.CFG_RMW_EN.CFG_RMW_EN = LIBERO_SETTING_CFG_RMW_EN;
4627     DDRCFG->ECC.CFG_ECC_CORRECTION_EN.CFG_ECC_CORRECTION_EN =\
4628         LIBERO_SETTING_CFG_ECC_CORRECTION_EN;
4629     DDRCFG->ECC.CFG_ECC_BYPASS.CFG_ECC_BYPASS = LIBERO_SETTING_CFG_ECC_BYPASS;
4630     DDRCFG->ECC.INIT_WRITE_DATA_1B_ECC_ERROR_GEN.INIT_WRITE_DATA_1B_ECC_ERROR_GEN =\
4631         LIBERO_SETTING_INIT_WRITE_DATA_1B_ECC_ERROR_GEN;
4632     DDRCFG->ECC.INIT_WRITE_DATA_2B_ECC_ERROR_GEN.INIT_WRITE_DATA_2B_ECC_ERROR_GEN =\
4633         LIBERO_SETTING_INIT_WRITE_DATA_2B_ECC_ERROR_GEN;
4634     DDRCFG->ECC.CFG_ECC_1BIT_INT_THRESH.CFG_ECC_1BIT_INT_THRESH =\
4635         LIBERO_SETTING_CFG_ECC_1BIT_INT_THRESH;
4636     DDRCFG->READ_CAPT.INIT_READ_CAPTURE_ADDR.INIT_READ_CAPTURE_ADDR =\
4637         LIBERO_SETTING_INIT_READ_CAPTURE_ADDR;
4638     DDRCFG->MTA.CFG_ERROR_GROUP_SEL.CFG_ERROR_GROUP_SEL =\
4639         LIBERO_SETTING_CFG_ERROR_GROUP_SEL;
4640     DDRCFG->MTA.CFG_DATA_SEL.CFG_DATA_SEL = LIBERO_SETTING_CFG_DATA_SEL;
4641     DDRCFG->MTA.CFG_TRIG_MODE.CFG_TRIG_MODE = LIBERO_SETTING_CFG_TRIG_MODE;
4642     DDRCFG->MTA.CFG_POST_TRIG_CYCS.CFG_POST_TRIG_CYCS =\
4643         LIBERO_SETTING_CFG_POST_TRIG_CYCS;
4644     DDRCFG->MTA.CFG_TRIG_MASK.CFG_TRIG_MASK = LIBERO_SETTING_CFG_TRIG_MASK;
4645     DDRCFG->MTA.CFG_EN_MASK.CFG_EN_MASK = LIBERO_SETTING_CFG_EN_MASK;
4646     DDRCFG->MTA.MTC_ACQ_ADDR.MTC_ACQ_ADDR = LIBERO_SETTING_MTC_ACQ_ADDR;
4647     DDRCFG->MTA.CFG_TRIG_MT_ADDR_0.CFG_TRIG_MT_ADDR_0 =\
4648         LIBERO_SETTING_CFG_TRIG_MT_ADDR_0;
4649     DDRCFG->MTA.CFG_TRIG_MT_ADDR_1.CFG_TRIG_MT_ADDR_1 =\
4650         LIBERO_SETTING_CFG_TRIG_MT_ADDR_1;
4651     DDRCFG->MTA.CFG_TRIG_ERR_MASK_0.CFG_TRIG_ERR_MASK_0 =\
4652         LIBERO_SETTING_CFG_TRIG_ERR_MASK_0;
4653     DDRCFG->MTA.CFG_TRIG_ERR_MASK_1.CFG_TRIG_ERR_MASK_1 =\
4654         LIBERO_SETTING_CFG_TRIG_ERR_MASK_1;
4655     DDRCFG->MTA.CFG_TRIG_ERR_MASK_2.CFG_TRIG_ERR_MASK_2 =\
4656         LIBERO_SETTING_CFG_TRIG_ERR_MASK_2;
4657     DDRCFG->MTA.CFG_TRIG_ERR_MASK_3.CFG_TRIG_ERR_MASK_3 =\
4658         LIBERO_SETTING_CFG_TRIG_ERR_MASK_3;
4659     DDRCFG->MTA.CFG_TRIG_ERR_MASK_4.CFG_TRIG_ERR_MASK_4 =\
4660         LIBERO_SETTING_CFG_TRIG_ERR_MASK_4;
4661     DDRCFG->MTA.MTC_ACQ_WR_DATA_0.MTC_ACQ_WR_DATA_0 =\
4662         LIBERO_SETTING_MTC_ACQ_WR_DATA_0;
4663     DDRCFG->MTA.MTC_ACQ_WR_DATA_1.MTC_ACQ_WR_DATA_1 =\
4664         LIBERO_SETTING_MTC_ACQ_WR_DATA_1;
4665     DDRCFG->MTA.MTC_ACQ_WR_DATA_2.MTC_ACQ_WR_DATA_2 =\
4666         LIBERO_SETTING_MTC_ACQ_WR_DATA_2;
4667     DDRCFG->MTA.CFG_PRE_TRIG_CYCS.CFG_PRE_TRIG_CYCS =\
4668         LIBERO_SETTING_CFG_PRE_TRIG_CYCS;
4669     DDRCFG->MTA.CFG_DATA_SEL_FIRST_ERROR.CFG_DATA_SEL_FIRST_ERROR =\
4670         LIBERO_SETTING_CFG_DATA_SEL_FIRST_ERROR;
4671     DDRCFG->DYN_WIDTH_ADJ.CFG_DQ_WIDTH.CFG_DQ_WIDTH =\
4672         LIBERO_SETTING_CFG_DQ_WIDTH;
4673     DDRCFG->DYN_WIDTH_ADJ.CFG_ACTIVE_DQ_SEL.CFG_ACTIVE_DQ_SEL =\
4674         LIBERO_SETTING_CFG_ACTIVE_DQ_SEL;
4675     DDRCFG->CA_PAR_ERR.INIT_CA_PARITY_ERROR_GEN_REQ.INIT_CA_PARITY_ERROR_GEN_REQ =\
4676         LIBERO_SETTING_INIT_CA_PARITY_ERROR_GEN_REQ;
4677     DDRCFG->CA_PAR_ERR.INIT_CA_PARITY_ERROR_GEN_CMD.INIT_CA_PARITY_ERROR_GEN_CMD =\
4678         LIBERO_SETTING_INIT_CA_PARITY_ERROR_GEN_CMD;
4679     DDRCFG->DFI.CFG_DFI_T_RDDATA_EN.CFG_DFI_T_RDDATA_EN =\
4680         LIBERO_SETTING_CFG_DFI_T_RDDATA_EN;
4681     DDRCFG->DFI.CFG_DFI_T_PHY_RDLAT.CFG_DFI_T_PHY_RDLAT =\
4682         LIBERO_SETTING_CFG_DFI_T_PHY_RDLAT;
4683     DDRCFG->DFI.CFG_DFI_T_PHY_WRLAT.CFG_DFI_T_PHY_WRLAT =\
4684         LIBERO_SETTING_CFG_DFI_T_PHY_WRLAT;
4685     DDRCFG->DFI.CFG_DFI_PHYUPD_EN.CFG_DFI_PHYUPD_EN =\
4686         LIBERO_SETTING_CFG_DFI_PHYUPD_EN;
4687     DDRCFG->DFI.INIT_DFI_LP_DATA_REQ.INIT_DFI_LP_DATA_REQ =\
4688         LIBERO_SETTING_INIT_DFI_LP_DATA_REQ;
4689     DDRCFG->DFI.INIT_DFI_LP_CTRL_REQ.INIT_DFI_LP_CTRL_REQ =\
4690         LIBERO_SETTING_INIT_DFI_LP_CTRL_REQ;
4691     DDRCFG->DFI.INIT_DFI_LP_WAKEUP.INIT_DFI_LP_WAKEUP =\
4692         LIBERO_SETTING_INIT_DFI_LP_WAKEUP;
4693     DDRCFG->DFI.INIT_DFI_DRAM_CLK_DISABLE.INIT_DFI_DRAM_CLK_DISABLE =\
4694         LIBERO_SETTING_INIT_DFI_DRAM_CLK_DISABLE;
4695     DDRCFG->DFI.CFG_DFI_DATA_BYTE_DISABLE.CFG_DFI_DATA_BYTE_DISABLE =\
4696         LIBERO_SETTING_CFG_DFI_DATA_BYTE_DISABLE;
4697     DDRCFG->DFI.CFG_DFI_LVL_SEL.CFG_DFI_LVL_SEL =\
4698         LIBERO_SETTING_CFG_DFI_LVL_SEL;
4699     DDRCFG->DFI.CFG_DFI_LVL_PERIODIC.CFG_DFI_LVL_PERIODIC =\
4700         LIBERO_SETTING_CFG_DFI_LVL_PERIODIC;
4701     DDRCFG->DFI.CFG_DFI_LVL_PATTERN.CFG_DFI_LVL_PATTERN =\
4702         LIBERO_SETTING_CFG_DFI_LVL_PATTERN;
4703     DDRCFG->DFI.PHY_DFI_INIT_START.PHY_DFI_INIT_START =\
4704         LIBERO_SETTING_PHY_DFI_INIT_START;
4705     DDRCFG->AXI_IF.CFG_AXI_START_ADDRESS_AXI1_0.CFG_AXI_START_ADDRESS_AXI1_0 =\
4706         LIBERO_SETTING_CFG_AXI_START_ADDRESS_AXI1_0;
4707     DDRCFG->AXI_IF.CFG_AXI_START_ADDRESS_AXI1_1.CFG_AXI_START_ADDRESS_AXI1_1 =\
4708         LIBERO_SETTING_CFG_AXI_START_ADDRESS_AXI1_1;
4709     DDRCFG->AXI_IF.CFG_AXI_START_ADDRESS_AXI2_0.CFG_AXI_START_ADDRESS_AXI2_0 =\
4710         LIBERO_SETTING_CFG_AXI_START_ADDRESS_AXI2_0;
4711     DDRCFG->AXI_IF.CFG_AXI_START_ADDRESS_AXI2_1.CFG_AXI_START_ADDRESS_AXI2_1 =\
4712         LIBERO_SETTING_CFG_AXI_START_ADDRESS_AXI2_1;
4713     DDRCFG->AXI_IF.CFG_AXI_END_ADDRESS_AXI1_0.CFG_AXI_END_ADDRESS_AXI1_0 =\
4714         LIBERO_SETTING_CFG_AXI_END_ADDRESS_AXI1_0;
4715     DDRCFG->AXI_IF.CFG_AXI_END_ADDRESS_AXI1_1.CFG_AXI_END_ADDRESS_AXI1_1 =\
4716         LIBERO_SETTING_CFG_AXI_END_ADDRESS_AXI1_1;
4717     DDRCFG->AXI_IF.CFG_AXI_END_ADDRESS_AXI2_0.CFG_AXI_END_ADDRESS_AXI2_0 =\
4718         LIBERO_SETTING_CFG_AXI_END_ADDRESS_AXI2_0;
4719     DDRCFG->AXI_IF.CFG_AXI_END_ADDRESS_AXI2_1.CFG_AXI_END_ADDRESS_AXI2_1 =\
4720         LIBERO_SETTING_CFG_AXI_END_ADDRESS_AXI2_1;
4721     DDRCFG->AXI_IF.CFG_MEM_START_ADDRESS_AXI1_0.CFG_MEM_START_ADDRESS_AXI1_0 =\
4722         LIBERO_SETTING_CFG_MEM_START_ADDRESS_AXI1_0;
4723     DDRCFG->AXI_IF.CFG_MEM_START_ADDRESS_AXI1_1.CFG_MEM_START_ADDRESS_AXI1_1 =\
4724         LIBERO_SETTING_CFG_MEM_START_ADDRESS_AXI1_1;
4725     DDRCFG->AXI_IF.CFG_MEM_START_ADDRESS_AXI2_0.CFG_MEM_START_ADDRESS_AXI2_0 =\
4726         LIBERO_SETTING_CFG_MEM_START_ADDRESS_AXI2_0;
4727     DDRCFG->AXI_IF.CFG_MEM_START_ADDRESS_AXI2_1.CFG_MEM_START_ADDRESS_AXI2_1 =\
4728         LIBERO_SETTING_CFG_MEM_START_ADDRESS_AXI2_1;
4729     DDRCFG->AXI_IF.CFG_ENABLE_BUS_HOLD_AXI1.CFG_ENABLE_BUS_HOLD_AXI1 =\
4730         LIBERO_SETTING_CFG_ENABLE_BUS_HOLD_AXI1;
4731     DDRCFG->AXI_IF.CFG_ENABLE_BUS_HOLD_AXI2.CFG_ENABLE_BUS_HOLD_AXI2 =\
4732         LIBERO_SETTING_CFG_ENABLE_BUS_HOLD_AXI2;
4733     DDRCFG->AXI_IF.CFG_AXI_AUTO_PCH.CFG_AXI_AUTO_PCH =\
4734         LIBERO_SETTING_CFG_AXI_AUTO_PCH;
4735     DDRCFG->csr_custom.PHY_RESET_CONTROL.PHY_RESET_CONTROL =\
4736         LIBERO_SETTING_PHY_RESET_CONTROL;
4737     DDRCFG->csr_custom.PHY_RESET_CONTROL.PHY_RESET_CONTROL =\
4738         (LIBERO_SETTING_PHY_RESET_CONTROL & ~0x8000UL);
4739     DDRCFG->csr_custom.PHY_PC_RANK.PHY_PC_RANK = LIBERO_SETTING_PHY_PC_RANK;
4740     DDRCFG->csr_custom.PHY_RANKS_TO_TRAIN.PHY_RANKS_TO_TRAIN =\
4741         LIBERO_SETTING_PHY_RANKS_TO_TRAIN;
4742     DDRCFG->csr_custom.PHY_WRITE_REQUEST.PHY_WRITE_REQUEST =\
4743         LIBERO_SETTING_PHY_WRITE_REQUEST;
4744     DDRCFG->csr_custom.PHY_READ_REQUEST.PHY_READ_REQUEST =\
4745         LIBERO_SETTING_PHY_READ_REQUEST;
4746     DDRCFG->csr_custom.PHY_WRITE_LEVEL_DELAY.PHY_WRITE_LEVEL_DELAY =\
4747         LIBERO_SETTING_PHY_WRITE_LEVEL_DELAY;
4748     DDRCFG->csr_custom.PHY_GATE_TRAIN_DELAY.PHY_GATE_TRAIN_DELAY =\
4749         LIBERO_SETTING_PHY_GATE_TRAIN_DELAY;
4750     DDRCFG->csr_custom.PHY_EYE_TRAIN_DELAY.PHY_EYE_TRAIN_DELAY =\
4751         LIBERO_SETTING_PHY_EYE_TRAIN_DELAY;
4752     DDRCFG->csr_custom.PHY_EYE_PAT.PHY_EYE_PAT = LIBERO_SETTING_PHY_EYE_PAT;
4753     DDRCFG->csr_custom.PHY_START_RECAL.PHY_START_RECAL =\
4754         LIBERO_SETTING_PHY_START_RECAL;
4755     DDRCFG->csr_custom.PHY_CLR_DFI_LVL_PERIODIC.PHY_CLR_DFI_LVL_PERIODIC =\
4756         LIBERO_SETTING_PHY_CLR_DFI_LVL_PERIODIC;
4757     DDRCFG->csr_custom.PHY_TRAIN_STEP_ENABLE.PHY_TRAIN_STEP_ENABLE =\
4758         LIBERO_SETTING_PHY_TRAIN_STEP_ENABLE;
4759     DDRCFG->csr_custom.PHY_LPDDR_DQ_CAL_PAT.PHY_LPDDR_DQ_CAL_PAT =\
4760         LIBERO_SETTING_PHY_LPDDR_DQ_CAL_PAT;
4761     DDRCFG->csr_custom.PHY_INDPNDT_TRAINING.PHY_INDPNDT_TRAINING =\
4762         LIBERO_SETTING_PHY_INDPNDT_TRAINING;
4763     DDRCFG->csr_custom.PHY_ENCODED_QUAD_CS.PHY_ENCODED_QUAD_CS =\
4764         LIBERO_SETTING_PHY_ENCODED_QUAD_CS;
4765     DDRCFG->csr_custom.PHY_HALF_CLK_DLY_ENABLE.PHY_HALF_CLK_DLY_ENABLE =\
4766         LIBERO_SETTING_PHY_HALF_CLK_DLY_ENABLE;
4767 
4768 }
4769 
4770 
4771 /**
4772  * setup_ddr_segments(void)
4773  * setup segment registers- translated DDR address as user requires
4774  */
setup_ddr_segments(SEG_SETUP option)4775 void setup_ddr_segments(SEG_SETUP option)
4776 {
4777     if(option == DEFAULT_SEG_SETUP)
4778     {
4779         SEG[0].u[0].raw = (INIT_SETTING_SEG0_0 & 0x7FFFUL);
4780         SEG[0].u[1].raw = (INIT_SETTING_SEG0_1 & 0x7FFFUL);
4781         SEG[1].u[2].raw = (INIT_SETTING_SEG1_2 & 0x7FFFUL);
4782         SEG[1].u[3].raw = (INIT_SETTING_SEG1_3 & 0x7FFFUL);
4783         SEG[1].u[4].raw = (INIT_SETTING_SEG1_4 & 0x7FFFUL);
4784         SEG[1].u[5].raw = (INIT_SETTING_SEG1_5 & 0x7FFFUL);
4785     }
4786     else
4787     {
4788         SEG[0].u[0].raw = (LIBERO_SETTING_SEG0_0 & 0x7FFFUL);
4789         SEG[0].u[1].raw = (LIBERO_SETTING_SEG0_1 & 0x7FFFUL);
4790         SEG[1].u[2].raw = (LIBERO_SETTING_SEG1_2 & 0x7FFFUL);
4791         SEG[1].u[3].raw = (LIBERO_SETTING_SEG1_3 & 0x7FFFUL);
4792         SEG[1].u[4].raw = (LIBERO_SETTING_SEG1_4 & 0x7FFFUL);
4793         SEG[1].u[5].raw = (LIBERO_SETTING_SEG1_5 & 0x7FFFUL);
4794     }
4795     /*
4796      * disable ddr blocker
4797      * Is cleared at reset. When written to '1' disables the blocker function
4798      * allowing the L2 cache controller to access the DDRC. Once written to '1'
4799      * the register cannot be written to 0, only an MSS reset will clear the
4800      * register
4801      */
4802     SEG[0].u[7].raw = 0x01U;
4803 }
4804 
4805 /**
4806  * use_software_bclk_sclk_training()
4807  * @param ddr_type
4808  * @return returns 1U if required, otherwise 0U
4809  */
use_software_bclk_sclk_training(DDR_TYPE ddr_type)4810 static uint8_t use_software_bclk_sclk_training(DDR_TYPE ddr_type)
4811 {
4812     uint8_t result = 0U;
4813     switch (ddr_type)
4814     {
4815         default:
4816         case DDR_OFF_MODE:
4817             break;
4818         case DDR3L:
4819             result = 1U;
4820             break;
4821         case DDR3:
4822             result = 1U;
4823             break;
4824         case DDR4:
4825             result = 1U;
4826             break;
4827         case LPDDR3:
4828             result = 1U;
4829             break;
4830         case LPDDR4:
4831             result = 1U;
4832             break;
4833     }
4834     return(result);
4835 }
4836 
4837 /**
4838  * config_ddr_io_pull_up_downs_rpc_bits()
4839  *
4840  * This function overrides the RPC bits related to weak pull up and
4841  * weak pull downs. It also sets the override bit if the I/O is disabled.
4842  * The settings come fro m Libero
4843  *
4844  * Note: If LIBERO_SETTING_RPC_EN_ADDCMD0_OVRT9 is not present, indicates older
4845  * Libero core (pre 2.0.109)
4846  * Post 2.0.109 version of Libero MSS core, weak pull up and pull down
4847  * settings come from Libero, along with setting unused MSS DDR I/O to
4848  * override.
4849  *
4850  */
config_ddr_io_pull_up_downs_rpc_bits(DDR_TYPE ddr_type)4851 static void config_ddr_io_pull_up_downs_rpc_bits(DDR_TYPE ddr_type)
4852 {
4853     if(ddr_type == LPDDR4) /* we will add other variants here once verified */
4854     {
4855 #ifdef LIBERO_SETTING_RPC_EN_ADDCMD0_OVRT9
4856         /* set over-rides (associated bit set to 1 if I/O not being used */
4857         CFG_DDR_SGMII_PHY->ovrt9.ovrt9   = LIBERO_SETTING_RPC_EN_ADDCMD0_OVRT9;
4858         CFG_DDR_SGMII_PHY->ovrt10.ovrt10 = LIBERO_SETTING_RPC_EN_ADDCMD1_OVRT10;
4859         CFG_DDR_SGMII_PHY->ovrt11.ovrt11 = LIBERO_SETTING_RPC_EN_ADDCMD2_OVRT11;
4860         CFG_DDR_SGMII_PHY->ovrt12.ovrt12 = LIBERO_SETTING_RPC_EN_DATA0_OVRT12;
4861         CFG_DDR_SGMII_PHY->ovrt13.ovrt13 = LIBERO_SETTING_RPC_EN_DATA1_OVRT13;
4862         CFG_DDR_SGMII_PHY->ovrt14.ovrt14 = LIBERO_SETTING_RPC_EN_DATA2_OVRT14;
4863         CFG_DDR_SGMII_PHY->ovrt15.ovrt15 = LIBERO_SETTING_RPC_EN_DATA3_OVRT15;
4864         CFG_DDR_SGMII_PHY->ovrt16.ovrt16 = LIBERO_SETTING_RPC_EN_ECC_OVRT16;
4865         /* set the required wpu state- note: associated I/O bit 1=> off, 0=> on */
4866         CFG_DDR_SGMII_PHY->rpc235.rpc235 = LIBERO_SETTING_RPC235_WPD_ADD_CMD0;
4867         CFG_DDR_SGMII_PHY->rpc236.rpc236 = LIBERO_SETTING_RPC236_WPD_ADD_CMD1;
4868         CFG_DDR_SGMII_PHY->rpc237.rpc237 = LIBERO_SETTING_RPC237_WPD_ADD_CMD2;
4869         CFG_DDR_SGMII_PHY->rpc238.rpc238 = LIBERO_SETTING_RPC238_WPD_DATA0;
4870         CFG_DDR_SGMII_PHY->rpc239.rpc239 = LIBERO_SETTING_RPC239_WPD_DATA1;
4871         CFG_DDR_SGMII_PHY->rpc240.rpc240 = LIBERO_SETTING_RPC240_WPD_DATA2;
4872         CFG_DDR_SGMII_PHY->rpc241.rpc241 = LIBERO_SETTING_RPC241_WPD_DATA3;
4873         CFG_DDR_SGMII_PHY->rpc242.rpc242 = LIBERO_SETTING_RPC242_WPD_ECC;
4874         /* set the required wpd state- note: associated I/O bit 1=> off, 0=> on */
4875         CFG_DDR_SGMII_PHY->rpc243.rpc243 = LIBERO_SETTING_RPC243_WPU_ADD_CMD0;
4876         CFG_DDR_SGMII_PHY->rpc244.rpc244 = LIBERO_SETTING_RPC244_WPU_ADD_CMD1;
4877         CFG_DDR_SGMII_PHY->rpc245.rpc245 = LIBERO_SETTING_RPC245_WPU_ADD_CMD2;
4878         CFG_DDR_SGMII_PHY->rpc246.rpc246 = LIBERO_SETTING_RPC246_WPU_DATA0;
4879         CFG_DDR_SGMII_PHY->rpc247.rpc247 = LIBERO_SETTING_RPC247_WPU_DATA1;
4880         CFG_DDR_SGMII_PHY->rpc248.rpc248 = LIBERO_SETTING_RPC248_WPU_DATA2;
4881         CFG_DDR_SGMII_PHY->rpc249.rpc249 = LIBERO_SETTING_RPC249_WPU_DATA3;
4882         CFG_DDR_SGMII_PHY->rpc250.rpc250 = LIBERO_SETTING_RPC250_WPU_ECC;
4883 #endif
4884     }
4885 }
4886 
4887 
4888 /**
4889  * get the best sweep value
4890  * @param good_index
4891  * @return
4892  */
4893 #ifdef SWEEP_ENABLED
get_best_sweep(sweep_index * good_index)4894 static uint8_t get_best_sweep(sweep_index *good_index)
4895 {
4896 #ifdef EXTRACT_SWEEP_RESULT
4897     uint8_t cmd_index;
4898     uint8_t bclk_sclk_index;
4899     uint8_t dpc_vgen_index;
4900     uint8_t dpc_vgen_h_index;
4901     uint8_t dpc_vgen_vs_index;
4902     uint8_t good_in_row;
4903 
4904     for (dpc_vgen_vs_index=0U; dpc_vgen_vs_index < MAX_NUMBER_DPC_VS_GEN_SWEEPS; dpc_vgen_vs_index++)
4905     {
4906         for (dpc_vgen_h_index=0U; dpc_vgen_h_index < MAX_NUMBER_DPC_H_GEN_SWEEPS; dpc_vgen_h_index++)
4907         {
4908             for (dpc_vgen_index=0U; dpc_vgen_index < MAX_NUMBER_DPC_V_GEN_SWEEPS; dpc_vgen_index++)
4909             {
4910                 for (bclk_sclk_index=0U; bclk_sclk_index < MAX_NUMBER__BCLK_SCLK_OFFSET_SWEEPS; bclk_sclk_index++)
4911                 {
4912                     good_in_row = 0U;
4913                     for (cmd_index=0U; cmd_index < MAX_NUMBER_ADDR_CMD_OFFSET_SWEEPS; cmd_index++)
4914                     {
4915                         if (sweep_results[dpc_vgen_vs_index][dpc_vgen_h_index][dpc_vgen_index][bclk_sclk_index][cmd_index]\
4916                                 == CALIBRATION_PASSED)
4917                         {
4918                             good_in_row++;
4919                             /*
4920                              * look for 3 in a row,in x and y direction and pick the
4921                              * middle one
4922                              * */
4923                             if((good_in_row > 2U)&&(bclk_sclk_index>1)&&(bclk_sclk_index<MAX_NUMBER__BCLK_SCLK_OFFSET_SWEEPS-1))
4924                             {
4925                                 if ((sweep_results[dpc_vgen_vs_index]\
4926                                         [dpc_vgen_h_index][dpc_vgen_index]\
4927                                         [bclk_sclk_index-1][cmd_index]\
4928                                                 == CALIBRATION_PASSED)&&\
4929                                         (sweep_results[dpc_vgen_vs_index]\
4930                                                 [dpc_vgen_h_index]\
4931                                                 [dpc_vgen_index]\
4932                                                 [bclk_sclk_index+1][cmd_index]\
4933                                                          == CALIBRATION_PASSED))
4934                                 {
4935                                     good_index->dpc_vgen_vs_index = dpc_vgen_vs_index;
4936                                     good_index->dpc_vgen_h_index = dpc_vgen_h_index;
4937                                     good_index->bclk_sclk_index = bclk_sclk_index;
4938                                     good_index->dpc_vgen_index = dpc_vgen_index;
4939                                     good_index->cmd_index = cmd_index - 1U;
4940                                     return(0U);
4941                                 }
4942                             }
4943                         }
4944                         else
4945                         {
4946                             good_in_row = 0U;
4947                         }
4948                     }
4949                 }
4950             }
4951         }
4952     }
4953     return(1U);
4954 #else  /* EXTRACT_SWEEP_RESULT */
4955     good_index->dpc_vgen_vs_index = 0U;
4956     good_index->dpc_vgen_h_index = 0U;
4957     good_index->bclk_sclk_index = 0U;
4958     good_index->dpc_vgen_index = 0U;
4959     good_index->cmd_index = 0U;
4960     return(0U);
4961 #endif
4962 }
4963 #endif /* SWEEP_ENABLED */
4964 
4965 
4966 #ifdef DDR_DIAGNOSTICS /* todo: add support for diagnostics below during board bring-up */
4967 
4968 /*-------------------------------------------------------------------------*//**
4969   The MSS_DDR_status() function is used to return status information to the
4970   user.
4971 
4972   TODO: Define number of request inputs
4973 
4974    @param option
4975     This option chooses status data we wish returned
4976 
4977    @param return_data
4978     Returned data here. This must be within a defined range.
4979     todo:Detail on the sharing of data will be system dependent.
4980     AMP/SMU detail to be finalized at time of writing
4981 
4982   @return
4983     Returns 0 on success.
4984     TODO: Define error codes.
4985 
4986   Example:
4987     The call to MSS_DDR_status(DDR_TYPE, return_data) will return 0 if
4988     successful and the DDR type in the first four bytes of the ret_mem area.
4989     @code
4990     MSS_DDR_status( DDR_TYPE, ret_mem );
4991     @endcode
4992  */
4993 uint8_t
MSS_DDR_status(uint8_t option,uint32_t * return_data)4994 MSS_DDR_status
4995 (
4996     uint8_t option, uint32_t *return_data
4997 )
4998 {
4999   uint8_t error = 0U;
5000 
5001   switch (option)
5002   {
5003     case USR_OPTION_tip_register_dump:
5004         /* todo: WIP
5005          * add commands here */
5006       break;
5007 
5008     default:
5009 
5010       break;
5011   }
5012 
5013   return error;
5014 }
5015 
5016 
5017 /*-------------------------------------------------------------------------*//**
5018  * MSS_DDR_user_commands commands from the user
5019  *
5020  * @param command
5021  *   User command
5022  * @param extra_command_data
5023  *   extra data from user for particular command
5024  * @param return_data
5025  *   data returned via supplied pointer
5026  * @return
5027  *   status 0 => success
5028  *
5029  *  Example:
5030       The call to
5031       MSS_DDR_user_commands(USR_CMD_INC_DELAY_LINE, 0x01 , return_data)
5032       will return 0 id successful and the
5033       DDR type in the first four bytes of the ret_mem area.
5034       @code
5035       MSS_DDR_user_commands(USR_CMD_INC_DELAY_LINE, 0x01 , return_data);
5036       @endcode
5037  */
5038 uint8_t
MSS_DDR_user_commands(uint8_t command,uint32_t * extra_command_data,uint32_t * return_data,uint32_t return_size)5039 MSS_DDR_user_commands
5040 (
5041     uint8_t command, uint32_t *extra_command_data, uint32_t *return_data,  \
5042         uint32_t return_size
5043 )
5044 {
5045   uint8_t error = 0U;
5046   uint32_t *reg_address;
5047 
5048   switch (command)
5049   {
5050     case USR_CMD_GET_DDR_STATUS:
5051       break;
5052         case USR_CMD_GET_MODE_SETTING:
5053             break;
5054         case USR_CMD_GET_W_CALIBRATION:
5055             config_copy(return_data, &calib_data, sizeof(calib_data));
5056             break;
5057         case USR_CMD_GET_GREEN_ZONE:
5058             /* READ DQ WINDOW MEASUREMENT */
5059             /* READ DQS WINDOW MEASUREMENT */
5060             /* READ VREF WINDOW MAX MEASUREMENT */
5061 
5062             break;
5063 
5064         case USR_CMD_GET_REG:
5065             /*
5066              * First check if address valid
5067              */
5068             config_copy(reg_address, extra_command_data, 4U);
5069             reg_address = (uint32_t *)((uint32_t)reg_address &\
5070                 (uint32_t)(0xFFFFFFFCUL));
5071             if ((reg_address >=\
5072                 &CFG_DDR_SGMII_PHY->SOFT_RESET_DDR_PHY.SOFT_RESET_DDR_PHY)\
5073                 && (reg_address < &CFG_DDR_SGMII_PHY->SPARE_STAT.SPARE_STAT))
5074             {
5075                 config_copy(return_data, reg_address, sizeof(uint32_t));
5076             }
5077             else
5078             {
5079                 error = 1U;
5080             }
5081             break;
5082 
5083         /*
5084          * And set commands
5085          */
5086         case USR_CMD_SET_GREEN_ZONE_DQ:
5087             /* READ DQ WINDOW MEASUREMENT */
5088             /*
5089              * This procedure is uses reads/writes & DQ delayline controls, to
5090              * measure the maximum DQ offset before failure.
5091              */
5092             break;
5093         case USR_CMD_SET_GREEN_ZONE_DQS:
5094             /* READ DQS WINDOW MEASUREMENT */
5095             /*
5096              * This procedure is uses reads/writes & DQS delayline controls, to
5097              * measure the maximum DQS offset before failure.
5098              */
5099             break;
5100         case USR_CMD_SET_GREEN_ZONE_VREF_MAX:
5101             /* READ VREF WINDOW MAX MEASUREMENT */
5102             /*
5103              * This procedure is uses reads/writes & VREF controller delayline
5104              * controls, to measure the max VREF level.
5105              */
5106             break;
5107         case USR_CMD_SET_GREEN_ZONE_VREF_MIN:
5108             /* READ VREF WINDOW MIN MEASUREMENT */
5109             /*
5110              * This procedure is uses reads/writes & VREF controller delayline
5111              * controls, to measure the minimum VREF level.
5112              */
5113             break;
5114         case USR_CMD_SET_RETRAIN:
5115             /* Incremental, In-System Retraining Procedures */
5116             /*
5117              * This procedure adjusts the read window to re-center clock and
5118              * data.
5119              * It should be triggered when the DLL code value passes a certain
5120              * threshold, during a refresh cycle.
5121              * Added here to allow the user to trigger.
5122              */
5123             break;
5124         case USR_CMD_SET_REG:
5125             break;
5126 
5127         default:
5128             error = 1U;
5129             break;
5130     }
5131     return error;
5132 }
5133 #endif
5134 
5135 #ifdef DEBUG_DDR_INIT
5136 #ifdef DEBUG_DDR_DDRCFG
debug_read_ddrcfg(void)5137 void debug_read_ddrcfg(void)
5138 {
5139     (void)print_reg_array(g_debug_uart ,
5140                 (uint32_t *)&DDRCFG->ADDR_MAP,\
5141                 (sizeof(DDRCFG->ADDR_MAP)/4U));
5142     (void)print_reg_array(g_debug_uart ,
5143                 (uint32_t *)&DDRCFG->MC_BASE3,\
5144                 (sizeof(DDRCFG->MC_BASE3)/4U));
5145     (void)print_reg_array(g_debug_uart ,
5146                 (uint32_t *)&DDRCFG->MC_BASE1,\
5147                 (sizeof(DDRCFG->MC_BASE1)/4U));
5148     (void)print_reg_array(g_debug_uart ,
5149                 (uint32_t *)&DDRCFG->MC_BASE2,\
5150                 (sizeof(DDRCFG->MC_BASE2)/4U));
5151     (void)print_reg_array(g_debug_uart ,
5152                 (uint32_t *)&DDRCFG->MPFE,\
5153                 (sizeof(DDRCFG->MPFE)/4U));
5154     (void)print_reg_array(g_debug_uart ,
5155                 (uint32_t *)&DDRCFG->REORDER,\
5156                 (sizeof(DDRCFG->REORDER)/4U));
5157     (void)print_reg_array(g_debug_uart ,
5158                 (uint32_t *)&DDRCFG->RMW,\
5159                 (sizeof(DDRCFG->RMW)/4U));
5160     (void)print_reg_array(g_debug_uart ,
5161                 (uint32_t *)&DDRCFG->ECC,\
5162                 (sizeof(DDRCFG->ECC)/4U));
5163     (void)print_reg_array(g_debug_uart ,
5164                 (uint32_t *)&DDRCFG->READ_CAPT,\
5165                 (sizeof(DDRCFG->READ_CAPT)/4U));
5166     (void)print_reg_array(g_debug_uart ,
5167                 (uint32_t *)&DDRCFG->MTA,\
5168                 (sizeof(DDRCFG->MTA)/4U));
5169     (void)print_reg_array(g_debug_uart ,
5170                 (uint32_t *)&DDRCFG->DYN_WIDTH_ADJ,\
5171                 (sizeof(DDRCFG->DYN_WIDTH_ADJ)/4U));
5172     (void)print_reg_array(g_debug_uart ,
5173                 (uint32_t *)&DDRCFG->CA_PAR_ERR,\
5174                 (sizeof(DDRCFG->CA_PAR_ERR)/4U));
5175     (void)print_reg_array(g_debug_uart ,
5176                 (uint32_t *)&DDRCFG->DFI,\
5177                 (sizeof(DDRCFG->DFI)/4U));
5178     (void)print_reg_array(g_debug_uart ,
5179                 (uint32_t *)&DDRCFG->AXI_IF,\
5180                 (sizeof(DDRCFG->AXI_IF)/4U));
5181     (void)print_reg_array(g_debug_uart ,
5182                 (uint32_t *)&DDRCFG->csr_custom,\
5183                 (sizeof(DDRCFG->csr_custom)/4U));
5184     return;
5185 }
5186 #endif
5187 #endif
5188 
5189 
5190 const uint8_t REFCLK_OFFSETS[][5U] = {
5191                 {LIBERO_SETTING_REFCLK_DDR3_1333_NUM_OFFSETS,
5192                 LIBERO_SETTING_REFCLK_DDR3_1333_OFFSET_0,
5193                 LIBERO_SETTING_REFCLK_DDR3_1333_OFFSET_1,
5194                 LIBERO_SETTING_REFCLK_DDR3_1333_OFFSET_2,
5195                 LIBERO_SETTING_REFCLK_DDR3_1333_OFFSET_3},
5196                 {
5197                 LIBERO_SETTING_REFCLK_DDR3L_1333_NUM_OFFSETS,
5198                 LIBERO_SETTING_REFCLK_DDR3L_1333_OFFSET_0,
5199                 LIBERO_SETTING_REFCLK_DDR3L_1333_OFFSET_1,
5200                 LIBERO_SETTING_REFCLK_DDR3L_1333_OFFSET_2,
5201                 LIBERO_SETTING_REFCLK_DDR3L_1333_OFFSET_3},
5202                 {
5203                 LIBERO_SETTING_REFCLK_DDR4_1600_NUM_OFFSETS,
5204                 LIBERO_SETTING_REFCLK_DDR4_1600_OFFSET_0,
5205                 LIBERO_SETTING_REFCLK_DDR4_1600_OFFSET_1,
5206                 LIBERO_SETTING_REFCLK_DDR4_1600_OFFSET_2,
5207                 LIBERO_SETTING_REFCLK_DDR4_1600_OFFSET_3},
5208                 {
5209                 LIBERO_SETTING_REFCLK_LPDDR3_1600_NUM_OFFSETS,
5210                 LIBERO_SETTING_REFCLK_LPDDR3_1600_OFFSET_0,
5211                 LIBERO_SETTING_REFCLK_LPDDR3_1600_OFFSET_1,
5212                 LIBERO_SETTING_REFCLK_LPDDR3_1600_OFFSET_2,
5213                 LIBERO_SETTING_REFCLK_LPDDR3_1600_OFFSET_3},
5214                 {
5215                 LIBERO_SETTING_REFCLK_LPDDR4_1600_NUM_OFFSETS,
5216                 LIBERO_SETTING_REFCLK_LPDDR4_1600_OFFSET_0,
5217                 LIBERO_SETTING_REFCLK_LPDDR4_1600_OFFSET_1,
5218                 LIBERO_SETTING_REFCLK_LPDDR4_1600_OFFSET_2,
5219                 LIBERO_SETTING_REFCLK_LPDDR4_1600_OFFSET_3},
5220 
5221                 {LIBERO_SETTING_REFCLK_DDR3_1067_NUM_OFFSETS,
5222                 LIBERO_SETTING_REFCLK_DDR3_1067_OFFSET_0,
5223                 LIBERO_SETTING_REFCLK_DDR3_1067_OFFSET_1,
5224                 LIBERO_SETTING_REFCLK_DDR3_1067_OFFSET_2,
5225                 LIBERO_SETTING_REFCLK_DDR3_1067_OFFSET_3},
5226                 {
5227                 LIBERO_SETTING_REFCLK_DDR3L_1067_NUM_OFFSETS,
5228                 LIBERO_SETTING_REFCLK_DDR3L_1067_OFFSET_0,
5229                 LIBERO_SETTING_REFCLK_DDR3L_1067_OFFSET_1,
5230                 LIBERO_SETTING_REFCLK_DDR3L_1067_OFFSET_2,
5231                 LIBERO_SETTING_REFCLK_DDR3L_1067_OFFSET_3},
5232                 {
5233                 LIBERO_SETTING_REFCLK_DDR4_1333_NUM_OFFSETS,
5234                 LIBERO_SETTING_REFCLK_DDR4_1333_OFFSET_0,
5235                 LIBERO_SETTING_REFCLK_DDR4_1333_OFFSET_1,
5236                 LIBERO_SETTING_REFCLK_DDR4_1333_OFFSET_2,
5237                 LIBERO_SETTING_REFCLK_DDR4_1333_OFFSET_3},
5238                 {
5239                 LIBERO_SETTING_REFCLK_LPDDR3_1333_NUM_OFFSETS,
5240                 LIBERO_SETTING_REFCLK_LPDDR3_1333_OFFSET_0,
5241                 LIBERO_SETTING_REFCLK_LPDDR3_1333_OFFSET_1,
5242                 LIBERO_SETTING_REFCLK_LPDDR3_1333_OFFSET_2,
5243                 LIBERO_SETTING_REFCLK_LPDDR3_1333_OFFSET_3},
5244                 {
5245                 LIBERO_SETTING_REFCLK_LPDDR4_1333_NUM_OFFSETS,
5246                 LIBERO_SETTING_REFCLK_LPDDR4_1333_OFFSET_0,
5247                 LIBERO_SETTING_REFCLK_LPDDR4_1333_OFFSET_1,
5248                 LIBERO_SETTING_REFCLK_LPDDR4_1333_OFFSET_2,
5249                 LIBERO_SETTING_REFCLK_LPDDR4_1333_OFFSET_3},
5250 };
5251 
5252 /**
5253  * ddr_manual_addcmd_refclk_offset This function determines current
5254  * sweep offset based on DDR type
5255  * @param ddr_type
5256  * @param refclk_sweep_index
5257  * @return
5258  */
5259 #ifdef MANUAL_ADDCMD_TRAINIG
ddr_manual_addcmd_refclk_offset(DDR_TYPE ddr_type,uint8_t * refclk_sweep_index)5260 static uint8_t ddr_manual_addcmd_refclk_offset(DDR_TYPE ddr_type, uint8_t * refclk_sweep_index)
5261 {
5262     uint8_t refclk_offset;
5263     uint8_t type_array_index;
5264 
5265     type_array_index = (uint8_t)ddr_type;
5266     switch (ddr_type)
5267     {
5268         case DDR3L:
5269         case DDR3:
5270             if(LIBERO_SETTING_DDR_CLK+ DDR_FREQ_MARGIN < DDR_1333_MHZ)
5271             {
5272                 type_array_index = type_array_index + (uint8_t)LPDDR4 + 1U;
5273             }
5274             break;
5275         case DDR4:
5276         case LPDDR3:
5277         case LPDDR4:
5278             if(LIBERO_SETTING_DDR_CLK + DDR_FREQ_MARGIN < DDR_1600_MHZ)
5279             {
5280                 type_array_index = type_array_index + (uint8_t)LPDDR4 + 1U;
5281             }
5282             break;
5283         default:
5284         case DDR_OFF_MODE:
5285             break;
5286     }
5287 
5288     if (*refclk_sweep_index >= REFCLK_OFFSETS[type_array_index][0U])
5289     {
5290         *refclk_sweep_index = 0U;
5291     }
5292 
5293     refclk_offset = REFCLK_OFFSETS[type_array_index][*refclk_sweep_index + 1U];
5294 
5295     *refclk_sweep_index = (*refclk_sweep_index + 1U);
5296 
5297     return refclk_offset;
5298 }
5299 #endif
5300 
5301 
5302 #endif /* DDR_SUPPORT */
5303 
5304