• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* SPDX-License-Identifier: BSD-3-Clause */
2 
3 //-----------------------------------------------------------------------------
4 // Include files
5 //-----------------------------------------------------------------------------
6 //#include "..\Common\pd_common.h"
7 //#include "Register.h"
8 #include "dramc_common.h"
9 #include "dramc_dv_init.h"
10 #include "dramc_int_global.h"
11 #include "x_hal_io.h"
12 #include "dramc_actiming.h"
13 
14 #include "dramc_reg_base_addr.h"
15 
16 #include "dramc_top.h"
17 
18 //#include "DramC_reg.h"
19 //#include "System_reg.h"
20 //#include "string.h"
21 
22 //-----------------------------------------------------------------------------
23 // Global variables
24 //-----------------------------------------------------------------------------
25 U8 u1PrintModeRegWrite = 0;
26 
27 #if ENABLE_RODT_TRACKING_SAVE_MCK
28 // global variables for RODT tracking & ROEN
29 U8 u1ODT_ON;		// infor of p->odt_onoff
30 U8 u1WDQS_ON = 0;	// infor of WDQS on(ROEN=1)
31 U8 u1RODT_TRACK = 0;  // infor of rodt tracking enable
32 U8 u1ROEN, u1ModeSel;//status of ROEN, MODESEL setting
33 #endif
34 
35 //MRR DRAM->DRAMC
36 const U8 uiLPDDR4_MRR_DRAM_Pinmux[PINMUX_MAX][CHANNEL_NUM][16] =
37 {
38 	{
39 	// for DSC
40 		//CH-A
41 		{
42 			0, 1, 7, 6, 4, 5, 2, 3,
43 			9, 8, 11, 10, 14, 15, 13, 12
44 		},
45 		#if (CHANNEL_NUM>1)
46 		//CH-B
47 		{
48 			1, 0, 5, 6, 3, 2, 7, 4,
49 			8, 9, 11, 10, 12, 14, 13, 15
50 		},
51 		#endif
52 		#if (CHANNEL_NUM>2)
53 		//CH-C
54 		{
55 			0, 1, 7, 6, 4, 5, 2, 3,
56 			9, 8, 11, 10, 14, 15, 13, 12
57 		},
58 		//CH-D
59 		{
60 			1, 0, 5, 6, 3, 2, 7, 4,
61 			8, 9, 11, 10, 12, 14, 13, 15
62 		},
63 		#endif
64 
65 	},
66 	{
67 	// for LPBK
68 		// TODO: need porting
69 	},
70 	{
71 	// for EMCP
72 		//CH-A
73 		{
74 			1, 0, 3, 2, 4, 7, 6, 5,
75 			8, 9, 10, 14, 11, 15, 13, 12
76 		},
77 		#if (CHANNEL_NUM>1)
78 		//CH-B
79 		{
80 			0, 1, 4, 7, 3, 5, 6, 2,
81 			9, 8, 10, 12, 11, 14, 13, 15
82 		},
83 		#endif
84 		#if (CHANNEL_NUM>2)
85 		//CH-C
86 		{
87 			1, 0, 3, 2, 4, 7, 6, 5,
88 			8, 9, 10, 14, 11, 15, 13, 12
89 		},
90 		//CH-D
91 		{
92 			0, 1, 4, 7, 3, 5, 6, 2,
93 			9, 8, 10, 12, 11, 14, 13, 15
94 		},
95 		#endif
96 	},
97 };
98 
99 #if (__LP5_COMBO__)
100 const U8 uiLPDDR5_MRR_Mapping_POP[CHANNEL_NUM][16] =
101 {
102 	{
103 		8, 9, 10, 11, 12, 15, 14, 13,
104 		0, 1, 2, 3, 4, 7, 6, 5,
105 	},
106 
107 	#if (CHANNEL_NUM>1)
108 	{
109 		8, 9, 10, 11, 12, 15, 14, 13,
110 		0, 1, 2, 3, 4, 7, 6, 5,
111 	},
112 	#endif
113 };
114 #endif
115 
116 //MRR DRAM->DRAMC
117 U8 uiLPDDR4_MRR_Mapping_POP[CHANNEL_NUM][16] =
118 {
119 	//CH-A
120 	{
121 		1, 0, 3, 2, 4, 7, 6, 5,
122 		8, 9, 10, 14, 11, 15, 13, 12
123 	},
124 	#if (CHANNEL_NUM>1)
125 	//CH-B
126 	{
127 		0, 1, 4, 7, 3, 5, 6, 2,
128 		9, 8, 10, 12, 11, 14, 13, 15
129 	},
130 	#endif
131 	#if (CHANNEL_NUM>2)
132 	//CH-C
133 	{
134 		1, 0, 3, 2, 4, 7, 6, 5,
135 		8, 9, 10, 14, 11, 15, 13, 12
136 	},
137 	//CH-D
138 	{
139 		0, 1, 4, 7, 3, 5, 6, 2,
140 		9, 8, 10, 12, 11, 14, 13, 15
141 	},
142 	#endif
143 };
144 
145 #if (fcFOR_CHIP_ID == fcMargaux)
Set_DRAM_Pinmux_Sel(DRAMC_CTX_T * p)146 static void Set_DRAM_Pinmux_Sel(DRAMC_CTX_T *p)
147 {
148 	#if (__LP5_COMBO__)
149 	if (is_lp5_family(p))
150 		return;
151 	#endif
152 
153 #if !FOR_DV_SIMULATION_USED
154 	if (is_discrete_lpddr4())
155 		p->DRAMPinmux = PINMUX_DSC;
156 	else
157 #endif
158 		p->DRAMPinmux = PINMUX_EMCP;
159 
160 	msg("[Set_DRAM_Pinmux_Sel] DRAMPinmux = %d\n", p->DRAMPinmux);
161 
162 	memcpy(&uiLPDDR4_MRR_Mapping_POP, uiLPDDR4_MRR_DRAM_Pinmux[p->DRAMPinmux], sizeof(uiLPDDR4_MRR_Mapping_POP));
163 	memcpy(&uiLPDDR4_O1_Mapping_POP, uiLPDDR4_O1_DRAM_Pinmux[p->DRAMPinmux], sizeof(uiLPDDR4_O1_Mapping_POP));
164 	memcpy(&uiLPDDR4_CA_Mapping_POP, uiLPDDR4_CA_DRAM_Pinmux[p->DRAMPinmux], sizeof(uiLPDDR4_CA_Mapping_POP));
165 }
166 #endif
167 
Set_MRR_Pinmux_Mapping(DRAMC_CTX_T * p)168 void Set_MRR_Pinmux_Mapping(DRAMC_CTX_T *p)
169 {
170 	U8 *uiLPDDR_MRR_Mapping = NULL;
171 	U8 backup_channel;
172 	U32 backup_broadcast;
173 	DRAM_CHANNEL_T chIdx = CHANNEL_A;
174 
175 	//Backup channel & broadcast
176 	backup_channel = vGetPHY2ChannelMapping(p);
177 	backup_broadcast = GetDramcBroadcast();
178 
179 	DramcBroadcastOnOff(DRAMC_BROADCAST_OFF); //Disable broadcast
180 
181 	//LP4: Set each channel's pinmux individually, LP3: Only has 1 channel (support_channel_num == 1)
182 	for (chIdx = CHANNEL_A; chIdx < (int)p->support_channel_num; chIdx++)
183 	{
184 		vSetPHY2ChannelMapping(p, chIdx);
185 
186 	#if (__LP5_COMBO__)
187 		if (is_lp5_family(p))
188 			uiLPDDR_MRR_Mapping = (U8 *)uiLPDDR5_MRR_Mapping_POP[chIdx];
189 		else
190 	#endif
191 		uiLPDDR_MRR_Mapping = (U8 *)uiLPDDR4_MRR_Mapping_POP[chIdx];
192 
193 		//Set MRR pin mux
194 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_MRR_BIT_MUX1), P_Fld(uiLPDDR_MRR_Mapping[0], MRR_BIT_MUX1_MRR_BIT0_SEL) | P_Fld(uiLPDDR_MRR_Mapping[1], MRR_BIT_MUX1_MRR_BIT1_SEL) |
195 																   P_Fld(uiLPDDR_MRR_Mapping[2], MRR_BIT_MUX1_MRR_BIT2_SEL) | P_Fld(uiLPDDR_MRR_Mapping[3], MRR_BIT_MUX1_MRR_BIT3_SEL));
196 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_MRR_BIT_MUX2), P_Fld(uiLPDDR_MRR_Mapping[4], MRR_BIT_MUX2_MRR_BIT4_SEL) | P_Fld(uiLPDDR_MRR_Mapping[5], MRR_BIT_MUX2_MRR_BIT5_SEL) |
197 																   P_Fld(uiLPDDR_MRR_Mapping[6], MRR_BIT_MUX2_MRR_BIT6_SEL) | P_Fld(uiLPDDR_MRR_Mapping[7], MRR_BIT_MUX2_MRR_BIT7_SEL));
198 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_MRR_BIT_MUX3), P_Fld(uiLPDDR_MRR_Mapping[8], MRR_BIT_MUX3_MRR_BIT8_SEL) | P_Fld(uiLPDDR_MRR_Mapping[9], MRR_BIT_MUX3_MRR_BIT9_SEL) |
199 																   P_Fld(uiLPDDR_MRR_Mapping[10], MRR_BIT_MUX3_MRR_BIT10_SEL) | P_Fld(uiLPDDR_MRR_Mapping[11], MRR_BIT_MUX3_MRR_BIT11_SEL));
200 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_MRR_BIT_MUX4), P_Fld(uiLPDDR_MRR_Mapping[12], MRR_BIT_MUX4_MRR_BIT12_SEL) | P_Fld(uiLPDDR_MRR_Mapping[13], MRR_BIT_MUX4_MRR_BIT13_SEL) |
201 																   P_Fld(uiLPDDR_MRR_Mapping[14], MRR_BIT_MUX4_MRR_BIT14_SEL) | P_Fld(uiLPDDR_MRR_Mapping[15], MRR_BIT_MUX4_MRR_BIT15_SEL));
202 	}
203 
204 	//Recover channel & broadcast
205 	vSetPHY2ChannelMapping(p, backup_channel);
206 	DramcBroadcastOnOff(backup_broadcast);
207 }
208 
209 
Set_DQO1_Pinmux_Mapping(DRAMC_CTX_T * p)210 void Set_DQO1_Pinmux_Mapping(DRAMC_CTX_T *p)
211 {
212 	U8 *uiLPDDR_DQO1_Mapping = NULL;
213 	U8 backup_channel;
214 	U32 backup_broadcast;
215 	DRAM_CHANNEL_T chIdx = CHANNEL_A;
216 
217 	//Backup channel & broadcast
218 	backup_channel = vGetPHY2ChannelMapping(p);
219 	backup_broadcast = GetDramcBroadcast();
220 
221 	DramcBroadcastOnOff(DRAMC_BROADCAST_OFF); //Disable broadcast
222 
223 	//LP4: Set each channel's pinmux individually, LP3: Only has 1 channel (support_channel_num == 1)
224 	for (chIdx = CHANNEL_A; chIdx < (int)p->support_channel_num; chIdx++)
225 	{
226 		vSetPHY2ChannelMapping(p, chIdx);
227 
228 	#if (__LP5_COMBO__)
229 		if (is_lp5_family(p))
230 			uiLPDDR_DQO1_Mapping = (U8 *)uiLPDDR5_O1_Mapping_POP[chIdx];
231 		else
232 	#endif
233 		uiLPDDR_DQO1_Mapping = (U8 *)uiLPDDR4_O1_Mapping_POP[chIdx];
234 
235 		//Set MRR pin mux
236 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DQ_SE_PINMUX_CTRL0), P_Fld(uiLPDDR_DQO1_Mapping[0], MISC_DQ_SE_PINMUX_CTRL0_DQ_PINMUX_SEL_DQ0)
237 																			| P_Fld(uiLPDDR_DQO1_Mapping[1], MISC_DQ_SE_PINMUX_CTRL0_DQ_PINMUX_SEL_DQ1)
238 																			| P_Fld(uiLPDDR_DQO1_Mapping[2], MISC_DQ_SE_PINMUX_CTRL0_DQ_PINMUX_SEL_DQ2)
239 																			| P_Fld(uiLPDDR_DQO1_Mapping[3], MISC_DQ_SE_PINMUX_CTRL0_DQ_PINMUX_SEL_DQ3)
240 																			| P_Fld(uiLPDDR_DQO1_Mapping[4], MISC_DQ_SE_PINMUX_CTRL0_DQ_PINMUX_SEL_DQ4)
241 																			| P_Fld(uiLPDDR_DQO1_Mapping[5], MISC_DQ_SE_PINMUX_CTRL0_DQ_PINMUX_SEL_DQ5)
242 																			| P_Fld(uiLPDDR_DQO1_Mapping[6], MISC_DQ_SE_PINMUX_CTRL0_DQ_PINMUX_SEL_DQ6)
243 																			| P_Fld(uiLPDDR_DQO1_Mapping[7], MISC_DQ_SE_PINMUX_CTRL0_DQ_PINMUX_SEL_DQ7));
244 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DQ_SE_PINMUX_CTRL1), P_Fld(uiLPDDR_DQO1_Mapping[8], MISC_DQ_SE_PINMUX_CTRL1_DQ_PINMUX_SEL_DQ8)
245 																			| P_Fld(uiLPDDR_DQO1_Mapping[9], MISC_DQ_SE_PINMUX_CTRL1_DQ_PINMUX_SEL_DQ9)
246 																			| P_Fld(uiLPDDR_DQO1_Mapping[10], MISC_DQ_SE_PINMUX_CTRL1_DQ_PINMUX_SEL_DQ10)
247 																			| P_Fld(uiLPDDR_DQO1_Mapping[11], MISC_DQ_SE_PINMUX_CTRL1_DQ_PINMUX_SEL_DQ11)
248 																			| P_Fld(uiLPDDR_DQO1_Mapping[12], MISC_DQ_SE_PINMUX_CTRL1_DQ_PINMUX_SEL_DQ12)
249 																			| P_Fld(uiLPDDR_DQO1_Mapping[13], MISC_DQ_SE_PINMUX_CTRL1_DQ_PINMUX_SEL_DQ13)
250 																			| P_Fld(uiLPDDR_DQO1_Mapping[14], MISC_DQ_SE_PINMUX_CTRL1_DQ_PINMUX_SEL_DQ14)
251 																			| P_Fld(uiLPDDR_DQO1_Mapping[15], MISC_DQ_SE_PINMUX_CTRL1_DQ_PINMUX_SEL_DQ15));
252 	}
253 
254 	//Recover channel & broadcast
255 	vSetPHY2ChannelMapping(p, backup_channel);
256 	DramcBroadcastOnOff(backup_broadcast);
257 }
258 
259 
SetRankInfoToConf(DRAMC_CTX_T * p)260 static void SetRankInfoToConf(DRAMC_CTX_T *p)
261 {
262 #if (FOR_DV_SIMULATION_USED == 0 && SW_CHANGE_FOR_SIMULATION == 0)
263 	EMI_SETTINGS *emi_set;
264 	U32 u4value = 0;
265 
266 #if (!__ETT__)//preloader
267 	if (emi_setting_index == -1)
268 		emi_set = &default_emi_setting;
269 	else
270 		emi_set = &emi_settings[emi_setting_index];
271 #else//ett
272 	emi_set = &default_emi_setting;
273 #endif
274 
275 	u4value = ((emi_set->EMI_CONA_VAL >> 17) & 0x1)? 0: 1;//CONA 17th bit 0: Disable dual rank mode 1: Enable dual rank mode
276 
277 	vIO32WriteFldAlign(DRAMC_REG_SA_RESERVE, u4value, SA_RESERVE_SINGLE_RANK);
278 
279 	jv_msg("Rank info: %d emi_setting_index: %d CONA[0x%x]\n", u4value, emi_setting_index, emi_set->EMI_CONA_VAL);
280 #endif
281 	return;
282 }
283 
SetDramInfoToConf(DRAMC_CTX_T * p)284 static void SetDramInfoToConf(DRAMC_CTX_T *p)
285 {
286 	vIO32WriteFldMulti_All(DRAMC_REG_SA_RESERVE,
287 		P_Fld(p->dram_cbt_mode[RANK_0], SA_RESERVE_MODE_RK0) |
288 		P_Fld(p->dram_cbt_mode[RANK_1], SA_RESERVE_MODE_RK1));
289 
290 	if(u2DFSGetHighestFreq(p) >= 2133)
291 	{
292 		vIO32WriteFldAlign_All(DRAMC_REG_SA_RESERVE, 1, SA_RESERVE_SUPPORT_4266);
293 	}
294 }
295 
UpdateDFSTbltoDDR3200(DRAMC_CTX_T * p)296 void UpdateDFSTbltoDDR3200(DRAMC_CTX_T *p)
297 {
298 #if(FOR_DV_SIMULATION_USED==0 && SW_CHANGE_FOR_SIMULATION==0)
299 	U16 u2HighestFreq = u2DFSGetHighestFreq(p);
300 	DRAM_PLL_FREQ_SEL_T highestfreqsel = 0;
301 	U8 u1ShuffleIdx = 0;
302 
303 	// lookup table to find highest freq
304 	highestfreqsel = GetSelByFreq(p, u2HighestFreq);
305 	for (u1ShuffleIdx = DRAM_DFS_SHUFFLE_1; u1ShuffleIdx < DRAM_DFS_SHUFFLE_MAX; u1ShuffleIdx++)
306 		if (gFreqTbl[u1ShuffleIdx].freq_sel == highestfreqsel)
307 			break;
308 
309 	gFreqTbl[u1ShuffleIdx].freq_sel = LP4_DDR3200; // for DSC DRAM
310 
311 	gUpdateHighestFreq = TRUE;
312 	u2HighestFreq = u2DFSGetHighestFreq(p); // @Darren, Update u2FreqMax variables
313 	#if __ETT__
314 	UpdateEttDFVSTbltoDDR3200(p, u2HighestFreq); //@Darren, Update for DDR3200 ETT DVFS stress
315 	#endif
316 	msg("[UpdateDFSTbltoDDR3200] Get Highest Freq is %d\n", u2HighestFreq);
317 #endif
318 }
319 
Global_Option_Init(DRAMC_CTX_T * p)320 void Global_Option_Init(DRAMC_CTX_T *p)
321 {
322 	//SaveCurrDramCtx(p);
323 	vSetChannelNumber(p);
324 	SetRankInfoToConf(p);
325 	vSetRankNumber(p);
326 	vSetFSPNumber(p);
327 #if (fcFOR_CHIP_ID == fcMargaux)
328 	Set_DRAM_Pinmux_Sel(p);
329 #endif
330 	Set_MRR_Pinmux_Mapping(p);
331 	Set_DQO1_Pinmux_Mapping(p);
332 
333 	vInitGlobalVariablesByCondition(p);
334 	vInitMappingFreqArray(p);
335 #if ENABLE_TX_TRACKING
336 	DramcDQSOSCInit();
337 #endif
338 
339 #ifdef FOR_HQA_TEST_USED
340 	HQA_measure_message_reset_all_data(p);
341 #endif
342 }
343 
344 #if CBT_MOVE_CA_INSTEAD_OF_CLK
DramcCmdUIDelaySetting(DRAMC_CTX_T * p,U8 value)345 void DramcCmdUIDelaySetting(DRAMC_CTX_T *p, U8 value)
346 {
347 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_CA7), P_Fld(value, SHU_SELPH_CA7_DLY_RA0) |
348 																P_Fld(value, SHU_SELPH_CA7_DLY_RA1) |
349 																P_Fld(value, SHU_SELPH_CA7_DLY_RA2) |
350 																P_Fld(value, SHU_SELPH_CA7_DLY_RA3) |
351 																P_Fld(value, SHU_SELPH_CA7_DLY_RA4) |
352 																P_Fld(value, SHU_SELPH_CA7_DLY_RA5) |
353 																P_Fld(value, SHU_SELPH_CA7_DLY_RA6));
354 
355 	// Note: CKE UI must sync CA UI (CA and CKE delay circuit are same) @Lin-Yi
356 	// To avoid tXP timing margin issue
357 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_CA5), value, SHU_SELPH_CA5_DLY_CKE);
358 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_CA6), value, SHU_SELPH_CA6_DLY_CKE1);
359 
360 	///TODO: Yirong : new calibration flow : change CS UI to 0
361 //	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_CA5), P_Fld(0x1, SHU_SELPH_CA5_DLY_CS) | P_Fld(0x1, SHU_SELPH_CA5_DLY_CS1));
362 }
363 #endif
364 
cbt_dfs_mr13_global(DRAMC_CTX_T * p,U8 freq)365 void cbt_dfs_mr13_global(DRAMC_CTX_T *p, U8 freq)
366 {
367 	U8 u1RankIdx;
368 	U8 backup_rank;
369 
370 	backup_rank = u1GetRank(p);
371 
372 	for(u1RankIdx =0; u1RankIdx < p->support_rank_num; u1RankIdx++)
373 	{
374 		vSetRank(p, u1RankIdx);
375 
376 		if (freq == CBT_LOW_FREQ)
377 		{
378 			DramcMRWriteFldAlign(p, 13, 0, MR13_FSP_OP, JUST_TO_GLOBAL_VALUE);
379 			DramcMRWriteFldAlign(p, 13, 0, MR13_FSP_WR, JUST_TO_GLOBAL_VALUE);
380 		}
381 		else // CBT_HIGH_FREQ
382 		{
383 			DramcMRWriteFldAlign(p, 13, 1, MR13_FSP_OP, JUST_TO_GLOBAL_VALUE);
384 			DramcMRWriteFldAlign(p, 13, 1, MR13_FSP_WR, JUST_TO_GLOBAL_VALUE);
385 		}
386 	}
387 
388 	vSetRank(p, backup_rank);
389 }
390 
cbt_switch_freq(DRAMC_CTX_T * p,U8 freq)391 void cbt_switch_freq(DRAMC_CTX_T *p, U8 freq)
392 {
393 #if (FOR_DV_SIMULATION_USED == TRUE) // @Darren+ for DV sim
394 	return;
395 #endif
396 
397 #if MR_CBT_SWITCH_FREQ
398 #if (fcFOR_CHIP_ID == fcMargaux)
399 	static U8 _CurFreq = CBT_UNKNOWN_FREQ;
400 	if (_CurFreq == freq)
401 	{
402 		return; // Do nothing no meter the frequency is.
403 	}
404 	_CurFreq = freq;
405 
406 	EnableDFSHwModeClk(p);
407 
408 	if (freq == CBT_LOW_FREQ)
409 	{
410 		DramcDFSDirectJump_RGMode(p, DRAM_DFS_SHUFFLE_2); // Darren NOTE: Dramc shu1 for MRW (DramcModeRegInit_LP4 and CBT)
411 	}
412 	else
413 	{
414 		DramcDFSDirectJump_RGMode(p, DRAM_DFS_SHUFFLE_1);
415 	}
416 
417 	vIO32WriteFldMulti_All(DDRPHY_REG_MISC_CLK_CTRL, P_Fld(0, MISC_CLK_CTRL_DVFS_CLK_MEM_SEL)
418 													| P_Fld(0, MISC_CLK_CTRL_DVFS_MEM_CK_MUX_UPDATE_EN));
419 
420 	DDRPhyFreqMeter();
421 #else
422 	#error Need check of the DRAM_DFS_SHUFFLE_X for your chip !!!
423 #endif
424 #endif
425 }
426 
427 
DramcPowerOnSequence(DRAMC_CTX_T * p)428 void DramcPowerOnSequence(DRAMC_CTX_T *p)
429 {
430 #ifdef DUMP_INIT_RG_LOG_TO_DE
431 	//CKE high
432 	CKEFixOnOff(p, CKE_WRITE_TO_ALL_RANK, CKE_FIXON, CKE_WRITE_TO_ALL_CHANNEL);
433 	return;
434 #endif
435 
436 	#if APPLY_LP4_POWER_INIT_SEQUENCE
437 	//static U8 u1PowerOn=0;
438 	//if(u1PowerOn ==0)
439 	{
440 	//reset dram = low
441 	vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DDRPHY_REG_MISC_CTRL1), 0x0, MISC_CTRL1_R_DMDA_RRESETB_I);
442 
443 	//vIO32WriteFldAlign(DRAMC_REG_RKCFG, 0, RKCFG_CKE2RANK_OPT2);
444 
445 	//CKE low
446 	CKEFixOnOff(p, CKE_WRITE_TO_ALL_RANK, CKE_FIXOFF, CKE_WRITE_TO_ALL_CHANNEL);
447 
448 	// delay tINIT1=200us(min) & tINIT2=10ns(min)
449 	mcDELAY_US(200);
450 
451 	//reset dram = low
452 	vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DDRPHY_REG_MISC_CTRL1), 0x1, MISC_CTRL1_R_DMDA_RRESETB_I);
453 
454 	// Disable HW MIOCK control to make CLK always on
455 	vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DRAMC_REG_DRAMC_PD_CTRL), 1, DRAMC_PD_CTRL_APHYCKCG_FIXOFF);
456 	vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DRAMC_REG_DRAMC_PD_CTRL), 1, DRAMC_PD_CTRL_TCKFIXON);
457 
458 	//tINIT3=2ms(min)
459 	mcDELAY_MS(2);
460 
461 	//CKE high
462 	CKEFixOnOff(p, CKE_WRITE_TO_ALL_RANK, CKE_FIXON, CKE_WRITE_TO_ALL_CHANNEL);
463 
464 	// tINIT5=2us(min)
465 	mcDELAY_US(2);
466 	//u1PowerOn=1;
467 
468 	//// Enable  HW MIOCK control to make CLK dynamic
469 	vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DRAMC_REG_DRAMC_PD_CTRL), 0, DRAMC_PD_CTRL_TCKFIXON);
470 	vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DRAMC_REG_DRAMC_PD_CTRL), 0, DRAMC_PD_CTRL_APHYCKCG_FIXOFF);
471 	msg3("APPLY_LP4_POWER_INIT_SEQUENCE\n");
472 	}
473 	#endif
474 }
475 
DramcModeRegInit_CATerm(DRAMC_CTX_T * p,U8 bWorkAround)476 DRAM_STATUS_T DramcModeRegInit_CATerm(DRAMC_CTX_T *p, U8 bWorkAround)
477 {
478 	static U8 CATermWA[CHANNEL_NUM] = {0};
479 	U8 u1ChannelIdx, u1RankIdx, u1RankIdxBak;
480 	U32 backup_broadcast;
481 	U8 u1MR11_Value;
482 	U8 u1MR22_Value;
483 
484 	u1ChannelIdx = vGetPHY2ChannelMapping(p);
485 
486 	if (CATermWA[u1ChannelIdx] == bWorkAround)
487 		return DRAM_OK;
488 
489 	CATermWA[u1ChannelIdx] = bWorkAround;
490 
491 	backup_broadcast = GetDramcBroadcast();
492 
493 	DramcBroadcastOnOff(DRAMC_BROADCAST_OFF);
494 
495 	u1RankIdxBak = u1GetRank(p);
496 
497 	for (u1RankIdx = 0; u1RankIdx < (U32)(p->support_rank_num); u1RankIdx++)
498 	{
499 		vSetRank(p, u1RankIdx);
500 
501 		msg("[DramcModeRegInit_CATerm] CH%u RK%u bWorkAround=%d\n", u1ChannelIdx, u1RankIdx, bWorkAround);
502 		/* FSP_1: 1. For term freqs   2. Assumes "data rate >= DDR2667" are terminated */
503 		#if MRW_CHECK_ONLY
504 		mcSHOW_MRW_MSG("\n==[MR Dump] %s==\n", __func__);
505 		#endif
506 		DramcMRWriteFldAlign(p, 13, 0, MR13_FSP_OP, TO_MR);
507 		DramcMRWriteFldAlign(p, 13, 1, MR13_FSP_WR, TO_MR);
508 
509 		//MR12 use previous value
510 
511 		if (p->dram_type == TYPE_LPDDR4P)
512 		{
513 			u1MR11_Value = 0x0; 				 //ODT disable
514 		}
515 		else
516 		{
517 #if ENABLE_SAMSUNG_NT_ODT
518 			if ((p->vendor_id == VENDOR_SAMSUNG) && (p->revision_id == 0x7)) // 1ynm process for NT-ODT
519 			{
520 				u1MR11_Value = 0x2; 			 //@Darren, DQ ODT:120ohm -> parallel to 60ohm
521 				u1MR11_Value |= (0x1 << 3);   //@Darren, MR11[3]=1 to enable NT-ODT for B707
522 			}
523 			else
524 #endif
525 				u1MR11_Value = 0x3; 			 //DQ ODT:80ohm
526 
527 		#if FSP1_CLKCA_TERM
528 			if (p->dram_cbt_mode[u1RankIdx] == CBT_NORMAL_MODE)
529 			{
530 				u1MR11_Value |= 0x40;			   //CA ODT:60ohm for byte mode
531 			}
532 			else
533 			{
534 				u1MR11_Value |= 0x20;			   //CA ODT:120ohm for byte mode
535 			}
536 		#endif
537 		}
538 	#if APPLY_SIGNAL_WAVEFORM_SETTINGS_ADJUST
539 		if (gDramcDqOdtRZQAdjust >= 0)
540 			u1MR11_Value = gDramcDqOdtRZQAdjust;
541 	#endif
542 		u1MR11Value[p->dram_fsp] = u1MR11_Value;
543 		DramcModeRegWriteByRank(p, u1RankIdx, 11, u1MR11Value[p->dram_fsp]); //ODT
544 
545 		if (p->dram_type == TYPE_LPDDR4)
546 		{
547 			u1MR22_Value = 0x24;	 //SOC-ODT, ODTE-CK, ODTE-CS, Disable ODTD-CA
548 		}
549 		else  //TYPE_LPDDR4x, LP4P
550 		{
551 			u1MR22_Value = 0x3c;	 //Disable CA-CS-CLK ODT, SOC ODT=RZQ/4
552 		#if FSP1_CLKCA_TERM
553 			if (bWorkAround)
554 			{
555 				u1MR22_Value = 0x4;
556 			}
557 			else
558 			{
559 				if (u1RankIdx == RANK_0)
560 				{
561 					u1MR22_Value = 0x4; 	//Enable CA-CS-CLK ODT, SOC ODT=RZQ/4
562 				}
563 				else
564 				{
565 					u1MR22_Value = 0x2c;	 //Enable CS ODT, SOC ODT=RZQ/4
566 				}
567 			}
568 		#endif
569 		}
570 	#if APPLY_SIGNAL_WAVEFORM_SETTINGS_ADJUST
571 		if (gDramcMR22SoCODTAdjust[u1MRFsp] >= 0)
572 		{
573 			u1MR22_Value = (u1MR22_Value & ~(0x7)) | gDramcMR22SoCODTAdjust[u1MRFsp];
574 		}
575 	#endif
576 		u1MR22Value[p->dram_fsp] = u1MR22_Value;
577 		DramcModeRegWriteByRank(p, u1RankIdx, 22, u1MR22_Value);
578 	}
579 
580 	vSetRank(p, u1RankIdxBak);
581 //	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_MRS), RANK_0, MRS_MRSRK);
582 
583 	DramcBroadcastOnOff(backup_broadcast);
584 
585 	return DRAM_OK;
586 }
587 
DramcModeRegInit_LP4(DRAMC_CTX_T * p)588 DRAM_STATUS_T DramcModeRegInit_LP4(DRAMC_CTX_T *p)
589 {
590 	U32 u4RankIdx;//, u4CKE0Bak, u4CKE1Bak, u4MIOCKBak, u4AutoRefreshBak;
591 	U8 u1MRFsp= FSP_0;
592 	U8 u1ChannelIdx;
593 	U8 backup_channel, backup_rank;
594 	U8 operating_fsp = p->dram_fsp;
595 	U32 backup_broadcast;
596 	U8 u1MR11_Value;
597 	U8 u1MR22_Value;
598 	U8 u1nWR=0;
599 	U16 u2FreqMax = u2DFSGetHighestFreq(p);
600 	U8 u1set_mrsrk=0;
601 
602 	backup_broadcast = GetDramcBroadcast();
603 
604 	DramcBroadcastOnOff(DRAMC_BROADCAST_OFF);
605 
606 
607 	DramcPowerOnSequence(p);
608 
609 	backup_channel = p->channel;
610 	backup_rank = p->rank;
611 
612 
613 #if VENDER_JV_LOG
614 	vPrintCalibrationBasicInfo_ForJV(p);
615 #endif
616 
617 	/* Fix nWR value to 30 (MR01[6:4] = 101B) for DDR3200
618 	 * Fix nWR value to 34 (MR01[6:4] = 110B) for DDR3733
619 	 * Other vendors: Use default MR01 for each FSP (Set in vInitGlobalVariablesByCondition() )
620 	 */
621 	{
622 		/* Clear MR01 OP[6:4] */
623 		u1MR01Value[FSP_0] &= 0x8F;
624 		u1MR01Value[FSP_1] &= 0x8F;
625 		if (u2FreqMax == 2133)
626 		{
627 			/* Set MR01 OP[6:4] to 111B = 7 */
628 			u1MR01Value[FSP_0] |= (0x7 << 4);
629 			u1MR01Value[FSP_1] |= (0x7 << 4);
630 			u1nWR = 40;
631 		}
632 		else if (u2FreqMax == 1866)
633 		{
634 			/* Set MR01 OP[6:4] to 110B = 6 */
635 			u1MR01Value[FSP_0] |= (0x6 << 4);
636 			u1MR01Value[FSP_1] |= (0x6 << 4);
637 			u1nWR = 34;
638 		}
639 		else // Freq <= 1600
640 		{
641 			/* Set MR01 OP[6:4] to 101B = 5 */
642 			u1MR01Value[FSP_0] |= (0x5 << 4);
643 			u1MR01Value[FSP_1] |= (0x5 << 4);
644 			u1nWR = 30;
645 		}
646 
647 		msg("nWR fixed to %d\n", u1nWR);
648 		reg_msg("nWR fixed to %d\n", u1nWR);
649 	}
650 
651 #ifndef DUMP_INIT_RG_LOG_TO_DE
652 	if(p->dram_fsp == FSP_1)
653 	{
654 		// @Darren, VDDQ = 600mv + CaVref default is 301mV (no impact)
655 		// Fix high freq keep FSP0 for CA term (PPR abnormal)
656 		vIO32WriteFldMulti_All(DDRPHY_REG_CA_CMD2, P_Fld(1, CA_CMD2_RG_TX_ARCMD_OE_DIS_CA)
657 												| P_Fld(0, CA_CMD2_RG_TX_ARCA_OE_TIE_SEL_CA)
658 												| P_Fld(0xff, CA_CMD2_RG_TX_ARCA_OE_TIE_EN_CA));
659 		cbt_switch_freq(p, CBT_LOW_FREQ);
660 		vIO32WriteFldMulti_All(DDRPHY_REG_CA_CMD2, P_Fld(0, CA_CMD2_RG_TX_ARCMD_OE_DIS_CA)
661 												| P_Fld(1, CA_CMD2_RG_TX_ARCA_OE_TIE_SEL_CA)
662 												| P_Fld(0xff, CA_CMD2_RG_TX_ARCA_OE_TIE_EN_CA));
663 	}
664 #endif
665 
666 	for(u1ChannelIdx=0; u1ChannelIdx<(p->support_channel_num); u1ChannelIdx++)
667 	{
668 		vSetPHY2ChannelMapping(p, u1ChannelIdx);
669 
670 		for(u4RankIdx =0; u4RankIdx < (U32)(p->support_rank_num); u4RankIdx++)
671 		{
672 			vSetRank(p, u4RankIdx);
673 
674 			msg("[ModeRegInit_LP4] CH%u RK%u\n", u1ChannelIdx, u4RankIdx);
675 			reg_msg("[ModeRegInit_LP4] CH%u RK%u\n", u1ChannelIdx, u4RankIdx);
676 		#if VENDER_JV_LOG
677 			jv_msg("\n[ModeRegInit_LP4] CH%u RK%d\n", u1ChannelIdx, u4RankIdx);
678 		#endif
679 		#if MRW_CHECK_ONLY
680 			mcSHOW_MRW_MSG("\n==[MR Dump] %s==\n", __func__);
681 		#endif
682 
683 			//vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_MRS), u4RankIdx, MRS_MRSRK);
684 
685 			// Note : MR37 for LP4P should be set before any Mode register.
686 			// MR37 is not shadow register, just need to set by channel and rank. No need to set by FSP
687 			if(p->dram_type == TYPE_LPDDR4P)
688 			{
689 				//temp solution, need remove later
690 			#ifndef MT6779_FPGA
691 			#if (FOR_DV_SIMULATION_USED==0 && SW_CHANGE_FOR_SIMULATION==0)
692 				#if __Petrus_TO_BE_PORTING__
693 					dramc_set_vddq_voltage(p->dram_type, 600000);
694 				#endif
695 			#endif
696 			#endif
697 
698 				DramcModeRegWriteByRank(p, u4RankIdx, 37, 0x1);
699 
700 				//temp solution, need remove later
701 			#ifndef MT6779_FPGA
702 			#if (FOR_DV_SIMULATION_USED==0 && SW_CHANGE_FOR_SIMULATION==0)
703 				#if __Petrus_TO_BE_PORTING__
704 					dramc_set_vddq_voltage(p->dram_type, 400000);
705 				#endif
706 			#endif
707 			#endif
708 			}
709 
710 
711 		   // if(p->frequency<=1200)
712 			{
713 				/* FSP_0: 1. For un-term freqs	 2. Assumes "data rate < DDR2667" are un-term */
714 				u1MRFsp = FSP_0;
715 				msg3("\tFsp%d\n", u1MRFsp);
716 			#if VENDER_JV_LOG
717 				jv_msg("\tFsp%d\n", u1MRFsp);
718 			#endif
719 
720 				u1MR13Value[u4RankIdx] = 0;
721 				MRWriteFldMulti(p, 13, P_Fld(1, MR13_PRO) |
722 									   P_Fld(1, MR13_VRCG),
723 									   TO_MR);
724 
725 				//MR12 use previous value
726 				DramcModeRegWriteByRank(p, u4RankIdx, 12, u1MR12Value[u1ChannelIdx][u4RankIdx][u1MRFsp]);
727 				DramcModeRegWriteByRank(p, u4RankIdx, 1, u1MR01Value[u1MRFsp]);
728 
729 				//MR2 set Read/Write Latency
730 				if (p->freqGroup <= 400) // DDR800, DDR400
731 				{
732 					u1MR02Value[u1MRFsp] = 0x12; // the minimum of WL is 8, and the minimum of RL is 14 (Jouling and HJ)
733 				}
734 				else if ((p->freqGroup == 800) || (p->freqGroup == 600)) // DDR1600, DDR1200
735 				{
736 					u1MR02Value[u1MRFsp] = 0x12;
737 				}
738 				else if (p->freqGroup == 933) // DDR1866
739 				{
740 					u1MR02Value[u1MRFsp] = 0x1b; //RL=20, WL=10
741 				}
742 				else if (p->freqGroup == 1200) // DDR2280, DDR2400 (DDR2667 uses FSP_1)
743 				{
744 					u1MR02Value[u1MRFsp] = 0x24;
745 				}
746 
747 				DramcModeRegWriteByRank(p, u4RankIdx, 2, u1MR02Value[u1MRFsp]);
748 				//if(p->odt_onoff)
749 				u1MR11Value[u1MRFsp] = 0x0;
750 				DramcModeRegWriteByRank(p, u4RankIdx, 11, u1MR11Value[u1MRFsp]); //ODT disable
751 
752 #if ENABLE_LP4Y_DFS
753 				// For LPDDR4Y <= DDR1600 un-term
754 				if (p->freqGroup <= 800)
755 				{
756 					u1MR21Value[u1MRFsp] |= (0x1 << 5); // Low Speed CA buffer for LP4Y
757 					u1MR51Value[u1MRFsp] |= (0x3 << 1); // CLK[3]=0, WDQS[2]=1 and RDQS[1]=1 Single-End mode for LP4Y
758 				}
759 				else
760 #endif
761 				{
762 					u1MR21Value[u1MRFsp] = 0;
763 					u1MR51Value[u1MRFsp] = 0;
764 				}
765 				DramcModeRegWriteByRank(p, u4RankIdx, 21, u1MR21Value[u1MRFsp]);
766 				DramcModeRegWriteByRank(p, u4RankIdx, 51, u1MR51Value[u1MRFsp]);
767 
768 				if(p->dram_type == TYPE_LPDDR4)
769 				{
770 					u1MR22_Value = 0x20;	 //SOC-ODT, ODTE-CK, ODTE-CS, Disable ODTD-CA
771 				}
772 				else  //TYPE_LPDDR4x, LP4P
773 				{
774 					u1MR22_Value = 0x38;	 //SOC-ODT, ODTE-CK, ODTE-CS, Disable ODTD-CA
775 				}
776 			#if APPLY_SIGNAL_WAVEFORM_SETTINGS_ADJUST
777 				if (gDramcMR22SoCODTAdjust[u1MRFsp]>=0)
778 				{
779 					u1MR22_Value = (u1MR22_Value&~(0x7))|gDramcMR22SoCODTAdjust[u1MRFsp];
780 				}
781 			#endif
782 				u1MR22Value[u1MRFsp] = u1MR22_Value;
783 				DramcModeRegWriteByRank(p, u4RankIdx, 22, u1MR22Value[u1MRFsp]);
784 
785 				//MR14 use previous value
786 				DramcModeRegWriteByRank(p, u4RankIdx, 14, u1MR14Value[u1ChannelIdx][u4RankIdx][u1MRFsp]); //MR14 VREF-DQ
787 
788 			#if CALIBRATION_SPEED_UP_DEBUG
789 				msg("CBT Vref Init: CH%d Rank%d FSP%d, Range %d Vref %d\n\n",p->channel, p->rank, u1MRFsp, u1MR12Value[u1ChannelIdx][u4RankIdx][u1MRFsp]>>6, (u1MR12Value[u1ChannelIdx][u4RankIdx][u1MRFsp] & 0x3f));
790 				msg("TX Vref Init: CH%d Rank%d FSP%d, TX Range %d Vref %d\n\n",p->channel, p->rank, u1MRFsp,u1MR14Value[u1ChannelIdx][u4RankIdx][u1MRFsp]>>6, (u1MR14Value[u1ChannelIdx][u4RankIdx][u1MRFsp] & 0x3f));
791 			#endif
792 
793 				//MR3 set write-DBI and read-DBI (Disabled during calibration, enabled after K)
794 				u1MR03Value[u1MRFsp] = (u1MR03Value[u1MRFsp]&0x3F);
795 
796 				if(p->dram_type == TYPE_LPDDR4X || p->dram_type == TYPE_LPDDR4P)
797 				{
798 					u1MR03Value[u1MRFsp] &= 0xfe;
799 				}
800 			#if APPLY_SIGNAL_WAVEFORM_SETTINGS_ADJUST
801 				if (gDramcMR03PDDSAdjust[u1MRFsp]>=0)
802 				{
803 					u1MR03Value[u1MRFsp] = (u1MR03Value[u1MRFsp]&~(0x7<<3))|(gDramcMR03PDDSAdjust[u1MRFsp]<<3);
804 				}
805 			#endif
806 				DramcModeRegWriteByRank(p, u4RankIdx, 3, u1MR03Value[u1MRFsp]);
807 				DramcModeRegWriteByRank(p, u4RankIdx, 4, u1MR04Value[u4RankIdx]);
808 				DramcModeRegWriteByRank(p, u4RankIdx, 3, u1MR03Value[u1MRFsp]);
809 			}
810 			//else
811 			{
812 				/* FSP_1: 1. For term freqs   2. Assumes "data rate >= DDR2667" are terminated */
813 				u1MRFsp = FSP_1;
814 				msg3("\tFsp%d\n", u1MRFsp);
815 			#if VENDER_JV_LOG
816 				jv_msg("\tFsp%d\n", u1MRFsp);
817 			#endif
818 
819 				DramcMRWriteFldAlign(p, 13, 1, MR13_FSP_WR, TO_MR);
820 
821 				//MR12 use previous value
822 				DramcModeRegWriteByRank(p, u4RankIdx, 12, u1MR12Value[u1ChannelIdx][u4RankIdx][u1MRFsp]); //MR12 VREF-CA
823 				DramcModeRegWriteByRank(p, u4RankIdx, 1, u1MR01Value[u1MRFsp]);
824 
825 				//MR2 set Read/Write Latency
826 				if (p->freqGroup == 2133)
827 				{
828 					u1MR02Value[u1MRFsp] = 0x3f;
829 				}
830 				else if (p->freqGroup == 1866)
831 				{
832 					u1MR02Value[u1MRFsp] = 0x36;
833 				}
834 				else if (p->freqGroup == 1600)
835 				{
836 					u1MR02Value[u1MRFsp] = 0x2d;
837 				}
838 				else if (p->freqGroup == 1333)
839 				{
840 					u1MR02Value[u1MRFsp] = 0x24;
841 				}
842 				DramcModeRegWriteByRank(p, u4RankIdx, 2, u1MR02Value[u1MRFsp]);
843 
844 				if(p->dram_type == TYPE_LPDDR4P)
845 					u1MR11_Value = 0x0; 				 //ODT disable
846 				else
847 				{
848 #if ENABLE_SAMSUNG_NT_ODT
849 					if ((p->vendor_id == VENDOR_SAMSUNG) && (p->revision_id == 0x7)) // 1ynm process for NT-ODT
850 					{
851 						u1MR11_Value = 0x2; 			 //@Darren, DQ ODT:120ohm -> parallel to 60ohm
852 						u1MR11_Value |= (0x1 << 3);   //@Darren, MR11[3]=1 to enable NT-ODT for B707
853 					}
854 					else
855 #endif
856 						u1MR11_Value = 0x3; 			 //DQ ODT:80ohm
857 
858 				#if FSP1_CLKCA_TERM
859 					if(p->dram_cbt_mode[u4RankIdx]==CBT_NORMAL_MODE)
860 					{
861 						u1MR11_Value |= 0x40;			   //CA ODT:60ohm for normal mode die
862 					}
863 					else
864 					{
865 						u1MR11_Value |= 0x20;			   //CA ODT:120ohm for byte mode die
866 					}
867 				#endif
868 				}
869 			#if APPLY_SIGNAL_WAVEFORM_SETTINGS_ADJUST
870 					if (gDramcDqOdtRZQAdjust>=0)
871 					{
872 						u1MR11_Value &= ~(0x7);
873 						u1MR11_Value = gDramcDqOdtRZQAdjust;
874 					}
875 			#endif
876 					u1MR11Value[u1MRFsp] = u1MR11_Value;
877 					DramcModeRegWriteByRank(p, u4RankIdx, 11, u1MR11Value[u1MRFsp]); //ODT
878 
879 				u1MR21Value[u1MRFsp] = 0;
880 				u1MR51Value[u1MRFsp] = 0;
881 				DramcModeRegWriteByRank(p, u4RankIdx, 21, u1MR21Value[u1MRFsp]);
882 				DramcModeRegWriteByRank(p, u4RankIdx, 51, u1MR51Value[u1MRFsp]);
883 
884 				if(p->dram_type == TYPE_LPDDR4)
885 				{
886 					u1MR22_Value = 0x24;	 //SOC-ODT, ODTE-CK, ODTE-CS, Disable ODTD-CA
887 				}
888 				else  //TYPE_LPDDR4x, LP4P
889 				{
890 					u1MR22_Value = 0x3c;	 //Disable CA-CS-CLK ODT, SOC ODT=RZQ/4
891 				#if FSP1_CLKCA_TERM
892 					if(u4RankIdx==RANK_0)
893 					{
894 						u1MR22_Value = 0x4; 	//Enable CA-CS-CLK ODT, SOC ODT=RZQ/4
895 					}
896 					else
897 					{
898 						u1MR22_Value = 0x2c;	 //Enable CS ODT, SOC ODT=RZQ/4
899 					}
900 				#endif
901 				}
902 			#if APPLY_SIGNAL_WAVEFORM_SETTINGS_ADJUST
903 				if (gDramcMR22SoCODTAdjust[u1MRFsp]>=0)
904 				{
905 					u1MR22_Value = (u1MR22_Value&~(0x7))|gDramcMR22SoCODTAdjust[u1MRFsp];
906 				}
907 			#endif
908 				u1MR22Value[u1MRFsp] = u1MR22_Value;
909 				DramcModeRegWriteByRank(p, u4RankIdx, 22, u1MR22Value[u1MRFsp]);
910 
911 				//MR14 use previous value
912 				DramcModeRegWriteByRank(p, u4RankIdx, 14, u1MR14Value[u1ChannelIdx][u4RankIdx][u1MRFsp]); //MR14 VREF-DQ
913 
914 			#if CALIBRATION_SPEED_UP_DEBUG
915 				msg("CBT Vref Init: CH%d Rank%d FSP%d, Range %d Vref %d\n\n",p->channel, p->rank, u1MRFsp, u1MR12Value[u1ChannelIdx][u4RankIdx][u1MRFsp]>>6, (u1MR12Value[u1ChannelIdx][u4RankIdx][u1MRFsp] & 0x3f));
916 				msg("TX Vref Init: CH%d Rank%d FSP%d, TX Range %d Vref %d\n\n",p->channel, p->rank, u1MRFsp, u1MR14Value[u1ChannelIdx][u4RankIdx][u1MRFsp]>>6, (u1MR14Value[u1ChannelIdx][u4RankIdx][u1MRFsp] & 0x3f));
917 			#endif
918 
919 				//MR3 set write-DBI and read-DBI (Disabled during calibration, enabled after K)
920 				u1MR03Value[u1MRFsp] = (u1MR03Value[u1MRFsp]&0x3F);
921 
922 				if(p->dram_type == TYPE_LPDDR4X || p->dram_type == TYPE_LPDDR4P)
923 				{
924 					u1MR03Value[u1MRFsp] &= 0xfe;
925 				}
926 			#if APPLY_SIGNAL_WAVEFORM_SETTINGS_ADJUST
927 				if (gDramcMR03PDDSAdjust[u1MRFsp]>=0)
928 				{
929 					u1MR03Value[u1MRFsp] = (u1MR03Value[u1MRFsp]&~(0x7<<3))|(gDramcMR03PDDSAdjust[u1MRFsp]<<3);
930 				}
931 			#endif
932 				DramcModeRegWriteByRank(p, u4RankIdx, 3, u1MR03Value[u1MRFsp]);
933 				DramcModeRegWriteByRank(p, u4RankIdx, 4, u1MR04Value[u4RankIdx]);
934 				DramcModeRegWriteByRank(p, u4RankIdx, 3, u1MR03Value[u1MRFsp]);
935 			}
936 
937 
938 #if ENABLE_LP4_ZQ_CAL
939 			DramcZQCalibration(p, u4RankIdx); //ZQ calobration should be done before CBT calibration by switching to low frequency
940 #endif
941 
942 			/* freq < 1333 is assumed to be odt_off -> uses FSP_0 */
943 			//if (p->frequency < MRFSP_TERM_FREQ)
944 			if(operating_fsp == FSP_0)
945 			{
946 				DramcMRWriteFldAlign(p, 13, 0, MR13_FSP_OP, JUST_TO_GLOBAL_VALUE);
947 				DramcMRWriteFldAlign(p, 13, 0, MR13_FSP_WR, JUST_TO_GLOBAL_VALUE);
948 			}
949 			else
950 			{
951 				DramcMRWriteFldAlign(p, 13, 1, MR13_FSP_OP, JUST_TO_GLOBAL_VALUE);
952 				DramcMRWriteFldAlign(p, 13, 1, MR13_FSP_WR, JUST_TO_GLOBAL_VALUE);
953 			}
954 
955 		}
956 		vSetRank(p, backup_rank);
957 
958 #if 0
959 		for(u4RankIdx =0; u4RankIdx < (U32)(p->support_rank_num); u4RankIdx++)
960 		{
961 			DramcModeRegWriteByRank(p, u4RankIdx, 13, u1MR13Value[RANK_0]);
962 		}
963 #else
964 
965 		/* MRS two ranks simutaniously */
966 		if (p->support_rank_num == RANK_DUAL)
967 			u1set_mrsrk = 0x3;
968 		else
969 			u1set_mrsrk = RANK_0;
970 		DramcModeRegWriteByRank(p, u1set_mrsrk, 13, u1MR13Value[RANK_0]);
971 #endif
972 
973 		/* Auto-MRW related register write (Used during HW DVFS frequency switch flow)
974 		 * VRCG seems to be enabled/disabled even when switching to same FSP(but different freq) to simplify HW DVFS flow
975 		 */
976 		// 1. MR13 OP[3] = 1 : Enable "high current mode" to reduce the settling time when changing FSP(freq) during operation
977 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHU_HWSET_MR13), P_Fld(u1MR13Value[RANK_0] | (0x1 << 3), SHU_HWSET_MR13_HWSET_MR13_OP)
978 																	| P_Fld(13, SHU_HWSET_MR13_HWSET_MR13_MRSMA));
979 		// 2. MR13 OP[3] = 1 : Enable "high current mode" after FSP(freq) switch operation for calibration
980 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHU_HWSET_VRCG), P_Fld(u1MR13Value[RANK_0] | (0x1 << 3), SHU_HWSET_VRCG_HWSET_VRCG_OP)
981 																	| P_Fld(13, SHU_HWSET_VRCG_HWSET_VRCG_MRSMA));
982 		// 3. MR2 : Set RL/WL after FSP(freq) switch
983 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHU_HWSET_MR2), P_Fld(u1MR02Value[operating_fsp], SHU_HWSET_MR2_HWSET_MR2_OP)
984 																	| P_Fld(2, SHU_HWSET_MR2_HWSET_MR2_MRSMA));
985 	}
986 
987 #ifndef DUMP_INIT_RG_LOG_TO_DE
988 	//switch to high freq
989 	if(p->dram_fsp == FSP_1)
990 	{
991 		// @Darren, no effect via DDR1600 (purpose to keep @FSP0)
992 		// Fix high freq keep FSP0 for CA term (PPR abnormal)
993 		vIO32WriteFldMulti_All(DDRPHY_REG_CA_CMD2, P_Fld(1, CA_CMD2_RG_TX_ARCMD_OE_DIS_CA)
994 												| P_Fld(0, CA_CMD2_RG_TX_ARCA_OE_TIE_SEL_CA)
995 												| P_Fld(0xff, CA_CMD2_RG_TX_ARCA_OE_TIE_EN_CA));
996 		cbt_switch_freq(p, CBT_HIGH_FREQ);
997 		vIO32WriteFldMulti_All(DDRPHY_REG_CA_CMD2, P_Fld(0, CA_CMD2_RG_TX_ARCMD_OE_DIS_CA)
998 												| P_Fld(1, CA_CMD2_RG_TX_ARCA_OE_TIE_SEL_CA)
999 												| P_Fld(0xff, CA_CMD2_RG_TX_ARCA_OE_TIE_EN_CA));
1000 	}
1001 #endif
1002 	vSetPHY2ChannelMapping(p, backup_channel);
1003 
1004 	vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_CTRL0), RANK_0, SWCMD_CTRL0_MRSRK);
1005 
1006 	DramcBroadcastOnOff(backup_broadcast);
1007 
1008 #if SAMSUNG_TEST_MODE_MRS_FOR_PRELOADER
1009 	vApplyProgramSequence(p);
1010 #endif
1011 
1012 	return DRAM_OK;
1013 }
1014 
MPLLInit(void)1015 void MPLLInit(void)
1016 {
1017 #if (FOR_DV_SIMULATION_USED==0)
1018 #if 0//__A60868_TO_BE_PORTING__
1019 
1020 #if (FOR_DV_SIMULATION_USED == 0)
1021 	unsigned int tmp;
1022 
1023 	DRV_WriteReg32(AP_PLL_CON0, 0x11); // CLKSQ Enable
1024 	mcDELAY_US(100);
1025 	DRV_WriteReg32(AP_PLL_CON0, 0x13); // CLKSQ LPF Enable
1026 	mcDELAY_MS(1);
1027 	DRV_WriteReg32(MPLL_PWR_CON0, 0x3); // power on MPLL
1028 	mcDELAY_US(30);
1029 	DRV_WriteReg32(MPLL_PWR_CON0, 0x1); // turn off ISO of	MPLL
1030 	mcDELAY_US(1);
1031 	tmp = DRV_Reg32(MPLL_CON1);
1032 	DRV_WriteReg32(MPLL_CON1, tmp | 0x80000000); // Config MPLL freq
1033 	DRV_WriteReg32(MPLL_CON0, 0x181); // enable MPLL
1034 	mcDELAY_US(20);
1035 #endif
1036 #else
1037 	unsigned int tmp;
1038 
1039 	DRV_WriteReg32(MPLL_CON3, 0x3); // power on MPLL
1040 
1041 	mcDELAY_US(30);
1042 
1043 	tmp = DRV_Reg32(MPLL_CON3);
1044 	DRV_WriteReg32(MPLL_CON3, tmp & 0xFFFFFFFD); // turn off ISO of  MPLL
1045 
1046 	mcDELAY_US(1);
1047 
1048 	DRV_WriteReg32(MPLL_CON1, 0x84200000); // Config MPLL freq
1049 
1050 	tmp = DRV_Reg32(MPLL_CON0);
1051 	DRV_WriteReg32(MPLL_CON0, tmp | 0x1); // enable MPLL
1052 
1053 	mcDELAY_US(20);
1054 
1055 	tmp = DRV_Reg32(PLLON_CON0);
1056 	DRV_WriteReg32(PLLON_CON0, tmp & ~(0x1 << 2)); // PLL_ISO  from SPM
1057 
1058 	tmp = DRV_Reg32(PLLON_CON0);
1059 	DRV_WriteReg32(PLLON_CON0, tmp & ~(0x1 << 11)); // PLL_EN  from SPM
1060 
1061 	tmp = DRV_Reg32(PLLON_CON1);
1062 	DRV_WriteReg32(PLLON_CON1, tmp & ~(0x1 << 20)); // PLL_PWR	from SPM
1063 
1064 	tmp = DRV_Reg32(PLLON_CON2);
1065 	DRV_WriteReg32(PLLON_CON2, tmp & ~(0x1 << 2)); // PLL SPEC
1066 
1067 	tmp = DRV_Reg32(PLLON_CON3);
1068 	DRV_WriteReg32(PLLON_CON3, tmp & ~(0x1 << 2)); // PLL SPEC
1069 #endif
1070 #endif
1071 }
1072 
1073 
1074 #if ENABLE_RODT_TRACKING_SAVE_MCK
SetTxWDQSStatusOnOff(U8 u1OnOff)1075 void SetTxWDQSStatusOnOff(U8 u1OnOff)
1076 {
1077 	  u1WDQS_ON = u1OnOff;
1078 }
1079 #endif
1080 
1081 
1082 #if XRTRTR_NEW_CROSS_RK_MODE
XRTRTR_SHU_Setting(DRAMC_CTX_T * p)1083 void XRTRTR_SHU_Setting(DRAMC_CTX_T * p)
1084 {
1085 	U8 u1RkSelUIMinus = 0, u1RkSelMCKMinus = 0;
1086 	//U8 u1RankIdx = 0;
1087 	//U8 u1Rank_backup = u1GetRank(p);
1088 
1089 	if (vGet_DDR_Loop_Mode(p) == SEMI_OPEN_LOOP_MODE) // DDR800semi
1090 		u1RkSelMCKMinus = 1;
1091 	else if (p->frequency >= 1600) //DDR3200 up
1092 		u1RkSelUIMinus = 2;
1093 
1094 	// DV codes is included
1095 	/*vIO32WriteFldAlign_All(DDRPHY_REG_SHU_B0_DQ6, u1ShuRkMode, SHU_B0_DQ6_RG_RX_ARDQ_RANK_SEL_SER_MODE_B0);
1096 	vIO32WriteFldAlign_All(DDRPHY_REG_SHU_B1_DQ6, u1ShuRkMode, SHU_B1_DQ6_RG_RX_ARDQ_RANK_SEL_SER_MODE_B1);*/
1097 
1098 	//DRAMC setting - @Darren, DV no set (double confirm)
1099 	vIO32WriteFldMulti_All(DDRPHY_REG_SHU_MISC_RANK_SEL_STB, P_Fld(u1RkSelMCKMinus, SHU_MISC_RANK_SEL_STB_RANK_SEL_STB_MCK_MINUS)
1100 									   | P_Fld(u1RkSelUIMinus, SHU_MISC_RANK_SEL_STB_RANK_SEL_STB_UI_MINUS)
1101 									   | P_Fld(0x0, SHU_MISC_RANK_SEL_STB_RANK_SEL_STB_MCK_PLUS)
1102 									   | P_Fld(0x0, SHU_MISC_RANK_SEL_STB_RANK_SEL_STB_UI_PLUS)
1103 									   | P_Fld(0x0, SHU_MISC_RANK_SEL_STB_RANK_SEL_STB_PHASE_EN)
1104 									   | P_Fld(0x0, SHU_MISC_RANK_SEL_STB_RANK_SEL_RXDLY_TRACK)
1105 									   | P_Fld(0x1, SHU_MISC_RANK_SEL_STB_RANK_SEL_STB_TRACK)
1106 									   | P_Fld(0x0, SHU_MISC_RANK_SEL_STB_RANK_SEL_STB_SERMODE) // @HJ, no use
1107 									   | P_Fld(0x0, SHU_MISC_RANK_SEL_STB_RANK_SEL_STB_EN_B23)
1108 									   | P_Fld(0x1, SHU_MISC_RANK_SEL_STB_RANK_SEL_STB_EN));
1109 	//Darren-vIO32WriteFldAlign_All(DRAMC_REG_SHU_STBCAL, 0x1, SHU_STBCAL_DQSIEN_RX_SELPH_OPT); //@HJ, internal wire assign to 1'b1
1110 
1111 	/*for (u1RankIdx = RANK_0; u1RankIdx < p->support_rank_num; u1RankIdx++)
1112 	{
1113 		vSetRank(p, u1RankIdx);
1114 		vIO32WriteFldMulti_All(DDRPHY_REG_MISC_SHU_RK_DQSCAL, P_Fld(0x1, MISC_SHU_RK_DQSCAL_DQSIENHLMTEN)
1115 										   | P_Fld(0x3f, MISC_SHU_RK_DQSCAL_DQSIENHLMT)
1116 										   | P_Fld(0x1, MISC_SHU_RK_DQSCAL_DQSIENLLMTEN)
1117 										   | P_Fld(0x60, MISC_SHU_RK_DQSCAL_DQSIENLLMT));
1118 	}
1119 	vSetRank(p, u1Rank_backup);*/
1120 }
1121 #endif
1122 
1123 #if XRTWTW_NEW_CROSS_RK_MODE
XRTWTW_SHU_Setting(DRAMC_CTX_T * p)1124 void XRTWTW_SHU_Setting(DRAMC_CTX_T * p)
1125 {
1126 	U8 u1RankIdx, u1ByteIdx;
1127 	U8 u1Rank_bak = u1GetRank(p);
1128 	U16 u2TxDly_OEN_RK[2][2] = {0}, u2TxPI_UPD[2] = {0}, u2TxRankINCTL, u2TxDly_OEN_RK_max, u2TxPI_UPD_max;
1129 
1130 	for (u1RankIdx = RANK_0; u1RankIdx < p->support_rank_num; u1RankIdx++)
1131 	{
1132 		vSetRank(p, u1RankIdx);
1133 
1134 		u2TxDly_OEN_RK[u1RankIdx][0] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ0), SHURK_SELPH_DQ0_TXDLY_OEN_DQ0); //Byte0
1135 		u2TxDly_OEN_RK[u1RankIdx][1] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ0), SHURK_SELPH_DQ0_TXDLY_OEN_DQ1); //Byte1
1136 	}
1137 	vSetRank(p, u1Rank_bak);
1138 
1139 	for (u1ByteIdx = 0; u1ByteIdx < (p->data_width / DQS_BIT_NUMBER); u1ByteIdx++)
1140 	{
1141 		u2TxDly_OEN_RK_max = (u2TxDly_OEN_RK[0][u1ByteIdx] > u2TxDly_OEN_RK[1][u1ByteIdx])? u2TxDly_OEN_RK[0][u1ByteIdx]: u2TxDly_OEN_RK[1][u1ByteIdx];
1142 		if (p->frequency >= 1200)
1143 			u2TxPI_UPD[u1ByteIdx] = (u2TxDly_OEN_RK_max > 2)? (u2TxDly_OEN_RK_max - 2): 0; //Byte0
1144 		else
1145 			u2TxPI_UPD[u1ByteIdx] = (u2TxDly_OEN_RK_max > 1)? (u2TxDly_OEN_RK_max - 1): 0; //Byte0
1146 	}
1147 
1148 	u2TxPI_UPD_max = (u2TxPI_UPD[0] > u2TxPI_UPD[1])? u2TxPI_UPD[0]: u2TxPI_UPD[1];
1149 	u2TxRankINCTL = (u2TxPI_UPD_max > 1)? (u2TxPI_UPD_max - 1): 0;
1150 
1151 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHU_NEW_XRW2W_CTRL), P_Fld(0x0, SHU_NEW_XRW2W_CTRL_TXPI_UPD_MODE)
1152 										   | P_Fld(u2TxPI_UPD[0], SHU_NEW_XRW2W_CTRL_TX_PI_UPDCTL_B0)
1153 										   | P_Fld(u2TxPI_UPD[1], SHU_NEW_XRW2W_CTRL_TX_PI_UPDCTL_B1));
1154 
1155 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHU_TX_RANKCTL), P_Fld(0x0, SHU_TX_RANKCTL_TXRANKINCTL_ROOT)
1156 											| P_Fld(u2TxRankINCTL, SHU_TX_RANKCTL_TXRANKINCTL)
1157 											| P_Fld(u2TxRankINCTL, SHU_TX_RANKCTL_TXRANKINCTL_TXDLY));
1158 }
1159 #endif
1160 
1161 
1162 #if CMD_CKE_WORKAROUND_FIX
CMD_CKE_Modified_txp_Setting(DRAMC_CTX_T * p)1163 void CMD_CKE_Modified_txp_Setting(DRAMC_CTX_T * p)
1164 {
1165 #if __A60868_TO_BE_PORTING__
1166 
1167 	U8 u1CmdTxPipe = 0, u1CkTxPipe = 0, u1SrefPdSel = 0;
1168 
1169 	if (vGet_Div_Mode(p) == DIV4_MODE)
1170 	{
1171 		u1CkTxPipe = 1;
1172 		u1SrefPdSel = 1;
1173 	}
1174 	else if (p->frequency >= 1866)
1175 	{
1176 		u1CmdTxPipe = 1;
1177 		u1CkTxPipe = 1;
1178 	}
1179 
1180 	vIO32WriteFldAlign(DDRPHY_SHU_MISC0, u1CkTxPipe, SHU_MISC0_RG_CK_TXPIPE_BYPASS_EN);
1181 	vIO32WriteFldAlign(DDRPHY_SHU_MISC0, u1CmdTxPipe, SHU_MISC0_RG_CMD_TXPIPE_BYPASS_EN);
1182 	//vIO32WriteFldAlign(DRAMC_REG_SHU_CONF0, u1SrefPdSel, SHU_CONF0_SREF_PD_SEL);
1183 #endif //__A60868_TO_BE_PORTING__
1184 }
1185 #endif
1186 
1187 #if TX_OE_EXTEND
UpdateTxOEN(DRAMC_CTX_T * p)1188 static void UpdateTxOEN(DRAMC_CTX_T *p)
1189 {
1190 	U8 u1ByteIdx, backup_rank, ii;
1191 	U8 u1DQ_OE_CNT;
1192 
1193 	// For LP4
1194 	// 1. R_DMDQOE_OPT (dramc_conf 0x8C0[11])
1195 	//	  set 1'b1: adjust DQSOE/DQOE length with R_DMDQOE_CNT
1196 	// 2. R_DMDQOE_CNT (dramc_conf 0x8C0[10:8])
1197 	//	  set 3'h3
1198 	// 3. Initial TX setting OE/DATA
1199 	//	  OE = DATA - 4 UI
1200 
1201 	// For LP3
1202 	// 1. R_DMDQOE_OPT (dramc_conf 0x8C0[11])
1203 	//	  set 1'b1: adjust DQSOE/DQOE length with R_DMDQOE_CNT
1204 	// 2. R_DMDQOE_CNT (dramc_conf 0x8C0[10:8])
1205 	//	  set 3'h2
1206 	// 3. Initial TX setting OE/DATA
1207 	//	  OE = DATA - 2 UI
1208 
1209 	u1DQ_OE_CNT = 3;
1210 
1211 	vIO32WriteFldMulti(DRAMC_REG_SHU_TX_SET0, P_Fld(1, SHU_TX_SET0_DQOE_OPT) | P_Fld(u1DQ_OE_CNT, SHU_TX_SET0_DQOE_CNT));
1212 
1213 	backup_rank = u1GetRank(p);
1214 
1215 	LP4_ShiftDQS_OENUI(p, -1, ALL_BYTES);
1216 	ShiftDQ_OENUI_AllRK(p, -1, ALL_BYTES);
1217 
1218 }
1219 #endif
1220 
1221 #if ENABLE_DUTY_SCAN_V2
1222 #ifdef DDR_INIT_TIME_PROFILING
1223 U32 gu4DutyCalibrationTime;
1224 #endif
1225 #endif
1226 
vReplaceDVInit(DRAMC_CTX_T * p)1227 static void vReplaceDVInit(DRAMC_CTX_T *p)
1228 {
1229 	U8 u1RandIdx, backup_rank = 0;
1230 
1231 	backup_rank = p->rank;
1232 
1233 	//Disable RX Tracking
1234 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ5), P_Fld(0, B1_DQ5_RG_RX_ARDQS0_DVS_EN_B1));
1235 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ5), P_Fld(0, B0_DQ5_RG_RX_ARDQS0_DVS_EN_B0));
1236 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_B0_RXDVS0), P_Fld(0, B0_RXDVS0_R_RX_DLY_TRACK_ENA_B0)
1237 															| P_Fld(0, B0_RXDVS0_R_RX_DLY_TRACK_CG_EN_B0 ));
1238 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_B1_RXDVS0), P_Fld(0, B1_RXDVS0_R_RX_DLY_TRACK_ENA_B1)
1239 															| P_Fld(0, B1_RXDVS0_R_RX_DLY_TRACK_CG_EN_B1 ));
1240 
1241 	for(u1RandIdx = RANK_0; u1RandIdx < p->support_rank_num; u1RandIdx++)
1242 	{
1243 		vSetRank(p, u1RandIdx);
1244 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_RK_B0_RXDVS2), P_Fld(0, RK_B0_RXDVS2_R_RK0_RX_DLY_FAL_TRACK_GATE_ENA_B0)
1245 																	| P_Fld(0, RK_B0_RXDVS2_R_RK0_RX_DLY_RIS_TRACK_GATE_ENA_B0)
1246 																	| P_Fld(0, RK_B0_RXDVS2_R_RK0_DVS_MODE_B0));
1247 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_RK_B1_RXDVS2), P_Fld(0, RK_B1_RXDVS2_R_RK0_RX_DLY_FAL_TRACK_GATE_ENA_B1)
1248 																	| P_Fld(0, RK_B1_RXDVS2_R_RK0_RX_DLY_RIS_TRACK_GATE_ENA_B1)
1249 																	| P_Fld(0, RK_B1_RXDVS2_R_RK0_DVS_MODE_B1));
1250 	}
1251 	vSetRank(p, backup_rank);
1252 
1253 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_CBT_WLEV_CTRL1), 0, CBT_WLEV_CTRL1_CATRAINLAT);
1254 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_CTRL1), 0, SWCMD_CTRL1_WRFIFO_MODE2);
1255 
1256 
1257 	//Bringup setting review
1258 
1259 	{
1260 		U32 backup_broadcast = GetDramcBroadcast();
1261 		DramcBroadcastOnOff(DRAMC_BROADCAST_OFF);
1262 
1263 		U8 u1DQ_HYST_SEL=0x1, u1CA_HYST_SEL=0x1;
1264 		U8 u1DQ_CAP_SEL=0x18, u1CA_CAP_SEL=0x18;
1265 		//Critical
1266 		//APHY
1267 		if(p->frequency<=933)
1268 		{
1269 			u1DQ_HYST_SEL = 0x1;
1270 			u1CA_HYST_SEL = 0x1;
1271 		}
1272 		else
1273 		{
1274 			u1DQ_HYST_SEL = 0x0;
1275 			u1CA_HYST_SEL = 0x0;
1276 		}
1277 
1278 		if(p->frequency<=933)
1279 		{
1280 			u1DQ_CAP_SEL= 0x18;
1281 			u1CA_CAP_SEL= 0x18;
1282 		}
1283 		else if(p->frequency<=1200)
1284 		{
1285 			u1DQ_CAP_SEL= 0x14;
1286 			u1CA_CAP_SEL= 0x14;
1287 		}
1288 		else if(p->frequency<=1600)
1289 		{
1290 			u1DQ_CAP_SEL= 0x4;
1291 			u1CA_CAP_SEL= 0x4;
1292 		}
1293 		else if(p->frequency<=2133)
1294 		{
1295 			u1DQ_CAP_SEL= 0x2;
1296 			u1CA_CAP_SEL= 0x2;
1297 		}
1298 
1299 		vIO32WriteFldMulti_All(DDRPHY_REG_SHU_B0_DQ6, P_Fld(u1DQ_HYST_SEL, SHU_B0_DQ6_RG_ARPI_HYST_SEL_B0)
1300 													| P_Fld(u1DQ_CAP_SEL, SHU_B0_DQ6_RG_ARPI_CAP_SEL_B0));
1301 		vIO32WriteFldMulti_All(DDRPHY_REG_SHU_B1_DQ6, P_Fld(u1DQ_HYST_SEL, SHU_B1_DQ6_RG_ARPI_HYST_SEL_B1)
1302 													| P_Fld(u1DQ_CAP_SEL, SHU_B1_DQ6_RG_ARPI_CAP_SEL_B1));
1303 		vIO32WriteFldMulti_All(DDRPHY_REG_SHU_CA_CMD6, P_Fld(u1CA_HYST_SEL, SHU_CA_CMD6_RG_ARPI_HYST_SEL_CA)
1304 													| P_Fld(u1CA_CAP_SEL, SHU_CA_CMD6_RG_ARPI_CAP_SEL_CA));
1305 
1306 		//Jeremy
1307 		vIO32WriteFldMulti_All(DDRPHY_REG_SHU_B0_DQ2,P_Fld((p->frequency>=2133), SHU_B0_DQ2_RG_ARPI_PSMUX_XLATCH_FORCE_DQS_B0)
1308 													| P_Fld((p->frequency>=2133), SHU_B0_DQ2_RG_ARPI_PSMUX_XLATCH_FORCE_DQ_B0)
1309 													| P_Fld(0, SHU_B0_DQ2_RG_ARPI_SMT_XLATCH_FORCE_DQS_B0)
1310 													| P_Fld(0, SHU_B0_DQ2_RG_ARPI_SMT_XLATCH_DQ_FORCE_B0));
1311 		vIO32WriteFldMulti_All(DDRPHY_REG_SHU_B1_DQ2,P_Fld((p->frequency>=2133), SHU_B1_DQ2_RG_ARPI_PSMUX_XLATCH_FORCE_DQS_B1)
1312 													| P_Fld((p->frequency>=2133), SHU_B1_DQ2_RG_ARPI_PSMUX_XLATCH_FORCE_DQ_B1)
1313 													| P_Fld(0, SHU_B1_DQ2_RG_ARPI_SMT_XLATCH_FORCE_DQS_B1)
1314 													| P_Fld(0, SHU_B1_DQ2_RG_ARPI_SMT_XLATCH_DQ_FORCE_B1));
1315 		vIO32WriteFldMulti_All(DDRPHY_REG_SHU_CA_CMD2,P_Fld((p->frequency>=2133), SHU_CA_CMD2_RG_ARPI_PSMUX_XLATCH_FORCE_CLK_CA)
1316 													| P_Fld((p->frequency>=2133), SHU_CA_CMD2_RG_ARPI_PSMUX_XLATCH_FORCE_CA_CA)
1317 													| P_Fld((p->frequency<=300), SHU_CA_CMD2_RG_ARPI_SMT_XLATCH_FORCE_CLK_CA)
1318 													| P_Fld((p->frequency<=300), SHU_CA_CMD2_RG_ARPI_SMT_XLATCH_CA_FORCE_CA));
1319 
1320 		//disable RX PIPE for RX timing pass
1321 		vIO32WriteFldAlign_All(DDRPHY_REG_SHU_MISC_RX_PIPE_CTRL, 0x0, SHU_MISC_RX_PIPE_CTRL_RX_PIPE_BYPASS_EN);
1322 
1323 		//Disable MD32 IRQ
1324 		vIO32Write4B_All(DDRPHY_REG_MISC_DBG_IRQ_CTRL1, 0x0);
1325 		vIO32Write4B_All(DDRPHY_REG_MISC_DBG_IRQ_CTRL4, 0x0);
1326 		vIO32Write4B_All(DDRPHY_REG_MISC_DBG_IRQ_CTRL7, 0x0);
1327 
1328 		//Disable NEW RX DCM mode
1329 		vIO32WriteFldMulti_All(DDRPHY_REG_MISC_SHU_RX_CG_CTRL, P_Fld(0, MISC_SHU_RX_CG_CTRL_RX_DCM_WAIT_DLE_EXT_DLY)
1330 															| P_Fld(2, MISC_SHU_RX_CG_CTRL_RX_DCM_EXT_DLY)
1331 															| P_Fld(0, MISC_SHU_RX_CG_CTRL_RX_APHY_CTRL_DCM_OPT)
1332 															| P_Fld(0, MISC_SHU_RX_CG_CTRL_RX_DCM_OPT));
1333 
1334 		vIO32WriteFldAlign_All(DRAMC_REG_HMR4, 0, HMR4_MR4INT_LIMITEN);
1335 		vIO32WriteFldAlign_All(DRAMC_REG_REFCTRL1, 0, REFCTRL1_REFPEND_OPT1);
1336 		vIO32WriteFldAlign_All(DRAMC_REG_REFCTRL3, 0, REFCTRL3_REF_DERATING_EN);
1337 
1338 		vIO32WriteFldMulti_All(DRAMC_REG_DRAMC_IRQ_EN, P_Fld(0x3fff, DRAMC_IRQ_EN_DRAMC_IRQ_EN_RSV)
1339 													| P_Fld(0x0, DRAMC_IRQ_EN_MR4INT_EN));
1340 		vIO32WriteFldAlign_All(DRAMC_REG_SHU_CONF0, 0, SHU_CONF0_PBREFEN);
1341 
1342 
1343 
1344 		vIO32WriteFldAlign_All(DDRPHY_REG_CA_TX_MCK, 0x1, CA_TX_MCK_R_DMRESET_FRPHY_OPT);
1345 		vIO32WriteFldAlign_All(DDRPHY_REG_MISC_DVFSCTL2, 0x1, MISC_DVFSCTL2_RG_ADA_MCK8X_EN_SHUFFLE);
1346 		vIO32WriteFldAlign_All(DDRPHY_REG_MISC_IMPCAL, 0x1, MISC_IMPCAL_IMPBINARY);
1347 
1348 		vIO32WriteFldMulti_All(DDRPHY_REG_SHU_B0_DQ10, P_Fld(0x1, SHU_B0_DQ10_RG_RX_ARDQS_DQSSTB_RPST_HS_EN_B0)
1349 													| P_Fld(0x1, SHU_B0_DQ10_RG_RX_ARDQS_DQSSTB_CG_EN_B0));
1350 		vIO32WriteFldMulti_All(DDRPHY_REG_SHU_B1_DQ10, P_Fld(0x1, SHU_B1_DQ10_RG_RX_ARDQS_DQSSTB_RPST_HS_EN_B1)
1351 													| P_Fld(0x1, SHU_B1_DQ10_RG_RX_ARDQS_DQSSTB_CG_EN_B1));
1352 
1353 		vIO32WriteFldMulti_All(DDRPHY_REG_SHU_B0_DQ8, P_Fld(1, SHU_B0_DQ8_R_DMRANK_CHG_PIPE_CG_IG_B0)
1354 													| P_Fld(1, SHU_B0_DQ8_R_DMRANK_PIPE_CG_IG_B0)
1355 													| P_Fld(1, SHU_B0_DQ8_R_DMDQSIEN_RDSEL_TOG_PIPE_CG_IG_B0)
1356 													| P_Fld(1, SHU_B0_DQ8_R_DMDQSIEN_RDSEL_PIPE_CG_IG_B0)
1357 													| P_Fld(1, SHU_B0_DQ8_R_DMDQSIEN_FLAG_PIPE_CG_IG_B0)
1358 													| P_Fld(1, SHU_B0_DQ8_R_DMDQSIEN_FLAG_SYNC_CG_IG_B0)
1359 													| P_Fld(1, SHU_B0_DQ8_R_DMRXDLY_CG_IG_B0)
1360 													| P_Fld(1, SHU_B0_DQ8_R_DMRXDVS_RDSEL_TOG_PIPE_CG_IG_B0)
1361 													| P_Fld(1, SHU_B0_DQ8_R_DMRXDVS_RDSEL_PIPE_CG_IG_B0)
1362 													| P_Fld(1, SHU_B0_DQ8_R_RMRODTEN_CG_IG_B0)
1363 													| P_Fld(1, SHU_B0_DQ8_R_DMRANK_RXDLY_PIPE_CG_IG_B0));
1364 		vIO32WriteFldMulti_All(DDRPHY_REG_SHU_B1_DQ8, P_Fld(1, SHU_B1_DQ8_R_DMRANK_CHG_PIPE_CG_IG_B1)
1365 													| P_Fld(1, SHU_B1_DQ8_R_DMRANK_PIPE_CG_IG_B1)
1366 													| P_Fld(1, SHU_B1_DQ8_R_DMDQSIEN_RDSEL_TOG_PIPE_CG_IG_B1)
1367 													| P_Fld(1, SHU_B1_DQ8_R_DMDQSIEN_RDSEL_PIPE_CG_IG_B1)
1368 													| P_Fld(1, SHU_B1_DQ8_R_DMDQSIEN_FLAG_PIPE_CG_IG_B1)
1369 													| P_Fld(1, SHU_B1_DQ8_R_DMDQSIEN_FLAG_SYNC_CG_IG_B1)
1370 													| P_Fld(1, SHU_B1_DQ8_R_DMRXDLY_CG_IG_B1)
1371 													| P_Fld(1, SHU_B1_DQ8_R_DMRXDVS_RDSEL_TOG_PIPE_CG_IG_B1)
1372 													| P_Fld(1, SHU_B1_DQ8_R_DMRXDVS_RDSEL_PIPE_CG_IG_B1)
1373 													| P_Fld(1, SHU_B1_DQ8_R_RMRODTEN_CG_IG_B1)
1374 													| P_Fld(1, SHU_B1_DQ8_R_DMRANK_RXDLY_PIPE_CG_IG_B1));
1375 
1376 		vIO32WriteFldAlign_All(DDRPHY_REG_SHU_B0_DLL2, 0x1, SHU_B0_DLL2_RG_ARDQ_REV_B0);
1377 		vIO32WriteFldAlign_All(DDRPHY_REG_SHU_B1_DLL2, 0x1, SHU_B1_DLL2_RG_ARDQ_REV_B1);
1378 		vIO32WriteFldAlign_All(DDRPHY_REG_SHU_CA_DLL2, 0x1, SHU_CA_DLL2_RG_ARCMD_REV);	  //Jeremy
1379 
1380 		#if 1
1381 		//Follow DE - DRAMC
1382 		//vIO32WriteFldAlign_All(DRAMC_REG_DDRCOMMON0, 1, DDRCOMMON0_DISSTOP26M);
1383 		//vIO32WriteFldAlign_All(DRAMC_REG_TEST2_A3, 1, TEST2_A3_TEST_AID_EN);
1384 		//vIO32WriteFldAlign_All(DRAMC_REG_TEST2_A4, 0, TEST2_A4_TESTAGENTRKSEL);
1385 		vIO32WriteFldAlign_All(DRAMC_REG_DUMMY_RD, 0, DUMMY_RD_DQSG_DMYRD_EN);
1386 		vIO32WriteFldAlign_All(DRAMC_REG_DRAMC_DBG_SEL1, 0x1e, DRAMC_DBG_SEL1_DEBUG_SEL_0);
1387 		vIO32WriteFldAlign_All(DRAMC_REG_SWCMD_CTRL2, 0x20, SWCMD_CTRL2_RTSWCMD_AGE);
1388 		vIO32WriteFldAlign_All(DRAMC_REG_RTMRW_CTRL0, 0x20, RTMRW_CTRL0_RTMRW_AGE);
1389 
1390 
1391 		vIO32WriteFldMulti_All(DRAMC_REG_DLLFRZ_CTRL, P_Fld(0, DLLFRZ_CTRL_DLLFRZ) | P_Fld(0, DLLFRZ_CTRL_DLLFRZ_MON_PBREF_OPT));
1392 		vIO32WriteFldMulti_All(DRAMC_REG_MPC_CTRL, P_Fld(1, MPC_CTRL_RTSWCMD_HPRI_EN) | P_Fld(1, MPC_CTRL_RTMRW_HPRI_EN));
1393 		vIO32WriteFldMulti_All(DRAMC_REG_HW_MRR_FUN, P_Fld(0, HW_MRR_FUN_R2MRRHPRICTL) | P_Fld(0, HW_MRR_FUN_TR2MRR_ENA));
1394 		vIO32WriteFldMulti_All(DRAMC_REG_ACTIMING_CTRL, P_Fld(1, ACTIMING_CTRL_REFNA_OPT) | P_Fld(1, ACTIMING_CTRL_SEQCLKRUN3));
1395 		vIO32WriteFldAlign_All(DRAMC_REG_CKECTRL, 1, CKECTRL_RUNTIMEMRRCKEFIX);
1396 		vIO32WriteFldMulti_All(DRAMC_REG_DVFS_CTRL0, P_Fld(0, DVFS_CTRL0_DVFS_SYNC_MASK) | P_Fld(1, DVFS_CTRL0_R_DVFS_SREF_OPT));
1397 		vIO32WriteFldAlign_All(DRAMC_REG_DVFS_TIMING_CTRL1, 1, DVFS_TIMING_CTRL1_SHU_PERIOD_GO_ZERO_CNT);
1398 		vIO32WriteFldMulti_All(DRAMC_REG_HMR4, P_Fld(1, HMR4_REFRCNT_OPT)
1399 											| P_Fld(0, HMR4_REFR_PERIOD_OPT)
1400 											| P_Fld(1, HMR4_SPDR_MR4_OPT)//Resume from S0, trigger HW MR4
1401 											| P_Fld(0, HMR4_HMR4_TOG_OPT));
1402 		vIO32WriteFldAlign_All(DRAMC_REG_RX_SET0, 0, RX_SET0_SMRR_UPD_OLD);
1403 		vIO32WriteFldAlign_All(DRAMC_REG_DRAMCTRL, 1, DRAMCTRL_SHORTQ_OPT);
1404 		vIO32WriteFldAlign_All(DRAMC_REG_MISCTL0, 1, MISCTL0_REFP_ARBMASK_PBR2PBR_PA_DIS);
1405 #if ENABLE_EARLY_BG_CMD==0
1406 		vIO32WriteFldAlign_All(DRAMC_REG_PERFCTL0, 0, PERFCTL0_EBG_EN);
1407 #endif
1408 		vIO32WriteFldMulti_All(DRAMC_REG_CLKAR, P_Fld(1, CLKAR_REQQUECLKRUN) | P_Fld(0x7fff, CLKAR_REQQUE_PACG_DIS));
1409 		vIO32WriteFldMulti_All(DRAMC_REG_REFCTRL0, P_Fld(0, REFCTRL0_PBREF_BK_REFA_ENA) | P_Fld(0, REFCTRL0_PBREF_BK_REFA_NUM));
1410 		vIO32WriteFldAlign_All(DRAMC_REG_REFCTRL1, 0, REFCTRL1_REF_OVERHEAD_SLOW_REFPB_ENA);
1411 		vIO32WriteFldAlign_All(DRAMC_REG_REFCTRL1, 0, REFCTRL1_REFPB2AB_IGZQCS);
1412 		vIO32WriteFldAlign_All(DRAMC_REG_REFCTRL1, 1, REFCTRL1_REFPENDINGINT_OPT1); // @Darren, sync MP settings from Derping
1413 		vIO32WriteFldAlign_All(DRAMC_REG_REF_BOUNCE1,5, REF_BOUNCE1_REFRATE_DEBOUNCE_TH);
1414 		vIO32WriteFldAlign_All(DRAMC_REG_REFPEND2, 8, REFPEND2_MPENDREFCNT_TH8);
1415 		vIO32WriteFldAlign_All(DRAMC_REG_SCSMCTRL, 0, SCSMCTRL_SC_PG_MAN_DIS);
1416 		vIO32WriteFldMulti_All(DRAMC_REG_SCSMCTRL_CG, P_Fld(1, SCSMCTRL_CG_SCSM_CGAR)
1417 													| P_Fld(1, SCSMCTRL_CG_SCARB_SM_CGAR));
1418 		vIO32WriteFldAlign_All(DRAMC_REG_RTSWCMD_CNT, 0x30, RTSWCMD_CNT_RTSWCMD_CNT);
1419 		vIO32WriteFldAlign_All(DRAMC_REG_DRAMC_IRQ_EN, 0x3fff, DRAMC_IRQ_EN_DRAMC_IRQ_EN_RSV);
1420 		vIO32WriteFldAlign_All(DRAMC_REG_SHU_DCM_CTRL0, 1, SHU_DCM_CTRL0_DDRPHY_CLK_EN_OPT);
1421 		vIO32WriteFldMulti_All(DRAMC_REG_SHU_HMR4_DVFS_CTRL0, P_Fld(0x1ff, SHU_HMR4_DVFS_CTRL0_REFRCNT) | P_Fld(0, SHU_HMR4_DVFS_CTRL0_FSPCHG_PRDCNT));
1422 		vIO32WriteFldAlign_All(DRAMC_REG_SHU_HWSET_VRCG, 11, SHU_HWSET_VRCG_VRCGDIS_PRDCNT);
1423 		vIO32WriteFldAlign_All(DRAMC_REG_SHU_MISC, 2, SHU_MISC_REQQUE_MAXCNT);
1424 
1425 
1426 		//Follow DE - DDRPHY
1427 		vIO32WriteFldMulti_All(DDRPHY_REG_B0_DLL_ARPI4, P_Fld(1, B0_DLL_ARPI4_RG_ARPI_BYPASS_SR_DQS_B0) | P_Fld(1, B0_DLL_ARPI4_RG_ARPI_BYPASS_SR_DQ_B0));
1428 		vIO32WriteFldMulti_All(DDRPHY_REG_B1_DLL_ARPI4, P_Fld(1, B1_DLL_ARPI4_RG_ARPI_BYPASS_SR_DQS_B1) | P_Fld(1, B1_DLL_ARPI4_RG_ARPI_BYPASS_SR_DQ_B1));
1429 		vIO32WriteFldMulti_All(DDRPHY_REG_CA_DLL_ARPI4, P_Fld(1, CA_DLL_ARPI4_RG_ARPI_BYPASS_SR_CLK_CA) | P_Fld(1, CA_DLL_ARPI4_RG_ARPI_BYPASS_SR_CA_CA));
1430 		vIO32WriteFldMulti_All(DDRPHY_REG_CA_CMD11, P_Fld(0xa, CA_CMD11_RG_RRESETB_DRVN) | P_Fld(0xa, CA_CMD11_RG_RRESETB_DRVP));
1431 		vIO32WriteFldAlign_All(DDRPHY_REG_MISC_CG_CTRL2, 0x1f, MISC_CG_CTRL2_RG_MEM_DCM_IDLE_FSEL);
1432 
1433 #if 1 // Darren- for DDR400 open loop mode disable
1434 		vIO32WriteFldMulti_All(DDRPHY_REG_MISC_CG_CTRL9, P_Fld(0, MISC_CG_CTRL9_RG_MCK4X_O_FB_CK_CG_OFF)
1435 														| P_Fld(0, MISC_CG_CTRL9_RG_CG_DDR400_MCK4X_O_OFF)
1436 														| P_Fld(0, MISC_CG_CTRL9_RG_MCK4X_O_OPENLOOP_MODE_EN)
1437 														| P_Fld(0, MISC_CG_CTRL9_RG_MCK4X_Q_FB_CK_CG_OFF)
1438 														| P_Fld(0, MISC_CG_CTRL9_RG_CG_DDR400_MCK4X_Q_OFF)
1439 														| P_Fld(0, MISC_CG_CTRL9_RG_MCK4X_Q_OPENLOOP_MODE_EN)
1440 														| P_Fld(0, MISC_CG_CTRL9_RG_MCK4X_I_FB_CK_CG_OFF)
1441 														| P_Fld(0, MISC_CG_CTRL9_RG_CG_DDR400_MCK4X_I_OFF)
1442 														| P_Fld(0, MISC_CG_CTRL9_RG_MCK4X_I_OPENLOOP_MODE_EN)
1443 														| P_Fld(0, MISC_CG_CTRL9_RG_M_CK_OPENLOOP_MODE_EN));
1444 #endif
1445 		//Darren-vIO32WriteFldAlign_All(DDRPHY_REG_MISC_DVFSCTL, 1, MISC_DVFSCTL_R_SHUFFLE_PI_RESET_ENABLE);
1446 		vIO32WriteFldMulti_All(DDRPHY_REG_MISC_DVFSCTL2, P_Fld(1, MISC_DVFSCTL2_RG_ADA_MCK8X_EN_SHUFFLE)
1447 														| P_Fld(0, MISC_DVFSCTL2_RG_DLL_SHUFFLE)); // Darren-
1448 
1449 		vIO32WriteFldMulti_All(DDRPHY_REG_MISC_DVFSCTL3, P_Fld(0x10, MISC_DVFSCTL3_RG_CNT_PHY_ST_DELAY_AFT_CHG_TO_BCLK)
1450 														| P_Fld(1, MISC_DVFSCTL3_RG_DVFS_MEM_CK_SEL_SOURCE)
1451 														| P_Fld(3, MISC_DVFSCTL3_RG_DVFS_MEM_CK_SEL_DESTI)
1452 														| P_Fld(1, MISC_DVFSCTL3_RG_PHY_ST_DELAY_BEF_CHG_TO_BCLK)
1453 														| P_Fld(1, MISC_DVFSCTL3_RG_PHY_ST_DELAY_AFT_CHG_TO_MCLK));
1454 
1455 		//Darren-vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, 0, MISC_RG_DFS_CTRL_SPM_DVFS_CONTROL_SEL);
1456 		vIO32WriteFldAlign_All(DDRPHY_REG_MISC_DDR_RESERVE, 0xf, MISC_DDR_RESERVE_WDT_CONF_ISO_CNT);
1457 		vIO32WriteFldMulti_All(DDRPHY_REG_MISC_IMP_CTRL1, P_Fld(1, MISC_IMP_CTRL1_RG_RIMP_SUS_ECO_OPT) | P_Fld(1, MISC_IMP_CTRL1_IMP_ABN_LAT_CLR));
1458 		vIO32WriteFldMulti_All(DDRPHY_REG_MISC_IMPCAL, P_Fld(1, MISC_IMPCAL_IMPCAL_BYPASS_UP_CA_DRV)
1459 													| P_Fld(1, MISC_IMPCAL_IMPCAL_DRVUPDOPT)
1460 													| P_Fld(1, MISC_IMPCAL_IMPBINARY)
1461 													| P_Fld(1, MISC_IMPCAL_DQDRVSWUPD)
1462 													| P_Fld(0, MISC_IMPCAL_DRVCGWREF));
1463 
1464 		vIO32WriteFldMulti_All(DDRPHY_REG_MISC_DUTYSCAN1, P_Fld(1, MISC_DUTYSCAN1_EYESCAN_DQS_OPT) | P_Fld(1, MISC_DUTYSCAN1_RX_EYE_SCAN_CG_EN));
1465 		vIO32WriteFldAlign_All(DDRPHY_REG_MISC_DVFS_EMI_CLK, 0, MISC_DVFS_EMI_CLK_RG_DLL_SHUFFLE_DDRPHY);
1466 		vIO32WriteFldMulti_All(DDRPHY_REG_MISC_CTRL0, P_Fld(0, MISC_CTRL0_IDLE_DCM_CHB_CDC_ECO_OPT)
1467 													| P_Fld(1, MISC_CTRL0_IMPCAL_CDC_ECO_OPT)
1468 													| P_Fld(1, MISC_CTRL0_IMPCAL_LP_ECO_OPT));
1469 
1470 		vIO32WriteFldMulti_All(DDRPHY_REG_MISC_CTRL4, P_Fld(0, MISC_CTRL4_R_OPT2_CG_CS)
1471 													| P_Fld(0, MISC_CTRL4_R_OPT2_CG_CLK)
1472 													| P_Fld(0, MISC_CTRL4_R_OPT2_CG_CMD)
1473 													| P_Fld(0, MISC_CTRL4_R_OPT2_CG_DQSIEN)
1474 													| P_Fld(0, MISC_CTRL4_R_OPT2_CG_DQ)
1475 													| P_Fld(0, MISC_CTRL4_R_OPT2_CG_DQS)
1476 													| P_Fld(0, MISC_CTRL4_R_OPT2_CG_DQM)
1477 													| P_Fld(0, MISC_CTRL4_R_OPT2_CG_MCK)
1478 													| P_Fld(0, MISC_CTRL4_R_OPT2_MPDIV_CG));
1479 		vIO32WriteFldMulti_All(DDRPHY_REG_MISC_CTRL6, P_Fld(1, MISC_CTRL6_RG_ADA_MCK8X_EN_SHU_OPT) | P_Fld(1, MISC_CTRL6_RG_PHDET_EN_SHU_OPT));
1480 
1481 		vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RX_AUTOK_CFG0, 1, MISC_RX_AUTOK_CFG0_RX_CAL_CG_EN);
1482 
1483 		vIO32WriteFldMulti_All(DDRPHY_REG_SHU_B0_DQ1, P_Fld(1, SHU_B0_DQ1_RG_ARPI_MIDPI_BYPASS_EN_B0)
1484 													| P_Fld(1, SHU_B0_DQ1_RG_ARPI_MIDPI_DUMMY_EN_B0)
1485 													| P_Fld(1, SHU_B0_DQ1_RG_ARPI_8PHASE_XLATCH_FORCE_B0));
1486 		vIO32WriteFldMulti_All(DDRPHY_REG_SHU_B1_DQ1, P_Fld(1, SHU_B1_DQ1_RG_ARPI_MIDPI_BYPASS_EN_B1)
1487 													| P_Fld(1, SHU_B1_DQ1_RG_ARPI_MIDPI_DUMMY_EN_B1)
1488 													| P_Fld(1, SHU_B1_DQ1_RG_ARPI_8PHASE_XLATCH_FORCE_B1));
1489 
1490 		vIO32WriteFldMulti_All(DDRPHY_REG_SHU_B0_DQ10, P_Fld(1, SHU_B0_DQ10_RG_RX_ARDQS_BW_SEL_B0)
1491 													| P_Fld(1, SHU_B0_DQ10_RG_RX_ARDQS_DQSSTB_RPST_HS_EN_B0)	//Critical ?
1492 													| P_Fld(1, SHU_B0_DQ10_RG_RX_ARDQS_DQSSTB_CG_EN_B0));		//Critical ?
1493 		vIO32WriteFldMulti_All(DDRPHY_REG_SHU_B1_DQ10, P_Fld(1, SHU_B1_DQ10_RG_RX_ARDQS_BW_SEL_B1)
1494 													| P_Fld(1, SHU_B1_DQ10_RG_RX_ARDQS_DQSSTB_RPST_HS_EN_B1)	//Critical ?
1495 													| P_Fld(1, SHU_B1_DQ10_RG_RX_ARDQS_DQSSTB_CG_EN_B1));		//Critical ?
1496 
1497 		{
1498 			U8 u1DQ_BW_SEL_B0=0, u1DQ_BW_SEL_B1=0, u1CA_BW_SEL_CA=0, u1CLK_BW_SEL_CA=0;
1499 			if (p->frequency <= 1200)
1500 			{
1501 				u1CLK_BW_SEL_CA = 1;
1502 			}
1503 			if (p->frequency >= 2133)
1504 			{
1505 				u1DQ_BW_SEL_B0 = 1;
1506 				u1DQ_BW_SEL_B1 = 1;
1507 				u1CA_BW_SEL_CA = 1;
1508 			}
1509 			vIO32WriteFldAlign_All(DDRPHY_REG_SHU_B0_DQ11, u1DQ_BW_SEL_B0, SHU_B0_DQ11_RG_RX_ARDQ_BW_SEL_B0);
1510 			vIO32WriteFldAlign_All(DDRPHY_REG_SHU_B1_DQ11, u1DQ_BW_SEL_B1, SHU_B1_DQ11_RG_RX_ARDQ_BW_SEL_B1);
1511 			vIO32WriteFldAlign_All(DDRPHY_REG_SHU_CA_CMD11, u1CA_BW_SEL_CA, SHU_CA_CMD11_RG_RX_ARCA_BW_SEL_CA);
1512 			vIO32WriteFldAlign_All(DDRPHY_REG_SHU_CA_CMD10, u1CLK_BW_SEL_CA, SHU_CA_CMD10_RG_RX_ARCLK_BW_SEL_CA);
1513 		}
1514 
1515 		vIO32WriteFldMulti_All(DDRPHY_REG_SHU_CA_CMD1, P_Fld(1, SHU_CA_CMD1_RG_ARPI_MIDPI_BYPASS_EN_CA) | P_Fld(1, SHU_CA_CMD1_RG_ARPI_MIDPI_DUMMY_EN_CA));
1516 		//Darren-vIO32WriteFldAlign_All(DDRPHY_REG_SHU_CA_CMD10, 1, SHU_CA_CMD10_RG_RX_ARCLK_DLY_LAT_EN_CA);
1517 		vIO32WriteFldMulti_All(DDRPHY_REG_SHU_CA_CMD8, P_Fld(1, SHU_CA_CMD8_R_DMRANK_CHG_PIPE_CG_IG_CA)
1518 													| P_Fld(1, SHU_CA_CMD8_R_DMRANK_PIPE_CG_IG_CA)
1519 													| P_Fld(1, SHU_CA_CMD8_R_DMDQSIEN_RDSEL_TOG_PIPE_CG_IG_CA)
1520 													| P_Fld(1, SHU_CA_CMD8_R_DMDQSIEN_RDSEL_PIPE_CG_IG_CA)
1521 													| P_Fld(1, SHU_CA_CMD8_R_DMDQSIEN_FLAG_PIPE_CG_IG_CA)
1522 													| P_Fld(1, SHU_CA_CMD8_R_DMDQSIEN_FLAG_SYNC_CG_IG_CA)
1523 													| P_Fld(1, SHU_CA_CMD8_R_RMRX_TOPHY_CG_IG_CA)
1524 													| P_Fld(1, SHU_CA_CMD8_R_RMRODTEN_CG_IG_CA));
1525 		vIO32WriteFldAlign_All(DDRPHY_REG_SHU_CA_CMD12, 0, SHU_CA_CMD12_RG_RIMP_REV);
1526 
1527 
1528 		vIO32WriteFldMulti_All(DDRPHY_REG_MISC_SHU_IMPEDAMCE_UPD_DIS1, P_Fld(1, MISC_SHU_IMPEDAMCE_UPD_DIS1_CMD1_ODTN_UPD_DIS)
1529 																	| P_Fld(1, MISC_SHU_IMPEDAMCE_UPD_DIS1_CMD1_DRVN_UPD_DIS)
1530 																	| P_Fld(1, MISC_SHU_IMPEDAMCE_UPD_DIS1_CMD1_DRVP_UPD_DIS)
1531 																	| P_Fld(1, MISC_SHU_IMPEDAMCE_UPD_DIS1_CS_ODTN_UPD_DIS)
1532 																	| P_Fld(1, MISC_SHU_IMPEDAMCE_UPD_DIS1_CS_DRVN_UPD_DIS)
1533 																	| P_Fld(1, MISC_SHU_IMPEDAMCE_UPD_DIS1_CS_DRVP_UPD_DIS));
1534 
1535 		//Darren-vIO32WriteFldMulti_All(DDRPHY_REG_MISC_SHU_DVFSDLL, P_Fld(67, MISC_SHU_DVFSDLL_R_2ND_DLL_IDLE) | P_Fld(43, MISC_SHU_DVFSDLL_R_DLL_IDLE));
1536 
1537 		//Darren-vIO32WriteFldAlign_All(DDRPHY_REG_SHU_MISC_IMPCAL1, 0, SHU_MISC_IMPCAL1_IMPCALCNT);
1538 		//Darren-vIO32WriteFldAlign_All(DDRPHY_REG_SHU_MISC_DRVING2, 0, SHU_MISC_DRVING2_DIS_IMPCAL_ODT_EN);
1539 		//Darren-vIO32WriteFldAlign_All(DDRPHY_REG_SHU_MISC_DRVING6, 7, SHU_MISC_DRVING6_IMP_TXDLY_CMD);
1540 
1541 		vIO32WriteFldMulti_All(DDRPHY_REG_MISC_SHU_RX_CG_CTRL, P_Fld(0, MISC_SHU_RX_CG_CTRL_RX_DCM_WAIT_DLE_EXT_DLY)
1542 															| P_Fld(2, MISC_SHU_RX_CG_CTRL_RX_DCM_EXT_DLY)
1543 															| P_Fld(0, MISC_SHU_RX_CG_CTRL_RX_APHY_CTRL_DCM_OPT)
1544 															| P_Fld(0, MISC_SHU_RX_CG_CTRL_RX_DCM_OPT));
1545 		#endif
1546 		DramcBroadcastOnOff(backup_broadcast);
1547 	}
1548 }
1549 
1550 
vApplyConfigBeforeCalibration(DRAMC_CTX_T * p)1551 void vApplyConfigBeforeCalibration(DRAMC_CTX_T *p)
1552 {
1553 #if __A60868_TO_BE_PORTING__
1554 
1555 	U8 read_xrtw2w, shu_index;
1556 	U8 u1RankIdx, u1RankIdxBak;
1557 	u1RankIdxBak = u1GetRank(p);
1558 
1559 	//Clk free run {Move to Init_DRAM() and only call once}
1560 #if (SW_CHANGE_FOR_SIMULATION == 0)
1561 	EnableDramcPhyDCM(p, 0);
1562 #endif
1563 
1564 	//Set LP3/LP4 Rank0/1 CA/TX delay chain to 0
1565 #if (FOR_DV_SIMULATION_USED == 0)
1566 	//CA0~9 per bit delay line -> CHA_CA0 CHA_CA3 CHA_B0_DQ6 CHA_B0_DQ7 CHA_B0_DQ2 CHA_B0_DQ5 CHA_B0_DQ4 CHA_B0_DQ1 CHA_B0_DQ0 CHA_B0_DQ3
1567 	vResetDelayChainBeforeCalibration(p);
1568 #endif
1569 
1570 	//MR4 refresh cnt set to 0x1ff (2ms update)
1571 	vIO32WriteFldAlign_All(DRAMC_REG_SHU_CONF3, 0x1ff, SHU_CONF3_REFRCNT);
1572 
1573 	//The counter for Read MR4 cannot be reset after SREF if DRAMC no power down.
1574 	vIO32WriteFldAlign_All(DRAMC_REG_SPCMDCTRL, 1, SPCMDCTRL_SRFMR4_CNTKEEP_B);
1575 
1576 	//---- ZQ CS init --------
1577 	vIO32WriteFldAlign_All(DRAMC_REG_SHU_SCINTV, 0x1B, SHU_SCINTV_TZQLAT); //ZQ Calibration Time, unit: 38.46ns, tZQCAL min is 1 us. need to set larger than 0x1b
1578 	//for(shu_index = DRAM_DFS_SHUFFLE_1; shu_index < DRAM_DFS_SHUFFLE_MAX; shu_index++)
1579 		//vIO32WriteFldAlign_All(DRAMC_REG_SHU_CONF3 + SHU_GRP_DRAMC_OFFSET*shu_index, 0x1ff, SHU_CONF3_ZQCSCNT); //Every refresh number to issue ZQCS commands, only for DDR3/LPDDR2/LPDDR3/LPDDR4
1580 	vIO32WriteFldAlign_All(DRAMC_REG_SHU_CONF3, 0x1ff, SHU_CONF3_ZQCSCNT); //Every refresh number to issue ZQCS commands, only for DDR3/LPDDR2/LPDDR3/LPDDR4
1581 	//vIO32WriteFldAlign_All(DRAMC_REG_SHU2_CONF3, 0x1ff, SHU_CONF3_ZQCSCNT); //Every refresh number to issue ZQCS commands, only for DDR3/LPDDR2/LPDDR3/LPDDR4
1582 	//vIO32WriteFldAlign_All(DRAMC_REG_SHU3_CONF3, 0x1ff, SHU_CONF3_ZQCSCNT); //Every refresh number to issue ZQCS commands, only for DDR3/LPDDR2/LPDDR3/LPDDR4
1583 	vIO32WriteFldAlign_All(DRAMC_REG_DRAMCTRL, 0, DRAMCTRL_ZQCALL);  // HW send ZQ command for both rank, disable it due to some dram only have 1 ZQ pin for two rank.
1584 
1585 	//Dual channel ZQCS interlace,	0: disable, 1: enable
1586 	if (p->support_channel_num == CHANNEL_SINGLE)
1587 	{
1588 		//single channel, ZQCSDUAL=0, ZQCSMASK=0
1589 		vIO32WriteFldMulti(DRAMC_REG_ZQCS, P_Fld(0, ZQCS_ZQCSDUAL) | P_Fld(0x0, ZQCS_ZQCSMASK));
1590 	}
1591 	else if (p->support_channel_num == CHANNEL_DUAL)
1592 	{
1593 		// HW ZQ command is channel interleaving since 2 channel share the same ZQ pin.
1594 		#ifdef ZQCS_ENABLE_LP4
1595 		// dual channel, ZQCSDUAL =1, and CHA ZQCSMASK=0, CHB ZQCSMASK=1
1596 
1597 		vIO32WriteFldMulti_All(DRAMC_REG_ZQCS, P_Fld(1, ZQCS_ZQCSDUAL) | \
1598 											   P_Fld(0, ZQCS_ZQCSMASK_OPT) | \
1599 											   P_Fld(0, ZQCS_ZQMASK_CGAR) | \
1600 											   P_Fld(0, ZQCS_ZQCS_MASK_SEL_CGAR));
1601 
1602 		/* DRAMC CHA(CHN0):ZQCSMASK=1, DRAMC CHB(CHN1):ZQCSMASK=0.
1603 		 * ZQCSMASK setting: (Ch A, Ch B) = (1,0) or (0,1)
1604 		 * if CHA.ZQCSMASK=1, and then set CHA.ZQCALDISB=1 first, else set CHB.ZQCALDISB=1 first
1605 		 */
1606 		vIO32WriteFldAlign(DRAMC_REG_ZQCS + (CHANNEL_A << POS_BANK_NUM), 1, ZQCS_ZQCSMASK);
1607 		vIO32WriteFldAlign(DRAMC_REG_ZQCS + SHIFT_TO_CHB_ADDR, 0, ZQCS_ZQCSMASK);
1608 
1609 		// DRAMC CHA(CHN0):ZQCS_ZQCS_MASK_SEL=0, DRAMC CHB(CHN1):ZQCS_ZQCS_MASK_SEL=0.
1610 		vIO32WriteFldAlign_All(DRAMC_REG_ZQCS, 0, ZQCS_ZQCS_MASK_SEL);
1611 		#endif
1612 	}
1613 
1614 	// Disable LP3 HW ZQ
1615 	vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DRAMC_REG_SPCMDCTRL), 0, SPCMDCTRL_ZQCSDISB);   //LP3 ZQCSDISB=0
1616 	// Disable LP4 HW ZQ
1617 	vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DRAMC_REG_SPCMDCTRL), 0, SPCMDCTRL_ZQCALDISB);  //LP4 ZQCALDISB=0
1618 	// ---- End of ZQ CS init -----
1619 
1620 	//Disable write-DBI of DRAMC (Avoids pre-defined data pattern being modified)
1621 	DramcWriteDBIOnOff(p, DBI_OFF);
1622 	//Disable read-DBI of DRAMC (Avoids pre-defined data pattern being modified)
1623 	DramcReadDBIOnOff(p, DBI_OFF);
1624 	//disable MR4 read, REFRDIS=1
1625 	vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DRAMC_REG_SPCMDCTRL), 1, SPCMDCTRL_REFRDIS);
1626 	vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DRAMC_REG_DQSOSCR), 0x1, DQSOSCR_DQSOSCRDIS);  //MR18, MR19 Disable
1627 	//for(shu_index = DRAM_DFS_SHUFFLE_1; shu_index < DRAM_DFS_SHUFFLE_MAX; shu_index++)
1628 		//vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DRAMC_REG_SHU_SCINTV) + SHU_GRP_DRAMC_OFFSET*shu_index, 0x1, SHU_SCINTV_DQSOSCENDIS);
1629 	vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DRAMC_REG_SHU_SCINTV), 0x1, SHU_SCINTV_DQSOSCENDIS);
1630 	//vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DRAMC_REG_SHU2_SCINTV), 0x1, SHU2_SCINTV_DQSOSCENDIS);
1631 	//vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DRAMC_REG_SHU3_SCINTV), 0x1, SHU3_SCINTV_DQSOSCENDIS);
1632 	vIO32WriteFldMulti_All(DRAMC_REG_ADDR(DRAMC_REG_DUMMY_RD), P_Fld(0x0, DUMMY_RD_DUMMY_RD_EN)
1633 											| P_Fld(0x0, DUMMY_RD_SREF_DMYRD_EN)
1634 											| P_Fld(0x0, DUMMY_RD_DQSG_DMYRD_EN)
1635 											| P_Fld(0x0, DUMMY_RD_DMY_RD_DBG));
1636 
1637 	// Disable HW gating tracking first, 0x1c0[31], need to disable both UI and PI tracking or the gating delay reg won't be valid.
1638 	DramcHWGatingOnOff(p, 0);
1639 
1640 	// Disable gating debug
1641 	vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DRAMC_REG_STBCAL2), 0, STBCAL2_STB_GERRSTOP);
1642 
1643 	for (u1RankIdx = RANK_0; u1RankIdx < RANK_MAX; u1RankIdx++)
1644 	{
1645 		vSetRank(p, u1RankIdx);
1646 
1647 		// Disable RX delay tracking
1648 		vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DDRPHY_R0_B0_RXDVS2), 0x0, R0_B0_RXDVS2_R_RK0_RX_DLY_RIS_TRACK_GATE_ENA_B0);
1649 		vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DDRPHY_R0_B1_RXDVS2), 0x0, R0_B1_RXDVS2_R_RK0_RX_DLY_RIS_TRACK_GATE_ENA_B1);
1650 
1651 		vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DDRPHY_R0_B0_RXDVS2), 0x0, R0_B0_RXDVS2_R_RK0_RX_DLY_FAL_TRACK_GATE_ENA_B0);
1652 		vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DDRPHY_R0_B1_RXDVS2), 0x0, R0_B1_RXDVS2_R_RK0_RX_DLY_FAL_TRACK_GATE_ENA_B1);
1653 
1654 		//RX delay mux, delay vlaue from reg.
1655 		vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DDRPHY_R0_B0_RXDVS2), 0x0, R0_B0_RXDVS2_R_RK0_DVS_MODE_B0);
1656 		vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DDRPHY_R0_B1_RXDVS2), 0x0, R0_B1_RXDVS2_R_RK0_DVS_MODE_B1);
1657 		vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DDRPHY_R0_CA_RXDVS2), 0x0, R0_CA_RXDVS2_R_RK0_DVS_MODE_CA);
1658 	}
1659 	vSetRank(p, u1RankIdxBak);
1660 
1661 	// Set to all-bank refresh
1662 	vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DRAMC_REG_REFCTRL0), 0, REFCTRL0_PBREFEN);
1663 
1664 	// set MRSRK to 0, MPCRKEN always set 1 (Derping)
1665 	vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DRAMC_REG_MRS), 0, MRS_MRSRK);
1666 	vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DRAMC_REG_MPC_OPTION), 1, MPC_OPTION_MPCRKEN);
1667 
1668 	//RG mode
1669 	vIO32WriteFldAlign_All(DDRPHY_B0_DQ6, 0x1, B0_DQ6_RG_RX_ARDQ_BIAS_PS_B0);
1670 	vIO32WriteFldAlign_All(DDRPHY_B1_DQ6, 0x1, B1_DQ6_RG_RX_ARDQ_BIAS_PS_B1);
1671 	vIO32WriteFldAlign_All(DDRPHY_CA_CMD6, 0x1, CA_CMD6_RG_RX_ARCMD_BIAS_PS);
1672 
1673 #if ENABLE_RX_TRACKING
1674 	DramcRxInputDelayTrackingInit_byFreq(p);
1675 #endif
1676 
1677 #ifdef LOOPBACK_TEST
1678 #ifdef LPBK_INTERNAL_EN
1679 	DramcLoopbackTest_settings(p, 0);	//0: internal loopback test 1: external loopback test
1680 #else
1681 	DramcLoopbackTest_settings(p, 1);	//0: internal loopback test 1: external loopback test
1682 #endif
1683 #endif
1684 
1685 #if ENABLE_TMRRI_NEW_MODE
1686 	SetCKE2RankIndependent(p);
1687 #endif
1688 
1689 #ifdef DUMMY_READ_FOR_TRACKING
1690 	vIO32WriteFldAlign_All(DRAMC_REG_DUMMY_RD, 1, DUMMY_RD_DMY_RD_RX_TRACK);
1691 #endif
1692 
1693 	vIO32WriteFldAlign_All(DRAMC_REG_DRSCTRL, 1, DRSCTRL_DRSDIS);
1694 
1695 #ifdef IMPEDANCE_TRACKING_ENABLE
1696 	// set correct setting to control IMPCAL HW Tracking in shuffle RG
1697 	// if p->freq >= 1333, enable IMP HW tracking(SHU_DRVING1_DIS_IMPCAL_HW=0), else SHU_DRVING1_DIS_IMPCAL_HW = 1
1698 	U8 u1DisImpHw;
1699 	U32 u4TermFreq;
1700 
1701 #if (__LP5_COMBO__ == TRUE)
1702 	if (TRUE == is_lp5_family(p))
1703 		u4TermFreq = LP5_MRFSP_TERM_FREQ;
1704 	else
1705 #endif
1706 		u4TermFreq = LP4_MRFSP_TERM_FREQ;
1707 
1708 	u1DisImpHw = (p->frequency >= u4TermFreq)? 0: 1;
1709 	vIO32WriteFldAlign_All(DDRPHY_REG_SHU_MISC_DRVING1, u1DisImpHw, SHU_MISC_DRVING1_DIS_IMPCAL_HW);
1710 	vIO32WriteFldAlign_All(DDRPHY_REG_SHU_MISC_DRVING1, u1DisImpHw, SHU_MISC_DRVING1_DIS_IMP_ODTN_TRACK);
1711 	vIO32WriteFldAlign_All(DDRPHY_REG_SHU_MISC_DRVING2, u1DisImpHw, SHU_MISC_DRVING2_DIS_IMPCAL_ODT_EN);
1712 	vIO32WriteFldAlign_All(DDRPHY_REG_SHU_CA_CMD12, u1DisImpHw, SHU_CA_CMD12_RG_RIMP_UNTERM_EN);
1713 #endif
1714 
1715 
1716 #if SUPPORT_SAVE_TIME_FOR_CALIBRATION && RX_DELAY_PRE_CAL
1717 	s2RxDelayPreCal = PASS_RANGE_NA; // reset variable for fast k test
1718 #endif
1719 #endif
1720 }
1721 
DramcInit_DutyCalibration(DRAMC_CTX_T * p)1722 static void DramcInit_DutyCalibration(DRAMC_CTX_T *p)
1723 {
1724 #if ENABLE_DUTY_SCAN_V2
1725 	U32 u4backup_broadcast= GetDramcBroadcast();
1726 #ifdef DDR_INIT_TIME_PROFILING
1727 	U32 u4low_tick0, u4high_tick0, u4low_tick1, u4high_tick1;
1728 #if __ETT__
1729 	u4low_tick0 = GPT_GetTickCount(&u4high_tick0);
1730 #else
1731 	u4low_tick0 = get_timer(0);
1732 #endif
1733 #endif
1734 
1735 	DramcBroadcastOnOff(DRAMC_BROADCAST_OFF);
1736 #ifndef DUMP_INIT_RG_LOG_TO_DE
1737 	if (Get_MDL_Used_Flag()==NORMAL_USED)
1738 	{
1739 		DramcNewDutyCalibration(p);
1740 	}
1741 #endif
1742 	DramcBroadcastOnOff(u4backup_broadcast);
1743 
1744 #ifdef DDR_INIT_TIME_PROFILING
1745 #if __ETT__
1746 	u4low_tick1 = GPT_GetTickCount(&u4high_tick1);
1747 	gu4DutyCalibrationTime = ((u4low_tick1 - u4low_tick0) * 76) / 1000000;
1748 #else
1749 	u4low_tick1 = get_timer(u4low_tick0);
1750 	gu4DutyCalibrationTime = u4low_tick1;
1751 #endif
1752 #endif
1753 #endif
1754 }
1755 
SV_BroadcastOn_DramcInit(DRAMC_CTX_T * p)1756 static void SV_BroadcastOn_DramcInit(DRAMC_CTX_T *p)
1757 {
1758 
1759 
1760 	//CInit_ConfigFromTBA();
1761 	DramcBroadcastOnOff(DRAMC_BROADCAST_ON);
1762 
1763 	//if(LPDDR4_EN_S && DramcConfig->freq_sel == LP4_DDR1600)
1764 	if(!is_lp5_family(p))
1765 	{
1766 		if(p->frequency>=2133)	//Term
1767 		{
1768 			msg("sv_algorithm_assistance_LP4_4266 \n");
1769 			sv_algorithm_assistance_LP4_4266(p);
1770 		}
1771 		else if(p->frequency>=1333)  //Term
1772 		{
1773 			msg("sv_algorithm_assistance_LP4_3733 \n");
1774 			sv_algorithm_assistance_LP4_3733(p);
1775 		}
1776 		else if(p->frequency>400) //Unterm
1777 		{
1778 			msg("sv_algorithm_assistance_LP4_1600 \n");
1779 			sv_algorithm_assistance_LP4_1600(p);
1780 		}
1781 		else /*if(p->frequency==400)*/	//DDR800 Semi-Open
1782 		{
1783 			//msg("CInit_golden_mini_freq_related_vseq_LP4_1600 \n");
1784 			//CInit_golden_mini_freq_related_vseq_LP4_1600(p);
1785 			//CInit_golden_mini_freq_related_vseq_LP4_1600_SHU1(DramcConfig);
1786 			msg("sv_algorithm_assistance_LP4_800 \n");
1787 			sv_algorithm_assistance_LP4_800(p);
1788 		}
1789 		/*else //DDR250 Open Loop (DV random seed not ready)
1790 		{
1791 			msg("sv_algorithm_assistance_LP4_250 \n");
1792 			sv_algorithm_assistance_LP4_250(p);
1793 		}*/
1794 	}
1795 	#if __LP5_COMBO__
1796 	else
1797 	{
1798 		if(p->freq_sel==LP5_DDR4266)
1799 		{
1800 			msg("CInit_golden_mini_freq_related_vseq_LP5_4266 \n");
1801 			CInit_golden_mini_freq_related_vseq_LP5_4266(p);
1802 		}
1803 		else if(p->freq_sel==LP5_DDR5500)
1804 		{
1805 			msg("CInit_golden_mini_freq_related_vseq_LP5_5500 \n");
1806 			CInit_golden_mini_freq_related_vseq_LP5_5500(p);
1807 		}
1808 		else
1809 		{
1810 			msg("CInit_golden_mini_freq_related_vseq_LP5_3200 \n");
1811 			CInit_golden_mini_freq_related_vseq_LP5_3200(p);
1812 			CInit_golden_mini_freq_related_vseq_LP5_3200_SHU1(p);
1813 		}
1814 	}
1815 	#endif
1816 
1817 	RESETB_PULL_DN(p);
1818 	ANA_init(p);
1819 	DIG_STATIC_SETTING(p);
1820 	DIG_CONFIG_SHUF(p,0,0); //temp ch0 group 0
1821 
1822 	if(!is_lp5_family(p))
1823 	{
1824 		LP4_UpdateInitialSettings(p);
1825 	}
1826 	else
1827 	{
1828 		LP5_UpdateInitialSettings(p);
1829 	}
1830 	DramcBroadcastOnOff(DRAMC_BROADCAST_OFF);
1831 }
DramcInit(DRAMC_CTX_T * p)1832 DRAM_STATUS_T DramcInit(DRAMC_CTX_T *p)
1833 {
1834 #ifdef FOR_HQA_REPORT_USED
1835 	if (gHQALog_flag==1)
1836 	{
1837 		msg("[HQA] Log parsing, ");
1838 		msg("\tDram Data rate = "); HQA_LOG_Print_Freq_String(p); msg("\n");
1839 	}
1840 #endif
1841 	msg("MEM_TYPE=%d, freq_sel=%d\n", MEM_TYPE, p->freq_sel);
1842 	SV_BroadcastOn_DramcInit(p); // @Darren, Broadcast Off after SV_BroadcastOn_DramcInit done
1843 	#if PRINT_CALIBRATION_SUMMARY
1844 	//default set DRAM status = NO K
1845 	memset(p->aru4CalResultFlag, 0xffff, sizeof(p->aru4CalResultFlag));
1846 	memset(p->aru4CalExecuteFlag, 0, sizeof(p->aru4CalExecuteFlag));
1847 	#if PRINT_CALIBRATION_SUMMARY_FASTK_CHECK
1848 	memset(p->FastKResultFlag, 0xffff, sizeof(p->FastKResultFlag));
1849 	memset(p->FastKExecuteFlag, 0, sizeof(p->FastKExecuteFlag));
1850 	#endif
1851 	#endif
1852 
1853 	EnableDramcPhyDCM(p, DCM_OFF); //Let CLK always free-run
1854 	vResetDelayChainBeforeCalibration(p);
1855 
1856 	if(!is_lp5_family(p))
1857 		DVFSSettings(p);
1858 
1859 	vSetRank(p, RANK_0);
1860 		//LP4_DRAM_INIT(p);
1861 		Dramc8PhaseCal(p);
1862 		DramcInit_DutyCalibration(p);
1863 		DramcModeRegInit_LP4(p);
1864 
1865 	DDRPhyFreqMeter();
1866 
1867 	DdrUpdateACTiming(p);
1868 
1869 	memset(p->isWLevInitShift, FALSE, sizeof(p->isWLevInitShift));
1870 
1871 	#if BYPASS_CALIBRATION
1872 	if(p->freq_sel==LP4_DDR4266 || p->freq_sel==LP4_DDR3200)
1873 	{
1874 		Apply_LP4_4266_Calibraton_Result(p);
1875 	}
1876 
1877 	else if(p->freq_sel==LP4_DDR1600)
1878 	{
1879 		msg("BYPASS CALIBRATION LP4 1600 \n");
1880 		Apply_LP4_1600_Calibraton_Result(p);
1881 	}
1882 	#endif
1883 
1884 #if 0//__A60868_TO_BE_PORTING__
1885 
1886 	U32 save_ch, dram_t; //Darren
1887 	#if (!__ETT__ && !FOR_DV_SIMULATION_USED && SW_CHANGE_FOR_SIMULATION == 0)
1888 	EMI_SETTINGS *emi_set; //Darren
1889 	#endif
1890 	U8 dram_cbt_mode;
1891 
1892 	msg("\n[DramcInit]\n");
1893 
1894 	vSetPHY2ChannelMapping(p, CHANNEL_A);
1895 
1896 	//default set DRAM status = NO K
1897 	memset(p->aru4CalResultFlag, 0xffff, sizeof(p->aru4CalResultFlag));
1898 	memset(p->aru4CalExecuteFlag, 0, sizeof(p->aru4CalExecuteFlag));
1899 
1900 	DramcSetting_Olympus_LP4_ByteMode(p);
1901 
1902 	DramcInit_DutyCalibration(p);
1903 
1904 	DramcModeRegInit_LP4(p);
1905 
1906 	//DdrUpdateACTiming(p);
1907 
1908 #if 0  //update refresh rate
1909 	// for free-run clk 26MHz, 0x62 * (1/26) = 3.8ns
1910 	vIO32WriteFldAlign_All(DRAMC_REG_DRAMC_PD_CTRL, 0x62, DRAMC_PD_CTRL_REFCNT_FR_CLK);
1911 	// for non-fre-run clk,  reg = 3.8 ns * f / 4 / 16;
1912 	u4RefreshRate = 38 * p->frequency / 640;
1913 	vIO32WriteFldAlign_All(DRAMC_REG_CONF2, u4RefreshRate, CONF2_REFCNT);
1914 #endif
1915 
1916 #if (fcFOR_CHIP_ID == fcLafite)
1917 	// For kernel api for check LPDDR3/4/4X (Darren), only for fcOlympus and fcElbrus.
1918 	// For Other chip, please confirm the register is free for SW use.
1919 	save_ch = vGetPHY2ChannelMapping(p);
1920 	vSetPHY2ChannelMapping(p, CHANNEL_A);
1921 
1922 	switch (p->dram_type)
1923 	{
1924 		case TYPE_LPDDR4:
1925 			dram_t = 2;
1926 			break;
1927 		case TYPE_LPDDR4X:
1928 			dram_t = 3;
1929 			break;
1930 		case TYPE_LPDDR4P:
1931 			dram_t = 4;
1932 			break;
1933 		default:
1934 			dram_t = 0;
1935 			err("Incorrect DRAM Type!\n");
1936 			break;
1937 	}
1938 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_ARBCTL), dram_t, ARBCTL_RSV_DRAM_TYPE);
1939 
1940 	// For DRAM normal, byte and mixed mode
1941 	if ((p->dram_cbt_mode[RANK_0] == CBT_NORMAL_MODE) && (p->dram_cbt_mode[RANK_1] == CBT_NORMAL_MODE))
1942 		dram_cbt_mode = CBT_R0_R1_NORMAL;
1943 	else if ((p->dram_cbt_mode[RANK_0] == CBT_BYTE_MODE1) && (p->dram_cbt_mode[RANK_1] == CBT_BYTE_MODE1))
1944 		dram_cbt_mode = CBT_R0_R1_BYTE;
1945 	else if ((p->dram_cbt_mode[RANK_0] == CBT_NORMAL_MODE) && (p->dram_cbt_mode[RANK_1] == CBT_BYTE_MODE1))
1946 		dram_cbt_mode = CBT_R0_NORMAL_R1_BYTE;
1947 	else if ((p->dram_cbt_mode[RANK_0] == CBT_BYTE_MODE1) && (p->dram_cbt_mode[RANK_1] == CBT_NORMAL_MODE))
1948 		dram_cbt_mode = CBT_R0_BYTE_R1_NORMAL;
1949 	else
1950 		dram_cbt_mode = CBT_R0_R1_NORMAL;
1951 
1952 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_RSTMASK), dram_cbt_mode, RSTMASK_RSV_DRAM_CBT_MIXED);
1953 
1954 	// Sagy: Keep original setting till OS kernel ready, if ready, remove it
1955 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_ARBCTL), (p->dram_cbt_mode[RANK_0] | p->dram_cbt_mode[RANK_1]), ARBCTL_RSV_DRAM_CBT);
1956 
1957 	vSetPHY2ChannelMapping(p, save_ch);
1958 #endif
1959 
1960 	msg3("[DramcInit] Done\n");
1961 #endif//__A60868_TO_BE_PORTING__
1962 	return DRAM_OK;
1963 }
1964 
1965 #if ENABLE_TMRRI_NEW_MODE
SetCKE2RankIndependent(DRAMC_CTX_T * p)1966 void SetCKE2RankIndependent(DRAMC_CTX_T *p)
1967 {
1968 	#if ENABLE_TMRRI_NEW_MODE//Newly added CKE control mode API
1969 	msg("SET_CKE_2_RANK_INDEPENDENT_RUN_TIME: ON\n");
1970 	vCKERankCtrl(p, CKE_RANK_INDEPENDENT);
1971 	#else //Legacy individual CKE control register settings
1972 	msg("SET_CKE_2_RANK_INDEPENDENT_RUN_TIME: OFF\n");
1973 	vCKERankCtrl(p, CKE_RANK_DEPENDENT);
1974 	#endif
1975 }
1976 #endif
1977 
1978 
1979 #if ENABLE_WRITE_DBI
EnableDRAMModeRegWriteDBIAfterCalibration(DRAMC_CTX_T * p)1980 void EnableDRAMModeRegWriteDBIAfterCalibration(DRAMC_CTX_T *p)
1981 {
1982 	U8 channel_idx, rank_idx;
1983 	U8 ch_backup, rank_backup, u1FSPIdx = 0;
1984 
1985 	ch_backup = p->channel;
1986 	rank_backup = p->rank;
1987 
1988 	for (channel_idx = CHANNEL_A; channel_idx < p->support_channel_num; channel_idx++)
1989 	{
1990 		vSetPHY2ChannelMapping(p, channel_idx);
1991 		for (rank_idx = RANK_0; rank_idx < p->support_rank_num; rank_idx++)
1992 		{
1993 			vSetRank(p, rank_idx);
1994 			for (u1FSPIdx = FSP_0; u1FSPIdx < p->support_fsp_num; u1FSPIdx++)
1995 			{
1996 				DramcMRWriteFldAlign(p, 13, u1FSPIdx, MR13_FSP_WR, TO_MR);
1997 				SetDramModeRegForWriteDBIOnOff(p, u1FSPIdx, p->DBI_W_onoff[u1FSPIdx]);
1998 			}
1999 		}
2000 	}
2001 
2002 	vSetRank(p, rank_backup);
2003 	vSetPHY2ChannelMapping(p, ch_backup);
2004 }
2005 #endif
2006 
2007 #if ENABLE_READ_DBI
EnableDRAMModeRegReadDBIAfterCalibration(DRAMC_CTX_T * p)2008 void EnableDRAMModeRegReadDBIAfterCalibration(DRAMC_CTX_T *p)
2009 {
2010 	U8 channel_idx, rank_idx;
2011 	U8 ch_backup, rank_backup, u1FSPIdx = 0;
2012 	S8 u1ShuffleIdx;
2013 
2014 	ch_backup = p->channel;
2015 	rank_backup = p->rank;
2016 
2017 	for (channel_idx = CHANNEL_A; channel_idx < p->support_channel_num; channel_idx++)
2018 	{
2019 		vSetPHY2ChannelMapping(p, channel_idx);
2020 		for (rank_idx = RANK_0; rank_idx < p->support_rank_num; rank_idx++)
2021 		{
2022 			vSetRank(p, rank_idx);
2023 			for (u1FSPIdx = FSP_0; u1FSPIdx < p->support_fsp_num; u1FSPIdx++)
2024 			{
2025 				DramcMRWriteFldAlign(p, 13, u1FSPIdx, MR13_FSP_WR, TO_MR);
2026 				SetDramModeRegForReadDBIOnOff(p, u1FSPIdx, p->DBI_R_onoff[u1FSPIdx]);
2027 			}
2028 		}
2029 	}
2030 
2031 	//[Ei_ger] DVT item RD2MRR & MRR2RD
2032 	vIO32WriteFldMulti_All(DRAMC_REG_HW_MRR_FUN, P_Fld(0x1, HW_MRR_FUN_TR2MRR_ENA)
2033 										   | P_Fld(0x1, HW_MRR_FUN_R2MRRHPRICTL)
2034 										   | P_Fld(0x1, HW_MRR_FUN_MANTMRR_EN));
2035 
2036 	vSetRank(p, rank_backup);
2037 	vSetPHY2ChannelMapping(p, ch_backup);
2038 }
2039 #endif
2040 
2041 
SetMr13VrcgToNormalOperationShuffle(DRAMC_CTX_T * p)2042 static void SetMr13VrcgToNormalOperationShuffle(DRAMC_CTX_T *p)//Only set DRAM_DFS_SHUFFLE_1
2043 {
2044 
2045 	U32 u4Value = 0;
2046 
2047 		//DVFS MRW
2048 	u4Value = u4IO32ReadFldAlign(DRAMC_REG_SHU_HWSET_VRCG, SHU_HWSET_VRCG_HWSET_VRCG_OP);
2049 	vIO32WriteFldAlign_All(DRAMC_REG_SHU_HWSET_VRCG, u4Value & ~(0x1 << 3), SHU_HWSET_VRCG_HWSET_VRCG_OP);
2050 	return;
2051 }
2052 
SetMr13VrcgToNormalOperation(DRAMC_CTX_T * p)2053 void SetMr13VrcgToNormalOperation(DRAMC_CTX_T *p)
2054 {
2055 
2056 	DRAM_CHANNEL_T eOriChannel = vGetPHY2ChannelMapping(p);
2057 	DRAM_RANK_T eOriRank = u1GetRank(p);
2058 	U8 u1ChIdx = CHANNEL_A;
2059 	U8 u1RankIdx = 0;
2060 
2061 #if MRW_CHECK_ONLY
2062 	mcSHOW_MRW_MSG("\n==[MR Dump] %s==\n", __func__);
2063 #endif
2064 
2065 
2066 	for (u1ChIdx = CHANNEL_A; u1ChIdx < p->support_channel_num; u1ChIdx++)
2067 	{
2068 		vSetPHY2ChannelMapping(p, u1ChIdx);
2069 		//To DRAM: MR13[3] = 0
2070 		for (u1RankIdx = 0; u1RankIdx < p->support_rank_num; u1RankIdx++)
2071 		{
2072 			vSetRank(p, u1RankIdx);
2073 			DramcMRWriteFldAlign(p, 13, 0, MR13_VRCG, TO_MR);
2074 		}
2075 	}
2076 	vSetPHY2ChannelMapping(p, (U8)eOriChannel);
2077 	vSetRank(p, (U8)eOriRank);
2078 	return;
2079 }
2080 
DramcShuTrackingDcmEnBySRAM(DRAMC_CTX_T * p)2081 static void DramcShuTrackingDcmEnBySRAM(DRAMC_CTX_T *p)
2082 {
2083 #if (fcFOR_CHIP_ID == fcA60868)
2084 	U8 u1ShuffleIdx, ShuRGAccessIdxBak;
2085 	U32 u4DramcShuOffset = 0;
2086 	U32 u4DDRPhyShuOffset = 0;
2087 
2088 	ShuRGAccessIdxBak = p->ShuRGAccessIdx;
2089 	msg("\n==[DramcShuTrackingDcmEnBySRAM]==\n");
2090 	for (u1ShuffleIdx = 0; u1ShuffleIdx <= 1; u1ShuffleIdx++) //fill SHU1 of conf while (u1ShuffleIdx==DRAM_DFS_SRAM_MAX)
2091 	{
2092 		//Aceess DMA SRAM by APB bus use debug mode by conf SHU3
2093 		u4DramcShuOffset = 0;
2094 		u4DDRPhyShuOffset = 0;
2095 		p->ShuRGAccessIdx = u1ShuffleIdx;
2096 		#ifdef HW_GATING
2097 		//DramcHWGatingOnOff(p, 1, u4DramcShuOffset); // Enable HW gating tracking
2098 		#endif
2099 
2100 		#if ENABLE_TX_TRACKING
2101 		Enable_TX_Tracking(p, u4DramcShuOffset);
2102 		#endif
2103 
2104 		#if RDSEL_TRACKING_EN
2105 		Enable_RDSEL_Tracking(p, u4DramcShuOffset);
2106 		#endif
2107 
2108 		#ifdef HW_GATING
2109 		Enable_Gating_Tracking(p, u4DDRPhyShuOffset); // Enable HW gating tracking
2110 		#endif
2111 	}
2112 	p->ShuRGAccessIdx = ShuRGAccessIdxBak;
2113 #else
2114 	DRAM_DFS_FREQUENCY_TABLE_T *pFreqTable = p->pDFSTable; // from dramc conf shu0
2115 	U8 u1ShuffleIdx;
2116 	U32 u4DramcShuOffset = 0;
2117 	U32 u4DDRPhyShuOffset = 0;
2118 	U16 u2Freq = 0;
2119 
2120 	U32 u4RegBackupAddress[] =
2121 	{
2122 		(DDRPHY_REG_MISC_SRAM_DMA0),
2123 		(DDRPHY_REG_MISC_SRAM_DMA0 + SHIFT_TO_CHB_ADDR),
2124 		(DDRPHY_REG_MISC_SRAM_DMA1),
2125 		(DDRPHY_REG_MISC_SRAM_DMA1 + SHIFT_TO_CHB_ADDR),
2126 #if (CHANNEL_NUM==4)
2127 		(DDRPHY_REG_MISC_SRAM_DMA0 + SHIFT_TO_CHC_ADDR),
2128 		(DDRPHY_REG_MISC_SRAM_DMA0 + SHIFT_TO_CHD_ADDR),
2129 		(DDRPHY_REG_MISC_SRAM_DMA1 + SHIFT_TO_CHC_ADDR),
2130 		(DDRPHY_REG_MISC_SRAM_DMA1 + SHIFT_TO_CHD_ADDR),
2131 #endif
2132 	};
2133 
2134 	//Backup regs
2135 	DramcBackupRegisters(p, u4RegBackupAddress, sizeof(u4RegBackupAddress) / sizeof(U32));
2136 
2137 	#if (ENABLE_TX_TRACKING && TX_RETRY_ENABLE)
2138 	Enable_and_Trigger_TX_Retry(p);
2139 	#endif
2140 
2141 	//Aceess DMA SRAM by APB bus use debug mode by conf SHU3
2142 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_SRAM_DMA0, 0x0, MISC_SRAM_DMA0_APB_SLV_SEL);//before setting
2143 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_SRAM_DMA1, 0x1, MISC_SRAM_DMA1_R_APB_DMA_DBG_ACCESS);
2144 
2145 	for (u1ShuffleIdx = 0; u1ShuffleIdx <= DRAM_DFS_SRAM_MAX; u1ShuffleIdx++) //fill SHU1 of conf while (u1ShuffleIdx==DRAM_DFS_SRAM_MAX)
2146 	{
2147 		if (u1ShuffleIdx == DRAM_DFS_SRAM_MAX)
2148 		{
2149 			//for SHU0 restore to SRAM
2150 			vSetDFSTable(p, pFreqTable);//Restore DFS table
2151 			//Restore regs, or SHU0 RG cannot be set
2152 			DramcRestoreRegisters(p, u4RegBackupAddress, sizeof(u4RegBackupAddress) / sizeof(U32));
2153 			p->ShuRGAccessIdx = DRAM_DFS_REG_SHU0;//Since access conf SHU0
2154 		}
2155 		else
2156 		{
2157 			//Aceess DMA SRAM by APB bus use debug mode by conf SHU1
2158 			vSetDFSTable(p, get_FreqTbl_by_shuffleIndex(p, u1ShuffleIdx));//Update DFS table
2159 			u2Freq = GetFreqBySel(p, p->pDFSTable->freq_sel);
2160 			vIO32WriteFldAlign_All(DDRPHY_REG_MISC_SRAM_DMA0, 0x0, MISC_SRAM_DMA0_APB_SLV_SEL);//before setting
2161 			vIO32WriteFldAlign_All(DDRPHY_REG_MISC_SRAM_DMA1, u1ShuffleIdx, MISC_SRAM_DMA1_R_APB_DMA_DBG_LEVEL);
2162 			//APB bus use debug mode by conf SHU1
2163 			vIO32WriteFldAlign_All(DDRPHY_REG_MISC_SRAM_DMA0, 0x1, MISC_SRAM_DMA0_APB_SLV_SEL);//Trigger DEBUG MODE
2164 			p->ShuRGAccessIdx = DRAM_DFS_REG_SHU1;
2165 		}
2166 		// add your function
2167 		// For example: EnableDramcPhyDCMShuffle(p, enable_dcm, u4DramcShuOffset, u4DDRPhyShuOffset, u1ShuffleIdx);
2168 #if ENABLE_TX_TRACKING
2169 		Enable_TX_Tracking(p, u4DramcShuOffset);
2170 #endif
2171 #if RDSEL_TRACKING_EN
2172 		Enable_RDSEL_Tracking(p, u4DramcShuOffset);
2173 #endif
2174 #ifdef HW_GATING
2175 		Enable_Gating_Tracking(p, u4DDRPhyShuOffset); // Enable HW gating tracking
2176 #endif
2177 #if ENABLE_RX_DCM_DPHY
2178 		EnableRxDcmDPhy(p, u4DDRPhyShuOffset, u2Freq);
2179 #endif
2180 		Enable_ClkTxRxLatchEn(p, u4DDRPhyShuOffset); // for new xrank mode
2181 #if ENABLE_TX_WDQS // @Darren, To avoid unexpected DQS toggle during calibration
2182 		Enable_TxWDQS(p, u4DDRPhyShuOffset, u2Freq);
2183 #endif
2184 
2185 #if (SW_CHANGE_FOR_SIMULATION == 0)
2186 #if APPLY_LOWPOWER_GOLDEN_SETTINGS
2187 		int enable_dcm = (doe_get_config("dramc_dcm")) ? 0 : 1;
2188 		EnableDramcPhyDCMShuffle(p, enable_dcm, u4DramcShuOffset, u4DDRPhyShuOffset);
2189 #else
2190 		EnableDramcPhyDCMShuffle(p, 0, u4DramcShuOffset, u4DDRPhyShuOffset);
2191 #endif
2192 #endif
2193 		SetMr13VrcgToNormalOperationShuffle(p);
2194 		p->ShuRGAccessIdx = DRAM_DFS_REG_SHU0;
2195 	}
2196 #endif
2197 }
2198 
2199 #if (ENABLE_PER_BANK_REFRESH == 1)
DramcEnablePerBankRefresh(DRAMC_CTX_T * p,bool en)2200 void DramcEnablePerBankRefresh(DRAMC_CTX_T *p, bool en)
2201 {
2202 	if (en)
2203 	{
2204 		vIO32WriteFldMulti_All(DRAMC_REG_REFCTRL0, P_Fld(1, REFCTRL0_PBREF_BK_REFA_ENA) | P_Fld(2, REFCTRL0_PBREF_BK_REFA_NUM));
2205 
2206 	#if PER_BANK_REFRESH_USE_MODE==0
2207 			vIO32WriteFldMulti_All(DRAMC_REG_REFCTRL0, P_Fld(0, REFCTRL0_KEEP_PBREF) | P_Fld(0, REFCTRL0_KEEP_PBREF_OPT)); //Original mode
2208 			msg("\tPER_BANK_REFRESH: Original Mode\n");
2209 	#endif
2210 
2211 	#if PER_BANK_REFRESH_USE_MODE==1
2212 			vIO32WriteFldMulti_All(DRAMC_REG_REFCTRL0, P_Fld(0, REFCTRL0_KEEP_PBREF) | P_Fld(1, REFCTRL0_KEEP_PBREF_OPT)); //Hybrid mode
2213 			msg("\tPER_BANK_REFRESH: Hybrid Mode\n");
2214 	#endif
2215 
2216 	#if PER_BANK_REFRESH_USE_MODE==2
2217 			vIO32WriteFldMulti_All(DRAMC_REG_REFCTRL0, P_Fld(1, REFCTRL0_KEEP_PBREF) | P_Fld(0, REFCTRL0_KEEP_PBREF_OPT)); //Always per-bank mode
2218 			msg("\tPER_BANK_REFRESH: Always Per-Bank Mode\n");
2219 	#endif
2220 
2221 		vIO32WriteFldAlign_All(DRAMC_REG_REFCTRL1, 1, REFCTRL1_REFPB2AB_IGZQCS);
2222 	}
2223 
2224 	#if IMP_TRACKING_PB_TO_AB_REFRESH_WA
2225 	// disable all shu pb-ref
2226 	vIO32WriteFldAlign_All(DRAMC_REG_SHU_CONF0, 0, SHU_CONF0_PBREFEN);
2227 	#else
2228 	vIO32WriteFldAlign_All(DRAMC_REG_SHU_CONF0, en, SHU_CONF0_PBREFEN);
2229 	#endif
2230 }
2231 #endif
2232 
2233 #ifdef TEMP_SENSOR_ENABLE
DramcHMR4_Presetting(DRAMC_CTX_T * p)2234 void DramcHMR4_Presetting(DRAMC_CTX_T *p)
2235 {
2236 	U8 backup_channel = p->channel;
2237 	U8 channelIdx;
2238 
2239 	for (channelIdx = CHANNEL_A; channelIdx < p->support_channel_num; channelIdx++)
2240 	{
2241 		vSetPHY2ChannelMapping(p, channelIdx);
2242 	//	  vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_REFCTRL1), Refr_rate_manual_trigger, REFCTRL1_REFRATE_MANUAL_RATE_TRIG);
2243 	//	  vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_REFCTRL1), Refr_rate_manual, REFCTRL1_REFRATE_MANUAL);
2244 
2245 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_HMR4), 1, HMR4_REFR_PERIOD_OPT);
2246 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_HMR4), 0, HMR4_REFRCNT_OPT);	// 0: 3.9us * cnt, 1: 15.6us * cnt
2247 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_HMR4_DVFS_CTRL0), 0x80, SHU_HMR4_DVFS_CTRL0_REFRCNT);
2248 
2249 		// Support byte mode, default disable
2250 		// Support byte/normal mode
2251 		if (vGet_Dram_CBT_Mode(p) == CBT_BYTE_MODE1)
2252 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_HMR4), 1, HMR4_HMR4_BYTEMODE_EN);
2253 		else
2254 			vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_HMR4), 0, HMR4_HMR4_BYTEMODE_EN);
2255 
2256 		// Toggle to clear record
2257 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_REFCTRL1), 0, REFCTRL1_REFRATE_MON_CLR);
2258 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_REFCTRL1), 1, REFCTRL1_REFRATE_MON_CLR);
2259 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_REFCTRL1), 0, REFCTRL1_REFRATE_MON_CLR);
2260 	}
2261 	vSetPHY2ChannelMapping(p, backup_channel);
2262 
2263 }
2264 #endif
2265 
SwitchHMR4(DRAMC_CTX_T * p,bool en)2266 static void SwitchHMR4(DRAMC_CTX_T *p, bool en)
2267 {
2268 #ifdef __LP5_COMBO__
2269 	if (is_lp5_family(p))
2270 	{
2271 		vIO32WriteFldAlign_All(DRAMC_REG_REF_BOUNCE2, 9, REF_BOUNCE2_PRE_MR4INT_TH);
2272 
2273 		vIO32WriteFldAlign_All(DRAMC_REG_REFCTRL2, 9, REFCTRL2_MR4INT_TH);
2274 
2275 	}
2276 	else
2277 #endif
2278 	{
2279 		vIO32WriteFldAlign_All(DRAMC_REG_REF_BOUNCE2, 5, REF_BOUNCE2_PRE_MR4INT_TH);
2280 
2281 		vIO32WriteFldAlign_All(DRAMC_REG_REFCTRL2, 5, REFCTRL2_MR4INT_TH);
2282 	}
2283 
2284 	// TOG_OPT, 0: Read rank0 only, 1: read both rank0 and rank1
2285 	if (en && p->support_rank_num == RANK_DUAL)
2286 		vIO32WriteFldAlign_All(DRAMC_REG_HMR4, 1, HMR4_HMR4_TOG_OPT);	// Read both rank0 and rank1
2287 	else
2288 		vIO32WriteFldAlign_All(DRAMC_REG_HMR4, 0, HMR4_HMR4_TOG_OPT);	// Read rank0 only (need for manual/SW MRR)
2289 
2290 	vIO32WriteFldAlign_All(DRAMC_REG_HMR4, !en, HMR4_REFRDIS);
2291 
2292 #if 0	// Reading HMR4 repeatedly for debugging
2293 	while(1)
2294 	{
2295 		msg("@@ --------------------\n");
2296 		msg("@@ MISC_STATUSA_REFRESH_RATE: %d\n",
2297 			u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_MISC_STATUSA), MISC_STATUSA_REFRESH_RATE));
2298 		msg("@@ MIN: %d, MAX: %d\n",
2299 			u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_HW_REFRATE_MON), HW_REFRATE_MON_REFRESH_RATE_MIN_MON),
2300 			u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_HW_REFRATE_MON), HW_REFRATE_MON_REFRESH_RATE_MAX_MON));
2301 
2302 		// if HMR4_HMR4_TOG_OPT == 1
2303 		{
2304 			msg("@@ 		MIN    MAX\n");
2305 			msg("@@ RK0_B0:  %d 	%d\n",
2306 				u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_HW_REFRATE_MON3), HW_REFRATE_MON3_REFRESH_RATE_MIN_MON_RK0_B0),
2307 				u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_HW_REFRATE_MON3), HW_REFRATE_MON3_REFRESH_RATE_MAX_MON_RK0_B0));
2308 			msg("@@ RK1_B0:  %d 	%d\n",
2309 				u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_HW_REFRATE_MON3), HW_REFRATE_MON3_REFRESH_RATE_MIN_MON_RK1_B0),
2310 				u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_HW_REFRATE_MON3), HW_REFRATE_MON3_REFRESH_RATE_MAX_MON_RK1_B0));
2311 			msg("@@ RK0_B1:  %d 	%d\n",
2312 				u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_HW_REFRATE_MON2), HW_REFRATE_MON2_REFRESH_RATE_MIN_MON_RK0_B1),
2313 				u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_HW_REFRATE_MON2), HW_REFRATE_MON2_REFRESH_RATE_MAX_MON_RK0_B1));
2314 			msg("@@ RK1_B1:  %d 	%d\n",
2315 				u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_HW_REFRATE_MON2), HW_REFRATE_MON2_REFRESH_RATE_MIN_MON_RK1_B1),
2316 				u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_HW_REFRATE_MON2), HW_REFRATE_MON2_REFRESH_RATE_MAX_MON_RK1_B1));
2317 		}
2318 
2319 		msg("@@ Wait to measure!!\n\n");
2320 		Sleep(500);
2321 	}
2322 #endif
2323 }
2324 
2325 #if ENABLE_REFRESH_RATE_DEBOUNCE
DramcRefreshRateDeBounceEnable(DRAMC_CTX_T * p)2326 static void DramcRefreshRateDeBounceEnable(DRAMC_CTX_T *p)
2327 {
2328 	vIO32WriteFldMulti_All(DRAMC_REG_REF_BOUNCE1, P_Fld(0x4 , REF_BOUNCE1_REFRATE_DEBOUNCE_COUNT) | // when De-bounce counter >= this count, then dramc apply new dram's MR4 value
2329 												  P_Fld(5 , REF_BOUNCE1_REFRATE_DEBOUNCE_TH) |	// MR4 value >= 0.5X refresh rate, then de-bounce count active
2330 												  P_Fld(0 , REF_BOUNCE1_REFRATE_DEBOUNCE_OPT) |
2331 												  P_Fld(0xff1f , REF_BOUNCE1_REFRATE_DEBOUNCE_DIS) ); //all bits set 1 to disable debounce function
2332 }
2333 #endif
2334 
2335 #if DRAMC_MODIFIED_REFRESH_MODE
DramcModifiedRefreshMode(DRAMC_CTX_T * p)2336 void DramcModifiedRefreshMode(DRAMC_CTX_T *p)
2337 {
2338 	vIO32WriteFldMulti_All(DRAMC_REG_REFPEND1, P_Fld(2, REFPEND1_MPENDREFCNT_TH0)
2339 											| P_Fld(2, REFPEND1_MPENDREFCNT_TH1)
2340 											| P_Fld(4, REFPEND1_MPENDREFCNT_TH2)
2341 											| P_Fld(5, REFPEND1_MPENDREFCNT_TH3)
2342 											| P_Fld(5, REFPEND1_MPENDREFCNT_TH4)
2343 											| P_Fld(3, REFPEND1_MPENDREFCNT_TH5)
2344 											| P_Fld(3, REFPEND1_MPENDREFCNT_TH6)
2345 											| P_Fld(3, REFPEND1_MPENDREFCNT_TH7));
2346 	vIO32WriteFldMulti_All(DRAMC_REG_REFCTRL1, P_Fld(1, REFCTRL1_REFPEND_OPT1) | P_Fld(1, REFCTRL1_REFPEND_OPT2));
2347 	vIO32WriteFldAlign_All(DRAMC_REG_SHU_REF0, 4, SHU_REF0_MPENDREF_CNT);
2348 }
2349 #endif
2350 
2351 #if DRAMC_CKE_DEBOUNCE
DramcCKEDebounce(DRAMC_CTX_T * p)2352 void DramcCKEDebounce(DRAMC_CTX_T *p)
2353 {
2354 	U8 u1CKE_DBECnt = 15;
2355 	U8 rank_backup, u1RKIdx=0;
2356 	if(p->frequency>=1866)
2357 	{
2358 		rank_backup = p->rank;
2359 		for(u1RKIdx=0; u1RKIdx<p->support_rank_num; u1RKIdx++)
2360 		{
2361 			vSetRank(p, u1RKIdx);
2362 			vIO32WriteFldAlign_All(DRAMC_REG_SHURK_CKE_CTRL, u1CKE_DBECnt, SHURK_CKE_CTRL_CKE_DBE_CNT);
2363 			msg("CKE Debounce cnt = %d\n", u1CKE_DBECnt);
2364 		}
2365 		vSetRank(p, rank_backup);
2366 	}
2367 }
2368 #endif
2369 
2370 //1.Some RG setting will need to be DCM on, since not consider S0 2.ENABLE_RX_DCM_DPHY should be 1
S0_DCMOffWA(DRAMC_CTX_T * p)2371 static void S0_DCMOffWA(DRAMC_CTX_T *p)//For S0 + DCM off
2372 {
2373 	vIO32WriteFldMulti_All(DDRPHY_REG_MISC_CG_CTRL0,
2374 			P_Fld(0x0, MISC_CG_CTRL0_RG_CG_RX_COMB1_OFF_DISABLE) |
2375 			P_Fld(0x0, MISC_CG_CTRL0_RG_CG_RX_COMB0_OFF_DISABLE) |
2376 			P_Fld(0x0, MISC_CG_CTRL0_RG_CG_RX_CMD_OFF_DISABLE) |
2377 			P_Fld(0x0, MISC_CG_CTRL0_RG_CG_COMB1_OFF_DISABLE) |
2378 			P_Fld(0x0, MISC_CG_CTRL0_RG_CG_COMB0_OFF_DISABLE) |
2379 			P_Fld(0x0, MISC_CG_CTRL0_RG_CG_CMD_OFF_DISABLE) |
2380 			P_Fld(0x0, MISC_CG_CTRL0_RG_CG_COMB_OFF_DISABLE) |
2381 			P_Fld(0x0, MISC_CG_CTRL0_RG_CG_PHY_OFF_DIABLE) |
2382 			P_Fld(0x0, MISC_CG_CTRL0_RG_CG_DRAMC_OFF_DISABLE));
2383 }
2384 
DramcRunTimeConfig(DRAMC_CTX_T * p)2385 void DramcRunTimeConfig(DRAMC_CTX_T *p)
2386 {
2387 #if (fcFOR_CHIP_ID == fcA60868)
2388 	u1EnterRuntime = 1;
2389 #endif
2390 
2391 	msg("[DramcRunTimeConfig]\n");
2392 
2393 	SetDramInfoToConf(p);
2394 
2395 #if defined(DPM_CONTROL_AFTERK) && ((DRAMC_DFS_MODE%2) != 0) // for MD32 RG/PST mode
2396 	DPMInit(p);
2397 	msg("DPM_CONTROL_AFTERK: ON\n");
2398 #else
2399 	msg("!!! DPM_CONTROL_AFTERK: OFF\n");
2400 	msg("!!! DPM could not control APHY\n");
2401 #endif
2402 
2403 #if ENABLE_PER_BANK_REFRESH
2404 	#if IMP_TRACKING_PB_TO_AB_REFRESH_WA
2405 	// enable pb-ref for current shu
2406 	vIO32WriteFldAlign_All(DRAMC_REG_SHU_CONF0, 0x1, SHU_CONF0_PBREFEN);
2407 	#endif
2408 	msg("PER_BANK_REFRESH: ON\n");
2409 #else
2410 	msg("PER_BANK_REFRESH: OFF\n");
2411 #endif
2412 
2413 ///TODO:KIWI
2414 #if __A60868_TO_BE_PORTING__
2415 
2416 #if ENABLE_DDR800_OPEN_LOOP_MODE_OPTION
2417 	if (vGet_DDR800_Mode(p) == DDR800_SEMI_LOOP)
2418 	{
2419 		EnableDllCg(p, ENABLE); //open CG to save power
2420 	}
2421 #endif
2422 
2423 #endif //__A60868_TO_BE_PORTING__
2424 
2425 #if REFRESH_OVERHEAD_REDUCTION
2426 	vIO32WriteFldAlign_All(DRAMC_REG_REFCTRL1, 0x1, REFCTRL1_REF_OVERHEAD_SLOW_REFPB_ENA);
2427 	msg("REFRESH_OVERHEAD_REDUCTION: ON\n");
2428 #else
2429 	msg("REFRESH_OVERHEAD_REDUCTION: OFF\n");
2430 #endif
2431 
2432 #if CMD_PICG_NEW_MODE
2433 	msg("CMD_PICG_NEW_MODE: ON\n");
2434 #else
2435 	msg("CMD_PICG_NEW_MODE: OFF\n");
2436 #endif
2437 
2438 #if XRTWTW_NEW_CROSS_RK_MODE
2439 	if (p->support_rank_num == RANK_DUAL)
2440 	{
2441 		//ENABLE_XRTWTW_Setting(p); // @Darren, DV codes is included
2442 		msg("XRTWTW_NEW_MODE: ON\n");
2443 	}
2444 #else
2445 	msg("XRTWTW_NEW_MODE: OFF\n");
2446 #endif
2447 
2448 #if XRTRTR_NEW_CROSS_RK_MODE
2449 	if (p->support_rank_num == RANK_DUAL)
2450 	{
2451 		//ENABLE_XRTRTR_Setting(p); // @Darren, DV codes is included
2452 		msg("XRTRTR_NEW_MODE: ON\n");
2453 	}
2454 #else
2455 	msg("XRTRTR_NEW_MODE: OFF\n");
2456 #endif
2457 
2458 #if ENABLE_TX_TRACKING
2459 	msg("TX_TRACKING: ON\n");
2460 #else
2461 	msg("TX_TRACKING: OFF\n");
2462 #endif
2463 
2464 #if RDSEL_TRACKING_EN
2465 	msg("RDSEL_TRACKING: ON\n");
2466 #else
2467 	msg("RDSEL_TRACKING: OFF\n");
2468 #endif
2469 
2470 #if TDQSCK_PRECALCULATION_FOR_DVFS
2471 	msg("DQS Precalculation for DVFS: ");
2472 	/* Maoauo: Enable DQS precalculation for LP4, disable for LP3(same as Kibo) */
2473 	DramcDQSPrecalculation_enable(p);
2474 	msg("ON\n");
2475 #else
2476 	msg("DQS Precalculation for DVFS: OFF\n");
2477 #endif
2478 
2479 #if ENABLE_RX_TRACKING
2480 	DramcRxInputDelayTrackingInit_Common(p);
2481 	DramcRxInputDelayTrackingHW(p);
2482 	msg("RX_TRACKING: ON\n");
2483 #else
2484 	msg("RX_TRACKING: OFF\n");
2485 #endif
2486 
2487 #if (ENABLE_RX_TRACKING && RX_DLY_TRACK_ONLY_FOR_DEBUG && defined(DUMMY_READ_FOR_TRACKING))
2488 	msg("RX_DLY_TRACK_DBG: ON\n");
2489 	DramcRxDlyTrackDebug(p);
2490 #endif
2491 
2492 /* HW gating - Disabled by default(in preloader) to save power (DE: HJ Huang) */
2493 #if (defined(HW_GATING))
2494 	msg("HW_GATING DBG: ON\n");
2495 	DramcHWGatingDebugOnOff(p, ENABLE);
2496 #else
2497 	msg("HW_GATING DBG: OFF\n");
2498 	DramcHWGatingDebugOnOff(p, DISABLE);
2499 #endif
2500 
2501 #ifdef ZQCS_ENABLE_LP4
2502 	// if CHA.ZQCSMASK=1, and then set CHA.ZQCALDISB=1 first, else set CHB.ZQCALDISB=1 first
2503 #if (fcFOR_CHIP_ID == fcPetrus)
2504 	vIO32WriteFldAlign(DRAMC_REG_ZQ_SET1 + (CHANNEL_A << POS_BANK_NUM), 1, ZQ_SET1_ZQCALDISB);
2505 	vIO32WriteFldAlign(DRAMC_REG_ZQ_SET1 + (CHANNEL_D << POS_BANK_NUM), 1, ZQ_SET1_ZQCALDISB);
2506 
2507 	mcDELAY_US(1);
2508 
2509 	vIO32WriteFldAlign(DRAMC_REG_ZQ_SET1 + (CHANNEL_B << POS_BANK_NUM), 1, ZQ_SET1_ZQCALDISB);
2510 	vIO32WriteFldAlign(DRAMC_REG_ZQ_SET1 + (CHANNEL_C << POS_BANK_NUM), 1, ZQ_SET1_ZQCALDISB);
2511 #elif (fcFOR_CHIP_ID == fcMargaux)
2512 	vIO32WriteFldAlign_All(DRAMC_REG_ZQ_SET1, 1, ZQ_SET1_ZQCALDISB);// LP3 and LP4 are different, be careful.
2513 #endif
2514 	msg("ZQCS_ENABLE_LP4: ON\n");
2515 #else
2516 	vIO32WriteFldAlign_All(DRAMC_REG_ZQ_SET1, 0, ZQ_SET1_ZQCALDISB);// LP3 and LP4 are different, be careful.
2517 	msg("ZQCS_ENABLE_LP4: OFF\n");
2518 #endif
2519 
2520 ///TODO:JEREMY
2521 #if 0
2522 #ifdef DUMMY_READ_FOR_DQS_GATING_RETRY
2523 	DummyReadForDqsGatingRetryNonShuffle(p, 1);
2524 	msg("DUMMY_READ_FOR_DQS_GATING_RETRY: ON\n");
2525 #else
2526 	DummyReadForDqsGatingRetryNonShuffle(p, 0);
2527 	msg("DUMMY_READ_FOR_DQS_GATING_RETRY: OFF\n");
2528 #endif
2529 #endif
2530 
2531 #if RX_PICG_NEW_MODE
2532 	msg("RX_PICG_NEW_MODE: ON\n");
2533 #else
2534 	msg("RX_PICG_NEW_MODE: OFF\n");
2535 #endif
2536 
2537 #if TX_PICG_NEW_MODE
2538 	TXPICGNewModeEnable(p);
2539 	msg("TX_PICG_NEW_MODE: ON\n");
2540 #else
2541 	msg("TX_PICG_NEW_MODE: OFF\n");
2542 #endif
2543 
2544 #if ENABLE_RX_DCM_DPHY
2545 	msg("ENABLE_RX_DCM_DPHY: ON\n");
2546 #else
2547 	msg("ENABLE_RX_DCM_DPHY: OFF\n");
2548 #endif
2549 
2550 #if (SW_CHANGE_FOR_SIMULATION == 0)
2551 #if APPLY_LOWPOWER_GOLDEN_SETTINGS
2552 	int enable_dcm = (doe_get_config("dramc_dcm"))? 0: 1;
2553 	const char *str = (enable_dcm == 1)? ("ON") : ("OFF");
2554 //	  EnableDramcPhyDCM(p, enable_dcm);
2555 	EnableDramcPhyDCMNonShuffle(p, enable_dcm);
2556 	msg("LOWPOWER_GOLDEN_SETTINGS(DCM): %s\n", str);
2557 
2558 	if(enable_dcm == 0)
2559 	{
2560 		S0_DCMOffWA(p);//For S0 + DCM off
2561 	}
2562 
2563 #else
2564 //	  EnableDramcPhyDCM(p, DCM_OFF);
2565 	EnableDramcPhyDCMNonShuffle(p, 0);
2566 	msg("LOWPOWER_GOLDEN_SETTINGS(DCM): OFF\n");
2567 
2568 	S0_DCMOffWA(p);//For S0 + DCM off
2569 #endif
2570 #endif
2571 
2572 //DumpShuRG(p);
2573 
2574 
2575 
2576 #if 1
2577 	DramcShuTrackingDcmEnBySRAM(p);
2578 #endif
2579 
2580 
2581 //Dummy read should NOT be enabled before gating tracking
2582 #ifdef DUMMY_READ_FOR_TRACKING
2583 	DramcDummyReadForTrackingEnable(p);
2584 #else
2585 	msg("DUMMY_READ_FOR_TRACKING: OFF\n");
2586 #endif
2587 
2588 
2589 #ifdef SPM_CONTROL_AFTERK
2590 	DVFS_PRE_config(p);
2591 	TransferToSPMControl(p);
2592 	msg("SPM_CONTROL_AFTERK: ON\n");
2593 #else
2594 	msg("!!! SPM_CONTROL_AFTERK: OFF\n");
2595 	msg("!!! SPM could not control APHY\n");
2596 #endif
2597 
2598 // when time profiling multi times, SW impedance tracking will fail when trakcing enable.
2599 // ignor SW impedance tracking when doing time profling
2600 #ifndef DDR_INIT_TIME_PROFILING
2601 #ifdef IMPEDANCE_TRACKING_ENABLE
2602 	if (p->dram_type == TYPE_LPDDR4 || p->dram_type == TYPE_LPDDR4X)
2603 	{
2604 		DramcImpedanceTrackingEnable(p);
2605 		msg("IMPEDANCE_TRACKING: ON\n");
2606 
2607 #ifdef IMPEDANCE_HW_SAVING
2608 		DramcImpedanceHWSaving(p);
2609 #endif
2610 	}
2611 #else
2612 	msg("IMPEDANCE_TRACKING: OFF\n");
2613 #endif
2614 #endif
2615 
2616 	//0x1c0[31]
2617 	//vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_DQSCAL0), 0, DQSCAL0_STBCALEN);
2618 
2619 #ifdef TEMP_SENSOR_ENABLE
2620 	SwitchHMR4(p, ON);
2621 	msg("TEMP_SENSOR: ON\n");
2622 #else
2623 	SwitchHMR4(p, OFF);
2624 	msg("TEMP_SENSOR: OFF\n");
2625 #endif
2626 
2627 #ifdef HW_SAVE_FOR_SR
2628 	msg("HW_SAVE_FOR_SR: ON, no implementation\n");
2629 #else
2630 	msg("HW_SAVE_FOR_SR: OFF\n");
2631 #endif
2632 
2633 #ifdef CLK_FREE_FUN_FOR_DRAMC_PSEL
2634 	ClkFreeRunForDramcPsel(p);
2635 	msg("CLK_FREE_FUN_FOR_DRAMC_PSEL: ON\n");
2636 #else
2637 	msg("CLK_FREE_FUN_FOR_DRAMC_PSEL: OFF\n");
2638 #endif
2639 
2640 #if PA_IMPROVEMENT_FOR_DRAMC_ACTIVE_POWER
2641 	DramcPAImprove(p);
2642 	msg("PA_IMPROVEMENT_FOR_DRAMC_ACTIVE_POWER: ON\n");
2643 #else
2644 	msg("PA_IMPROVEMENT_FOR_DRAMC_ACTIVE_POWER: OFF\n");
2645 #endif
2646 
2647 #if ENABLE_RODT_TRACKING
2648 	msg("Read ODT Tracking: ON\n");
2649 #else
2650 	msg("Read ODT Tracking: OFF\n");
2651 #endif
2652 
2653 #if ENABLE_REFRESH_RATE_DEBOUNCE
2654 	msg("Refresh Rate DeBounce: ON\n");
2655 	DramcRefreshRateDeBounceEnable(p);
2656 #endif
2657 
2658 #if ENABLE_DVFS_BYPASS_MR13_FSP
2659 	DFSBypassMR13HwSet(p);
2660 #endif
2661 
2662 
2663 #if (CHECK_GOLDEN_SETTING == TRUE)
2664 	DRAM_STATUS_T stResult = CheckGoldenSetting(p);
2665 	msg("End of run time ==>Golden setting check: %s\n", (stResult == DRAM_OK)? ("OK") : ("NG"));
2666 #endif
2667 
2668 #if DFS_NOQUEUE_FLUSH_WA
2669 	EnableDFSNoQueueFlush(p);
2670 	msg("DFS_NO_QUEUE_FLUSH: ON\n");
2671 #else
2672 	msg("DFS_NO_QUEUE_FLUSH: OFF\n");
2673 #endif
2674 
2675 #if DFS_NOQUEUE_FLUSH_LATENCY_CNT
2676 	vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_FSM_CFG, 1, LPIF_FSM_CFG_DBG_LATENCY_CNT_EN);
2677 	// MD32 clock is 208M
2678 	vIO32WriteFldMulti_All(DDRPHY_MD32_REG_SSPM_MCLK_DIV, P_Fld(0, SSPM_MCLK_DIV_MCLK_SRC)
2679 		| P_Fld(0, SSPM_MCLK_DIV_MCLK_DIV));
2680 	msg("DFS_NO_QUEUE_FLUSH_LATENCY_CNT: ON\n");
2681 #else
2682 	msg("DFS_NO_QUEUE_FLUSH_LATENCY_CNT: OFF\n");
2683 #endif
2684 
2685 #if ENABLE_DFS_RUNTIME_MRW
2686 	DFSRuntimeFspMRW(p);
2687 	msg("ENABLE_DFS_RUNTIME_MRW: ON\n");
2688 #else
2689 	msg("ENABLE_DFS_RUNTIME_MRW: OFF\n");
2690 #endif
2691 
2692 	//CheckRxPICGNewModeSetting(p);
2693 	vIO32WriteFldAlign_All(DRAMC_REG_REFCTRL0, 0x0, REFCTRL0_REFDIS); //After k, auto refresh should be enable
2694 
2695 #if DDR_RESERVE_NEW_MODE
2696 	msg("DDR_RESERVE_NEW_MODE: ON\n");
2697 	vIO32WriteFldMulti_All(DDRPHY_REG_MISC_DDR_RESERVE, P_Fld(1, MISC_DDR_RESERVE_WDT_LITE_EN) | P_Fld(0, MISC_DDR_RESERVE_WDT_SM_CLR));
2698 #else
2699 	msg("DDR_RESERVE_NEW_MODE: OFF\n");
2700 	vIO32WriteFldMulti_All(DDRPHY_REG_MISC_DDR_RESERVE, P_Fld(0, MISC_DDR_RESERVE_WDT_LITE_EN) | P_Fld(1, MISC_DDR_RESERVE_WDT_SM_CLR));
2701 #endif
2702 
2703 #if MR_CBT_SWITCH_FREQ
2704 	msg("MR_CBT_SWITCH_FREQ: ON\n");
2705 #else
2706 	msg("MR_CBT_SWITCH_FREQ: OFF\n");
2707 #endif
2708 
2709 	msg("=========================\n");
2710 }
2711 
2712 #if 0  //no use?
2713 void DramcTest_DualSch_stress(DRAMC_CTX_T *p)
2714 {
2715 	U32 count = 0;
2716 	U16 u2Value = 0;
2717 
2718 #if MRW_CHECK_ONLY
2719 	mcSHOW_MRW_MSG("\n==[MR Dump] %s==\n", __func__);
2720 #endif
2721 
2722 	//vIO32WriteFldAlign_All(DRAMC_REG_PERFCTL0, 1, PERFCTL0_DUALSCHEN);
2723 	vIO32WriteFldAlign_All(DRAMC_REG_SHU_SCHEDULER, 1, SHU_SCHEDULER_DUALSCHEN);
2724 
2725 	while (count < 10)
2726 	{
2727 		count++;
2728 
2729 		u1MR12Value[p->channel][p->rank][p->dram_fsp] = 0x14;
2730 		DramcModeRegWriteByRank(p, p->rank, 12, u1MR12Value[p->channel][p->rank][p->dram_fsp]);
2731 		DramcModeRegReadByRank(p, p->rank, 12, &u2Value);
2732 		//msg("MR12 = 0x%0X\n", u1Value);
2733 	}
2734 }
2735 #endif
2736 
2737 #if (ENABLE_TX_TRACKING && TX_RETRY_ENABLE)
SPMTx_Track_Retry_OnOff(DRAMC_CTX_T * p,U8 shu_level,U8 onoff)2738 void SPMTx_Track_Retry_OnOff(DRAMC_CTX_T *p, U8 shu_level, U8 onoff)
2739 {
2740 	static U8 gIsddr800TxRetry = 0;
2741 
2742 	// MCK still available for DRAMC RG access from Joe comment
2743 	if (shu_level == SRAM_SHU6)
2744 	{
2745 		gIsddr800TxRetry = 1;
2746 	}
2747 
2748 	if ((gIsddr800TxRetry == 1) && (shu_level != SRAM_SHU6)) //Need to do tx retry when DDR800 -> DDr1200
2749 	{
2750 		if (onoff == ENABLE)
2751 		{
2752 			msg("TX track retry: ENABLE! (DDR800 to DDR1200)\n");
2753 			vIO32WriteFldAlign_All(DRAMC_REG_TX_RETRY_SET0, 1, TX_RETRY_SET0_XSR_TX_RETRY_BLOCK_ALE_MASK);
2754 			mcDELAY_US(1);
2755 			#if TX_RETRY_CONTROL_BY_SPM
2756 			vIO32WriteFldAlign(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_1, 1, LPIF_LOW_POWER_CFG_1_TX_TRACKING_RETRY_EN);
2757 			#else //control by DRAMC
2758 			vIO32WriteFldAlign_All(DRAMC_REG_TX_RETRY_SET0, 1, TX_RETRY_SET0_XSR_TX_RETRY_EN);
2759 			#endif
2760 		}
2761 		else //DISABLE
2762 		{
2763 			msg("TX track retry: DISABLE! (DDR800 to DDR1200)\n");
2764 			#if TX_RETRY_CONTROL_BY_SPM
2765 			vIO32WriteFldAlign(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_1, 0, LPIF_LOW_POWER_CFG_1_TX_TRACKING_RETRY_EN);
2766 			#else //control by DRAMC
2767 			vIO32WriteFldAlign_All(DRAMC_REG_TX_RETRY_SET0, 0, TX_RETRY_SET0_XSR_TX_RETRY_EN);
2768 			#endif
2769 			mcDELAY_US(1); //add 1us delay to wait emi and tx retry be done (because PPR_CTRL_TX_RETRY_SHU_RESP_OPT=1)
2770 			vIO32WriteFldAlign_All(DRAMC_REG_TX_RETRY_SET0, 0, TX_RETRY_SET0_XSR_TX_RETRY_BLOCK_ALE_MASK); //enable block emi to let tx retry be finish
2771 			gIsddr800TxRetry = 0;
2772 		}
2773 	}
2774 }
2775 
2776 #if SW_TX_RETRY_ENABLE
SWTx_Track_Retry_OnOff(DRAMC_CTX_T * p)2777 void SWTx_Track_Retry_OnOff(DRAMC_CTX_T *p)
2778 {
2779 	U8 u4Response;
2780 
2781 	msg("SW TX track retry!\n");
2782 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TX_RETRY_SET0), 0, TX_RETRY_SET0_XSR_TX_RETRY_SW_EN);
2783 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TX_RETRY_SET0), 1, TX_RETRY_SET0_XSR_TX_RETRY_BLOCK_ALE_MASK);
2784 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TX_RETRY_SET0), 1, TX_RETRY_SET0_XSR_TX_RETRY_SW_EN);
2785 	do
2786 	{
2787 		u4Response = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SPCMDRESP), SPCMDRESP_TX_RETRY_DONE_RESPONSE);
2788 		mcDELAY_US(1);	// Wait tZQCAL(min) 1us or wait next polling
2789 		msg3("still wait tx retry be done\n", u4Response);
2790 	}while (u4Response == 0);
2791 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TX_RETRY_SET0), 0, TX_RETRY_SET0_XSR_TX_RETRY_SW_EN);
2792 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_TX_RETRY_SET0), 0, TX_RETRY_SET0_XSR_TX_RETRY_BLOCK_ALE_MASK);
2793 }
2794 #endif
2795 #endif
2796 
2797 // The "ENABLE_RANK_NUMBER_AUTO_DETECTION" use this API
DFSInitForCalibration(DRAMC_CTX_T * p)2798 void DFSInitForCalibration(DRAMC_CTX_T *p)
2799 {
2800 #ifdef DDR_INIT_TIME_PROFILING
2801 	U32 CPU_Cycle;
2802 	time_msg("*** Data rate %d ***\n\n", p->frequency << 1);
2803 
2804 	TimeProfileBegin();
2805 #endif
2806 
2807 	u1PrintModeRegWrite = 1;
2808 
2809 #if MRW_BACKUP
2810 	U8 u1RKIdx;
2811 
2812 	for(u1RKIdx=0; u1RKIdx<p->support_rank_num; u1RKIdx++)
2813 	{
2814 		gFSPWR_Flag[u1RKIdx]=p->dram_fsp;
2815 	}
2816 #endif
2817 
2818 	DramcInit(p);
2819 	u1PrintModeRegWrite = 0;
2820 	vBeforeCalibration(p);
2821 
2822 #ifdef DUMP_INIT_RG_LOG_TO_DE
2823 	while (1);
2824 #endif
2825 
2826 
2827 #if ENABLE_DUTY_SCAN_V2
2828 #ifdef DDR_INIT_TIME_PROFILING
2829 	CPU_Cycle = TimeProfileEnd();
2830 	time_msg("	(1) DFSInitForCalibration() take %d ms\n\n", (CPU_Cycle / 1000) - gu4DutyCalibrationTime);
2831 	time_msg("	(2) DramcNewDutyCalibration take %d ms\n\r", gu4DutyCalibrationTime);
2832 #endif
2833 #endif
2834 
2835 #ifndef DUMP_INIT_RG_LOG_TO_DE
2836 	#ifdef ENABLE_MIOCK_JMETER
2837 	if ((Get_MDL_Used_Flag()==NORMAL_USED) && (p->frequency >= 800))
2838 	{
2839 		Get_RX_DelayCell(p);
2840 	}
2841 	#endif
2842 #endif
2843 
2844 #if !__ETT__
2845 	if (p->frequency >= 1333)
2846 #endif
2847 	{
2848 #ifdef DDR_INIT_TIME_PROFILING
2849 		TimeProfileBegin();
2850 #endif
2851 
2852 #ifndef DUMP_INIT_RG_LOG_TO_DE
2853 	#ifdef ENABLE_MIOCK_JMETER
2854 	if (Get_MDL_Used_Flag()==NORMAL_USED)
2855 	{
2856 		PRE_MIOCK_JMETER_HQA_USED(p);
2857 	}
2858 	#endif
2859 #endif
2860 
2861 #ifdef DDR_INIT_TIME_PROFILING
2862 	  CPU_Cycle=TimeProfileEnd();
2863 	  time_msg("  (3) JMeter takes %d ms\n\r", CPU_Cycle / 1000);
2864 #endif
2865 	}
2866 
2867 }
2868 
2869 #if 0 /* cc mark to use DV initial setting */
2870 void DramcHWDQSGatingTracking_ModeSetting(DRAMC_CTX_T *p)
2871 {
2872 #ifdef HW_GATING
2873 #if DramcHWDQSGatingTracking_FIFO_MODE
2874 	//REFUICHG=0, STB_SHIFT_DTCOUT_IG=0, DQSG_MODE=1, NARROW_IG=0
2875 	vIO32WriteFldMulti_All(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL),
2876 		P_Fld(1, MISC_STBCAL_STB_DQIEN_IG) |
2877 		P_Fld(1, MISC_STBCAL_PICHGBLOCK_NORD) |
2878 		P_Fld(0, MISC_STBCAL_REFUICHG) |
2879 		P_Fld(0, MISC_STBCAL_PHYVALID_IG) |
2880 		P_Fld(0, MISC_STBCAL_STBSTATE_OPT) |
2881 		P_Fld(0, MISC_STBCAL_STBDLELAST_FILTER) |
2882 		P_Fld(0, MISC_STBCAL_STBDLELAST_PULSE) |
2883 		P_Fld(0, MISC_STBCAL_STBDLELAST_OPT) |
2884 		P_Fld(1, MISC_STBCAL_PIMASK_RKCHG_OPT));
2885 
2886 	vIO32WriteFldMulti_All(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL1),
2887 		P_Fld(1, MISC_STBCAL1_STBCAL_FILTER) |
2888 		//cc mark P_Fld(1, MISC_STBCAL1_STB_FLAGCLR) |
2889 		P_Fld(1, MISC_STBCAL1_STB_SHIFT_DTCOUT_IG));
2890 
2891 	vIO32WriteFldMulti_All(DRAMC_REG_ADDR(DDRPHY_REG_MISC_CTRL0),
2892 		P_Fld(1, MISC_CTRL0_R_DMDQSIEN_FIFO_EN) |
2893 		P_Fld(0, MISC_CTRL0_R_DMVALID_DLY) |
2894 		P_Fld(0, MISC_CTRL0_R_DMVALID_DLY_OPT) |
2895 		P_Fld(0, MISC_CTRL0_R_DMVALID_NARROW_IG));
2896 		//cc mark P_Fld(0, MISC_CTRL0_R_DMDQSIEN_SYNCOPT));
2897 
2898 	vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ6),
2899 		0, B0_DQ6_RG_RX_ARDQ_DMRANK_OUTSEL_B0);
2900 	vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ6),
2901 		0, B1_DQ6_RG_RX_ARDQ_DMRANK_OUTSEL_B1);
2902 	vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DDRPHY_REG_CA_CMD6),
2903 		0, CA_CMD6_RG_RX_ARCMD_DMRANK_OUTSEL);
2904 
2905 #else
2906 	//REFUICHG=0, STB_SHIFT_DTCOUT_IG=0, DQSG_MODE=1, NARROW_IG=0
2907 	vIO32WriteFldMulti_All(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL),
2908 		P_Fld(1, MISC_STBCAL_STB_DQIEN_IG) |
2909 		P_Fld(1, MISC_STBCAL_PICHGBLOCK_NORD) |
2910 		P_Fld(0, MISC_STBCAL_REFUICHG) |
2911 		P_Fld(0, MISC_STBCAL_PHYVALID_IG) |
2912 		P_Fld(0, MISC_STBCAL_STBSTATE_OPT) |
2913 		P_Fld(0, MISC_STBCAL_STBDLELAST_FILTER) |
2914 		P_Fld(0, MISC_STBCAL_STBDLELAST_PULSE) |
2915 		P_Fld(0, MISC_STBCAL_STBDLELAST_OPT) |
2916 		P_Fld(1, MISC_STBCAL_PIMASK_RKCHG_OPT));
2917 
2918 	vIO32WriteFldMulti_All(DRAMC_REG_ADDR(DDRPHY_REG_MISC_STBCAL1),
2919 		P_Fld(1, MISC_STBCAL1_STBCAL_FILTER) |
2920 		//cc mark P_Fld(1, MISC_STBCAL1_STB_FLAGCLR) |
2921 		P_Fld(0, MISC_STBCAL1_STB_SHIFT_DTCOUT_IG));
2922 
2923 
2924 	vIO32WriteFldMulti_All(DRAMC_REG_ADDR(DDRPHY_REG_MISC_CTRL0),
2925 		P_Fld(0, MISC_CTRL0_R_DMDQSIEN_FIFO_EN) |
2926 		P_Fld(3, MISC_CTRL0_R_DMVALID_DLY) |
2927 		P_Fld(1, MISC_CTRL0_R_DMVALID_DLY_OPT) |
2928 		P_Fld(0, MISC_CTRL0_R_DMVALID_NARROW_IG));
2929 		//cc mark P_Fld(0xf, MISC_CTRL0_R_DMDQSIEN_SYNCOPT));
2930 
2931 	vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DDRPHY_REG_B0_DQ6),
2932 		1, B0_DQ6_RG_RX_ARDQ_DMRANK_OUTSEL_B0);
2933 	vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DDRPHY_REG_B1_DQ6),
2934 		1, B1_DQ6_RG_RX_ARDQ_DMRANK_OUTSEL_B1);
2935 	vIO32WriteFldAlign_All(DRAMC_REG_ADDR(DDRPHY_REG_CA_CMD6),
2936 		1, CA_CMD6_RG_RX_ARCMD_DMRANK_OUTSEL);
2937 #endif
2938 #endif
2939 }
2940 #endif
2941 
2942 #if TX_PICG_NEW_MODE
2943 #if 0
2944 void GetTXPICGSetting(DRAMC_CTX_T * p)
2945 {
2946 	U32 u4DQS_OEN_final, u4DQ_OEN_final;
2947 	U16 u2DQS_OEN_2T[2], u2DQS_OEN_05T[2], u2DQS_OEN_Delay[2];
2948 	U16 u2DQ_OEN_2T[2], u2DQ_OEN_05T[2], u2DQ_OEN_Delay[2];
2949 	U16 u2COMB_TX_SEL[2];
2950 	U16 u2Shift_Div[2];
2951 	U16 u2COMB_TX_PICG_CNT;
2952 	U8 u1CHIdx, u1RankIdx, u1Rank_bak = u1GetRank(p), u1backup_CH = vGetPHY2ChannelMapping(p), u1Div_ratio;
2953 
2954 	msg("****** GetTXPICGSetting DDR[%d] @@@\n", p->frequency * 2);
2955 
2956 	for (u1CHIdx = 0; u1CHIdx < p->support_channel_num; u1CHIdx++)
2957 	{
2958 		vSetPHY2ChannelMapping(p, u1CHIdx);
2959 		//Set TX DQS PICG
2960 		//DQS0
2961 		u2DQS_OEN_2T[0] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_DQS0), SHU_SELPH_DQS0_TXDLY_OEN_DQS0);//m
2962 		u2DQS_OEN_05T[0] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_DQS1), SHU_SELPH_DQS1_DLY_OEN_DQS0);//n
2963 		//DQS1
2964 		u2DQS_OEN_2T[1] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_DQS0), SHU_SELPH_DQS0_TXDLY_OEN_DQS1);//m
2965 		u2DQS_OEN_05T[1] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_DQS1), SHU_SELPH_DQS1_DLY_OEN_DQS1);//n
2966 
2967 		msg("CH%d\n", u1CHIdx);
2968 		msg("DQS0 m=%d n=%d \n", u2DQS_OEN_2T[0], u2DQS_OEN_05T[0]);
2969 		msg("DQS1 m=%d n=%d \n", u2DQS_OEN_2T[1], u2DQS_OEN_05T[1]);
2970 
2971 
2972 
2973 		u2COMB_TX_SEL[0] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_APHY_TX_PICG_CTRL), SHU_APHY_TX_PICG_CTRL_DDRPHY_CLK_EN_COMB_TX_DQS_SEL_P0);
2974 		u2COMB_TX_SEL[1] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_APHY_TX_PICG_CTRL), SHU_APHY_TX_PICG_CTRL_DDRPHY_CLK_EN_COMB_TX_DQS_SEL_P1);
2975 		u2COMB_TX_PICG_CNT = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_APHY_TX_PICG_CTRL), SHU_APHY_TX_PICG_CTRL_DDRPHY_CLK_EN_COMB_TX_PICG_CNT);
2976 
2977 		msg("TX_DQS_SEL_P0 %d \n", u2COMB_TX_SEL[0]);
2978 		msg("TX_DQS_SEL_P1 %d \n", u2COMB_TX_SEL[1]);
2979 		msg("COMB_TX_PICG_CNT %d \n", u2COMB_TX_PICG_CNT);
2980 
2981 		//Set TX RK0 and RK1 DQ PICG
2982 		for (u1RankIdx = RANK_0; u1RankIdx < p->support_rank_num; u1RankIdx++)
2983 		{
2984 			msg("Rank%d\n", u1RankIdx);
2985 
2986 			vSetRank(p, u1RankIdx);
2987 			//DQ0
2988 			u2DQ_OEN_2T[0] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ0), SHURK_SELPH_DQ0_TXDLY_OEN_DQ0);//p
2989 			u2DQ_OEN_05T[0] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ2), SHURK_SELPH_DQ2_DLY_OEN_DQ0);//q
2990 			//DQ1
2991 			u2DQ_OEN_2T[1] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ0), SHURK_SELPH_DQ0_TXDLY_OEN_DQ1);//p
2992 			u2DQ_OEN_05T[1] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ2), SHURK_SELPH_DQ2_DLY_OEN_DQ1);//q
2993 
2994 			msg("DQ0 p=%d q=%d \n", u2DQ_OEN_2T[0], u2DQ_OEN_05T[0]);
2995 			msg("DQ1 p=%d q=%d \n", u2DQ_OEN_2T[1], u2DQ_OEN_05T[1]);
2996 
2997 			u2COMB_TX_SEL[0] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHURK_APHY_TX_PICG_CTRL), SHURK_APHY_TX_PICG_CTRL_DDRPHY_CLK_EN_COMB_TX_DQ_RK_SEL_P0);
2998 			u2COMB_TX_SEL[1] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHURK_APHY_TX_PICG_CTRL), SHURK_APHY_TX_PICG_CTRL_DDRPHY_CLK_EN_COMB_TX_DQ_RK_SEL_P1);
2999 
3000 			msg("TX_DQ_RK_SEL_P0 %d \n", u2COMB_TX_SEL[0]);
3001 			msg("TX_DQ_RK_SEL_P1 %d \n", u2COMB_TX_SEL[1]);
3002 		}
3003 		vSetRank(p, u1Rank_bak);
3004 	}
3005 	vSetPHY2ChannelMapping(p, u1backup_CH);
3006 }
3007 #endif
3008 
3009 #define ADD_1UI_TO_APHY 1 //After A60-868/Pe-trus
TXPICGSetting(DRAMC_CTX_T * p)3010 void TXPICGSetting(DRAMC_CTX_T * p)
3011 {
3012 	U32 u4DQS_OEN_final, u4DQ_OEN_final;
3013 	U16 u2DQS_OEN_2T[2], u2DQS_OEN_05T[2], u2DQS_OEN_Delay[2];
3014 	U16 u2DQ_OEN_2T[2], u2DQ_OEN_05T[2], u2DQ_OEN_Delay[2];
3015 	U16 u2COMB_TX_SEL[2];
3016 	U16 u2Shift_DQS_Div[2];
3017 	U16 u2Shift_DQ_Div[2];
3018 	U16 u2COMB_TX_PICG_CNT;
3019 	U8 u1CHIdx, u1RankIdx, u1Rank_bak = u1GetRank(p), u1backup_CH = vGetPHY2ChannelMapping(p), u1Div_ratio;
3020 
3021 	u2COMB_TX_PICG_CNT = 3;//After Pe-trus, could detect HW OE=1 -> 0 automatically, and prolong TX picg
3022 	if (vGet_Div_Mode(p) == DIV8_MODE)
3023 	{
3024 		u2Shift_DQS_Div[0] = 10;//phase 0
3025 		u2Shift_DQS_Div[1] = 6;//phase 1
3026 		u2Shift_DQ_Div[0] = 8;//phase 0
3027 		u2Shift_DQ_Div[1] = 4;//phase 1
3028 		u1Div_ratio = 3;
3029 	}
3030 	else //DIV4_MODE
3031 	{
3032 		u2Shift_DQS_Div[0] = 2;//phase 0
3033 		u2Shift_DQS_Div[1] = 0;//phase 1, no use
3034 		u2Shift_DQ_Div[0] = 0;//phase 0
3035 		u2Shift_DQ_Div[1] = 0;//phase 1, no use
3036 		u1Div_ratio = 2;
3037 	}
3038 
3039 	for (u1CHIdx = 0; u1CHIdx < p->support_channel_num; u1CHIdx++)
3040 	{
3041 		vSetPHY2ChannelMapping(p, u1CHIdx);
3042 		//Set TX DQS PICG
3043 		//DQS0
3044 		u2DQS_OEN_2T[0] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_DQS0), SHU_SELPH_DQS0_TXDLY_OEN_DQS0);//m
3045 		u2DQS_OEN_05T[0] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_DQS1), SHU_SELPH_DQS1_DLY_OEN_DQS0);//n
3046 		u2DQS_OEN_Delay[0] = (u2DQS_OEN_2T[0] << u1Div_ratio) + u2DQS_OEN_05T[0];
3047 		//DQS1
3048 		u2DQS_OEN_2T[1] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_DQS0), SHU_SELPH_DQS0_TXDLY_OEN_DQS1);//m
3049 		u2DQS_OEN_05T[1] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_DQS1), SHU_SELPH_DQS1_DLY_OEN_DQS1);//n
3050 		u2DQS_OEN_Delay[1] = (u2DQS_OEN_2T[1] << u1Div_ratio) + u2DQS_OEN_05T[1];
3051 
3052 		u4DQS_OEN_final = (u2DQS_OEN_Delay[0] > u2DQS_OEN_Delay[1])? u2DQS_OEN_Delay[1]: u2DQS_OEN_Delay[0]; //choose minimum value
3053 		u4DQS_OEN_final += ADD_1UI_TO_APHY;
3054 
3055 
3056 		u2COMB_TX_SEL[0] = (u4DQS_OEN_final > u2Shift_DQS_Div[0])? ((u4DQS_OEN_final - u2Shift_DQS_Div[0]) >> u1Div_ratio): 0;
3057 
3058 		if (vGet_Div_Mode(p) == DIV4_MODE)
3059 			u2COMB_TX_SEL[1] = 0;
3060 		else
3061 			u2COMB_TX_SEL[1] = (u4DQS_OEN_final > u2Shift_DQS_Div[1])? ((u4DQS_OEN_final - u2Shift_DQS_Div[1]) >> u1Div_ratio): 0;
3062 
3063 		vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHU_APHY_TX_PICG_CTRL), P_Fld(u2COMB_TX_SEL[0], SHU_APHY_TX_PICG_CTRL_DDRPHY_CLK_EN_COMB_TX_DQS_SEL_P0)
3064 											 | P_Fld(u2COMB_TX_SEL[1], SHU_APHY_TX_PICG_CTRL_DDRPHY_CLK_EN_COMB_TX_DQS_SEL_P1)
3065 											 | P_Fld(u2COMB_TX_PICG_CNT, SHU_APHY_TX_PICG_CTRL_DDRPHY_CLK_EN_COMB_TX_PICG_CNT));
3066 		//Set TX RK0 and RK1 DQ PICG
3067 		for (u1RankIdx = RANK_0; u1RankIdx < p->support_rank_num; u1RankIdx++)
3068 		{
3069 			vSetRank(p, u1RankIdx);
3070 			//DQ0
3071 			u2DQ_OEN_2T[0] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ0), SHURK_SELPH_DQ0_TXDLY_OEN_DQ0);
3072 			u2DQ_OEN_05T[0] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ2), SHURK_SELPH_DQ2_DLY_OEN_DQ0);
3073 			u2DQ_OEN_Delay[0] = (u2DQ_OEN_2T[0] << u1Div_ratio) + u2DQ_OEN_05T[0];
3074 			//DQ1
3075 			u2DQ_OEN_2T[1] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ0), SHURK_SELPH_DQ0_TXDLY_OEN_DQ1);
3076 			u2DQ_OEN_05T[1] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ2), SHURK_SELPH_DQ2_DLY_OEN_DQ1);
3077 			u2DQ_OEN_Delay[1] = (u2DQ_OEN_2T[1] << u1Div_ratio) + u2DQ_OEN_05T[1];
3078 
3079 
3080 			u4DQ_OEN_final = (u2DQ_OEN_Delay[0] > u2DQ_OEN_Delay[1])? u2DQ_OEN_Delay[1]: u2DQ_OEN_Delay[0]; //choose minimum value
3081 			u4DQ_OEN_final += ADD_1UI_TO_APHY;
3082 
3083 			u2COMB_TX_SEL[0] = (u4DQ_OEN_final > u2Shift_DQ_Div[0])? ((u4DQ_OEN_final - u2Shift_DQ_Div[0]) >> u1Div_ratio): 0;
3084 
3085 			if (vGet_Div_Mode(p) == DIV4_MODE)
3086 			   u2COMB_TX_SEL[1] = 0;
3087 			else
3088 			   u2COMB_TX_SEL[1] = (u4DQ_OEN_final > u2Shift_DQ_Div[1])? ((u4DQ_OEN_final - u2Shift_DQ_Div[1]) >> u1Div_ratio): 0;
3089 
3090 			vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHURK_APHY_TX_PICG_CTRL), P_Fld(u2COMB_TX_SEL[0], SHURK_APHY_TX_PICG_CTRL_DDRPHY_CLK_EN_COMB_TX_DQ_RK_SEL_P0)
3091 											    | P_Fld(u2COMB_TX_SEL[1], SHURK_APHY_TX_PICG_CTRL_DDRPHY_CLK_EN_COMB_TX_DQ_RK_SEL_P1));
3092 		}
3093 		vSetRank(p, u1Rank_bak);
3094 	}
3095 	vSetPHY2ChannelMapping(p, u1backup_CH);
3096 }
3097 #endif
3098 
3099 
3100 #if RX_PICG_NEW_MODE
RXPICGSetting(DRAMC_CTX_T * p)3101 void RXPICGSetting(DRAMC_CTX_T * p)
3102 {
3103 	DRAM_RANK_T bkRank = u1GetRank(p);
3104 	U8 u1RankIdx = 0;
3105 
3106 	vIO32WriteFldAlign(DDRPHY_REG_MISC_SHU_STBCAL, 0, MISC_SHU_STBCAL_STBCALEN);
3107 	vIO32WriteFldAlign(DDRPHY_REG_MISC_SHU_STBCAL, 0, MISC_SHU_STBCAL_STB_SELPHCALEN);
3108 
3109 	//PI_CG_DQSIEN new mode
3110 	vIO32WriteFldAlign(DDRPHY_REG_MISC_STBCAL1, 1, MISC_STBCAL1_STBCNT_SHU_RST_EN);
3111 	vIO32WriteFldAlign(DDRPHY_REG_MISC_STBCAL2, 1, MISC_STBCAL2_DQSIEN_SELPH_BY_RANK_EN);
3112 	vIO32WriteFldAlign(DDRPHY_REG_MISC_SHU_STBCAL, 1, MISC_SHU_STBCAL_DQSIEN_PICG_MODE);
3113 
3114 	//APHY control new mode
3115 	vIO32WriteFldAlign(DDRPHY_REG_MISC_RX_IN_GATE_EN_CTRL, 1, MISC_RX_IN_GATE_EN_CTRL_RX_IN_GATE_EN_OPT);
3116 	vIO32WriteFldAlign(DDRPHY_REG_MISC_RX_IN_BUFF_EN_CTRL, 1, MISC_RX_IN_BUFF_EN_CTRL_RX_IN_BUFF_EN_OPT);
3117 
3118 	//Dummy code (based on DVT document Verification plan of RX PICG efficiency improvment.docx)
3119 	//No need to set since HW setting or setting in other place
3120 	//Pls. don't remove for the integrity
3121 	{
3122 		U8 u1TAIL_LAT = (vGet_Div_Mode(p) == DIV4_MODE) ? 1: 0;
3123 		vIO32WriteFldAlign(DDRPHY_REG_MISC_STBCAL2, 0, MISC_STBCAL2_STB_STBENRST_EARLY_1T_EN);
3124 
3125 		for (u1RankIdx = 0; u1RankIdx < p->support_rank_num; u1RankIdx++)//Should set 2 rank
3126 		{
3127 			vSetRank(p, u1RankIdx);
3128 			vIO32WriteFldMulti(DDRPHY_REG_MISC_SHU_RK_DQSIEN_PICG_CTRL, P_Fld(u1TAIL_LAT, MISC_SHU_RK_DQSIEN_PICG_CTRL_DQSIEN_PICG_TAIL_EXT_LAT)
3129 					| P_Fld(0, MISC_SHU_RK_DQSIEN_PICG_CTRL_DQSIEN_PICG_HEAD_EXT_LAT));
3130 		}
3131 		vSetRank(p, bkRank);
3132 
3133 		vIO32WriteFldMulti(DDRPHY_REG_MISC_RX_IN_BUFF_EN_CTRL, P_Fld(0, MISC_RX_IN_BUFF_EN_CTRL_DIS_IN_BUFF_EN)
3134 					| P_Fld(0, MISC_RX_IN_BUFF_EN_CTRL_FIX_IN_BUFF_EN)
3135 					| P_Fld(0, MISC_RX_IN_BUFF_EN_CTRL_RX_IN_BUFF_EN_4BYTE_EN));
3136 
3137 		vIO32WriteFldMulti(DDRPHY_REG_MISC_RX_IN_GATE_EN_CTRL, P_Fld(0, MISC_RX_IN_GATE_EN_CTRL_DIS_IN_GATE_EN)
3138 					| P_Fld(0, MISC_RX_IN_GATE_EN_CTRL_FIX_IN_GATE_EN)
3139 					| P_Fld(0, MISC_RX_IN_GATE_EN_CTRL_RX_IN_GATE_EN_4BYTE_EN));
3140 
3141 	}
3142 #if 0
3143 		vIO32WriteFldAlign(DRAMC_REG_STBCAL2, 0, STBCAL2_STB_STBENRST_EARLY_1T_EN);
3144 		vIO32WriteFldMulti(DRAMC_REG_SHU_STBCAL, P_Fld(u1TAIL_LAT, SHU_STBCAL_R1_DQSIEN_PICG_TAIL_EXT_LAT)
3145 					| P_Fld(0, SHU_STBCAL_R1_DQSIEN_PICG_HEAD_EXT_LAT)
3146 					| P_Fld(u1TAIL_LAT, SHU_STBCAL_R0_DQSIEN_PICG_TAIL_EXT_LAT)
3147 					| P_Fld(0, SHU_STBCAL_R0_DQSIEN_PICG_HEAD_EXT_LAT));
3148 		vIO32WriteFldMulti(DRAMC_REG_PHY_RX_INCTL, P_Fld(0, PHY_RX_INCTL_DIS_IN_BUFF_EN)
3149 					| P_Fld(0, PHY_RX_INCTL_FIX_IN_BUFF_EN)
3150 					| P_Fld(0, PHY_RX_INCTL_RX_IN_BUFF_EN_4BYTE_EN)
3151 					| P_Fld(0, PHY_RX_INCTL_DIS_IN_GATE_EN)
3152 					| P_Fld(0, PHY_RX_INCTL_FIX_IN_GATE_EN)
3153 					| P_Fld(0, PHY_RX_INCTL_RX_IN_GATE_EN_4BYTE_EN));
3154 #endif
3155 }
3156 #endif
3157 
3158 #ifndef DPM_CONTROL_AFTERK
dramc_exit_with_DFS_legacy_mode(DRAMC_CTX_T * p)3159 void dramc_exit_with_DFS_legacy_mode(DRAMC_CTX_T * p)
3160 {
3161 #if !__ETT__
3162 	//set for SPM DRAM self refresh
3163 	vIO32WriteFldAlign(SPM_POWERON_CONFIG_EN, 1, POWERON_CONFIG_EN_BCLK_CG_EN);
3164 	vIO32WriteFldAlign(SPM_DRAMC_DPY_CLK_SW_CON_2, 1, SPM_DRAMC_DPY_CLK_SW_CON_2_SW_PHYPLL_MODE_SW);
3165 	vIO32WriteFldAlign(SPM_POWER_ON_VAL0, 1, SPM_POWER_ON_VAL0_SC_PHYPLL_MODE_SW);
3166 #endif
3167 	//Preloader exit with legacy mode for CTP load used
3168 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, 0x0, MISC_RG_DFS_CTRL_SPM_DVFS_CONTROL_SEL);
3169 	vIO32WriteFldAlign_All(DDRPHY_REG_PHYPLL0, 0x0, PHYPLL0_RG_RPHYPLL_EN);
3170 	vIO32WriteFldAlign_All(DDRPHY_REG_CLRPLL0, 0x0, CLRPLL0_RG_RCLRPLL_EN);
3171 }
3172 #endif
3173 
3174 #if TX_PICG_NEW_MODE
TXPICGNewModeEnable(DRAMC_CTX_T * p)3175 void TXPICGNewModeEnable(DRAMC_CTX_T * p)
3176 {
3177 	//Switch TX PICG to new mode
3178 	vIO32WriteFldMulti_All(DDRPHY_REG_MISC_CTRL3, P_Fld(0, MISC_CTRL3_ARPI_CG_MCK_DQ_OPT)
3179 										 | P_Fld(0, MISC_CTRL3_ARPI_MPDIV_CG_DQ_OPT)
3180 										 | P_Fld(0, MISC_CTRL3_ARPI_CG_DQS_OPT)
3181 										 | P_Fld(0, MISC_CTRL3_ARPI_CG_DQ_OPT));
3182 }
3183 #endif
3184 
3185 #if ENABLE_WRITE_DBI_Protect
ApplyWriteDBIProtect(DRAMC_CTX_T * p,U8 onoff)3186 void ApplyWriteDBIProtect(DRAMC_CTX_T *p, U8 onoff)
3187 {
3188 #if __A60868_TO_BE_PORTING__
3189 	U8 *uiLPDDR_O1_Mapping;
3190 	U16 Temp_PinMux_MaskWrite_WriteDBIOn = 0;
3191 	U8 B0_PinMux_MaskWrite_WriteDBIOn = 0, B1_PinMux_MaskWrite_WriteDBIOn = 0;
3192 	int DQ_index;
3193 
3194 	uiLPDDR_O1_Mapping = (U8 *)uiLPDDR4_O1_Mapping_POP[p->channel];
3195 
3196 	// Write DMI/DBI Protect Function
3197 	// Byte0 can not have bit swap between Group1(DQ0/1) and Group2(DQ02~DQ07).
3198 	// Byte1 can not have bit swap between Group1(DQ8/9) and Group2(DQ10~DQ15).
3199 	// DBIWR_IMP_EN=1 and DBIWR_PINMUX_EN=1
3200 	// set DBIWR_OPTB0[7:0] meet with Byte0 pin MUX table.
3201 	// set DBIWR_OPTB1[7:0] meet with Byte1 pin MUX table.
3202 
3203 	for (DQ_index = 0; DQ_index < 16; DQ_index++)
3204 	{
3205 		Temp_PinMux_MaskWrite_WriteDBIOn |= ((0x7C7C >> uiLPDDR_O1_Mapping[DQ_index]) & 0x1) << DQ_index;
3206 	}
3207 	B1_PinMux_MaskWrite_WriteDBIOn = (U8)(Temp_PinMux_MaskWrite_WriteDBIOn >> 8) & 0xff;
3208 	B0_PinMux_MaskWrite_WriteDBIOn = (U8) Temp_PinMux_MaskWrite_WriteDBIOn & 0xff;
3209 
3210 	vIO32WriteFldMulti_All(DRAMC_REG_ARBCTL, P_Fld(B1_PinMux_MaskWrite_WriteDBIOn, ARBCTL_DBIWR_OPT_B1)
3211 										   | P_Fld(B0_PinMux_MaskWrite_WriteDBIOn, ARBCTL_DBIWR_OPT_B0)
3212 										   | P_Fld(onoff, ARBCTL_DBIWR_PINMUX_EN)
3213 										   | P_Fld(onoff, ARBCTL_DBIWR_IMP_EN));
3214 #endif
3215 }
3216 #endif
3217 
3218 #if ENABLE_WRITE_DBI
ApplyWriteDBIPowerImprove(DRAMC_CTX_T * p,U8 onoff)3219 void ApplyWriteDBIPowerImprove(DRAMC_CTX_T *p, U8 onoff)
3220 {
3221 
3222 	// set DBIWR_IMP_EN = 1
3223 	// DBIWR_OPTB0[1:0]=0, DBIWR_OPT_B0[7]=0
3224 	// DBIWR_OPTB1[1:0]=0, DBIWR_OPT_B1[7]=0
3225 	vIO32WriteFldMulti_All(DRAMC_REG_DBIWR_PROTECT, P_Fld(0, DBIWR_PROTECT_DBIWR_OPT_B1)
3226 									   | P_Fld(0, DBIWR_PROTECT_DBIWR_OPT_B0)
3227 									   | P_Fld(0, DBIWR_PROTECT_DBIWR_PINMUX_EN)
3228 									   | P_Fld(onoff, DBIWR_PROTECT_DBIWR_IMP_EN));
3229 }
3230 /* DDR800 mode struct declaration (declared here due Fld_wid for each register type) */
3231 /*
3232 typedef struct _DDR800Mode_T
3233 {
3234 	U8 dll_phdet_en_b0: Fld_wid(SHU_B0_DLL0_RG_ARDLL_PHDET_EN_B0_SHU);
3235 	U8 dll_phdet_en_b1: Fld_wid(SHU_B1_DLL0_RG_ARDLL_PHDET_EN_B1_SHU);
3236 	U8 dll_phdet_en_ca_cha: Fld_wid(SHU_CA_DLL0_RG_ARDLL_PHDET_EN_CA_SHU);
3237 	U8 dll_phdet_en_ca_chb: Fld_wid(SHU_CA_DLL0_RG_ARDLL_PHDET_EN_CA_SHU);
3238 	U8 phypll_ada_mck8x_en: Fld_wid(SHU_PLL22_RG_RPHYPLL_ADA_MCK8X_EN_SHU);
3239 	U8 ddr400_en_b0: Fld_wid(SHU_B0_DQ6_RG_ARPI_DDR400_EN_B0);
3240 	U8 ddr400_en_b1: Fld_wid(SHU_B1_DQ6_RG_ARPI_DDR400_EN_B1);
3241 	U8 ddr400_en_ca: Fld_wid(SHU_CA_CMD6_RG_ARPI_DDR400_EN_CA);
3242 	U8 phypll_ddr400_en: Fld_wid(SHU_PLL1_RG_RPHYPLL_DDR400_EN);
3243 	U8 ddr400_dqs_ps_b0: Fld_wid(SHU_B0_DQ9_RG_DDR400_DQS_PS_B0);
3244 	U8 ddr400_dqs_ps_b1: Fld_wid(SHU_B1_DQ9_RG_DDR400_DQS_PS_B1);
3245 	U8 ddr400_dq_ps_b0: Fld_wid(SHU_B0_DQ9_RG_DDR400_DQ_PS_B0);
3246 	U8 ddr400_dq_ps_b1: Fld_wid(SHU_B1_DQ9_RG_DDR400_DQ_PS_B1);
3247 	U8 ddr400_dqs_ps_ca: Fld_wid(SHU_CA_CMD9_RG_DDR400_DQS_PS_CA);
3248 	U8 ddr400_dq_ps_ca: Fld_wid(SHU_CA_CMD9_RG_DDR400_DQ_PS_CA);
3249 	U8 ddr400_semi_en_b0: Fld_wid(SHU_B0_DQ9_RG_DDR400_SEMI_EN_B0);
3250 	U8 ddr400_semi_en_b1: Fld_wid(SHU_B1_DQ9_RG_DDR400_SEMI_EN_B1);
3251 	U8 ddr400_semi_en_ca: Fld_wid(SHU_CA_CMD9_RG_DDR400_SEMI_EN_CA);
3252 	U8 ddr400_semi_open_en: Fld_wid(SHU_PLL0_RG_DDR400_SEMI_OPEN_EN);
3253 	U8 pll0_ada_mck8x_chb_en: Fld_wid(SHU_PLL0_ADA_MCK8X_CHB_EN);
3254 	U8 pll0_ada_mck8x_cha_en: Fld_wid(SHU_PLL0_ADA_MCK8X_CHA_EN);
3255 } DDR800Mode_T;
3256 */
3257 #endif
3258 
3259 
RODTSettings(DRAMC_CTX_T * p)3260 void RODTSettings(DRAMC_CTX_T *p)
3261 {
3262 	U8 u1VrefSel;
3263 	U8 u1RankIdx, u1RankIdxBak;
3264 
3265 	//VREF_EN
3266 	vIO32WriteFldAlign(DDRPHY_REG_B0_DQ5, 1, B0_DQ5_RG_RX_ARDQ_VREF_EN_B0);
3267 	vIO32WriteFldAlign(DDRPHY_REG_B1_DQ5, 1, B1_DQ5_RG_RX_ARDQ_VREF_EN_B1);
3268 
3269 	//Update related setting of APHY RX and ODT
3270 	vIO32WriteFldAlign(DDRPHY_REG_SHU_B0_VREF, !(p->odt_onoff), SHU_B0_VREF_RG_RX_ARDQ_VREF_UNTERM_EN_B0);
3271 	vIO32WriteFldAlign(DDRPHY_REG_SHU_B1_VREF, !(p->odt_onoff), SHU_B1_VREF_RG_RX_ARDQ_VREF_UNTERM_EN_B1);
3272 
3273 	if(p->odt_onoff==ODT_ON)
3274 	{
3275 		if (p->dram_type==TYPE_LPDDR5)
3276 			u1VrefSel = 0x46;//term LP5
3277 		else
3278 			u1VrefSel = 0x2c;//term LP4
3279 	}
3280 	else
3281 	{
3282 		if (p->dram_type==TYPE_LPDDR5)
3283 			u1VrefSel = 0x37;//unterm LP5
3284 		else
3285 			u1VrefSel = 0x37;//unterm LP4
3286 	}
3287 
3288 	u1RankIdxBak = u1GetRank(p);
3289 	for (u1RankIdx = 0; u1RankIdx < (U32)(p->support_rank_num); u1RankIdx++)
3290 	{
3291 		vSetRank(p, u1RankIdx);
3292 		vIO32WriteFldMulti(DDRPHY_REG_SHU_B0_PHY_VREF_SEL,
3293 					P_Fld(u1VrefSel, SHU_B0_PHY_VREF_SEL_RG_RX_ARDQ_VREF_SEL_LB_B0) |
3294 					P_Fld(u1VrefSel, SHU_B0_PHY_VREF_SEL_RG_RX_ARDQ_VREF_SEL_UB_B0));
3295 		vIO32WriteFldMulti(DDRPHY_REG_SHU_B1_PHY_VREF_SEL,
3296 					P_Fld(u1VrefSel, SHU_B1_PHY_VREF_SEL_RG_RX_ARDQ_VREF_SEL_LB_B1) |
3297 					P_Fld(u1VrefSel, SHU_B1_PHY_VREF_SEL_RG_RX_ARDQ_VREF_SEL_UB_B1));
3298 	}
3299 	vSetRank(p, u1RankIdxBak);
3300 
3301 	vIO32WriteFldAlign(DDRPHY_REG_SHU_B0_VREF, 1, SHU_B0_VREF_RG_RX_ARDQ_VREF_RANK_SEL_EN_B0);
3302 	vIO32WriteFldAlign(DDRPHY_REG_SHU_B1_VREF, 1, SHU_B1_VREF_RG_RX_ARDQ_VREF_RANK_SEL_EN_B1);
3303 
3304 #if ENABLE_TX_WDQS
3305 	vIO32WriteFldAlign(DDRPHY_REG_MISC_SHU_ODTCTRL, 1, MISC_SHU_ODTCTRL_RODTEN);
3306 	vIO32WriteFldAlign(DDRPHY_REG_SHU_B0_DQ7, 1, SHU_B0_DQ7_R_DMRODTEN_B0);
3307 	vIO32WriteFldAlign(DDRPHY_REG_SHU_B1_DQ7, 1, SHU_B1_DQ7_R_DMRODTEN_B1);
3308 #else
3309 	vIO32WriteFldAlign(DDRPHY_REG_MISC_SHU_ODTCTRL, p->odt_onoff, MISC_SHU_ODTCTRL_RODTEN);
3310 	vIO32WriteFldAlign(DDRPHY_REG_SHU_B0_DQ7, p->odt_onoff, SHU_B0_DQ7_R_DMRODTEN_B0);
3311 	vIO32WriteFldAlign(DDRPHY_REG_SHU_B1_DQ7, p->odt_onoff, SHU_B1_DQ7_R_DMRODTEN_B1);
3312 #endif
3313 
3314 #if ENABLE_RODT_TRACKING
3315 	//RODT tracking
3316 	vIO32WriteFldMulti(DDRPHY_REG_MISC_SHU_RODTENSTB, P_Fld(1, MISC_SHU_RODTENSTB_RODTENSTB_TRACK_EN)
3317 																	| P_Fld(1, MISC_SHU_RODTENSTB_RODTENSTB_TRACK_UDFLWCTRL)
3318 																	| P_Fld(0, MISC_SHU_RODTENSTB_RODTENSTB_SELPH_BY_BITTIME));
3319 #endif
3320 
3321 	//Darren-vIO32WriteFldAlign(DDRPHY_REG_B0_DQ6, !(p->odt_onoff), B0_DQ6_RG_TX_ARDQ_ODTEN_EXT_DIS_B0);
3322 	//Darren-vIO32WriteFldAlign(DDRPHY_REG_B1_DQ6, !(p->odt_onoff), B1_DQ6_RG_TX_ARDQ_ODTEN_EXT_DIS_B1);
3323 	//Darren-vIO32WriteFldAlign(DDRPHY_REG_CA_CMD6, !(p->odt_onoff), CA_CMD6_RG_TX_ARCMD_ODTEN_EXT_DIS);
3324 	vIO32WriteFldAlign(DDRPHY_REG_SHU_B0_DQ13, !(p->odt_onoff), SHU_B0_DQ13_RG_TX_ARDQ_IO_ODT_DIS_B0);
3325 	vIO32WriteFldAlign(DDRPHY_REG_SHU_B1_DQ13, !(p->odt_onoff), SHU_B1_DQ13_RG_TX_ARDQ_IO_ODT_DIS_B1);
3326 	vIO32WriteFldAlign(DDRPHY_REG_SHU_CA_CMD13, !(p->odt_onoff), SHU_CA_CMD13_RG_TX_ARCA_IO_ODT_DIS_CA);
3327 
3328 	//APHY CG disable
3329 	vIO32WriteFldMulti(DDRPHY_REG_SHU_B0_DQ13, P_Fld(0, SHU_B0_DQ13_RG_TX_ARDQS_OE_ODTEN_CG_EN_B0)
3330 											| P_Fld(0, SHU_B0_DQ13_RG_TX_ARDQM_OE_ODTEN_CG_EN_B0));
3331 	vIO32WriteFldAlign(DDRPHY_REG_SHU_B0_DQ14, 0, SHU_B0_DQ14_RG_TX_ARDQ_OE_ODTEN_CG_EN_B0);
3332 
3333 	vIO32WriteFldMulti(DDRPHY_REG_SHU_B1_DQ13, P_Fld(0, SHU_B1_DQ13_RG_TX_ARDQS_OE_ODTEN_CG_EN_B1)
3334 											| P_Fld(0, SHU_B1_DQ13_RG_TX_ARDQM_OE_ODTEN_CG_EN_B1));
3335 	vIO32WriteFldAlign(DDRPHY_REG_SHU_B1_DQ14, 0, SHU_B1_DQ14_RG_TX_ARDQ_OE_ODTEN_CG_EN_B1);
3336 }
3337 
3338 /* LP4 use 7UI mode (1)
3339  * LP5 lower than 4266 use 7UI mode (1)
3340  * LP5 higher than 4266 use 11UI mode (2)
3341  * LP5 higher than 4266 with better SI use 11/24UI mode (3)
3342  */
DQSSTBSettings(DRAMC_CTX_T * p)3343 void DQSSTBSettings(DRAMC_CTX_T *p)
3344 {
3345 	unsigned int dqsien_mode = 1;
3346 
3347 #if (__LP5_COMBO__)
3348 	U8 rpre_mode = LPDDR5_RPRE_4S_0T;
3349 
3350 	if (is_lp5_family(p))
3351 	{
3352 		if (p->frequency > 1600)
3353 			rpre_mode = LPDDR5_RPRE_2S_2T;
3354 	}
3355 
3356 	if (rpre_mode == LPDDR5_RPRE_2S_2T)
3357 		dqsien_mode = 2;
3358 	else if (rpre_mode == LPDDR5_RPRE_XS_4T)
3359 		dqsien_mode = 3;
3360 #endif
3361 
3362 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SHU_STBCAL),
3363 		dqsien_mode, MISC_SHU_STBCAL_DQSIEN_DQSSTB_MODE);
3364 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_DQ10),
3365 		dqsien_mode, SHU_B0_DQ10_RG_RX_ARDQS_DQSIEN_MODE_B0);
3366 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B1_DQ10),
3367 		dqsien_mode, SHU_B1_DQ10_RG_RX_ARDQS_DQSIEN_MODE_B1);
3368 }
3369 
SetMck8xLowPwrOption(DRAMC_CTX_T * p)3370 static void SetMck8xLowPwrOption(DRAMC_CTX_T *p)
3371 {
3372 #if ENABLE_REMOVE_MCK8X_UNCERT_LOWPOWER_OPTION
3373 	U32 u4Mck8xMode = 1;
3374 #else
3375 	U32 u4Mck8xMode = 0;
3376 #endif
3377 
3378 	vIO32WriteFldMulti(DDRPHY_REG_MISC_LP_CTRL,  P_Fld( u4Mck8xMode , MISC_LP_CTRL_RG_SC_ARPI_RESETB_8X_SEQ_LP_SEL ) \
3379 															  | P_Fld( u4Mck8xMode , MISC_LP_CTRL_RG_ADA_MCK8X_8X_SEQ_LP_SEL	  ) \
3380 															  | P_Fld( u4Mck8xMode, MISC_LP_CTRL_RG_AD_MCK8X_8X_SEQ_LP_SEL		 ) \
3381 															  | P_Fld( u4Mck8xMode , MISC_LP_CTRL_RG_MIDPI_EN_8X_SEQ_LP_SEL 	  ) \
3382 															  | P_Fld( u4Mck8xMode , MISC_LP_CTRL_RG_MIDPI_CKDIV4_EN_8X_SEQ_LP_SEL) \
3383 															  | P_Fld( u4Mck8xMode, MISC_LP_CTRL_RG_MCK8X_CG_SRC_LP_SEL 		 ) \
3384 															  | P_Fld( u4Mck8xMode , MISC_LP_CTRL_RG_MCK8X_CG_SRC_AND_LP_SEL	  ));
3385 
3386 }
3387 
LP4_UpdateInitialSettings(DRAMC_CTX_T * p)3388 void LP4_UpdateInitialSettings(DRAMC_CTX_T *p)
3389 {
3390 	U8 u1RankIdx, u1RankIdxBak;
3391 	///TODO:
3392 	//BRINGUP-TEST
3393 	vIO32WriteFldAlign(DDRPHY_REG_MISC_CTRL3, 0, MISC_CTRL3_ARPI_CG_CLK_OPT);
3394 	vIO32WriteFldAlign(DDRPHY_REG_MISC_CTRL4, 0, MISC_CTRL4_R_OPT2_CG_CLK);
3395 
3396 	//vIO32WriteFldMulti_All(DDRPHY_REG_CA_CMD2, P_Fld(1, CA_CMD2_RG_TX_ARCLK_OE_TIE_EN_CA) | P_Fld(0, CA_CMD2_RG_TX_ARCLK_OE_TIE_SEL_CA));
3397 	//vIO32WriteFldMulti_All(DDRPHY_REG_CA_CMD2, P_Fld(1, CA_CMD2_RG_TX_ARCLKB_OE_TIE_EN_CA) | P_Fld(0, CA_CMD2_RG_TX_ARCLKB_OE_TIE_SEL_CA));
3398 	//Set_MRR_Pinmux_Mapping(p); //Update MRR pinmux
3399 
3400 	vReplaceDVInit(p);
3401 
3402 	//Let CA and CS be independent
3403 	vIO32WriteFldAlign(DDRPHY_REG_SHU_CA_CMD14, 0xC0, SHU_CA_CMD14_RG_TX_ARCA_MCKIO_SEL_CA); //@Darren, confirm with Alucary
3404 	//Disable perbyte option
3405 	vIO32WriteFldMulti(DDRPHY_REG_SHU_B0_DQ7, P_Fld(0x0, SHU_B0_DQ7_R_DMRXDVS_PBYTE_DQM_EN_B0)
3406 											| P_Fld(0x0, SHU_B0_DQ7_R_DMRXDVS_PBYTE_FLAG_OPT_B0)
3407 											| P_Fld(0x0, SHU_B0_DQ7_R_DMRXDVS_DQM_FLAGSEL_B0));
3408 	vIO32WriteFldMulti(DDRPHY_REG_SHU_B1_DQ7, P_Fld(0x0, SHU_B1_DQ7_R_DMRXDVS_PBYTE_DQM_EN_B1)
3409 											| P_Fld(0x0, SHU_B1_DQ7_R_DMRXDVS_PBYTE_FLAG_OPT_B1)
3410 											| P_Fld(0x0, SHU_B1_DQ7_R_DMRXDVS_DQM_FLAGSEL_B1));
3411 
3412 #if RX_PICG_NEW_MODE
3413 	RXPICGSetting(p);
3414 #endif
3415 
3416 #if SIMULATION_SW_IMPED // Darren: Need porting by E2 IMP Calib DVT owner
3417 	#if FSP1_CLKCA_TERM
3418 		U8 u1CASwImpFreqRegion = (p->dram_fsp == FSP_0)? IMP_LOW_FREQ: IMP_HIGH_FREQ;
3419 	#else
3420 		U8 u1CASwImpFreqRegion = (p->frequency <= 1866)? IMP_LOW_FREQ: IMP_HIGH_FREQ;
3421 	#endif
3422 		U8 u1DQSwImpFreqRegion = (p->frequency <= 1866)? IMP_LOW_FREQ: IMP_HIGH_FREQ;
3423 
3424 	if (p->dram_type == TYPE_LPDDR4X)
3425 		DramcSwImpedanceSaveRegister(p, u1CASwImpFreqRegion, u1DQSwImpFreqRegion, DRAM_DFS_SHUFFLE_1);
3426 #endif
3427 
3428 	DQSSTBSettings(p);
3429 
3430 	RODTSettings(p);
3431 
3432 	//WDBI-OFF
3433 	vIO32WriteFldAlign(DRAMC_REG_SHU_TX_SET0, 0x0, SHU_TX_SET0_DBIWR);
3434 
3435 #if CBT_MOVE_CA_INSTEAD_OF_CLK
3436 	U8 u1CaPI = 0, u1CaUI = 0;
3437 
3438 	u1CaUI = 1;
3439 	u1CaPI = 0;
3440 
3441 	// CA delay shift u1CaUI*UI
3442 	DramcCmdUIDelaySetting(p, u1CaUI);
3443 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_SHU_SELPH_CA5), P_Fld(0x1, SHU_SELPH_CA5_DLY_CS) | P_Fld(0x1, SHU_SELPH_CA5_DLY_CS1));
3444 
3445 	// Rank0/1 u1CaPI*PI CA delay
3446 
3447 	u1RankIdxBak = u1GetRank(p);
3448 
3449 	for (u1RankIdx = 0; u1RankIdx < (U32)(p->support_rank_num); u1RankIdx++)
3450 	{
3451 		vSetRank(p, u1RankIdx);
3452 
3453 		CBTDelayCACLK(p, u1CaPI);
3454 	}
3455 
3456 	vSetRank(p, u1RankIdxBak);
3457 #endif
3458 
3459 #if ENABLE_TPBR2PBR_REFRESH_TIMING
3460 	vIO32WriteFldAlign(DRAMC_REG_REFCTRL1, 0x1, REFCTRL1_REF_OVERHEAD_PBR2PB_ENA); //@Derping
3461 	vIO32WriteFldAlign(DRAMC_REG_MISCTL0, 0x1, MISCTL0_REFP_ARBMASK_PBR2PBR_ENA); //@Unique
3462 	vIO32WriteFldAlign(DRAMC_REG_SCHEDULER_COM, 0x1, SCHEDULER_COM_PBR2PBR_OPT); //@YH
3463 #endif
3464 
3465 #if RDSEL_TRACKING_EN
3466 	vIO32WriteFldAlign(DDRPHY_REG_SHU_MISC_RDSEL_TRACK, 0, SHU_MISC_RDSEL_TRACK_DMDATLAT_I); //DMDATLAT_I should be set as 0 before set datlat k value, otherwise the status flag wil be set as 1
3467 #endif
3468 
3469 #if ENABLE_WRITE_POST_AMBLE_1_POINT_5_TCK
3470 	vIO32WriteFldAlign(DRAMC_REG_SHU_TX_SET0, p->dram_fsp, SHU_TX_SET0_WPST1P5T); //Set write post-amble by FSP with MR3
3471 #else
3472 	vIO32WriteFldAlign(DRAMC_REG_SHU_TX_SET0, 0x0, SHU_TX_SET0_WPST1P5T); //Set write post-amble by FSP with MR3
3473 #endif
3474 
3475 #if (!XRTRTR_NEW_CROSS_RK_MODE)
3476 	vIO32WriteFldAlign(DDRPHY_REG_SHU_MISC_RANK_SEL_STB, 0x0, SHU_MISC_RANK_SEL_STB_RANK_SEL_STB_EN);
3477 #endif
3478 
3479 	//MP Setting
3480 	vIO32WriteFldMulti(DRAMC_REG_DUMMY_RD, P_Fld(0x1, DUMMY_RD_DMYRD_REORDER_DIS) | P_Fld(0x1, DUMMY_RD_SREF_DMYRD_EN));
3481 	vIO32WriteFldMulti(DRAMC_REG_DRAMCTRL, P_Fld(0x0, DRAMCTRL_ALL_BLOCK_CTO_ALE_DBG_EN)
3482 									 | P_Fld(0x1, DRAMCTRL_DVFS_BLOCK_CTO_ALE_DBG_EN)
3483 									 | P_Fld(0x1, DRAMCTRL_SELFREF_BLOCK_CTO_ALE_DBG_EN));
3484 	vIO32WriteFldAlign(DDRPHY_REG_MISC_STBCAL2, 1, MISC_STBCAL2_DQSGCNT_BYP_REF);
3485 	//@Darren- enable bit11 via FMeter, vIO32WriteFldAlign(DDRPHY_REG_MISC_CG_CTRL7, 0, MISC_CG_CTRL7_CK_BFE_DCM_EN);
3486 
3487 	//1:8 --> data rate<=1600 set 0,  data rate<=3200 set 1, else 2
3488 	//1:4 --> data rate<= 800 set 0,  data rate<=1600 set 1, else 2
3489 	if(p->frequency<=800)
3490 		vIO32WriteFldAlign(DDRPHY_REG_MISC_SHU_PHY_RX_CTRL, 0, MISC_SHU_PHY_RX_CTRL_RX_IN_BUFF_EN_HEAD);
3491 	else if(p->frequency<=1200)
3492 		vIO32WriteFldAlign(DDRPHY_REG_MISC_SHU_PHY_RX_CTRL, 1, MISC_SHU_PHY_RX_CTRL_RX_IN_BUFF_EN_HEAD);
3493 	else
3494 		vIO32WriteFldAlign(DDRPHY_REG_MISC_SHU_PHY_RX_CTRL, 2, MISC_SHU_PHY_RX_CTRL_RX_IN_BUFF_EN_HEAD);
3495 	vIO32WriteFldAlign(DDRPHY_REG_MISC_CTRL1, 1, MISC_CTRL1_R_DMARPIDQ_SW);
3496 	vIO32WriteFldMulti(DDRPHY_REG_CA_TX_MCK, P_Fld(0xa, CA_TX_MCK_R_DMRESETB_DRVP_FRPHY) | P_Fld(0xa, CA_TX_MCK_R_DMRESETB_DRVN_FRPHY));
3497 	vIO32WriteFldMulti(DDRPHY_REG_MISC_SHU_RANK_SEL_LAT, P_Fld(0x3, MISC_SHU_RANK_SEL_LAT_RANK_SEL_LAT_B0) |
3498 			P_Fld(0x3, MISC_SHU_RANK_SEL_LAT_RANK_SEL_LAT_B1) | P_Fld(0x3, MISC_SHU_RANK_SEL_LAT_RANK_SEL_LAT_CA));
3499 
3500 	SetMck8xLowPwrOption(p);
3501 }
3502 
3503 
LP5_UpdateInitialSettings(DRAMC_CTX_T * p)3504 void LP5_UpdateInitialSettings(DRAMC_CTX_T *p)
3505 {
3506 	vIO32WriteFldAlign_All(DDRPHY_REG_SHU_CA_CMD14, 0x0, SHU_CA_CMD14_RG_TX_ARCA_MCKIO_SEL_CA); //Let CA and CS be independent
3507 	//Set_MRR_Pinmux_Mapping(p); //Update MRR pinmux
3508 
3509 	//Disable perbyte option
3510 	vIO32WriteFldMulti(DDRPHY_REG_SHU_B0_DQ7, P_Fld(0x0, SHU_B0_DQ7_R_DMRXDVS_PBYTE_DQM_EN_B0)
3511 											| P_Fld(0x0, SHU_B0_DQ7_R_DMRXDVS_PBYTE_FLAG_OPT_B0)
3512 											| P_Fld(0x0, SHU_B0_DQ7_R_DMRXDVS_DQM_FLAGSEL_B0));
3513 	vIO32WriteFldMulti(DDRPHY_REG_SHU_B1_DQ7, P_Fld(0x0, SHU_B1_DQ7_R_DMRXDVS_PBYTE_DQM_EN_B1)
3514 											| P_Fld(0x0, SHU_B1_DQ7_R_DMRXDVS_PBYTE_FLAG_OPT_B1)
3515 											| P_Fld(0x0, SHU_B1_DQ7_R_DMRXDVS_DQM_FLAGSEL_B1));
3516 
3517 	///TODO: Temp solution. May need to resolve in init flow
3518 	vIO32WriteFldMulti_All(DDRPHY_REG_MISC_CG_CTRL5, /* Will cause PI un-adjustable */
3519 		P_Fld(0x0, MISC_CG_CTRL5_R_CA_DLY_DCM_EN) |
3520 		P_Fld(0x0, MISC_CG_CTRL5_R_CA_PI_DCM_EN) |
3521 		P_Fld(0x0, MISC_CG_CTRL5_R_DQ0_DLY_DCM_EN) |
3522 		P_Fld(0x0, MISC_CG_CTRL5_R_DQ0_PI_DCM_EN) |
3523 		P_Fld(0x0, MISC_CG_CTRL5_R_DQ1_DLY_DCM_EN) |
3524 		P_Fld(0x0, MISC_CG_CTRL5_R_DQ1_PI_DCM_EN));
3525 
3526 	DQSSTBSettings(p);
3527 
3528 	RODTSettings(p);
3529 
3530 #if SIMULATION_SW_IMPED
3531 	#if FSP1_CLKCA_TERM
3532 		U8 u1CASwImpFreqRegion = (p->dram_fsp == FSP_0)? IMP_LOW_FREQ: IMP_HIGH_FREQ;
3533 	#else
3534 		U8 u1CASwImpFreqRegion = (p->frequency <= 1866)? IMP_LOW_FREQ: IMP_HIGH_FREQ;
3535 	#endif
3536 		U8 u1DQSwImpFreqRegion = (p->frequency <= 1866)? IMP_LOW_FREQ: IMP_HIGH_FREQ;
3537 
3538 	if (p->dram_type == TYPE_LPDDR5)
3539 		DramcSwImpedanceSaveRegister(p, u1CASwImpFreqRegion, u1DQSwImpFreqRegion, DRAM_DFS_SHUFFLE_1);
3540 #endif
3541 
3542 #if RDSEL_TRACKING_EN
3543 	vIO32WriteFldAlign(DDRPHY_REG_SHU_MISC_RDSEL_TRACK, 0, SHU_MISC_RDSEL_TRACK_DMDATLAT_I); //DMDATLAT_I should be set as 0 before set datlat k value, otherwise the status flag wil be set as 1
3544 #endif
3545 
3546 #if (!XRTRTR_NEW_CROSS_RK_MODE)
3547 	vIO32WriteFldAlign(DDRPHY_REG_SHU_MISC_RANK_SEL_STB, 0x0, SHU_MISC_RANK_SEL_STB_RANK_SEL_STB_EN);
3548 #endif
3549 	SetMck8xLowPwrOption(p);
3550 }
3551 
3552 #define CKGEN_FMETER 0x0
3553 #define ABIST_FMETER 0x1
3554 
DDRPhyFreqMeter(void)3555 unsigned int DDRPhyFreqMeter(void)
3556 {
3557 #if 0 //temp remove for bringup
3558 #if (FOR_DV_SIMULATION_USED==0)
3559 
3560 	unsigned int reg0=0, reg1=0;
3561 	unsigned int before_value=0, after_value=0;
3562 	unsigned int frq_result=0;
3563 #if (fcFOR_CHIP_ID == fcMargaux)
3564 	unsigned int chb_mctl_ca_en = (DRV_Reg32(Channel_B_DDRPHY_AO_BASE_ADDRESS + 0xBA8) >> 19) & 0x1;
3565 #endif
3566 
3567 #if 1//def HJ_SIM
3568 	/*TINFO="\n[PhyFreqMeter]"*/
3569 
3570 	reg0 = DRV_Reg32(Channel_A_DDRPHY_AO_BASE_ADDRESS + 0x70c) ;
3571 	DRV_WriteReg32	(Channel_A_DDRPHY_AO_BASE_ADDRESS + 0x70c  , reg0 | (1 << 16));
3572 	reg0 = DRV_Reg32(Channel_A_DDRPHY_AO_BASE_ADDRESS + 0x72c) ;
3573 	DRV_WriteReg32	(Channel_A_DDRPHY_AO_BASE_ADDRESS + 0x72c  , reg0 | (1 << 16));
3574 
3575 	// abist_clk29: AD_MPLL_CK
3576 	frq_result = FMeter(ABIST_FMETER, 29) ;
3577 	msg("AD_MPLL_CK FREQ=%d\n", frq_result);
3578 	/*TINFO="AD_MPLL_CK FREQ=%d\n", frq_result*/
3579 
3580 	// abist_clk40: DA_MPLL_52M_DIV_CK
3581 	//! frq_result = FMeter(ABIST_FMETER, 40) ;
3582 	/*TINFO="DA_MPLL_52M_DIV_CK FREQ=%d\n", frq_result*/
3583 	#if 1
3584 	if((DRV_Reg32(Channel_A_DDRPHY_NAO_BASE_ADDRESS + 0x50c) & (1<<8))==0)
3585 	{
3586 		// abist_clk31: AD_RCLRPLL_DIV4_CK_ch02
3587 		//frq_result = FMeter(ABIST_FMETER, 31) ;
3588 		msg("AD_RCLRPLL_DIV4_CK_ch02 FREQ=%d\n", frq_result);
3589 		/*TINFO="AD_RCLRPLL_DIV4_CK_ch02 FREQ=%d\n", frq_result*/
3590 	}
3591 	else
3592 	{
3593 		// abist_clk33: AD_RPHYRPLL_DIV4_CK_ch02
3594 		frq_result = FMeter(ABIST_FMETER, 33) ;
3595 		msg("AD_RPHYPLL_DIV4_CK_ch02 FREQ=%d\n", frq_result);
3596 		/*TINFO="AD_RPHYPLL_DIV4_CK_ch02 FREQ=%d\n", frq_result*/
3597 	}
3598 	#endif
3599 	//! ch0
3600 	reg0 = DRV_Reg32(Channel_A_DDRPHY_AO_BASE_ADDRESS + 0x504) ;
3601 	DRV_WriteReg32	(Channel_A_DDRPHY_AO_BASE_ADDRESS + 0x504  , reg0 | (1 << 11));
3602 
3603 	// abistgen_clk44: fmem_ck_aft_dcm_ch0 (DRAMC CHA's clock after idle mask)
3604 	before_value = FMeter(ABIST_FMETER, 44);
3605 	msg("fmem_ck_aft_dcm_ch0 FREQ=%d\n", before_value);
3606 	/*TINFO="fmem_ck_aft_dcm_ch0 FREQ=%d\n", after_value*/
3607 
3608 #if (fcFOR_CHIP_ID == fcMargaux)
3609 	if (chb_mctl_ca_en == 1)
3610 #endif
3611 	{
3612 		reg0 = DRV_Reg32(Channel_B_DDRPHY_AO_BASE_ADDRESS + 0x504) ;
3613 		DRV_WriteReg32	(Channel_B_DDRPHY_AO_BASE_ADDRESS + 0x504  , reg0 | (1 << 11));
3614 	// abistgen_clk45: fmem_ck_aft_dcm_ch1 (DRAMC CHB's clock after idle mask)
3615 	after_value = FMeter(ABIST_FMETER, 45);
3616 	msg("fmem_ck_aft_dcm_ch1 FREQ=%d\n", after_value);
3617 	}
3618 	/*TINFO="fmem_ck_aft_dcm_ch1 FREQ=%d\n", after_value*/
3619 
3620 	gddrphyfmeter_value = after_value<<2;
3621 
3622 	reg0 = DRV_Reg32(Channel_A_DDRPHY_AO_BASE_ADDRESS + 0x70c) ;
3623 	DRV_WriteReg32	(Channel_A_DDRPHY_AO_BASE_ADDRESS + 0x70c  , reg0 & ~(1 << 16));
3624 	reg0 = DRV_Reg32(Channel_A_DDRPHY_AO_BASE_ADDRESS + 0x72c) ;
3625 	DRV_WriteReg32	(Channel_A_DDRPHY_AO_BASE_ADDRESS + 0x72c  , reg0 & ~(1 << 16));
3626 
3627 
3628 	#if (CHANNEL_NUM>2)
3629 	reg0 = DRV_Reg32(Channel_C_DDRPHY_AO_BASE_ADDRESS + 0x70c) ;
3630 	DRV_WriteReg32	(Channel_C_DDRPHY_AO_BASE_ADDRESS + 0x70c  , reg0 | (1 << 16));
3631 	reg0 = DRV_Reg32(Channel_C_DDRPHY_AO_BASE_ADDRESS + 0x72c) ;
3632 	DRV_WriteReg32	(Channel_C_DDRPHY_AO_BASE_ADDRESS + 0x72c  , reg0 | (1 << 16));
3633 	#if 1
3634 	if((DRV_Reg32(Channel_C_DDRPHY_NAO_BASE_ADDRESS + 0x50c) & (1<<8))==0)
3635 	{
3636 		// abist_clk32: AD_RCLRPLL_DIV4_CK_ch13
3637 		//frq_result = FMeter(ABIST_FMETER, 32) ;
3638 		msg("AD_RCLRPLL_DIV4_CK_ch13 FREQ=%d\n", frq_result);
3639 		/*TINFO="AD_RCLRPLL_DIV4_CK_ch13 FREQ=%d\n", frq_result*/
3640 	}
3641 	else
3642 	{
3643 		// abist_clk34: AD_RPHYRPLL_DIV4_CK_ch13
3644 		frq_result = FMeter(ABIST_FMETER, 34) ;
3645 		msg("AD_RPHYPLL_DIV4_CK_ch13 FREQ=%d\n", frq_result);
3646 		/*TINFO="AD_RPHYPLL_DIV4_CK_ch13 FREQ=%d\n", frq_result*/
3647 	}
3648 	#endif
3649 
3650 	reg0 = DRV_Reg32(Channel_C_DDRPHY_AO_BASE_ADDRESS + 0x504) ;
3651 	DRV_WriteReg32	(Channel_C_DDRPHY_AO_BASE_ADDRESS + 0x504  , reg0 | (1 << 11));
3652 	reg0 = DRV_Reg32(Channel_D_DDRPHY_AO_BASE_ADDRESS + 0x504) ;
3653 	DRV_WriteReg32	(Channel_D_DDRPHY_AO_BASE_ADDRESS + 0x504  , reg0 | (1 << 11));
3654 
3655 	// abistgen_clk46: fmem_ck_aft_dcm_ch2 (DRAMC CHC's clock after idle mask)
3656 	before_value = FMeter(ABIST_FMETER, 46);
3657 	msg("fmem_ck_aft_dcm_ch2 FREQ=%d\n", before_value);
3658 	/*TINFO="fmem_ck_aft_dcm_ch2 FREQ=%d\n", after_value*/
3659 
3660 	// abistgen_clk47: fmem_ck_aft_dcm_ch3 (DRAMC CHC's clock after idle mask)
3661 	after_value = FMeter(ABIST_FMETER, 47);
3662 	msg("fmem_ck_aft_dcm_ch3 FREQ=%d\n", after_value);
3663 	/*TINFO="fmem_ck_aft_dcm_ch3 FREQ=%d\n", after_value*/
3664 
3665 	reg0 = DRV_Reg32(Channel_C_DDRPHY_AO_BASE_ADDRESS + 0x70c) ;
3666 	DRV_WriteReg32	(Channel_C_DDRPHY_AO_BASE_ADDRESS + 0x70c  , reg0 & ~(1 << 16));
3667 	reg0 = DRV_Reg32(Channel_C_DDRPHY_AO_BASE_ADDRESS + 0x72c) ;
3668 	DRV_WriteReg32	(Channel_C_DDRPHY_AO_BASE_ADDRESS + 0x72c  , reg0 & ~(1 << 16));
3669 	#endif
3670 
3671 	return (before_value<<16 | after_value);
3672 #endif //! end DSIM
3673 #endif
3674 
3675 #endif
3676 	return 0;
3677 }
3678