• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* SPDX-License-Identifier: BSD-3-Clause */
2 
3 //-----------------------------------------------------------------------------
4 // Include files
5 //-----------------------------------------------------------------------------
6 #include "dramc_common.h"
7 #include "dramc_int_global.h"
8 #include "x_hal_io.h"
9 
10 
11 #define SHUFFLE_GROUP	4				//SHU1~4
12 
13 //-----------------------------------------------------------------------------
14 // Global variables
15 //-----------------------------------------------------------------------------
16 
17 U8 _MappingFreqArray[DRAM_DFS_SRAM_MAX] = {0};
18 
19 U8 gDVFSCtrlSel = 0;
20 U32 gu4Ddrphy0SPMCtrl0;
21 U32 gu4Ddrphy1SPMCtrl0;
22 U32 gu4Ddrphy0SPMCtrl2;
23 U32 gu4Ddrphy1SPMCtrl2;
24 
25 
vSetDFSTable(DRAMC_CTX_T * p,DRAM_DFS_FREQUENCY_TABLE_T * pFreqTable)26 void vSetDFSTable(DRAMC_CTX_T *p, DRAM_DFS_FREQUENCY_TABLE_T *pFreqTable)
27 {
28 	p->pDFSTable = pFreqTable;
29 }
30 
vSetDFSFreqSelByTable(DRAMC_CTX_T * p,DRAM_DFS_FREQUENCY_TABLE_T * pFreqTable)31 void vSetDFSFreqSelByTable(DRAMC_CTX_T *p, DRAM_DFS_FREQUENCY_TABLE_T *pFreqTable)
32 {
33 	vSetDFSTable(p, pFreqTable);
34 	DDRPhyFreqSel(p, p->pDFSTable->freq_sel);
35 #if (FOR_DV_SIMULATION_USED == 0 && SW_CHANGE_FOR_SIMULATION == 0)
36 	vSetVcoreByFreq(p);
37 #endif
38 }
39 
vInitMappingFreqArray(DRAMC_CTX_T * p)40 void vInitMappingFreqArray(DRAMC_CTX_T *p)
41 {
42 	U8 u1ShuffleIdx = 0;
43 	DRAM_DFS_FREQUENCY_TABLE_T *pFreqTbl;
44 
45 	pFreqTbl = gFreqTbl;
46 
47 	for (u1ShuffleIdx = DRAM_DFS_SHUFFLE_1; u1ShuffleIdx < DRAM_DFS_SHUFFLE_MAX; u1ShuffleIdx++)
48 	{
49 		_MappingFreqArray[pFreqTbl[u1ShuffleIdx].shuffleIdx] = pFreqTbl[u1ShuffleIdx].shuffleIdx;
50 	}
51 }
52 
get_FreqTbl_by_shuffleIndex(DRAMC_CTX_T * p,U8 index)53 DRAM_DFS_FREQUENCY_TABLE_T* get_FreqTbl_by_shuffleIndex(DRAMC_CTX_T *p, U8 index)
54 {
55 	U8 u1ShuffleIdx = 0;
56 	DRAM_DFS_FREQUENCY_TABLE_T *pFreqTbl;
57 
58 	pFreqTbl = gFreqTbl;
59 
60 	for (u1ShuffleIdx = DRAM_DFS_SHUFFLE_1; u1ShuffleIdx < DRAM_DFS_SHUFFLE_MAX; u1ShuffleIdx++)
61 	{
62 		if (pFreqTbl[u1ShuffleIdx].shuffleIdx == index)
63 		{
64 			return & pFreqTbl[u1ShuffleIdx];
65 		}
66 	}
67 	return NULL;
68 }
69 
get_shuffleIndex_by_Freq(DRAMC_CTX_T * p)70 U8 get_shuffleIndex_by_Freq(DRAMC_CTX_T *p)
71 {
72 	U8 u1ShuffleIdx = 0;
73 	DRAM_DFS_FREQUENCY_TABLE_T *pFreqTbl;
74 	DRAM_DFS_SRAM_SHU_T eCurr_shu_level = vGet_Current_ShuLevel(p);
75 
76 	pFreqTbl = gFreqTbl;
77 
78 	for (u1ShuffleIdx = DRAM_DFS_SHUFFLE_1; u1ShuffleIdx < DRAM_DFS_SHUFFLE_MAX; u1ShuffleIdx++)
79 	{
80 		if (pFreqTbl[u1ShuffleIdx].shuffleIdx == eCurr_shu_level)
81 		{
82 			return _MappingFreqArray[pFreqTbl[u1ShuffleIdx].shuffleIdx];
83 		}
84 	}
85 
86 	return 0;
87 }
88 
89 #if 0 //@Darren, debug codes
90 void DramcWriteShuffleSRAMRange(DRAMC_CTX_T *p, U32 u4StartAddr, U32 u4EndAddr, u32 u4Data)
91 {
92 	U32 ii, u4tmp, u4Offset=0;
93 
94 	for (ii = u4StartAddr; ii <= u4EndAddr; ii += 4)
95 	{
96 		vIO32Write4B(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ0+u4Offset+SHU_GRP_DRAMC_OFFSET), u4Data);
97 		u4Offset += 4;
98 		mcDELAY_MS(1);
99 	}
100 }
101 
102 void DdrphyWriteShuffleSRAMRange(DRAMC_CTX_T *p, U32 u4StartAddr, U32 u4EndAddr, u32 u4Data)
103 {
104 	U32 ii, u4tmp, u4Offset=0;
105 
106 	for (ii = u4StartAddr; ii <= u4EndAddr; ii += 4)
107 	{
108 		vIO32Write4B(DRAMC_REG_ADDR(DDRPHY_REG_SHU_PHYPLL0+u4Offset+SHU_GRP_DDRPHY_OFFSET), u4Data);
109 		u4Offset += 4;
110 		mcDELAY_MS(1);
111 	}
112 }
113 
114 void FullRGDump(DRAMC_CTX_T *p, U8 step, U32 u4ShuOffset)
115 {
116 	U8 u1RankIdx=0;
117 
118 	msg("[FullRGDump] STEP%d\n", step);
119 	//Darren-DumpAoNonShuReg();
120 	for (u1RankIdx=RANK_0; u1RankIdx<p->support_rank_num; u1RankIdx++)
121 	{
122 		vSetRank(p, u1RankIdx);
123 		msg("[FullRGDump] RANK%d\n", u1RankIdx);
124 		DumpAoShuReg(u4ShuOffset, u4ShuOffset);
125 	}
126 	//Darren-DumpNaoReg();
127 }
128 
129 U32 SramDebugModeRead(DRAMC_CTX_T *p, U8 sram_shu_level, U32 u4Reg)
130 {
131 	U32 u4Value=0;
132 	U32 u4RegBackupAddress[] =
133 	{
134 		(DDRPHY_REG_MISC_SRAM_DMA0),
135 		(DDRPHY_REG_MISC_SRAM_DMA0 + SHIFT_TO_CHB_ADDR),
136 		(DDRPHY_REG_MISC_SRAM_DMA1),
137 		(DDRPHY_REG_MISC_SRAM_DMA1 + SHIFT_TO_CHB_ADDR),
138 #if (CHANNEL_NUM==4)
139 		(DDRPHY_REG_MISC_SRAM_DMA0 + SHIFT_TO_CHC_ADDR),
140 		(DDRPHY_REG_MISC_SRAM_DMA0 + SHIFT_TO_CHD_ADDR),
141 		(DDRPHY_REG_MISC_SRAM_DMA1 + SHIFT_TO_CHC_ADDR),
142 		(DDRPHY_REG_MISC_SRAM_DMA1 + SHIFT_TO_CHD_ADDR),
143 #endif
144 	};
145 
146 	//Backup regs
147 	DramcBackupRegisters(p, u4RegBackupAddress, sizeof(u4RegBackupAddress) / sizeof(U32));
148 
149 	//vIO32Write4B(DRAMC_REG_ADDR(u4Reg), u4Data); // SHU1
150 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_SRAM_DMA0, 0x0, MISC_SRAM_DMA0_APB_SLV_SEL);
151 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_SRAM_DMA1, 0x1, MISC_SRAM_DMA1_R_APB_DMA_DBG_ACCESS);
152 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_SRAM_DMA1, sram_shu_level, MISC_SRAM_DMA1_R_APB_DMA_DBG_LEVEL); // SHU8
153 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_SRAM_DMA0, 0x1, MISC_SRAM_DMA0_APB_SLV_SEL);
154 
155 	p->ShuRGAccessIdx = DRAM_DFS_REG_SHU1;
156 	u4Value = u4IO32Read4B(DRAMC_REG_ADDR(u4Reg));// SHU1
157 	DramcRestoreRegisters(p, u4RegBackupAddress, sizeof(u4RegBackupAddress) / sizeof(U32));
158 	p->ShuRGAccessIdx = DRAM_DFS_REG_SHU0;
159 	msg("[SramDebugModeRead] RK%d Reg=0x%x, Value=0x%x\n", p->rank, u4Reg, u4Value);
160 
161 	//vIO32WriteFldAlign_All(DDRPHY_REG_MISC_SRAM_DMA0, 0x0, MISC_SRAM_DMA0_APB_SLV_SEL);
162 	//vIO32WriteFldAlign_All(DDRPHY_REG_MISC_SRAM_DMA1, 0x0, MISC_SRAM_DMA1_R_APB_DMA_DBG_ACCESS);
163 
164 	return u4Value;
165 }
166 
167 void SramDebugModeWrite(DRAMC_CTX_T *p, U8 sram_shu_level, U32 u4Reg, U32 u4Data)
168 {
169 	U32 u4RegBackupAddress[] =
170 	{
171 		(DDRPHY_REG_MISC_SRAM_DMA0),
172 		(DDRPHY_REG_MISC_SRAM_DMA0 + SHIFT_TO_CHB_ADDR),
173 		(DDRPHY_REG_MISC_SRAM_DMA1),
174 		(DDRPHY_REG_MISC_SRAM_DMA1 + SHIFT_TO_CHB_ADDR),
175 #if (CHANNEL_NUM==4)
176 		(DDRPHY_REG_MISC_SRAM_DMA0 + SHIFT_TO_CHC_ADDR),
177 		(DDRPHY_REG_MISC_SRAM_DMA0 + SHIFT_TO_CHD_ADDR),
178 		(DDRPHY_REG_MISC_SRAM_DMA1 + SHIFT_TO_CHC_ADDR),
179 		(DDRPHY_REG_MISC_SRAM_DMA1 + SHIFT_TO_CHD_ADDR),
180 #endif
181 	};
182 
183 	//Backup regs
184 	DramcBackupRegisters(p, u4RegBackupAddress, sizeof(u4RegBackupAddress) / sizeof(U32));
185 
186 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_SRAM_DMA0, 0x0, MISC_SRAM_DMA0_APB_SLV_SEL);
187 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_SRAM_DMA1, 0x1, MISC_SRAM_DMA1_R_APB_DMA_DBG_ACCESS);
188 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_SRAM_DMA1, sram_shu_level, MISC_SRAM_DMA1_R_APB_DMA_DBG_LEVEL); // SHU8
189 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_SRAM_DMA0, 0x1, MISC_SRAM_DMA0_APB_SLV_SEL);
190 
191 	p->ShuRGAccessIdx = DRAM_DFS_REG_SHU1;
192 	vIO32Write4B(DRAMC_REG_ADDR(u4Reg), u4Data); // SHU1
193 	DramcRestoreRegisters(p, u4RegBackupAddress, sizeof(u4RegBackupAddress) / sizeof(U32));
194 	p->ShuRGAccessIdx = DRAM_DFS_REG_SHU0;
195 	msg("[SramDebugModeWrite] RK%d Reg=0x%x, Value=0x%x\n", p->rank, u4Reg, u4Data);
196 
197 	//vIO32WriteFldAlign_All(DDRPHY_REG_MISC_SRAM_DMA0, 0x0, MISC_SRAM_DMA0_APB_SLV_SEL);
198 	//vIO32WriteFldAlign_All(DDRPHY_REG_MISC_SRAM_DMA1, 0x0, MISC_SRAM_DMA1_R_APB_DMA_DBG_ACCESS);
199 }
200 
201 void DramcCopyShu0toShu1(DRAMC_CTX_T *p, U32 u4StartAddr, U32 u4EndAddr)
202 {
203 	U32 ii, u4tmp, u4Offset=0;
204 	DRAM_DFS_REG_SHU_T ShuRGAccessIdxBackup = p->ShuRGAccessIdx; // SHU1 need use p->ShuRGAccessIdx=DRAM_DFS_REG_SHU1 for RK1
205 
206 	for (ii = u4StartAddr; ii <= u4EndAddr; ii += 4)
207 	{
208 		u4tmp = u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ0+u4Offset));
209 		mcDELAY_MS(1);
210 		p->ShuRGAccessIdx = DRAM_DFS_REG_SHU1;
211 		vIO32Write4B(DRAMC_REG_ADDR(DRAMC_REG_SHURK_SELPH_DQ0+u4Offset), u4tmp);
212 		p->ShuRGAccessIdx = DRAM_DFS_REG_SHU0;
213 		u4Offset += 4;
214 		mcDELAY_MS(1);
215 	}
216 
217 	p->ShuRGAccessIdx = ShuRGAccessIdxBackup;
218 }
219 
220 void DdrphyCopyShu0toShu1(DRAMC_CTX_T *p, U32 u4StartAddr, U32 u4EndAddr)
221 {
222 	U32 ii, u4tmp, u4Offset=0;
223 	DRAM_DFS_REG_SHU_T ShuRGAccessIdxBackup = p->ShuRGAccessIdx; // SHU1 need use p->ShuRGAccessIdx=DRAM_DFS_REG_SHU1 for RK1
224 
225 	for (ii = u4StartAddr; ii <= u4EndAddr; ii += 4)
226 	{
227 		u4tmp = u4IO32Read4B(DRAMC_REG_ADDR(DDRPHY_REG_SHU_PHYPLL0+u4Offset));
228 		mcDELAY_MS(1);
229 		p->ShuRGAccessIdx = DRAM_DFS_REG_SHU1;
230 		vIO32Write4B(DRAMC_REG_ADDR(DDRPHY_REG_SHU_PHYPLL0+u4Offset), u4tmp);
231 		p->ShuRGAccessIdx = DRAM_DFS_REG_SHU0;
232 		u4Offset += 4;
233 		mcDELAY_MS(1);
234 	}
235 
236 	p->ShuRGAccessIdx = ShuRGAccessIdxBackup;
237 }
238 #endif
239 
240 #if ENABLE_LP4Y_WA
CmdBusTrainingLP4YWA(DRAMC_CTX_T * p,U8 u1OnOff)241 void CmdBusTrainingLP4YWA(DRAMC_CTX_T *p, U8 u1OnOff)
242 {
243 	U8 u1MR51 = 0;
244 
245 	if (p->frequency > 800) // skip DDR1600 up
246 		return;
247 
248 	if (u1OnOff == DISABLE)
249 		u1MR51Value[p->dram_fsp] = u1MR51Value[p->dram_fsp] & ~(1 << 3); // disable CLK SE mode
250 	else
251 		u1MR51Value[p->dram_fsp] = u1MR51Value[p->dram_fsp] | (1 << 3); // enable CLK SE mode
252 
253 	DramcModeRegWriteByRank(p, p->rank, 51, u1MR51Value[p->dram_fsp]);
254 
255 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_CA_CMD13), P_Fld( u1OnOff , SHU_CA_CMD13_RG_TX_ARCLKB_OE_TIE_SEL_CA ) \
256 															  | P_Fld( u1OnOff		 , SHU_CA_CMD13_RG_TX_ARCLKB_OE_TIE_EN_CA  ));
257 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DDRPHY_REG_SHU_CA_CMD7) , P_Fld( u1OnOff , SHU_CA_CMD7_R_LP4Y_SDN_MODE_CLK		 ));
258 }
259 #endif
260 
261 #if ENABLE_DFS_RUNTIME_MRW
DFSRuntimeFspMRW(DRAMC_CTX_T * p)262 void DFSRuntimeFspMRW(DRAMC_CTX_T *p)
263 {
264 	vIO32WriteFldAlign_All(DRAMC_REG_SA_RESERVE, p->dram_fsp, SA_RESERVE_DFS_FSP_RTMRW);
265 }
266 
DFSRuntimeMRW_preset(DRAMC_CTX_T * p,U8 sram_shu_level)267 void DFSRuntimeMRW_preset(DRAMC_CTX_T *p, U8 sram_shu_level)
268 {
269 	U8 u1ChIdx = 0, u1RankIdx = 0;
270 	U8 u1MR03_Value = 0;
271 	U8 ch_start = 0, ch_end = 0;
272 
273 		ch_start = CHANNEL_A;
274 		ch_end = CHANNEL_B;
275 #if (CHANNEL_NUM > 2)
276 		ch_end = CHANNEL_D;
277 #endif
278 
279 	//Darren-msg("[DFSRuntimeMRW_preset] FSP%d\n", p->dram_fsp);
280 	//! save mr13
281 	vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_MR_OP_STORE_SHU_15_0, u1MR13Value[RANK_0], LPIF_MR_OP_STORE_SHU_15_0_MR_OP_SET_SHU_15_0);
282 	//Darren-msg("\tMR13 = 0x%x\n", u1MR13Value[RANK_0]);
283 
284 #if ENABLE_READ_DBI
285 	u1MR03_Value = ((u1MR03Value[p->dram_fsp] & 0xbf) | (p->DBI_R_onoff[p->dram_fsp] << 6));
286 #endif
287 
288 #if ENABLE_WRITE_DBI
289 	u1MR03_Value = ((u1MR03Value[p->dram_fsp] & 0x7F) | (p->DBI_W_onoff[p->dram_fsp] << 7));
290 #endif
291 
292 	//! save shux mr1/mr2/mr3/mr11
293 	vIO32WriteFldMulti_All(DDRPHY_MD32_REG_LPIF_MR_OP_STORE_SHU_0_0 + (sram_shu_level << 4),
294 		P_Fld(u1MR01Value[p->dram_fsp], LPIF_MR_OP_STORE_SHU_0_0_MR_OP_SET_SHU_0_0) |
295 		P_Fld(u1MR02Value[p->dram_fsp], LPIF_MR_OP_STORE_SHU_0_0_MR_OP_SET_SHU_0_1) |
296 		P_Fld(u1MR03_Value, LPIF_MR_OP_STORE_SHU_0_0_MR_OP_SET_SHU_0_2) |
297 		P_Fld(u1MR11Value[p->dram_fsp], LPIF_MR_OP_STORE_SHU_0_0_MR_OP_SET_SHU_0_3));
298 		//Darren-msg("\tMR01 = 0x%x, MR02 = 0x%x, MR03 = 0x%x, MR1 = 0x%x\n", u1MR01Value[p->dram_fsp], u1MR02Value[p->dram_fsp], u1MR03Value[p->dram_fsp], u1MR11Value[p->dram_fsp]);
299 
300 	//! save shux mr22/mr51
301 	vIO32WriteFldMulti_All(DDRPHY_MD32_REG_LPIF_MR_OP_STORE_SHU_0_1 + (sram_shu_level << 4),
302 		P_Fld(u1MR21Value[p->dram_fsp], LPIF_MR_OP_STORE_SHU_0_1_MR_OP_SET_SHU_0_5) |
303 		P_Fld(u1MR22Value[p->dram_fsp], LPIF_MR_OP_STORE_SHU_0_1_MR_OP_SET_SHU_0_6) |
304 		P_Fld(u1MR51Value[p->dram_fsp], LPIF_MR_OP_STORE_SHU_0_1_MR_OP_SET_SHU_0_7));
305 		//Darren-msg("\tMR22 = 0x%x, MR51 = 0x%x\n", u1MR22Value[p->dram_fsp], u1MR51Value[p->dram_fsp]);
306 
307 	//! save shux mr12/mr14
308 	vIO32WriteFldMulti_All(DDRPHY_MD32_REG_LPIF_MR_OP_STORE_SHU_7_0 + (sram_shu_level << 4),
309 		P_Fld(u1MR12Value[ch_start][RANK_0][p->dram_fsp], LPIF_MR_OP_STORE_SHU_7_0_MR_OP_SET_SHU_7_0) |
310 		P_Fld(u1MR12Value[ch_start][RANK_1][p->dram_fsp], LPIF_MR_OP_STORE_SHU_7_0_MR_OP_SET_SHU_7_1) |
311 		P_Fld(u1MR12Value[ch_end][RANK_0][p->dram_fsp], LPIF_MR_OP_STORE_SHU_7_0_MR_OP_SET_SHU_7_2) |
312 		P_Fld(u1MR12Value[ch_end][RANK_1][p->dram_fsp], LPIF_MR_OP_STORE_SHU_7_0_MR_OP_SET_SHU_7_3));
313 
314 	vIO32WriteFldMulti_All(DDRPHY_MD32_REG_LPIF_MR_OP_STORE_SHU_7_1 + (sram_shu_level << 4),
315 		P_Fld(u1MR14Value[ch_start][RANK_0][p->dram_fsp], LPIF_MR_OP_STORE_SHU_7_1_MR_OP_SET_SHU_7_4) |
316 		P_Fld(u1MR14Value[ch_start][RANK_1][p->dram_fsp], LPIF_MR_OP_STORE_SHU_7_1_MR_OP_SET_SHU_7_5) |
317 		P_Fld(u1MR14Value[ch_end][RANK_0][p->dram_fsp], LPIF_MR_OP_STORE_SHU_7_1_MR_OP_SET_SHU_7_6) |
318 		P_Fld(u1MR14Value[ch_end][RANK_1][p->dram_fsp], LPIF_MR_OP_STORE_SHU_7_1_MR_OP_SET_SHU_7_7));
319 
320 #if 0
321 	for (u1ChIdx = CHANNEL_A; u1ChIdx < p->support_channel_num; u1ChIdx++)
322 	{
323 		for (u1RankIdx = RANK_0; u1RankIdx < p->support_rank_num; u1RankIdx++)
324 		{
325 			msg("\tCH%d, RK%d, MR12 = 0x%x, MR14 = 0x%x\n", u1ChIdx, u1RankIdx,u1MR12Value[u1ChIdx][u1RankIdx][p->dram_fsp], u1MR14Value[u1ChIdx][u1RankIdx][p->dram_fsp]);
326 		}
327 	}
328 #endif
329 }
330 
TriggerRTMRW_SingleChannel(DRAMC_CTX_T * p,U8 rtmrw_rank_sel,U8 u1MR1,U8 u1MR2,U8 u1MR3,U8 u1MR11,U8 u1MR12,U8 u1MR13,U8 u1MR14,U8 u1MR21,U8 u1MR22,U8 u1MR51)331 static void TriggerRTMRW_SingleChannel(DRAMC_CTX_T *p, U8 rtmrw_rank_sel, U8 u1MR1, U8 u1MR2, U8 u1MR3, U8 u1MR11, U8 u1MR12, U8 u1MR13, U8 u1MR14, U8 u1MR21, U8 u1MR22, U8 u1MR51)
332 {
333 	U8 rt_response_ack = 1, rt_ack = 0;
334 	U8 u1MRW_1ST_Num = 0x5; // MR13, MR1, MR2, MR3, MR11, MR12
335 	U8 u1MRW_2ND_Num = 0x2; // MR14, 22, 51
336 
337 #if ENABLE_LP4Y_DFS
338 	u1MRW_2ND_Num++; // for LP4Y MR21
339 #endif
340 
341 #if 1
342 	//! MR13, MR1, MR2, MR3, MR11, MR12
343 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_RTMRW_CTRL0),
344 		P_Fld(rtmrw_rank_sel, RTMRW_CTRL0_RTMRW0_RK) |
345 		P_Fld(rtmrw_rank_sel, RTMRW_CTRL0_RTMRW1_RK) |
346 		P_Fld(rtmrw_rank_sel, RTMRW_CTRL0_RTMRW2_RK) |
347 		P_Fld(rtmrw_rank_sel, RTMRW_CTRL0_RTMRW3_RK) |
348 		P_Fld(rtmrw_rank_sel, RTMRW_CTRL0_RTMRW4_RK) |
349 		P_Fld(rtmrw_rank_sel, RTMRW_CTRL0_RTMRW5_RK) |
350 		P_Fld(u1MRW_1ST_Num, RTMRW_CTRL0_RTMRW_LEN) |
351 		P_Fld(0x0, RTMRW_CTRL0_RTMRW_AGE) |
352 		P_Fld(0x3, RTMRW_CTRL0_RTMRW_LAT));
353 
354 	//! MA = 13, 1, 2, 3
355 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_RTMRW_CTRL1),
356 		P_Fld(13, RTMRW_CTRL1_RTMRW0_MA) |
357 		P_Fld(1, RTMRW_CTRL1_RTMRW1_MA) |
358 		P_Fld(2, RTMRW_CTRL1_RTMRW2_MA) |
359 		P_Fld(3, RTMRW_CTRL1_RTMRW3_MA));
360 
361 	//! OP13, OP1, OP2, OP3
362 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_RTMRW_CTRL2),
363 		P_Fld(u1MR13, RTMRW_CTRL2_RTMRW0_OP) |
364 		P_Fld(u1MR1, RTMRW_CTRL2_RTMRW1_OP) |
365 		P_Fld(u1MR2, RTMRW_CTRL2_RTMRW2_OP) |
366 		P_Fld(u1MR3, RTMRW_CTRL2_RTMRW3_OP));
367 
368 	//! MR11/MR12
369 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_RTMRW_CTRL3),
370 		P_Fld(11, RTMRW_CTRL3_RTMRW4_MA) |
371 		P_Fld(12, RTMRW_CTRL3_RTMRW5_MA) |
372 		P_Fld(u1MR11, RTMRW_CTRL3_RTMRW4_OP) |
373 		P_Fld(u1MR12, RTMRW_CTRL3_RTMRW5_OP));
374 
375 	//!runtime MRW trigger
376 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN), 0x1, SWCMD_EN_RTMRWEN);
377 
378 	do {
379 		rt_ack = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SPCMDRESP), SPCMDRESP_RTMRW_RESPONSE);
380 	} while(rt_response_ack != rt_ack);
381 
382 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN), 0x0, SWCMD_EN_RTMRWEN);
383 #endif
384 
385 #if 1
386    //! MR14/22/51
387 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_RTMRW_CTRL0),
388 		P_Fld(rtmrw_rank_sel, RTMRW_CTRL0_RTMRW0_RK) |
389 		P_Fld(rtmrw_rank_sel, RTMRW_CTRL0_RTMRW1_RK) |
390 		P_Fld(rtmrw_rank_sel, RTMRW_CTRL0_RTMRW2_RK) |
391 #if ENABLE_LP4Y_DFS
392 		P_Fld(rtmrw_rank_sel, RTMRW_CTRL0_RTMRW3_RK) |
393 #endif
394 		P_Fld(u1MRW_2ND_Num, RTMRW_CTRL0_RTMRW_LEN) |
395 		P_Fld(0x0, RTMRW_CTRL0_RTMRW_AGE) |
396 		P_Fld(0x3, RTMRW_CTRL0_RTMRW_LAT));
397 
398 	//! MA = 14, 22, 51
399 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_RTMRW_CTRL1),
400 		P_Fld(14, RTMRW_CTRL1_RTMRW0_MA) |
401 #if ENABLE_LP4Y_DFS
402 		P_Fld(21, RTMRW_CTRL1_RTMRW3_MA) |
403 #endif
404 		P_Fld(22, RTMRW_CTRL1_RTMRW1_MA) |
405 		P_Fld(51, RTMRW_CTRL1_RTMRW2_MA));
406 
407 	//! OP14, OP22, OP51
408 	vIO32WriteFldMulti(DRAMC_REG_ADDR(DRAMC_REG_RTMRW_CTRL2),
409 		P_Fld(u1MR14, RTMRW_CTRL2_RTMRW0_OP) |
410 #if ENABLE_LP4Y_DFS
411 		P_Fld(u1MR21, RTMRW_CTRL2_RTMRW3_OP) |
412 #endif
413 		P_Fld(u1MR22, RTMRW_CTRL2_RTMRW1_OP) |
414 		P_Fld(u1MR51, RTMRW_CTRL2_RTMRW2_OP));
415 
416 	//!runtime MRW trigger
417 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN), 0x1, SWCMD_EN_RTMRWEN);
418 
419 	do {
420 		rt_ack = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SPCMDRESP), SPCMDRESP_RTMRW_RESPONSE);
421 	} while(rt_response_ack != rt_ack);
422 
423 	vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SWCMD_EN), 0x0, SWCMD_EN_RTMRWEN);
424 #endif
425 }
426 
DFSRTMRW_HwsetWA(DRAMC_CTX_T * p,U8 cur_shu_mux_index,U8 pingpong_shu_level,U8 pingpong_fsp)427 static void DFSRTMRW_HwsetWA(DRAMC_CTX_T *p, U8 cur_shu_mux_index, U8 pingpong_shu_level, U8 pingpong_fsp)
428 {
429 	U8 u1MR13_OP = 0, u1VRCG_OP = 0;
430 	U8 ch_start = 0, ch_end = 0, u1ChIdx = 0;
431 	U8 ch_bak = vGetPHY2ChannelMapping(p);
432 
433 		ch_start = CHANNEL_A;
434 		ch_end = CHANNEL_B+1;
435 #if (CHANNEL_NUM > 2)
436 		ch_end = CHANNEL_D+1;
437 #endif
438 
439 	for (u1ChIdx = ch_start; u1ChIdx < ch_end; u1ChIdx++)
440 	{
441 		vSetPHY2ChannelMapping(p, u1ChIdx);
442 		p->ShuRGAccessIdx = cur_shu_mux_index; // Currect
443 		u1MR13_OP = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_HWSET_MR13), SHU_HWSET_MR13_HWSET_MR13_OP);
444 		p->ShuRGAccessIdx = pingpong_shu_level; // Next
445 		u1VRCG_OP = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_HWSET_VRCG), SHU_HWSET_VRCG_HWSET_VRCG_OP);
446 		p->ShuRGAccessIdx = DRAM_DFS_REG_SHU0;
447 
448 		if(pingpong_fsp == FSP_1)
449 		{
450 			if (cur_shu_mux_index == PHYPLL_MODE)
451 			{
452 				u1MR13_OP &= 0x3F; //! MR13 OP7 = 0, OP6 = 0, from PHYPLL to CLRPLL
453 				u1VRCG_OP &= 0x3F; //! MR13 OP7 = 0, OP6 = 0, from PHYPLL to CLRPLL
454 			}
455 			else
456 			{
457 				u1MR13_OP |= 0xC0; //! MR13 OP7 = 1, OP6 = 1, from CLRPLL to PHYPLL
458 				u1VRCG_OP |= 0xC0; //! MR13 OP7 = 1, OP6 = 1, from CLRPLL to PHYPLL
459 			}
460 		}
461 		else
462 		{
463 			if (cur_shu_mux_index == PHYPLL_MODE)
464 			{
465 				u1MR13_OP |= 0xC0; //! MR13 OP7 = 1, OP6 = 1, from CLRPLL to PHYPLL
466 				u1VRCG_OP |= 0xC0; //! MR13 OP7 = 1, OP6 = 1, from CLRPLL to PHYPLL
467 			}
468 			else
469 			{
470 				u1MR13_OP &= 0x3F; //! MR13 OP7 = 0, OP6 = 0, from PHYPLL to CLRPLL
471 				u1VRCG_OP &= 0x3F; //! MR13 OP7 = 0, OP6 = 0, from PHYPLL to CLRPLL
472 			}
473 		}
474 		p->ShuRGAccessIdx = cur_shu_mux_index; // Currect
475 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_HWSET_MR13), u1MR13_OP, SHU_HWSET_MR13_HWSET_MR13_OP); // Current
476 		p->ShuRGAccessIdx = pingpong_shu_level; // Next
477 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_HWSET_VRCG), u1VRCG_OP, SHU_HWSET_VRCG_HWSET_VRCG_OP); // Next
478 		p->ShuRGAccessIdx = DRAM_DFS_REG_SHU0;
479 	}
480 
481 	p->ShuRGAccessIdx = DRAM_DFS_REG_SHU0;
482 	vSetPHY2ChannelMapping(p, ch_bak);
483 }
484 
DFSRuntimeMRWEn(DRAMC_CTX_T * p,U8 cur_shu_mux_index,U8 nxt_shu_level,U8 pingpong_fsp)485 static void DFSRuntimeMRWEn(DRAMC_CTX_T *p, U8 cur_shu_mux_index, U8 nxt_shu_level, U8 pingpong_fsp)
486 {
487 	U8 rtmr13 = 0;
488 	U8 rtmr1 = 0, rtmr2 = 0, rtmr3 = 0, rtmr11 = 0;
489 	U8 rtmr12 = 0, rtmr14 = 0;
490 	U8 rtmr21 = 0, rtmr22 = 0, rtmr51 = 0;
491 	U8 md32_rtmrw_hpri_en_bk = 0;
492 	U32 bc_bak = 0, ch_bak = 0;
493 	U8 ch_start = 0, ch_end = 0;
494 	U8 u1ChIdx = 0, u1RankIdx = 0;
495 	U8 u1FldIdx = 0;
496 
497 	bc_bak = GetDramcBroadcast();
498 	DramcBroadcastOnOff(DRAMC_BROADCAST_OFF);
499 	ch_bak = vGetPHY2ChannelMapping(p);
500 
501 		ch_start = CHANNEL_A;
502 		ch_end = CHANNEL_B+1;
503 #if (CHANNEL_NUM > 2)
504 		ch_end = CHANNEL_D+1;
505 #endif
506 
507 	//! get mr13
508 	rtmr13 = u4IO32ReadFldAlign(DDRPHY_MD32_REG_LPIF_MR_OP_STORE_SHU_15_0, LPIF_MR_OP_STORE_SHU_15_0_MR_OP_SET_SHU_15_0);
509 	//! get shux mr1/mr2/mr3/mr11
510 	rtmr1 = u4IO32ReadFldAlign(DDRPHY_MD32_REG_LPIF_MR_OP_STORE_SHU_0_0 + (nxt_shu_level << 4), LPIF_MR_OP_STORE_SHU_0_0_MR_OP_SET_SHU_0_0);
511 	rtmr2 = u4IO32ReadFldAlign(DDRPHY_MD32_REG_LPIF_MR_OP_STORE_SHU_0_0 + (nxt_shu_level << 4), LPIF_MR_OP_STORE_SHU_0_0_MR_OP_SET_SHU_0_1);
512 	rtmr3 = u4IO32ReadFldAlign(DDRPHY_MD32_REG_LPIF_MR_OP_STORE_SHU_0_0 + (nxt_shu_level << 4), LPIF_MR_OP_STORE_SHU_0_0_MR_OP_SET_SHU_0_2);
513 	rtmr11 = u4IO32ReadFldAlign(DDRPHY_MD32_REG_LPIF_MR_OP_STORE_SHU_0_0 + (nxt_shu_level << 4), LPIF_MR_OP_STORE_SHU_0_0_MR_OP_SET_SHU_0_3);
514 	//! get shux mr21/mr22/mr51
515 	rtmr21 = u4IO32ReadFldAlign(DDRPHY_MD32_REG_LPIF_MR_OP_STORE_SHU_0_1 + (nxt_shu_level << 4), LPIF_MR_OP_STORE_SHU_0_1_MR_OP_SET_SHU_0_5);
516 	rtmr22 = u4IO32ReadFldAlign(DDRPHY_MD32_REG_LPIF_MR_OP_STORE_SHU_0_1 + (nxt_shu_level << 4), LPIF_MR_OP_STORE_SHU_0_1_MR_OP_SET_SHU_0_6);
517 	rtmr51 = u4IO32ReadFldAlign(DDRPHY_MD32_REG_LPIF_MR_OP_STORE_SHU_0_1 + (nxt_shu_level << 4), LPIF_MR_OP_STORE_SHU_0_1_MR_OP_SET_SHU_0_7);
518 
519 	rtmr13 &= 0x3F;
520 	if (pingpong_fsp == FSP_1)
521 	{
522 		if(cur_shu_mux_index == PHYPLL_MODE)
523 			rtmr13 |= (0x1 << 7); //! MR13 OP7 = 1, OP6 = 0, from PHYPLL to CLRPLL
524 		else
525 			rtmr13 |= (0x1 << 6); //! MR13 OP7 = 0, OP6 = 1, from CLRPLL to PHYPLL
526 	}
527 	else
528 	{
529 		if(cur_shu_mux_index == PHYPLL_MODE)
530 			rtmr13 |= (0x1 << 6); //! MR13 OP7 = 0, OP6 = 1, from CLRPLL to PHYPLL
531 		else
532 			rtmr13 |= (0x1 << 7); //! MR13 OP7 = 1, OP6 = 0, from PHYPLL to CLRPLL
533 	}
534 
535 #if 0 // @Darren-
536 	if (p->support_rank_num == RANK_DUAL)
537 		md32_rtmrw_rank = 0x3; //! dual rank
538 	else
539 		md32_rtmrw_rank = 0x1; //! single rank
540 #endif
541 
542 	//Darren-msg("[DFSRuntimeMRWEn]\n");
543 	u1FldIdx = 0; // shift 8-bits field
544 	for (u1ChIdx = ch_start; u1ChIdx < ch_end; u1ChIdx++)
545 	{
546 		vSetPHY2ChannelMapping(p, u1ChIdx);
547 		md32_rtmrw_hpri_en_bk = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_MPC_CTRL), MPC_CTRL_RTMRW_HPRI_EN);
548 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_MPC_CTRL), 0x1, MPC_CTRL_RTMRW_HPRI_EN);
549 
550 		for (u1RankIdx = RANK_0; u1RankIdx < p->support_rank_num; u1RankIdx++)
551 		{
552 			//Darren-msg("CH%d RK%d\n", u1ChIdx, u1RankIdx);
553 			//! get shux mr12/mr14/
554 			rtmr12 = u4IO32ReadFldAlign(DDRPHY_MD32_REG_LPIF_MR_OP_STORE_SHU_7_0 + (nxt_shu_level << 4),  Fld(8, u1FldIdx*8));
555 			rtmr14 = u4IO32ReadFldAlign(DDRPHY_MD32_REG_LPIF_MR_OP_STORE_SHU_7_1 + (nxt_shu_level << 4),  Fld(8, u1FldIdx*8));
556 			//Darren-msg("\tMR1=0x%x, MR2=0x%x, MR3=0x%x, MR11=0x%x\n", rtmr1, rtmr2, rtmr3, rtmr11);
557 			//Darren-msg("\tMR12=0x%x, MR13=0x%x, MR14=0x%x, MR22=0x%x, MR51=0x%x\n", rtmr12, rtmr13, rtmr14, rtmr22, rtmr51);
558 			TriggerRTMRW_SingleChannel(p, u1RankIdx, rtmr1, rtmr2, rtmr3, rtmr11, rtmr12, rtmr13, rtmr14, rtmr21, rtmr22, rtmr51);
559 			u1FldIdx++; // shift 8-bits field
560 		}
561 
562 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_MPC_CTRL), md32_rtmrw_hpri_en_bk, MPC_CTRL_RTMRW_HPRI_EN);
563 	}
564 
565 	vSetPHY2ChannelMapping(p, ch_bak);
566 	DramcBroadcastOnOff(bc_bak);
567 }
568 #endif
569 
DFSHwSetWA(DRAMC_CTX_T * p,U8 cur_shu_mux_index,U8 nxt_shu_level)570 static void DFSHwSetWA(DRAMC_CTX_T *p, U8 cur_shu_mux_index, U8 nxt_shu_level)
571 {
572 	U8 u1MR13_OP = 0;
573 	U8 ch_start = 0, ch_end = 0, u1ChIdx = 0;
574 	U8 ch_bak = vGetPHY2ChannelMapping(p);
575 	ch_start = CHANNEL_A;
576 	ch_end = CHANNEL_B+1;
577 #if (CHANNEL_NUM > 2)
578 	ch_end = CHANNEL_D+1;
579 #endif
580 	for (u1ChIdx = ch_start; u1ChIdx < ch_end; u1ChIdx++)
581 	{
582 		vSetPHY2ChannelMapping(p, u1ChIdx);
583 		p->ShuRGAccessIdx = cur_shu_mux_index; // NOTE: Currect shuffle
584 		u1MR13_OP = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_HWSET_MR13), SHU_HWSET_MR13_HWSET_MR13_OP);
585 		if ((nxt_shu_level == SRAM_SHU0) || (nxt_shu_level == SRAM_SHU1)) // for term shuffle level
586 			u1MR13_OP |= 0xC0; //! MR13 OP7 = 1, OP6 = 1, from CLRPLL to PHYPLL
587 		else
588 			u1MR13_OP &= 0x3F; //! MR13 OP7 = 0, OP6 = 0, from PHYPLL to CLRPLL
589 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SHU_HWSET_MR13), u1MR13_OP, SHU_HWSET_MR13_HWSET_MR13_OP); // Current
590 	}
591 	p->ShuRGAccessIdx = DRAM_DFS_REG_SHU0;
592 	vSetPHY2ChannelMapping(p, ch_bak);
593 }
594 #if ENABLE_CONFIG_MCK_4TO1_MUX
ConfigMCK4To1MUX(DRAMC_CTX_T * p,CLK_MUX_T eClkMux)595 void ConfigMCK4To1MUX(DRAMC_CTX_T *p, CLK_MUX_T eClkMux)
596 {
597 	U8 u1DVFS_52M_104M_SEL, u1DVFS_104M_208M_SEL;
598 
599 	if (eClkMux == CLK_MUX_208M)
600 	{
601 		u1DVFS_52M_104M_SEL = 1;
602 		u1DVFS_104M_208M_SEL = 1;
603 	}
604 	else if (eClkMux == CLK_MUX_104M)
605 	{
606 		u1DVFS_52M_104M_SEL = 1;
607 		u1DVFS_104M_208M_SEL = 0;
608 	}
609 	else
610 	{
611 		u1DVFS_52M_104M_SEL = 0;
612 		u1DVFS_104M_208M_SEL = 0;
613 	}
614 
615 	vIO32WriteFldMulti_All(DDRPHY_REG_MISC_CKMUX_SEL, P_Fld(u1DVFS_52M_104M_SEL, MISC_CKMUX_SEL_RG_52M_104M_SEL)
616 						| P_Fld(u1DVFS_104M_208M_SEL, MISC_CKMUX_SEL_RG_104M_208M_SEL));
617 
618 	vIO32WriteFldMulti_All(DDRPHY_REG_MISC_CG_CTRL0, P_Fld(0x3, MISC_CG_CTRL0_CLK_MEM_SEL)
619 						| P_Fld(0x1, MISC_CG_CTRL0_W_CHG_MEM));
620 
621 	mcDELAY_XNS(100);//reserve 100ns period for clock mute and latch the rising edge sync condition for BCLK
622 
623 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_CG_CTRL0, 0x0, MISC_CG_CTRL0_W_CHG_MEM);
624 }
625 #endif
626 
627 #if ENABLE_DFS_DEBUG_MODE
WaitDFSDebugSM(DRAMC_CTX_T * p,U8 u1HangStatus)628 void WaitDFSDebugSM(DRAMC_CTX_T *p, U8 u1HangStatus)
629 {
630 	U8 u1Status[CHANNEL_NUM] = {0}, u1DvfsState[CHANNEL_NUM] = {0}, u1ChIdx = 0, u1ChStart = 0, u1ChEnd = 0;
631 	DRAM_CHANNEL_T eOriChannel = vGetPHY2ChannelMapping(p);
632 	U32 u4While1Cnt = 100;
633 
634 		u1ChStart = CHANNEL_A;
635 		u1ChEnd = CHANNEL_B+1;
636 #if CHANNEL_NUM > 2
637 		u1ChEnd = CHANNEL_D+1;
638 #endif
639 
640 	for (u1ChIdx = u1ChStart; u1ChIdx < u1ChEnd; u1ChIdx++)
641 	{
642 		vSetPHY2ChannelMapping(p, u1ChIdx);
643 		do {
644 			u1Status[u1ChIdx] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_DVFS_STATUS), DVFS_STATUS_CUT_PHY_ST_SHU);
645 			u1DvfsState[u1ChIdx] = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_MRR_STATUS2), MRR_STATUS2_DVFS_STATE);
646 			//msg("[WaitDFSDebugSM] CH%d DFS debug mode state (0x%x, 0x%x), Dvfs State = 0x%x\n", u1ChIdx, u1Status[u1ChIdx], u1HangStatus, u1DvfsState[u1ChIdx]);
647 			if (u1Status[u1ChIdx] == u1HangStatus)
648 				break;
649 
650 			if (u4While1Cnt == 0)
651 			{
652 				DDRPhyFreqMeter();
653 				while(1);
654 			}
655 			u4While1Cnt--;
656 		} while(1);
657 	}
658 
659 	vSetPHY2ChannelMapping(p, eOriChannel);
660 }
661 
ExitDFSDebugMode(DRAMC_CTX_T * p,DFS_DBG_T eDbgMode)662 void ExitDFSDebugMode(DRAMC_CTX_T *p, DFS_DBG_T eDbgMode)
663 {
664 	if ((eDbgMode == BEF_DFS_MODE) || (eDbgMode == AFT_DFS_MODE))
665 	{
666 		vIO32WriteFldMulti_All((DDRPHY_REG_MISC_DVFSCTL3), P_Fld(0x0, MISC_DVFSCTL3_RG_DFS_AFT_PHY_SHU_DBG_EN)
667 							| P_Fld(0x0, MISC_DVFSCTL3_RG_DFS_BEF_PHY_SHU_DBG_EN));
668 	}
669 	else if (eDbgMode == CHG_CLK_MODE)
670 	{
671 		vIO32WriteFldMulti_All((DDRPHY_REG_MISC_DVFSCTL3), P_Fld(0x0, MISC_DVFSCTL3_RG_PHY_ST_CHG_TO_BCLK_BY_LPC_EN)
672 							| P_Fld(0x0, MISC_DVFSCTL3_RG_PHY_ST_CHG_TO_MCLK_BY_LPC_EN));
673 		vIO32WriteFldAlign_All(DDRPHY_REG_MISC_CLK_CTRL, 0x0, MISC_CLK_CTRL_DVFS_MEM_CK_MUX_SEL_MODE); // HW mode
674 	}
675 	else
676 	{
677 		err("DFS debug mode err!\n");
678 		#if __ETT__
679 		while (1);
680 		#endif
681 	}
682 }
683 
ChkDFSDebugMode(DRAMC_CTX_T * p,DFS_DBG_T eDbgMode)684 void ChkDFSDebugMode(DRAMC_CTX_T *p, DFS_DBG_T eDbgMode)
685 {
686 	if (eDbgMode == BEF_DFS_MODE)
687 	{
688 		WaitDFSDebugSM(p, 0x1);
689 	}
690 	else if (eDbgMode == AFT_DFS_MODE)
691 	{
692 		WaitDFSDebugSM(p, 0x1d);
693 
694 	}
695 	else if (eDbgMode == CHG_CLK_MODE)
696 	{
697 		WaitDFSDebugSM(p, 0x1e);
698 
699 		// HW shuffle will switch clock to 208MHz and continue DFS
700 		vIO32WriteFldMulti_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_3, P_Fld(0xf, LPIF_LOW_POWER_CFG_3_DVFS_MEM_CK_MUX_SEL)
701 							| P_Fld(0x3, LPIF_LOW_POWER_CFG_3_DVFS_MEM_CK_MUX_UPDATE));
702 		mcDELAY_US(1); // Wait 1T 26MHz
703 		vIO32WriteFldMulti_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_3, P_Fld(0xf, LPIF_LOW_POWER_CFG_3_DVFS_MEM_CK_MUX_SEL)
704 							| P_Fld(0x0, LPIF_LOW_POWER_CFG_3_DVFS_MEM_CK_MUX_UPDATE));
705 
706 		WaitDFSDebugSM(p, 0x1f);
707 
708 		// HW shuffle will switch clock to MCK and continue DFS
709 		vIO32WriteFldMulti_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_3, P_Fld(0x5, LPIF_LOW_POWER_CFG_3_DVFS_MEM_CK_MUX_SEL)
710 							| P_Fld(0x3, LPIF_LOW_POWER_CFG_3_DVFS_MEM_CK_MUX_UPDATE));
711 		mcDELAY_US(1); // Wait 1T 26MHz
712 		vIO32WriteFldMulti_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_3, P_Fld(0x5, LPIF_LOW_POWER_CFG_3_DVFS_MEM_CK_MUX_SEL)
713 							| P_Fld(0x0, LPIF_LOW_POWER_CFG_3_DVFS_MEM_CK_MUX_UPDATE));
714 
715 	}
716 	else
717 	{
718 		err("DFS debug mode err!\n");
719 		#if __ETT__
720 		while (1);
721 		#endif
722 	}
723 }
724 
EntryDFSDebugMode(DRAMC_CTX_T * p,DFS_DBG_T eDbgMode)725 void EntryDFSDebugMode(DRAMC_CTX_T *p, DFS_DBG_T eDbgMode)
726 {
727 	if (eDbgMode == BEF_DFS_MODE)
728 	{
729 		vIO32WriteFldMulti_All((DDRPHY_REG_MISC_DVFSCTL3), P_Fld(0x0, MISC_DVFSCTL3_RG_DFS_AFT_PHY_SHU_DBG_EN)
730 							| P_Fld(0x1, MISC_DVFSCTL3_RG_DFS_BEF_PHY_SHU_DBG_EN));
731 	}
732 	else if (eDbgMode == AFT_DFS_MODE)
733 	{
734 		vIO32WriteFldMulti_All((DDRPHY_REG_MISC_DVFSCTL3), P_Fld(0x1, MISC_DVFSCTL3_RG_DFS_AFT_PHY_SHU_DBG_EN)
735 							| P_Fld(0x0, MISC_DVFSCTL3_RG_DFS_BEF_PHY_SHU_DBG_EN));
736 	}
737 	else if (eDbgMode == CHG_CLK_MODE)
738 	{
739 		vIO32WriteFldMulti_All((DDRPHY_REG_MISC_DVFSCTL3), P_Fld(0x1, MISC_DVFSCTL3_RG_PHY_ST_CHG_TO_BCLK_BY_LPC_EN)
740 							| P_Fld(0x1, MISC_DVFSCTL3_RG_PHY_ST_CHG_TO_MCLK_BY_LPC_EN));
741 		// for MD32 RG mode
742 		vIO32WriteFldAlign_All(DDRPHY_REG_MISC_CLK_CTRL, 0x1, MISC_CLK_CTRL_DVFS_MEM_CK_MUX_SEL_MODE);
743 		// for PHY RG mode (no support)
744 		//Darren-vIO32WriteFldAlign_All(DDRPHY_REG_MISC_CLK_CTRL, 0x1, MISC_CLK_CTRL_DVFS_MEM_CK_MUX_SEL_MODE);
745 	}
746 	else
747 	{
748 		err("DFS debug mode err!\n");
749 		#if __ETT__
750 		while (1);
751 		#endif
752 	}
753 
754 }
755 #endif
756 
757 #if DFS_NOQUEUE_FLUSH_WA
758 U32 u4PERFCTL0_backup=0;
759 
EnableDFSNoQueueFlush(DRAMC_CTX_T * p)760 void EnableDFSNoQueueFlush(DRAMC_CTX_T *p)
761 {
762 	vIO32WriteFldMulti_All(DRAMC_REG_DVFS_CTRL0, P_Fld(0, DVFS_CTRL0_HWSET_WLRL)
763 		| P_Fld(0, DVFS_CTRL0_DVFS_RXFIFOST_SKIP) // sync MP settings
764 		| P_Fld(1, DVFS_CTRL0_DVFS_NOQUEFLUSH_EN)
765 		| P_Fld(0, DVFS_CTRL0_R_DMDVFSMRW_EN));
766 	vIO32WriteFldMulti_All(DRAMC_REG_SHUCTRL1, P_Fld(0, SHUCTRL1_FC_PRDCNT)
767 #if ENABLE_LP4Y_WA
768 		//@Berson, LP4Y tCKFSPE/X_SE violation at shuffle as DVFS noqueflush enable
769 		// LP4Y tCKFSPE/X_SE violation at shuffle from 7.5ns to 15ns
770 		| P_Fld(5, SHUCTRL1_CKFSPE_PRDCNT)
771 		| P_Fld(5, SHUCTRL1_VRCGEN_PRDCNT)
772 #else
773 		| P_Fld(0, SHUCTRL1_CKFSPE_PRDCNT)
774 		| P_Fld(0, SHUCTRL1_VRCGEN_PRDCNT)
775 #endif
776 		| P_Fld(0, SHUCTRL1_CKFSPX_PRDCNT));
777 	vIO32WriteFldAlign_All(DRAMC_REG_BYPASS_FSPOP, 0, BYPASS_FSPOP_BPFSP_OPT); // sync MP settings
778 
779 #if ENABLE_DFS_RUNTIME_MRW // for Skip HW MR2
780 	vIO32WriteFldMulti_All(DRAMC_REG_DVFS_TIMING_CTRL3, P_Fld(0, DVFS_TIMING_CTRL3_RTMRW_MRW1_SKIP) // OP CHG & VRCG High
781 		| P_Fld(0, DVFS_TIMING_CTRL3_RTMRW_MRW2_SKIP) // VRCG Low
782 		| P_Fld(1, DVFS_TIMING_CTRL3_RTMRW_MRW3_SKIP)); // MR2 RL/WL (reduce 50ns)
783 #endif
784 
785 #if ENABLE_DFS_NOQUEUE_FLUSH_DBG
786 	// for debug mode only (skip HW MRW)
787 	vIO32WriteFldMulti_All(DRAMC_REG_DVFS_TIMING_CTRL3, P_Fld(1, DVFS_TIMING_CTRL3_RTMRW_MRW1_PAUSE)
788 		| P_Fld(1, DVFS_TIMING_CTRL3_RTMRW_MRW2_PAUSE)
789 		| P_Fld(1, DVFS_TIMING_CTRL3_RTMRW_MRW3_PAUSE));
790 #endif
791 }
792 
NoQueueFlushWA(DRAMC_CTX_T * p,U8 u1WA_enable)793 static void NoQueueFlushWA(DRAMC_CTX_T *p, U8 u1WA_enable)
794 {
795 	U32 bc_bak=0;
796 
797 	if (p->support_channel_num > CHANNEL_SINGLE) //for dual single
798 	{
799 		bc_bak = GetDramcBroadcast();
800 		DramcBroadcastOnOff(DRAMC_BROADCAST_ON);
801 	}
802 
803 	if (u1WA_enable == ENABLE)
804 	{
805 		u4PERFCTL0_backup = (u4IO32Read4B(DRAMC_REG_ADDR(DRAMC_REG_PERFCTL0)) >> Fld_shft(PERFCTL0_RWAGEEN)) & 0x3;
806 		vIO32WriteFldMulti(DRAMC_REG_PERFCTL0, P_Fld(0, PERFCTL0_RWAGEEN)
807 				| P_Fld(0, PERFCTL0_EMILLATEN));
808 		//msg("[NoQueueFlushWA] PERFCTL0[11:10] backup = 0x%x\n", u4PERFCTL0_backup);
809 	}
810 	else
811 	{
812 		vIO32WriteFldMulti(DRAMC_REG_PERFCTL0, P_Fld(u4PERFCTL0_backup & 0x1, PERFCTL0_RWAGEEN)
813 				| P_Fld((u4PERFCTL0_backup>>1) & 0x1, PERFCTL0_EMILLATEN));
814 	}
815 
816 	if (p->support_channel_num > CHANNEL_SINGLE) //for dual single
817 		DramcBroadcastOnOff(bc_bak);
818 }
819 #endif
820 
821 #if ENABLE_TIMING_TXSR_DFS_WA
TimingTxsrWA(DRAMC_CTX_T * p,U32 next_shu_level)822 static void TimingTxsrWA(DRAMC_CTX_T *p, U32 next_shu_level)
823 {
824 	U32 onoff=0, bc_bak=0;
825 
826 	if (p->support_channel_num > CHANNEL_SINGLE) //for dual single
827 	{
828 		bc_bak = GetDramcBroadcast();
829 		DramcBroadcastOnOff(DRAMC_BROADCAST_ON);
830 	}
831 
832 	if ((next_shu_level == SRAM_SHU4) || (next_shu_level == SRAM_SHU5) || (next_shu_level == SRAM_SHU6))
833 		onoff = DISABLE;
834 	else
835 		onoff = ENABLE;
836 
837 	vIO32WriteFldAlign(DRAMC_REG_REFCTRL1, onoff, REFCTRL1_REF_OVERHEAD_SLOW_REFPB_ENA);
838 
839 	if (p->support_channel_num > CHANNEL_SINGLE)
840 		DramcBroadcastOnOff(bc_bak);
841 }
842 #endif
843 
844 #if ENABLE_TX_REBASE_ODT_WA
TxReadBaseODTWA(DRAMC_CTX_T * p,U8 next_shu_level)845 void TxReadBaseODTWA(DRAMC_CTX_T *p, U8 next_shu_level)
846 {
847 	U32 termen_dis, bc_bak=0;
848 
849 	if (p->support_channel_num > CHANNEL_SINGLE) //for dual single
850 	{
851 		bc_bak = GetDramcBroadcast();
852 		DramcBroadcastOnOff(DRAMC_BROADCAST_ON);
853 	}
854 
855 	if ((next_shu_level == SRAM_SHU0) || (next_shu_level == SRAM_SHU1)) // for DDR4266/DDR3200
856 		termen_dis = DISABLE; //term
857 	else
858 		termen_dis = ENABLE; // un-term
859 
860 	//msg("[TxReadBaseODTWA] SRAM SHU%d, termen_dis = %d\n", next_shu_level, termen_dis);
861 	vIO32WriteFldAlign(DDRPHY_REG_B0_DQ6, termen_dis, B0_DQ6_RG_TX_ARDQ_ODTEN_EXT_DIS_B0);
862 	vIO32WriteFldAlign(DDRPHY_REG_B1_DQ6, termen_dis, B1_DQ6_RG_TX_ARDQ_ODTEN_EXT_DIS_B1);
863 	vIO32WriteFldAlign(DDRPHY_REG_CA_CMD6, termen_dis, CA_CMD6_RG_TX_ARCMD_ODTEN_EXT_DIS);
864 
865 	if (p->support_channel_num > CHANNEL_SINGLE) //for dual single
866 		DramcBroadcastOnOff(bc_bak);
867 }
868 #endif
869 
870 #if ENABLE_TX_REBASE_WDQS_DQS_PI_WA
TxReBaseWDQSDqsPiWA(DRAMC_CTX_T * p,U8 pingpong_shu_level)871 static void TxReBaseWDQSDqsPiWA(DRAMC_CTX_T *p, U8 pingpong_shu_level)
872 {
873 	U32 bc_bak=0;
874 
875 	if (p->support_channel_num > CHANNEL_SINGLE) //for dual single
876 	{
877 		bc_bak = GetDramcBroadcast();
878 		DramcBroadcastOnOff(DRAMC_BROADCAST_ON);
879 	}
880 
881 	p->ShuRGAccessIdx = pingpong_shu_level;
882 	vIO32WriteFldMulti(DDRPHY_REG_SHU_B0_DQ13	, P_Fld(0, SHU_B0_DQ13_RG_TX_ARDQS_READ_BASE_EN_B0			 )
883 																| P_Fld(0, SHU_B0_DQ13_RG_TX_ARDQSB_READ_BASE_EN_B0 		 ) );
884 	vIO32WriteFldMulti(DDRPHY_REG_SHU_B1_DQ13	, P_Fld(0, SHU_B1_DQ13_RG_TX_ARDQS_READ_BASE_EN_B1			 )
885 																| P_Fld(0, SHU_B1_DQ13_RG_TX_ARDQSB_READ_BASE_EN_B1 		 ) );
886 	mcDELAY_US(1);
887 	vIO32WriteFldMulti(DDRPHY_REG_SHU_B0_DQ13	, P_Fld(1, SHU_B0_DQ13_RG_TX_ARDQS_READ_BASE_EN_B0			 )
888 																| P_Fld(1, SHU_B0_DQ13_RG_TX_ARDQSB_READ_BASE_EN_B0 		 ) );
889 	vIO32WriteFldMulti(DDRPHY_REG_SHU_B1_DQ13	, P_Fld(1, SHU_B1_DQ13_RG_TX_ARDQS_READ_BASE_EN_B1			 )
890 																| P_Fld(1, SHU_B1_DQ13_RG_TX_ARDQSB_READ_BASE_EN_B1 		 ) );
891 	p->ShuRGAccessIdx = DRAM_DFS_REG_SHU0;
892 
893 	if (p->support_channel_num > CHANNEL_SINGLE) //for dual single
894 		DramcBroadcastOnOff(bc_bak);
895 }
896 #endif
897 
898 #if ENABLE_SRAM_DMA_WA
899 /*#define DDRPHY_REG_SHU_B0_PHY_VREF_SEL						 (DDRPHY_AO_BASE_ADDRESS + 0x07B4)
900 	#define SHU_B0_PHY_VREF_SEL_RG_RX_ARDQ_VREF_SEL_LB_B0	   Fld(7, 0) //[6:0]
901 	#define SHU_B0_PHY_VREF_SEL_RG_RX_ARDQ_VREF_SEL_UB_B0	   Fld(7, 8) //[14:8]*/
902 U32 gSRAMBackupIdx[DRAM_DFS_SHUFFLE_MAX][4] = { // @Darren, LP5 don't use DDRPHY_REG_SHU_R0_CA_RXDLY6 !!
903 /*0*/	 {DDRPHY_REG_SHU_B0_DQ9, DDRPHY_REG_SHU_B1_DQ9},
904 /*1*/	 {DDRPHY_REG_SHU_B0_DQ9, DDRPHY_REG_SHU_B1_DQ9},
905 /*2*/	 {DDRPHY_REG_SHU_B0_DQ9, DDRPHY_REG_SHU_B1_DQ9},
906 /*3*/	 {DDRPHY_REG_SHU_B0_DQ9, DDRPHY_REG_SHU_B1_DQ9},
907 /*4*/	 {DDRPHY_REG_SHU_B0_DQ9, DDRPHY_REG_SHU_B1_DQ9},
908 /*5*/	 {DDRPHY_REG_SHU_B0_DQ9, DDRPHY_REG_SHU_B1_DQ9},
909 /*6*/	 {DDRPHY_REG_SHU_B0_DQ9, DDRPHY_REG_SHU_B1_DQ9},
910 };
DPHYSaveToSRAMShuWA(DRAMC_CTX_T * p,U8 sram_shu_level)911 void DPHYSaveToSRAMShuWA(DRAMC_CTX_T *p, U8 sram_shu_level)
912 {
913 	U8 u1ChannelIdx=0, u1RankIdx=0;
914 	U32 u4B0_DQ1=0, u4Offset=0;
915 	U8 u1Ch_backup = p->channel, u1Rk_backup = p->rank;
916 	U32 u4B0_PHY_VREF_SEL=0, u4B1_PHY_VREF_SEL=0, u4PHY_VREF_SEL=0;
917 	DRAM_DFS_REG_SHU_T ShuRGAccessIdxBackup = p->ShuRGAccessIdx;
918 
919 	for (u1ChannelIdx=CHANNEL_A; u1ChannelIdx < (p->support_channel_num); u1ChannelIdx++)
920 	{
921 		vSetPHY2ChannelMapping(p, u1ChannelIdx);
922 		for (u1RankIdx = RANK_0; u1RankIdx < (p->support_rank_num); u1RankIdx++)
923 		{
924 			vSetRank(p, u1RankIdx);
925 			u4Offset = 0; // B0
926 			u4B0_PHY_VREF_SEL = u4IO32Read4B(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_PHY_VREF_SEL+u4Offset));
927 			u4Offset = DDRPHY_AO_B0_B1_OFFSET; // B1
928 			u4B1_PHY_VREF_SEL = u4IO32Read4B(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_PHY_VREF_SEL+u4Offset));
929 
930 			u4PHY_VREF_SEL = (u4B1_PHY_VREF_SEL<<16) | u4B0_PHY_VREF_SEL;
931 
932 			msg("[DPHYSaveToSRAMShuWA] CH%d RK%d, B1B0_PHY_VREF_SEL=0x%x\n", u1ChannelIdx, u1RankIdx, u4PHY_VREF_SEL);
933 			p->ShuRGAccessIdx = DRAM_DFS_REG_SHU0;
934 			vIO32Write4B(DRAMC_REG_ADDR(gSRAMBackupIdx[sram_shu_level][u1RankIdx]), u4PHY_VREF_SEL);
935 		}
936 	}
937 
938 	p->ShuRGAccessIdx = ShuRGAccessIdxBackup;
939 	vSetPHY2ChannelMapping(p, u1Ch_backup);
940 	vSetRank(p, u1Rk_backup);
941 }
942 
DPHYSRAMShuWAToSHU1(DRAMC_CTX_T * p)943 void DPHYSRAMShuWAToSHU1(DRAMC_CTX_T *p)
944 {
945 	U8 u1ChannelIdx=0, u1RankIdx=0;
946 	U32 u4B0_DQ1=0, u4Offset=0;
947 	U8 u1Ch_backup = p->channel, u1Rk_backup = p->rank;
948 	U32 u4B0_PHY_VREF_SEL=0, u4B1_PHY_VREF_SEL=0, u4PHY_VREF_SEL=0;
949 	DRAM_DFS_REG_SHU_T ShuRGAccessIdxBackup = p->ShuRGAccessIdx;
950 
951 	for (u1ChannelIdx=CHANNEL_A; u1ChannelIdx < (p->support_channel_num); u1ChannelIdx++)
952 	{
953 		vSetPHY2ChannelMapping(p, u1ChannelIdx);
954 		for (u1RankIdx = RANK_0; u1RankIdx < (p->support_rank_num); u1RankIdx++)
955 		{
956 			vSetRank(p, u1RankIdx);
957 			u4Offset = 0; // B0
958 			u4B0_PHY_VREF_SEL = u4IO32Read4B(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_PHY_VREF_SEL+u4Offset));
959 			u4Offset = DDRPHY_AO_B0_B1_OFFSET; // B1
960 			u4B1_PHY_VREF_SEL = u4IO32Read4B(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_PHY_VREF_SEL+u4Offset));
961 
962 			u4PHY_VREF_SEL = (u4B1_PHY_VREF_SEL<<16) | u4B0_PHY_VREF_SEL;
963 			msg("[DPHYRxVrefWAToSHU1] CH%d RK%d, B1B0_PHY_VREF_SEL=0x%x\n", u1ChannelIdx, u1RankIdx, u4PHY_VREF_SEL);
964 
965 			p->ShuRGAccessIdx = DRAM_DFS_REG_SHU1;
966 			u4Offset = 0; // B0
967 			vIO32Write4B(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_PHY_VREF_SEL+u4Offset), u4B0_PHY_VREF_SEL);
968 			u4Offset = DDRPHY_AO_B0_B1_OFFSET; // B1
969 			vIO32Write4B(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_PHY_VREF_SEL+u4Offset), u4B1_PHY_VREF_SEL);
970 			p->ShuRGAccessIdx = DRAM_DFS_REG_SHU0;
971 		}
972 	}
973 
974 	p->ShuRGAccessIdx = ShuRGAccessIdxBackup;
975 	vSetPHY2ChannelMapping(p, u1Ch_backup);
976 	vSetRank(p, u1Rk_backup);
977 }
978 
SRAMShuRestoreToDPHYWA(DRAMC_CTX_T * p,U8 sram_shu_level,U8 pingpong_shu_level)979 void SRAMShuRestoreToDPHYWA(DRAMC_CTX_T *p, U8 sram_shu_level, U8 pingpong_shu_level)
980 {
981 	U8 u1ChannelIdx=0, u1RankIdx=0, u1ByteIdx=0;
982 	U32 u4Offset=0;
983 	U8 u1Ch_backup = p->channel, u1Rk_backup = p->rank;
984 	U32 u4Byte_PHY_VREF_SEL=0, u4PHY_VREF_SEL=0;
985 	DRAM_DFS_REG_SHU_T ShuRGAccessIdxBackup = p->ShuRGAccessIdx;
986 
987 	for (u1ChannelIdx=CHANNEL_A; u1ChannelIdx < (p->support_channel_num); u1ChannelIdx++)
988 	{
989 		vSetPHY2ChannelMapping(p, u1ChannelIdx);
990 		for (u1RankIdx = RANK_0; u1RankIdx < (p->support_rank_num); u1RankIdx++)
991 		{
992 			vSetRank(p, u1RankIdx);
993 			p->ShuRGAccessIdx = pingpong_shu_level;
994 			u4PHY_VREF_SEL = u4IO32Read4B(DRAMC_REG_ADDR(gSRAMBackupIdx[sram_shu_level][u1RankIdx]));
995 			p->ShuRGAccessIdx = DRAM_DFS_REG_SHU0;
996 
997 			for(u1ByteIdx=0; u1ByteIdx<DQS_NUMBER_LP4; u1ByteIdx++)
998 			{
999 				u4Offset = u1ByteIdx*DDRPHY_AO_B0_B1_OFFSET;
1000 				u4Byte_PHY_VREF_SEL = (u4PHY_VREF_SEL >> (16*u1ByteIdx)) & 0xffff;
1001 
1002 				//msg("[SRAMShuRestoreToDPHYWA] CH%d RK%d B%d, u4Byte_PHY_VREF_SEL=0x%x\n", u1ChannelIdx, u1RankIdx, u1ByteIdx, u4Byte_PHY_VREF_SEL);
1003 
1004 				p->ShuRGAccessIdx = pingpong_shu_level;
1005 				vIO32Write4B(DRAMC_REG_ADDR(DDRPHY_REG_SHU_B0_PHY_VREF_SEL+u4Offset), u4Byte_PHY_VREF_SEL);
1006 				p->ShuRGAccessIdx = DRAM_DFS_REG_SHU0;
1007 			}
1008 		}
1009 	}
1010 
1011 	p->ShuRGAccessIdx = ShuRGAccessIdxBackup;
1012 	vSetPHY2ChannelMapping(p, u1Ch_backup);
1013 	vSetRank(p, u1Rk_backup);
1014 }
1015 #endif
1016 
EnableDFSHwModeClk(DRAMC_CTX_T * p)1017 void EnableDFSHwModeClk(DRAMC_CTX_T *p)
1018 {
1019 	//Shuffle HW mode for MCK/208M switch
1020 	vIO32WriteFldMulti_All(DDRPHY_REG_MISC_DVFSCTL3,
1021 						P_Fld(0x3, MISC_DVFSCTL3_RG_DVFS_MEM_CK_SEL_DESTI) | // dvfs source clock selection when ddrphy shuffle
1022 						P_Fld(0x1, MISC_DVFSCTL3_RG_DVFS_MEM_CK_SEL_SOURCE)); // dvfs destination clock selection when ddrphy shuffle
1023 	vIO32WriteFldMulti_All(DDRPHY_REG_MISC_CLK_CTRL,
1024 						P_Fld(0x1, MISC_CLK_CTRL_DVFS_MEM_CK_MUX_UPDATE_EN) | //M_CK clock mux selection update enable by shuffle
1025 						P_Fld(0x1, MISC_CLK_CTRL_DVFS_CLK_MEM_SEL) | // by shuffle
1026 						P_Fld(0x0, MISC_CLK_CTRL_DVFS_MEM_CK_MUX_SEL_MODE) | // HW mode by shuffle
1027 						P_Fld(0x1, MISC_CLK_CTRL_DVFS_MEM_CK_MUX_SEL)); // 4-to-1 mux for PLLCK
1028 }
1029 
DVFSSettings(DRAMC_CTX_T * p)1030 void DVFSSettings(DRAMC_CTX_T *p)
1031 {
1032 	U8 u1DVFS_52M_104M_SEL = 1; // DVFS_SM freq: 0: 52Mhz 1:104Mhz
1033 	U8 u1Master_DLL_Idle = 0x2b; // Master from MCK
1034 	U8 u1Slave_DLL_Idle = 0x43; // Slave from MCK
1035 #if (fcFOR_CHIP_ID == fcA60868) // @Darren, for A60868 only
1036 	U8 u1ChClkIgnore[2] = {ENABLE, ENABLE}, u1Channel = 0; // 1=ignore
1037 #endif
1038 	U32 backup_broadcast = GetDramcBroadcast();
1039 	DramcBroadcastOnOff(DRAMC_BROADCAST_OFF);
1040 
1041 	if (vGet_Div_Mode(p) == DIV16_MODE)
1042 	{
1043 		u1Master_DLL_Idle = 0x37; // Master from MCK
1044 		u1Slave_DLL_Idle = 0x4D; // Slave from MCK
1045 	}
1046 
1047 	//DVFS debug enable - MRR_STATUS2_DVFS_STATE
1048 	//@Lynx, A60868 HW always enable shuffle debug. remove RG: DVFSDLL_R_DDRPHY_SHUFFLE_DEBUG_ENABLE
1049 	//vIO32WriteFldAlign_All(DRAMC_REG_DVFSDLL, 1, DVFSDLL_R_DDRPHY_SHUFFLE_DEBUG_ENABLE);
1050 
1051 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_CKMUX_SEL, u1DVFS_52M_104M_SEL, MISC_CKMUX_SEL_RG_52M_104M_SEL); //Set DVFS_SM's clk
1052 #if ENABLE_DFS_208M_CLOCK
1053 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_CKMUX_SEL, 0x1, MISC_CKMUX_SEL_RG_104M_208M_SEL); //Set DVFS_SM's clk to 208M
1054 #endif
1055 	vIO32WriteFldMulti_All(DDRPHY_REG_MISC_SHU_DVFSDLL, P_Fld(u1Master_DLL_Idle, MISC_SHU_DVFSDLL_R_DLL_IDLE)
1056 		| P_Fld(u1Slave_DLL_Idle, MISC_SHU_DVFSDLL_R_2ND_DLL_IDLE));
1057 
1058 	// @Darren, set current SRAM SHU index for SPM mode DFS latch/restore
1059 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, vGet_Current_ShuLevel(p), MISC_RG_DFS_CTRL_RG_DR_SHU_LEVEL_SRAM);
1060 	//msg("[DVFSSettings] SHU_LEVEL_SRAM = %d\n", vGet_Current_ShuLevel(p));
1061 
1062 #if (fcFOR_CHIP_ID == fcA60868) // @Darren, for A60868 only
1063 	for (u1Channel = CHANNEL_A; u1Channel < p->support_channel_num; u1Channel++)
1064 		u1ChClkIgnore[u1Channel] = DISABLE;
1065 	vIO32WriteFldMulti_All(DDRPHY_REG_MISC_DVFSCTL2, P_Fld(u1ChClkIgnore[1], MISC_DVFSCTL2_RG_IGNORE_PHY_SH_CHG_CLK_RDY_CHB)
1066 		| P_Fld(u1ChClkIgnore[0], MISC_DVFSCTL2_RG_IGNORE_PHY_SH_CHG_CLK_RDY_CHA));
1067 #endif
1068 	// DFS trigger by DDRPHY RG
1069 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, 1, MISC_RG_DFS_CTRL_SPM_DVFS_CONTROL_SEL); // DFS RG mode for calibration
1070 	//vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, 1, MISC_RG_DFS_CTRL_RG_TX_TRACKING_DIS); // DFS RG mode for disable tx tracking
1071 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_DVFSCTL2, 1, MISC_DVFSCTL2_RG_MRW_AFTER_DFS);
1072 
1073 	vIO32WriteFldMulti_All(DDRPHY_MD32_REG_LPIF_FSM_CFG_1, P_Fld(1, LPIF_FSM_CFG_1_LPIF_LEGACY_CONTROL)
1074 		| P_Fld(1, LPIF_FSM_CFG_1_LPIF_LEGACY_CONTROL_2ND)
1075 		| P_Fld(1, LPIF_FSM_CFG_1_LPIF_LEGACY_CONTROL_FOR_PWR)
1076 		| P_Fld(1, LPIF_FSM_CFG_1_LPIF_LEGACY_CONTROL_FOR_PWR_2ND));
1077 	vIO32WriteFldMulti_All(DDRPHY_REG_MISC_SHU_OPT, P_Fld(1, MISC_SHU_OPT_R_DQB0_SHU_PHY_GATING_RESETB_SPM_EN)
1078 		| P_Fld(1, MISC_SHU_OPT_R_DQB1_SHU_PHY_GATING_RESETB_SPM_EN));
1079 #if ENABLE_DFS_HW_SAVE_MASK
1080 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_DVFSCTL2, 1, MISC_DVFSCTL2_DVFS_SYNC_MASK_FOR_PHY); // 0x1 = disable dfs hw save
1081 #endif
1082 
1083 #if ENABLE_DVFS_CDC_SYNCHRONIZER_OPTION
1084 	//CDC option
1085 	vIO32WriteFldMulti_All(DDRPHY_REG_MISC_DVFSCTL2, P_Fld(1, MISC_DVFSCTL2_R_DVFS_CDC_OPTION) //Lewis@20170331: Not set SHUCTRL2_R_DVFS_CDC_OPTION to 1 since it will lead DDR reserve mode fail in DDR2400 and DDR1600
1086 		| P_Fld(1, MISC_DVFSCTL2_R_CDC_MUX_SEL_OPTION));
1087 #endif
1088 
1089 #if 0 // @Darren, reserved from Mengru Dsim
1090 	U8 u1MarginNew = (u1DVFS_52M_104M_SEL == 1) ? 0x3 : 0x1;
1091 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_SHU_OPT, 0x2, MISC_SHU_OPT_R_CA_SHU_PHDET_SPM_EN);
1092 	vIO32WriteFldMulti_All(DDRPHY_REG_MISC_DVFSCTL, P_Fld(u1MarginNew, MISC_DVFSCTL_R_DVFS_PICG_MARGIN_NEW)
1093 			 | P_Fld(u1MarginNew, MISC_DVFSCTL_R_DVFS_PICG_MARGIN2_NEW)
1094 		| P_Fld(u1MarginNew, MISC_DVFSCTL_R_DVFS_PICG_MARGIN3_NEW));
1095 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_CKMUX_SEL, 0x1, MISC_CKMUX_SEL_FMEM_CK_MUX);
1096 	vIO32WriteFldMulti_All(DRAMC_REG_DVFS_CTRL0, P_Fld(0x1, DVFS_CTRL0_R_DRAMC_CHA)
1097 			 | P_Fld(0x0, DVFS_CTRL0_DVFS_CKE_OPT)
1098 			 | P_Fld(0x1, DVFS_CTRL0_SCARB_PRI_OPT)
1099 		| P_Fld(0x0, DVFS_CTRL0_SHU_PHYRST_SEL));
1100 	vIO32WriteFldMulti_All(DDRPHY_REG_MISC_DVFSCTL2, P_Fld(0x1, MISC_DVFSCTL2_R_DVFS_PARK_N)
1101 		| P_Fld(0x1, MISC_DVFSCTL2_R_DVFS_OPTION));
1102 #endif
1103 
1104 #if ENABLE_BLOCK_APHY_CLOCK_DFS_OPTION
1105 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_CG_CTRL7, 1, MISC_CG_CTRL7_ARMCTL_CK_OUT_CG_SEL);
1106 	vIO32WriteFldMulti_All(DDRPHY_REG_MISC_DVFSCTL, P_Fld(1, MISC_DVFSCTL_R_DVFS_PICG_POSTPONE)
1107 		| P_Fld(1, MISC_DVFSCTL_R_DMSHUFFLE_CHANGE_FREQ_OPT));
1108 #endif
1109 
1110 #if ENABLE_REMOVE_MCK8X_UNCERT_DFS_OPTION // @Mazar
1111 	//vIO32WriteFldAlign_All(DDRPHY_REG_MISC_DVFS_EMI_CLK, 1, MISC_DVFS_EMI_CLK_RG_DLL_SHUFFLE_DDRPHY);
1112 	vIO32WriteFldMulti_All(DDRPHY_REG_MISC_DVFSCTL, P_Fld(1, MISC_DVFSCTL_R_SHUFFLE_PI_RESET_ENABLE)
1113 		| P_Fld(3, MISC_DVFSCTL_R_DVFS_MCK8X_MARGIN)
1114 		| P_Fld(3, MISC_DVFSCTL_R_DVFS_PICG_MARGIN4_NEW));
1115 #endif
1116 
1117 #if RDSEL_TRACKING_EN
1118 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_SRAM_DMA1, 0x1ffff, MISC_SRAM_DMA1_SPM_RESTORE_STEP_EN);
1119 #endif
1120 
1121 #if (fcFOR_CHIP_ID == fcMargaux) // @Darren, for Mar_gaux New setting for ddrphy shuffle (sync mode)
1122 	vIO32WriteFldAlign(DDRPHY_REG_MISC_DVFSCTL2, 0, MISC_DVFSCTL2_R_DVFS_CLK_CHG_OK_SEL);
1123 	vIO32WriteFldAlign(DDRPHY_REG_MISC_DVFSCTL2 + SHIFT_TO_CHB_ADDR, 1, MISC_DVFSCTL2_R_DVFS_CLK_CHG_OK_SEL);
1124 #endif
1125 
1126 	//Cann_on CDC options
1127 	//DLL_SHUFFLE should be set enable before switch frequency
1128 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_DVFS_EMI_CLK, 0, MISC_DVFS_EMI_CLK_RG_DLL_SHUFFLE_DDRPHY);
1129 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_DVFSCTL2, 0, MISC_DVFSCTL2_RG_DLL_SHUFFLE);
1130 	vIO32WriteFldMulti_All(DDRPHY_REG_MISC_DVFSCTL2, P_Fld(0, MISC_DVFSCTL2_R_DVFS_OPTION)
1131 		| P_Fld(0, MISC_DVFSCTL2_R_DVFS_PARK_N));
1132 
1133 #if ENABLE_DFS_TIMING_ENLARGE
1134 	DFSEnlargeTimingSettings(p);
1135 #endif
1136 
1137 	//EnableDFSHwModeClk(p); // @Darren, for DFS shuffle change
1138 
1139 	DramcBroadcastOnOff(backup_broadcast);
1140 }
1141 
1142 #if ENABLE_DFS_SSC_WA
DDRSSCSetting(DRAMC_CTX_T * p)1143 void DDRSSCSetting(DRAMC_CTX_T * p)
1144 {
1145 	U32 u4DELTA1 = 0;
1146 
1147 	if (p->frequency == 1866)
1148 	{
1149 		u4DELTA1 = 0xE14;
1150 	}
1151 	else if (p->frequency == 1600)
1152 	{
1153 		u4DELTA1 = 0xC1C;
1154 	}
1155 	else if (p->frequency == 1200)
1156 	{
1157 		u4DELTA1 = 0x90F;
1158 	}
1159 	else
1160 	{
1161 		return;
1162 	}
1163 
1164 	vIO32WriteFldAlign_All(DDRPHY_REG_SHU_PHYPLL1, 0x1, SHU_PHYPLL1_RG_RPHYPLL_SDM_FRA_EN);
1165 	vIO32WriteFldAlign_All(DDRPHY_REG_SHU_PHYPLL1, 0x1, SHU_PHYPLL1_RG_RPHYPLL_SDM_FRA_EN);
1166 
1167 	vIO32WriteFldAlign_All(DDRPHY_REG_SHU_PHYPLL6, 0x1, SHU_PHYPLL6_RG_RPHYPLL_SDM_SSC_PH_INIT);
1168 	vIO32WriteFldAlign_All(DDRPHY_REG_SHU_CLRPLL6, 0x1, SHU_CLRPLL6_RG_RCLRPLL_SDM_SSC_PH_INIT);
1169 
1170 	vIO32WriteFldAlign_All(DDRPHY_REG_SHU_PHYPLL6, 0x0208, SHU_PHYPLL6_RG_RPHYPLL_SDM_SSC_PRD);
1171 	vIO32WriteFldAlign_All(DDRPHY_REG_SHU_CLRPLL6, 0x0208, SHU_CLRPLL6_RG_RCLRPLL_SDM_SSC_PRD);
1172 
1173 	vIO32WriteFldAlign_All(DDRPHY_REG_SHU_PHYPLL7, 0x0, SHU_PHYPLL7_RG_RPHYPLL_SDM_SSC_DELTA);
1174 	vIO32WriteFldAlign_All(DDRPHY_REG_SHU_CLRPLL7, 0x0, SHU_CLRPLL7_RG_RCLRPLL_SDM_SSC_DELTA);
1175 
1176 	vIO32WriteFldAlign_All(DDRPHY_REG_SHU_PHYPLL7, u4DELTA1, SHU_PHYPLL7_RG_RPHYPLL_SDM_SSC_DELTA1);
1177 	vIO32WriteFldAlign_All(DDRPHY_REG_SHU_CLRPLL7, u4DELTA1, SHU_CLRPLL7_RG_RCLRPLL_SDM_SSC_DELTA1);
1178 
1179 	//vIO32WriteFldAlign_All(DDRPHY_PLL1, 0x1, PLL1_RG_RPHYPLL_SDM_SSC_EN);
1180 	//vIO32WriteFldAlign_All(DDRPHY_PLL2, 0x1, PLL2_RG_RCLRPLL_SDM_SSC_EN);
1181 }
1182 
DramcSSCHoppingOnOff(DRAMC_CTX_T * p,U8 cur_shu_level,U8 u1OnOff)1183 void DramcSSCHoppingOnOff(DRAMC_CTX_T *p, U8 cur_shu_level, U8 u1OnOff)
1184 {
1185 	if ((cur_shu_level == 0x0) || (cur_shu_level == 0x8) || (cur_shu_level == 0x9) || (cur_shu_level == 0x6) || (cur_shu_level == 0x5))
1186 	{
1187 		if (!(p->u1PLLMode == PHYPLL_MODE))
1188 			vIO32WriteFldAlign(DDRPHY_REG_CLRPLL0, u1OnOff, CLRPLL0_RG_RCLRPLL_SDM_SSC_EN); // CLRPLL SSC
1189 		else
1190 			vIO32WriteFldAlign(DDRPHY_REG_PHYPLL0, u1OnOff, PHYPLL0_RG_RPHYPLL_SDM_SSC_EN); // PHYPLL SSC
1191 	}
1192 }
1193 #endif
1194 
1195 
1196 #if DVT_TEST_DUMMY_RD_SIDEBAND_FROM_SPM || ENABLE_DFS_SSC_WA
DVS_DMY_RD_ENTR(DRAMC_CTX_T * p)1197 void DVS_DMY_RD_ENTR(DRAMC_CTX_T *p)
1198 {
1199 	/*TINFO="DRAM : SPM DVS DMY RD ENTR"*/
1200 
1201 	/*TINFO="DRAM : set sc_ddrphy_fb_ck_en = 1"*/
1202 	vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_0, 1, LPIF_LOW_POWER_CFG_0_DDRPHY_FB_CK_EN);
1203 
1204 
1205 	mcDELAY_US(1);
1206 
1207 	/*TINFO="DRAM : set sc_dmyrd_en_mod_sel = 1"*/
1208 	//! diff with WE
1209 	vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_1, 1, LPIF_LOW_POWER_CFG_1_DMY_EN_MOD_SEL);
1210 
1211 	mcDELAY_US(1);
1212 
1213 	/*TINFO="DRAM : set sc_dmyrd_intv_sel = 1"*/
1214 	//! diff with WE
1215 	vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_1, 1, LPIF_LOW_POWER_CFG_1_DMYRD_INTV_SEL);
1216 
1217 	mcDELAY_US(1);
1218 
1219 	/*TINFO="DRAM : set sc_dmyrd_en = 1"*/
1220 	//! diff with WE
1221 	vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_1, 1, LPIF_LOW_POWER_CFG_1_DMYRD_EN);
1222 
1223 	mcDELAY_US(1);
1224 }
1225 
DVS_DMY_RD_EXIT(DRAMC_CTX_T * p)1226 void DVS_DMY_RD_EXIT(DRAMC_CTX_T *p)
1227 {
1228 	/*TINFO="DRAM : SPM DVS DMY RD EXIT"*/
1229 
1230 	/*TINFO="DRAM : set sc_dmyrd_en = 0"*/
1231 	//! diff with WE
1232 	vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_1, 0, LPIF_LOW_POWER_CFG_1_DMYRD_EN);
1233 
1234 	mcDELAY_US(1);
1235 
1236 	/*TINFO="DRAM : set sc_dmyrd_intv_sel = 0"*/
1237 	//! diff with WE
1238 	vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_1, 0, LPIF_LOW_POWER_CFG_1_DMYRD_INTV_SEL);
1239 
1240 	mcDELAY_US(1);
1241 
1242 	/*TINFO="DRAM : set sc_dmyrd_en_mod_sel = 0"*/
1243 	//! diff with WE
1244 	vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_1, 0, LPIF_LOW_POWER_CFG_1_DMY_EN_MOD_SEL);
1245 
1246 	mcDELAY_US(1);
1247 
1248 	/*TINFO="DRAM : set sc_ddrphy_fb_ck_en = 0"*/
1249 	vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_0, 0, LPIF_LOW_POWER_CFG_0_DDRPHY_FB_CK_EN);
1250 
1251 	mcDELAY_US(1);
1252 
1253 
1254 	/*TINFO="DRAM : SPM DVS DMY RD EXIT end "*/
1255 }
1256 #endif
1257 
1258 
1259 #if 1//(FOR_DV_SIMULATION_USED==0 && SW_CHANGE_FOR_SIMULATION==0)
1260 
DPMEnableTracking(DRAMC_CTX_T * p,U32 u4Reg,U32 u4Field,U8 u1ShuIdx,U8 u1Enable)1261 void DPMEnableTracking(DRAMC_CTX_T *p, U32 u4Reg, U32 u4Field, U8 u1ShuIdx, U8 u1Enable)
1262 {
1263 	U32 fld;
1264 
1265 	fld = Fld(1, (Fld_shft(u4Field) + u1ShuIdx));
1266 
1267 	vIO32WriteFldAlign_All(u4Reg, u1Enable, fld);
1268 }
1269 
DPMInit(DRAMC_CTX_T * p)1270 void DPMInit(DRAMC_CTX_T *p)
1271 {
1272 	U8 u1SetVal;
1273 	U8 u1Pll1Val, u1Pll2Val;
1274 	U8 u1ShuSramVal;
1275 	DRAM_DFS_SRAM_SHU_T u1CurrShuLevel = vGet_Current_ShuLevel(p);
1276 
1277 	u1SetVal = (p->support_channel_num > 1) ? 0x3 : 0x1;
1278 
1279 	// pre-setting DPM to dramc low power interface setting
1280 	vIO32WriteFldMulti_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_0,
1281 			P_Fld(u1SetVal, LPIF_LOW_POWER_CFG_0_PHYPLL_EN) |	// both channel phy pll en
1282 			P_Fld(u1SetVal, LPIF_LOW_POWER_CFG_0_DPY_DLL_EN) |	// both channel dpy pll en
1283 			P_Fld(u1SetVal, LPIF_LOW_POWER_CFG_0_DPY_2ND_DLL_EN) |	// both channel dpy 2nd pll en
1284 			P_Fld(u1SetVal, LPIF_LOW_POWER_CFG_0_DPY_DLL_CK_EN) |	// both channel dpy dll ck en
1285 			P_Fld(u1SetVal, LPIF_LOW_POWER_CFG_0_DPY_VREF_EN));	// both channel dpy vref en
1286 
1287 	vIO32WriteFldMulti_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_3,
1288 			P_Fld(u1SetVal, LPIF_LOW_POWER_CFG_3_DPY_MCK8X_EN) |	// both channel mck8x en
1289 			P_Fld(u1SetVal, LPIF_LOW_POWER_CFG_3_DPY_MIDPI_EN) |	// both channel midpi en
1290 			P_Fld(u1SetVal, LPIF_LOW_POWER_CFG_3_DPY_PI_RESETB_EN));	// both channel dpy pi resetb en
1291 
1292 	if (p->u1PLLMode == PHYPLL_MODE)
1293 	{
1294 		msg("PHYPLL\n");
1295 		u1Pll1Val = u1SetVal;
1296 		u1Pll2Val = 0;
1297 	}
1298 	else
1299 	{
1300 		msg("CLRPLL\n");
1301 		u1Pll1Val = 0;
1302 		u1Pll2Val = u1SetVal;
1303 	}
1304 
1305 	vIO32WriteFldMulti_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_0,
1306 			P_Fld(u1Pll1Val, LPIF_LOW_POWER_CFG_0_PHYPLL_SHU_EN) |
1307 			P_Fld(u1Pll1Val, LPIF_LOW_POWER_CFG_0_PHYPLL_MODE_SW) |
1308 			P_Fld(u1Pll2Val, LPIF_LOW_POWER_CFG_0_PHYPLL2_SHU_EN) |
1309 			P_Fld(u1Pll2Val, LPIF_LOW_POWER_CFG_0_PHYPLL2_MODE_SW));
1310 
1311 	// all by lpif fw mode
1312 	vIO32WriteFldMulti_All(DDRPHY_MD32_REG_LPIF_FSM_CFG_1,
1313 			/* TBA set control mux in DV initial */
1314 			P_Fld(0x0, LPIF_FSM_CFG_1_LPIF_LEGACY_CONTROL) |				// 0: MD32, 1: SPM
1315 			P_Fld(0x0, LPIF_FSM_CFG_1_LPIF_LEGACY_CONTROL_2ND) |			// 0: MD32, 1: SPM
1316 			P_Fld(0x0, LPIF_FSM_CFG_1_LPIF_LEGACY_CONTROL_FOR_PWR) |		// 0: MD32, 1: SPM
1317 			P_Fld(0x0, LPIF_FSM_CFG_1_LPIF_LEGACY_CONTROL_FOR_PWR_2ND) |	// 0: MD32, 1: SPM
1318 			P_Fld(0x1, LPIF_FSM_CFG_1_LPIF_OUTPUT_PATH_FROM_SW) |			// 0: MD32 SCU, 1: MD32 CFG
1319 			P_Fld(0x1, LPIF_FSM_CFG_1_LPIF_OUTPUT_PATH_FROM_SW_2ND) |		// 0: MD32 SCU, 1: MD32 CFG
1320 			P_Fld(0x1, LPIF_FSM_CFG_1_LPIF_POWER_CONTROL_SEL) | 			// 0: MD32 SCU, 1: MD32 CFG
1321 			P_Fld(0x1, LPIF_FSM_CFG_1_LPIF_POWER_CONTROL_SEL_2ND)); 		// 0: MD32 SCU, 1: MD32 CFG
1322 
1323 	vIO32WriteFldMulti_All(DDRPHY_MD32_REG_LPIF_FSM_OUT_CTRL_0,
1324 			P_Fld(0x1, LPIF_FSM_OUT_CTRL_0_LOG_OPT_PHYPLL_EN) |
1325 			P_Fld(0x1, LPIF_FSM_OUT_CTRL_0_LOG_OPT_DPY_DLL_EN) |
1326 			P_Fld(0x1, LPIF_FSM_OUT_CTRL_0_LOG_OPT_DPY_2ND_DLL_EN) |
1327 			P_Fld(0x1, LPIF_FSM_OUT_CTRL_0_LOG_OPT_DPY_DLL_CK_EN) |
1328 			P_Fld(0x1, LPIF_FSM_OUT_CTRL_0_LOG_OPT_DPY_VREF_EN) |
1329 			P_Fld(0x1, LPIF_FSM_OUT_CTRL_0_LOG_OPT_PHYPLL_SHU_EN) | // @Darren, fix dfs phypll init
1330 			P_Fld(0x1, LPIF_FSM_OUT_CTRL_0_LOG_OPT_PHYPLL_MODE_SW));
1331 
1332 	u1ShuSramVal = u1CurrShuLevel;
1333 
1334 	if (p->support_channel_num > 1)
1335 		u1ShuSramVal |= u1CurrShuLevel << 4;
1336 
1337 	// NOTE: MD32 PST mode shuffle level = (LPIF_CTRL_CTRL1_LPIF_DRAMC_DR_SHU_LEVEL_SRAM | LPIF_LOW_POWER_CFG_1_DR_SHU_SRAM_LEVEL)
1338 	vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_1, u1ShuSramVal, LPIF_LOW_POWER_CFG_1_DR_SHU_SRAM_LEVEL);
1339 
1340 #if __ETT__
1341 	/* internal test mode */
1342 	vIO32WriteFldAlign_All(DDRPHY_MD32_REG_SSPM_CFGREG_GPR0, 0xE7700E77, SSPM_CFGREG_GPR0_GPR0);
1343 #endif
1344 
1345 	// for DFS
1346 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, 0x0, MISC_RG_DFS_CTRL_SPM_DVFS_CONTROL_SEL);
1347 	vIO32WriteFldAlign_All(DDRPHY_REG_PHYPLL0, 0x0, PHYPLL0_RG_RPHYPLL_EN);
1348 	vIO32WriteFldAlign_All(DDRPHY_REG_CLRPLL0, 0x0, CLRPLL0_RG_RCLRPLL_EN);
1349 
1350 	// enable DFD
1351 	vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_DFD_DBUG_0 , 0x1, LPIF_DFD_DBUG_0_LPIF_DFD_DEBUG_ISO_EN);
1352 }
1353 
1354 //-------------------------------------------------------------------------
1355 /** TransferPLLToSPMControl
1356  *	1. Enable DVFS to SPM control
1357  *	2. Configs SPM pinmux
1358  *	3. To control PLL between PHYPLL and CLRPLL via SPM
1359  *	4. set current SRAM SHU index for SPM mode DFS latch/restore
1360  */
1361 //-------------------------------------------------------------------------
TransferPLLToSPMControl(DRAMC_CTX_T * p,U32 MD32Offset)1362 void TransferPLLToSPMControl(DRAMC_CTX_T *p, U32 MD32Offset)
1363 {
1364 	//U8 u1EnMd32Ch = 0, i;
1365 	//U16 u2SramLevel = 0;
1366 	//DRAM_DFS_SRAM_SHU_T u1CurrShuLevel = vGet_Current_ShuLevel(p);
1367 
1368 	/*for (i = 0; i < DPM_CH_NUM; i++)
1369 	{
1370 		u1EnMd32Ch |= (0x1 << i);
1371 		u2SramLevel |= (u1CurrShuLevel << (4*i));
1372 	}*/
1373 
1374 	/*TINFO="DRAM : enter SW DVFS"*/
1375 	//! To DFS SPM mode after calibration
1376 	// Enable DVFS to SPM control
1377 	/*vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, 0x0, MISC_RG_DFS_CTRL_SPM_DVFS_CONTROL_SEL);
1378 
1379 	vIO32WriteFldMulti(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_0+MD32Offset, P_Fld(0x3, LPIF_LOW_POWER_CFG_0_PHYPLL_EN)	// both channel phy pll en
1380 			 | P_Fld(0x3, LPIF_LOW_POWER_CFG_0_DPY_DLL_EN) // both channel dpy pll en
1381 			 | P_Fld(0x3, LPIF_LOW_POWER_CFG_0_DPY_2ND_DLL_EN) // both channel dpy 2nd pll en
1382 			 | P_Fld(0x3, LPIF_LOW_POWER_CFG_0_DPY_DLL_CK_EN) // both channel dpy dll ck en
1383 			 | P_Fld(0x3, LPIF_LOW_POWER_CFG_0_DPY_VREF_EN) // both channel dpy vref en
1384 			 | P_Fld(0x3, LPIF_LOW_POWER_CFG_0_PHYPLL_SHU_EN) // @Darren, fix dfs phypll init
1385 			 | P_Fld(0x3, LPIF_LOW_POWER_CFG_0_PHYPLL_MODE_SW)); // bith channel phypll mode sw*/
1386 
1387 	// DFS trigger by DRAMC MD32 RG
1388 	/*vIO32WriteFldMulti(DDRPHY_MD32_REG_LPIF_FSM_CFG_1+MD32Offset, P_Fld(0, LPIF_FSM_CFG_1_LPIF_LEGACY_CONTROL)
1389 		| P_Fld(0, LPIF_FSM_CFG_1_LPIF_LEGACY_CONTROL_2ND)
1390 		| P_Fld(0, LPIF_FSM_CFG_1_LPIF_LEGACY_CONTROL_FOR_PWR)
1391 		| P_Fld(0, LPIF_FSM_CFG_1_LPIF_LEGACY_CONTROL_FOR_PWR_2ND)
1392 		| P_Fld(1, LPIF_FSM_CFG_1_LPIF_OUTPUT_PATH_FROM_SW) // 1: MD32 RG mode, 0: MD32 PST mode
1393 		| P_Fld(1, LPIF_FSM_CFG_1_LPIF_OUTPUT_PATH_FROM_SW_2ND));*/ // 1: MD32 RG mode, 0: MD32 PST mode
1394 
1395 	msg("TransferPLLToSPMControl - MODE SW ");
1396 
1397 	if (p->u1PLLMode == PHYPLL_MODE)
1398 	{
1399 		/*msg("PHYPLL\n");
1400 		vIO32WriteFldAlign(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_0+MD32Offset, 0, LPIF_LOW_POWER_CFG_0_PHYPLL2_SHU_EN);
1401 		vIO32WriteFldAlign(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_0+MD32Offset, u1EnMd32Ch, LPIF_LOW_POWER_CFG_0_PHYPLL_SHU_EN);  // PHYPLL for part of SHU RG
1402 		vIO32WriteFldAlign(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_0+MD32Offset, 0, LPIF_LOW_POWER_CFG_0_PHYPLL2_MODE_SW); // same as DRAMC_DPY_CLK_SW_CON2_SW_PHYPLL2_MODE_SW by MUX
1403 		vIO32WriteFldAlign(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_0+MD32Offset, u1EnMd32Ch, LPIF_LOW_POWER_CFG_0_PHYPLL_MODE_SW);*/ // same as DRAMC_DPY_CLK_SW_CON2_SW_PHYPLL_MODE_SW by MUX
1404 		vIO32WriteFldMulti(DDRPHY_MD32_REG_LPIF_FSM_OUT_CTRL_0+MD32Offset,
1405 				P_Fld(0x1, LPIF_FSM_OUT_CTRL_0_LOG_OPT_PHYPLL_EN) |
1406 				P_Fld(0x1, LPIF_FSM_OUT_CTRL_0_LOG_OPT_DPY_DLL_EN) |
1407 				P_Fld(0x1, LPIF_FSM_OUT_CTRL_0_LOG_OPT_DPY_2ND_DLL_EN) |
1408 				P_Fld(0x1, LPIF_FSM_OUT_CTRL_0_LOG_OPT_DPY_DLL_CK_EN) |
1409 				P_Fld(0x1, LPIF_FSM_OUT_CTRL_0_LOG_OPT_DPY_VREF_EN) |
1410 				P_Fld(0x1, LPIF_FSM_OUT_CTRL_0_LOG_OPT_PHYPLL_SHU_EN) | // @Darren, fix dfs phypll init
1411 				P_Fld(0x1, LPIF_FSM_OUT_CTRL_0_LOG_OPT_PHYPLL_MODE_SW));
1412 	}
1413 	else
1414 	{
1415 		/*msg("CLRPLL\n");
1416 		vIO32WriteFldAlign(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_0+MD32Offset, 0, LPIF_LOW_POWER_CFG_0_PHYPLL_SHU_EN);
1417 		vIO32WriteFldAlign(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_0+MD32Offset, u1EnMd32Ch, LPIF_LOW_POWER_CFG_0_PHYPLL2_SHU_EN);  // CLRPLL for part of SHU RG
1418 		vIO32WriteFldAlign(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_0+MD32Offset, 0, LPIF_LOW_POWER_CFG_0_PHYPLL_MODE_SW); // same as DRAMC_DPY_CLK_SW_CON2_SW_PHYPLL2_MODE_SW by MUX
1419 		vIO32WriteFldAlign(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_0+MD32Offset, u1EnMd32Ch, LPIF_LOW_POWER_CFG_0_PHYPLL2_MODE_SW);*/ // same as DRAMC_DPY_CLK_SW_CON2_SW_PHYPLL_MODE_SW by MUX
1420 		vIO32WriteFldMulti(DDRPHY_MD32_REG_LPIF_FSM_OUT_CTRL_0+MD32Offset,
1421 				P_Fld(0x1, LPIF_FSM_OUT_CTRL_0_LOG_OPT_PHYPLL_EN) |
1422 				P_Fld(0x1, LPIF_FSM_OUT_CTRL_0_LOG_OPT_DPY_DLL_EN) |
1423 				P_Fld(0x1, LPIF_FSM_OUT_CTRL_0_LOG_OPT_DPY_2ND_DLL_EN) |
1424 				P_Fld(0x1, LPIF_FSM_OUT_CTRL_0_LOG_OPT_DPY_DLL_CK_EN) |
1425 				P_Fld(0x1, LPIF_FSM_OUT_CTRL_0_LOG_OPT_DPY_VREF_EN) |
1426 				P_Fld(0x1, LPIF_FSM_OUT_CTRL_0_LOG_OPT_PHYPLL2_SHU_EN) | // @Darren, fix dfs clrpll init
1427 				P_Fld(0x1, LPIF_FSM_OUT_CTRL_0_LOG_OPT_PHYPLL2_MODE_SW));
1428 	}
1429 	mcDELAY_US(1);
1430 
1431 	//Darren-vIO32WriteFldAlign_All(DDRPHY_REG_PHYPLL0, 0x0, PHYPLL0_RG_RPHYPLL_EN);
1432 	//Darren-vIO32WriteFldAlign_All(DDRPHY_REG_CLRPLL0, 0x0, CLRPLL0_RG_RCLRPLL_EN);
1433 
1434 	//set current SRAM SHU index for SPM mode DFS latch/restore
1435 	// @Darren for MD32 RG mode only
1436 	// MD32 PST mode shuffle level = (LPIF_CTRL_CTRL1_LPIF_DRAMC_DR_SHU_LEVEL_SRAM | LPIF_LOW_POWER_CFG_1_DR_SHU_SRAM_LEVEL)
1437 	//Darren-vIO32WriteFldAlign(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_1+MD32Offset, u2SramLevel, LPIF_LOW_POWER_CFG_1_DR_SHU_SRAM_LEVEL);
1438 	//msg("TransferPLLToSPMControl - Current SRAM SHU LEVEL = %d\n", u1CurrShuLevel);
1439 
1440 #if DFS_NOQUEUE_FLUSH_WA
1441 	// Enable Max cnt for latency measure from shu_en to shu_ack
1442 	vIO32WriteFldAlign(DDRPHY_MD32_REG_LPIF_FSM_CFG+MD32Offset, 1, LPIF_FSM_CFG_DBG_LATENCY_CNT_EN);
1443 #endif
1444 
1445 	// MD32 clock is 208M
1446 	vIO32WriteFldMulti(DDRPHY_MD32_REG_SSPM_MCLK_DIV+MD32Offset,
1447 			P_Fld(0, SSPM_MCLK_DIV_MCLK_SRC) |
1448 			P_Fld(0, SSPM_MCLK_DIV_MCLK_DIV));
1449 }
1450 #endif
1451 
1452 
1453 #if ENABLE_DVFS_BYPASS_MR13_FSP
DFSBypassMR13HwSet(DRAMC_CTX_T * p)1454 void DFSBypassMR13HwSet(DRAMC_CTX_T *p)
1455 {
1456 #if __A60868_TO_BE_PORTING__
1457 	U8 u1ShuffleIdx, BFSP = 0, u1SramShuIdx = 0;
1458 	REG_TRANSFER_T TransferReg;
1459 
1460 	TransferReg.u4Addr = DRAMC_REG_BYPASS_FSPOP;
1461 	TransferReg.u4Fld = BYPASS_FSPOP_BPFSP_SET_SHU3;
1462 	for (u1ShuffleIdx = 0; u1ShuffleIdx < DRAM_DFS_SRAM_MAX; u1ShuffleIdx++)
1463 	{
1464 		u1SramShuIdx = gFreqTbl[u1ShuffleIdx].shuffleIdx;
1465 		switch (u1SramShuIdx)
1466 		{
1467 			case 0:
1468 				TransferReg.u4Fld = BYPASS_FSPOP_BPFSP_SET_SHU0;
1469 				break;
1470 			case 1:
1471 				TransferReg.u4Fld = BYPASS_FSPOP_BPFSP_SET_SHU1;
1472 				break;
1473 			case 2:
1474 				TransferReg.u4Fld = BYPASS_FSPOP_BPFSP_SET_SHU2;
1475 				break;
1476 			case 3:
1477 				TransferReg.u4Fld = BYPASS_FSPOP_BPFSP_SET_SHU3;
1478 				break;
1479 			case 4:
1480 				TransferReg.u4Fld = BYPASS_FSPOP_BPFSP_SET_SHU4;
1481 				break;
1482 			case 5:
1483 				TransferReg.u4Fld = BYPASS_FSPOP_BPFSP_SET_SHU5;
1484 				break;
1485 			case 6:
1486 				TransferReg.u4Fld = BYPASS_FSPOP_BPFSP_SET_SHU6;
1487 				break;
1488 			case 7:
1489 				TransferReg.u4Fld = BYPASS_FSPOP_BPFSP_SET_SHU7;
1490 				break;
1491 			case 8:
1492 				TransferReg.u4Fld = BYPASS_FSPOP_BPFSP_SET_SHU8;
1493 				break;
1494 			case 9:
1495 				TransferReg.u4Fld = BYPASS_FSPOP_BPFSP_SET_SHU9;
1496 				break;
1497 			default:
1498 				err("[DFSBypassMR13HwSet] fail at BPFSP_SHU%d incorrect !!!\n", u1SramShuIdx);
1499 				break;
1500 		}
1501 		BFSP = (gFreqTbl[u1ShuffleIdx].freq_sel <= LP4_DDR2667)? 0x1: 0x0; //0x1 (Bypass), 0x0 (Not bypass)
1502 		//msg("[DFSBypassMR13HwSet] BPFSP_SHU%d = 0x%x\n", u1SramShuIdx, BFSP);
1503 		vIO32WriteFldAlign_All(TransferReg.u4Addr, BFSP, TransferReg.u4Fld);
1504 	}
1505 	vIO32WriteFldAlign_All(DRAMC_REG_TX_FREQ_RATIO_OLD_MODE0, 0x1, TX_FREQ_RATIO_OLD_MODE0_SHUFFLE_LEVEL_MODE_SELECT); // 1: shuffle level = 10, 0: shuffle level =4
1506 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_CDC_CTRL, 0x0, MISC_CDC_CTRL_REG_CDC_BYPASS_DBG);
1507 	vIO32WriteFldAlign_All(DRAMC_REG_BYPASS_FSPOP, 0x1, BYPASS_FSPOP_BPFSP_OPT);
1508 #endif
1509 }
1510 #endif
1511 
1512 #if FOR_DV_SIMULATION_USED
DFSSwitchtoRGMode(DRAMC_CTX_T * p)1513 void DFSSwitchtoRGMode(DRAMC_CTX_T *p)
1514 {
1515 	vIO32WriteFldAlign(DDRPHY_REG_MISC_RG_DFS_CTRL, 1, MISC_RG_DFS_CTRL_SPM_DVFS_CONTROL_SEL);
1516 }
1517 #endif
1518 
DramcSaveToShuffleSRAM(DRAMC_CTX_T * p,DRAM_DFS_SHUFFLE_TYPE_T srcRG,DRAM_DFS_SHUFFLE_TYPE_T dstRG)1519 void DramcSaveToShuffleSRAM(DRAMC_CTX_T *p, DRAM_DFS_SHUFFLE_TYPE_T srcRG, DRAM_DFS_SHUFFLE_TYPE_T dstRG)
1520 {
1521 	U8 u1ChIdx;
1522 	U8 u1value;
1523 	DRAM_CHANNEL_T eOriChannel = vGetPHY2ChannelMapping(p);
1524 
1525 	#if ENABLE_SRAM_DMA_WA
1526 	DPHYSaveToSRAMShuWA(p, p->pDFSTable->shuffleIdx);
1527 	#endif
1528 
1529 	for (u1ChIdx = 0; u1ChIdx < p->support_channel_num; u1ChIdx++)
1530 	{
1531 		vSetPHY2ChannelMapping(p, u1ChIdx);
1532 
1533 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SRAM_DMA0), 0, MISC_SRAM_DMA0_SW_DMA_FIRE);
1534 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SRAM_DMA0), 0, MISC_SRAM_DMA0_APB_SLV_SEL);
1535 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SRAM_DMA0), 1, MISC_SRAM_DMA0_SW_MODE);
1536 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SRAM_DMA0), 1, MISC_SRAM_DMA0_SW_STEP_EN_MODE);
1537 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SRAM_DMA0), 1, MISC_SRAM_DMA0_SRAM_WR_MODE);
1538 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SRAM_DMA0), 0, MISC_SRAM_DMA0_APB_WR_MODE);
1539 
1540 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SRAM_DMA0), srcRG, MISC_SRAM_DMA0_SW_SHU_LEVEL_APB);
1541 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SRAM_DMA0), dstRG, MISC_SRAM_DMA0_SW_SHU_LEVEL_SRAM);
1542 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SRAM_DMA0), 1, MISC_SRAM_DMA0_SW_DMA_FIRE);
1543 		do {
1544 			u1value = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DMA_DEBUG0), MISC_DMA_DEBUG0_SRAM_DONE);
1545 			u1value |= (u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DMA_DEBUG0), MISC_DMA_DEBUG0_APB_DONE) << 1);
1546 			msg3("\twait dramc to shuffle sram done.\n");
1547 		} while (u1value != 0x3);
1548 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SRAM_DMA0), 0, MISC_SRAM_DMA0_SW_DMA_FIRE);
1549 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SRAM_DMA0), 0, MISC_SRAM_DMA0_SW_STEP_EN_MODE);
1550 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SRAM_DMA0), 0, MISC_SRAM_DMA0_SW_MODE);
1551 	}
1552 
1553 	vSetPHY2ChannelMapping(p, eOriChannel);
1554 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_SRAM_DMA0, 0, MISC_SRAM_DMA0_SRAM_WR_MODE); //MP setting:should disable WR MDOE
1555 }
1556 
LoadShuffleSRAMtoDramc(DRAMC_CTX_T * p,DRAM_DFS_SHUFFLE_TYPE_T srcRG,DRAM_DFS_SHUFFLE_TYPE_T dstRG)1557 void LoadShuffleSRAMtoDramc(DRAMC_CTX_T *p, DRAM_DFS_SHUFFLE_TYPE_T srcRG, DRAM_DFS_SHUFFLE_TYPE_T dstRG)
1558 {
1559 	U8 u1ChIdx;
1560 	U8 u1value;
1561 	DRAM_CHANNEL_T eOriChannel = vGetPHY2ChannelMapping(p);
1562 
1563 	for (u1ChIdx = 0; u1ChIdx < p->support_channel_num; u1ChIdx++)
1564 	{
1565 		vSetPHY2ChannelMapping(p, u1ChIdx);
1566 
1567 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SRAM_DMA0), 0, MISC_SRAM_DMA0_SW_DMA_FIRE);
1568 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SRAM_DMA0), 0, MISC_SRAM_DMA0_APB_SLV_SEL);
1569 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SRAM_DMA0), 1, MISC_SRAM_DMA0_SW_MODE);
1570 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SRAM_DMA0), 1, MISC_SRAM_DMA0_SW_STEP_EN_MODE);
1571 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SRAM_DMA0), 0, MISC_SRAM_DMA0_SRAM_WR_MODE); //diff with DramcSaveToShuffleSRAM
1572 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SRAM_DMA0), 1, MISC_SRAM_DMA0_APB_WR_MODE); // diff with DramcSaveToShuffleSRAM
1573 
1574 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SRAM_DMA0), dstRG, MISC_SRAM_DMA0_SW_SHU_LEVEL_APB);
1575 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SRAM_DMA0), srcRG, MISC_SRAM_DMA0_SW_SHU_LEVEL_SRAM);
1576 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SRAM_DMA0), 1, MISC_SRAM_DMA0_SW_DMA_FIRE);
1577 		do {
1578 			u1value = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DMA_DEBUG0), MISC_DMA_DEBUG0_SRAM_DONE);
1579 			u1value |= (u4IO32ReadFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_DMA_DEBUG0), MISC_DMA_DEBUG0_APB_DONE) << 1);
1580 			msg3("\twait shuffle sram to dramc done.\n");
1581 		} while (u1value != 0x3);
1582 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SRAM_DMA0), 0, MISC_SRAM_DMA0_SW_DMA_FIRE);
1583 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SRAM_DMA0), 0, MISC_SRAM_DMA0_SW_STEP_EN_MODE);
1584 		vIO32WriteFldAlign(DRAMC_REG_ADDR(DDRPHY_REG_MISC_SRAM_DMA0), 0, MISC_SRAM_DMA0_SW_MODE);
1585 	}
1586 
1587 	vSetPHY2ChannelMapping(p, eOriChannel);
1588 }
1589 
WaitChShuEnAck(DRAMC_CTX_T * p,U32 u4Addr,U32 u4Fld,U8 u1Status)1590 static U8 WaitChShuEnAck(DRAMC_CTX_T *p, U32 u4Addr, U32 u4Fld, U8 u1Status)
1591 {
1592 	U8 u1WaitShuAckState = 0, u1ChIdx = 0, u1AckDone = 0;
1593 	DRAM_CHANNEL_T eOriChannel = vGetPHY2ChannelMapping(p);
1594 
1595 	for (u1ChIdx = CHANNEL_A; u1ChIdx < p->support_channel_num; u1ChIdx++)
1596 	{
1597 		vSetPHY2ChannelMapping(p, u1ChIdx);
1598 
1599 		do {
1600 			u1WaitShuAckState = u4IO32ReadFldAlign(DRAMC_REG_ADDR(u4Addr), u4Fld);
1601 			//msg("[WaitChShuEnAck] Wait Shu Ack State = 0x%x\n", u1WaitShuAckState);
1602 			if (u1WaitShuAckState == u1Status)
1603 				break;
1604 		} while(1);
1605 
1606 		u1AckDone |= (0x1 << u1ChIdx);
1607 	}
1608 	vSetPHY2ChannelMapping(p, eOriChannel);
1609 
1610 	return u1AckDone; // shu end
1611 }
1612 
DramcDFSDirectJump_SRAMShuRGMode(DRAMC_CTX_T * p,U8 shu_level)1613 void DramcDFSDirectJump_SRAMShuRGMode(DRAMC_CTX_T *p, U8 shu_level)
1614 {
1615 	U8 u1ShuAck = 0;
1616 	U8 i = 0;
1617 	U8 u1ChkComplete = 1;
1618 
1619 	if (p->u1PLLMode == PHYPLL_MODE)
1620 	{
1621 		msg3("Disable CLRPLL\n");
1622 		vIO32WriteFldAlign_All(DDRPHY_REG_CLRPLL0, 0, CLRPLL0_RG_RCLRPLL_EN);
1623 	}
1624 	else
1625 	{
1626 		msg3("Disable PHYPLL\n");
1627 		vIO32WriteFldAlign_All(DDRPHY_REG_PHYPLL0, 0, PHYPLL0_RG_RPHYPLL_EN);
1628 	}
1629 
1630 	for (i = 0; i < p->support_channel_num; i++)
1631 	{
1632 		u1ShuAck |= (0x1 << i);
1633 	}
1634 
1635 	if (p->u1PLLMode == PHYPLL_MODE)
1636 	{
1637 		msg3("DFSDirectJump to CLRPLL, SHU_LEVEL=%d, ACK=%x\n", shu_level, u1ShuAck);
1638 	}
1639 	else
1640 	{
1641 		msg3("DFSDirectJump to PHYPLL, SHU_LEVEL=%d, ACK=%x\n", shu_level, u1ShuAck);
1642 	}
1643 
1644 	/*TINFO="DRAM : set ddrphy_fb_ck_en=1"*/
1645 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, 1, MISC_RG_DFS_CTRL_RG_DDRPHY_FB_CK_EN);
1646 
1647 	// sram latch
1648 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, 1, MISC_RG_DFS_CTRL_RG_DR_SHU_LEVEL_SRAM_LATCH);
1649 	mcDELAY_US(1);
1650 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, 0, MISC_RG_DFS_CTRL_RG_DR_SHU_LEVEL_SRAM_LATCH);
1651 
1652 	if (p->u1PLLMode == PHYPLL_MODE)
1653 	{
1654 		vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, 0, MISC_RG_DFS_CTRL_RG_PHYPLL_SHU_EN);
1655 		vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, !p->u1PLLMode, MISC_RG_DFS_CTRL_RG_DR_SHU_LEVEL);
1656 		vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, 1, MISC_RG_DFS_CTRL_RG_PHYPLL2_SHU_EN);
1657 		msg3("Enable CLRPLL\n");
1658 	}
1659 	else
1660 	{
1661 		vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, 0, MISC_RG_DFS_CTRL_RG_PHYPLL2_SHU_EN);
1662 		vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, !p->u1PLLMode, MISC_RG_DFS_CTRL_RG_DR_SHU_LEVEL);
1663 		vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, 1, MISC_RG_DFS_CTRL_RG_PHYPLL_SHU_EN);
1664 		msg3("Enable PHYPLL\n");
1665 	}
1666 	mcDELAY_US(1);
1667 
1668 #if 1 //Darren-
1669 	//vIO32WriteFldMulti((DDRPHY_MISC_SPM_CTRL3), P_Fld(0, MISC_SPM_CTRL3_RG_DR_SHU_LEVEL_SRAM_CH1)
1670 	//					  | P_Fld(0, MISC_SPM_CTRL3_RG_DR_SHU_LEVEL_SRAM_CH0));
1671 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, shu_level, MISC_RG_DFS_CTRL_RG_DR_SHU_LEVEL_SRAM);
1672 
1673 	//wait sram load ack.
1674 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, 1, MISC_RG_DFS_CTRL_RG_DR_SRAM_LOAD);
1675 	//while (!u4IO32ReadFldAlign(DDRPHY_MISC_DMA_DEBUG0, MISC_DMA_DEBUG0_SC_DR_SRAM_PLL_LOAD_ACK)); // wait SRAM PLL load ack
1676 	while (WaitChShuEnAck(p, DDRPHY_REG_MISC_DMA_DEBUG0, MISC_DMA_DEBUG0_SC_DR_SRAM_LOAD_ACK, u1ChkComplete) != u1ShuAck)
1677 	//while (!u4IO32ReadFldAlign(DDRPHY_REG_MISC_DMA_DEBUG0, MISC_DMA_DEBUG0_SC_DR_SRAM_LOAD_ACK))
1678 	{
1679 		msg3("\twait sram load ack.\n");
1680 	}
1681 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, 0, MISC_RG_DFS_CTRL_RG_DR_SRAM_LOAD);
1682 #endif
1683 
1684 	if (p->u1PLLMode == PHYPLL_MODE)
1685 	{
1686 		//vIO32WriteFldAlign(DDRPHY_MISC_SPM_CTRL1, 1, MISC_SPM_CTRL1_RG_PHYPLL2_MODE_SW);
1687 		vIO32WriteFldAlign_All(DDRPHY_REG_CLRPLL0, 1, CLRPLL0_RG_RCLRPLL_EN); // Darren NOTE: Don't use PHYPLLx_MODE_SW and it will lock RCLRPLL_EN and RPHYPLL_EN control
1688 	}
1689 	else
1690 	{
1691 		//vIO32WriteFldAlign(DDRPHY_MISC_SPM_CTRL1, 1, MISC_SPM_CTRL1_RG_PHYPLL_MODE_SW);
1692 		vIO32WriteFldAlign_All(DDRPHY_REG_PHYPLL0, 1, PHYPLL0_RG_RPHYPLL_EN); // Darren NOTE: Don't use PHYPLLx_MODE_SW and it will lock RCLRPLL_EN and RPHYPLL_EN control
1693 	}
1694 
1695 #if DFS_NOQUEUE_FLUSH_WA
1696 	NoQueueFlushWA(p, ENABLE);
1697 #endif
1698 
1699 #if ENABLE_SRAM_DMA_WA
1700 	SRAMShuRestoreToDPHYWA(p, shu_level, !p->u1PLLMode);
1701 #endif
1702 
1703 	#if 0//ENABLE_DFS_DEBUG_MODE
1704 	EntryDFSDebugMode(p, CHG_CLK_MODE);
1705 	#endif
1706 
1707 #if (FOR_DV_SIMULATION_USED == 0 && SW_CHANGE_FOR_SIMULATION == 0)
1708 	mcDELAY_US(20); // for SRAM shuffle DV sim spec > 20us
1709 #else
1710 	mcDELAY_XUS(20); // for SRAM shuffle DV sim spec > 20us
1711 #endif
1712 
1713 #if 0
1714 	msg3("Enable SHORT-QUEUE\n");
1715 	vIO32WriteFldAlign(DDRPHY_MISC_SPM_CTRL1, 1, MISC_SPM_CTRL1_RG_DR_SHORT_QUEUE);
1716 
1717 	msg3("\twait 5us for short queue ack.\n");
1718 	mcDELAY_US(5);
1719 #endif
1720 
1721 	//msg("Disable RX-Tracking\n");
1722 	//vIO32WriteFldAlign(SPM_SW_RSV_8, 0, SW_RSV_8_RX_TRACKING_EN);
1723 
1724 	msg3("SHUFFLE Start\n");
1725 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, 1, MISC_RG_DFS_CTRL_RG_DR_SHU_EN); // NOTE: from SHU_EN=1 to ACK, DV spec < 5.1us
1726 
1727 #if DFS_NOQUEUE_FLUSH_WA && ENABLE_DFS_NOQUEUE_FLUSH_DBG
1728 	WaitNoQueueFlushComplete(p); // for debug mode MRW skip
1729 #endif
1730 
1731 	// Fixed DV sim spec for DFS shu_en=1 < 5.1us and shu_en=0 < 120ns
1732 #if 1//Darren-for test chip(FOR_DV_SIMULATION_USED == 0 && SW_CHANGE_FOR_SIMULATION == 0)
1733 	//msg3("\twait 5us for shu_en ack.\n");
1734 	//mcDELAY_US(5);
1735 	#if 0//ENABLE_DFS_DEBUG_MODE
1736 	ChkDFSDebugMode(p, CHG_CLK_MODE);
1737 	#endif
1738 
1739 	//while (WaitChShuEnAck(p, DRAMC_REG_MRR_STATUS2, MRR_STATUS2_DVFS_STATE, u1ShuAckState) != u1ShuAck) // SHUFFLE_END
1740 	//@tg Fix RG mode can not recevie shuffle end ack.
1741 	while ((u4IO32ReadFldAlign(DDRPHY_MD32_REG_LPIF_STATUS_4, LPIF_STATUS_4_DR_SHU_EN_ACK) & u1ShuAck) != u1ShuAck
1742 #if CHANNEL_NUM > 2
1743 	|| (u4IO32ReadFldAlign(DDRPHY_MD32_REG_LPIF_STATUS_4+SHIFT_TO_CHB_ADDR, LPIF_STATUS_4_DR_SHU_EN_ACK) & u1ShuAck) != u1ShuAck
1744 #endif
1745 	)
1746 	{
1747 		msg3("\twait shu_en ack.\n");
1748 	}
1749 #else
1750 	while (u4IO32ReadFldAlign(DRAMC_REG_MRR_STATUS2, MRR_STATUS2_DVFS_STATE) != u1ShuAckState); // SHUFFLE_END
1751 #endif
1752 
1753 	#if 0//ENABLE_DFS_DEBUG_MODE
1754 	ExitDFSDebugMode(p, CHG_CLK_MODE);
1755 	#endif
1756 
1757 #if ENABLE_TX_REBASE_WDQS_DQS_PI_WA
1758 	TxReBaseWDQSDqsPiWA(p, !p->u1PLLMode);
1759 #endif
1760 
1761 #if ENABLE_TX_REBASE_ODT_WA
1762 	TxReadBaseODTWA(p, shu_level);
1763 #endif
1764 
1765 	//vIO32WriteFldAlign(DDRPHY_MISC_SPM_CTRL1, 0, MISC_SPM_CTRL1_RG_DR_SHORT_QUEUE);
1766 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, 0, MISC_RG_DFS_CTRL_RG_DR_SHU_EN); // NOTE: from ACK to SHU_EN=0, DV spec < 120ns
1767 	msg3("SHUFFLE End\n");
1768 
1769 	//if(shu_level == 0)//LP4-2CH
1770 	//{
1771 		//msg("Enable RX-Tracking for shuffle-0\n");
1772 		//vIO32WriteFldAlign(SPM_SW_RSV_8, 3, SW_RSV_8_RX_TRACKING_EN);
1773 	//}
1774 
1775 	if (p->u1PLLMode == PHYPLL_MODE)
1776 	{
1777 		/*TINFO="DRAM : set sc_phypll_mode_sw=0"*/
1778 		//vIO32WriteFldAlign(DDRPHY_MISC_SPM_CTRL1, 0, MISC_SPM_CTRL1_RG_PHYPLL_MODE_SW); // Disable PHYPLL
1779 		vIO32WriteFldAlign_All(DDRPHY_REG_PHYPLL0, 0, PHYPLL0_RG_RPHYPLL_EN); // Darren NOTE: Don't use PHYPLLx_MODE_SW and it will lock RCLRPLL_EN and RPHYPLL_EN control
1780 	}
1781 	else
1782 	{
1783 		/*TINFO="DRAM : set sc_phypll2_mode_sw=0"*/
1784 		//vIO32WriteFldAlign(DDRPHY_MISC_SPM_CTRL1, 0, MISC_SPM_CTRL1_RG_PHYPLL2_MODE_SW); // Disable CLRPLL
1785 		vIO32WriteFldAlign_All(DDRPHY_REG_CLRPLL0, 0, CLRPLL0_RG_RCLRPLL_EN); // Darren NOTE: Don't use PHYPLLx_MODE_SW and it will lock RCLRPLL_EN and RPHYPLL_EN control
1786 	}
1787 
1788 #if 1 //Darren-
1789 	//wait sram restore ack.
1790 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, 1, MISC_RG_DFS_CTRL_RG_DR_SRAM_RESTORE);
1791 	while (WaitChShuEnAck(p, DDRPHY_REG_MISC_DMA_DEBUG0, MISC_DMA_DEBUG0_SC_DR_SRAM_RESTORE_ACK, u1ChkComplete) != u1ShuAck)
1792 	//while (!u4IO32ReadFldAlign(DDRPHY_REG_MISC_DMA_DEBUG0, MISC_DMA_DEBUG0_SC_DR_SRAM_RESTORE_ACK))
1793 	{
1794 		msg3("\twait sram restore ack.\n");
1795 	}
1796 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, 0, MISC_RG_DFS_CTRL_RG_DR_SRAM_RESTORE);
1797 
1798 #if DFS_NOQUEUE_FLUSH_WA
1799 	NoQueueFlushWA(p, DISABLE);
1800 #endif
1801 
1802 	/*TINFO="DRAM : set ddrphy_fb_ck_en=0"*/
1803 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, 0, MISC_RG_DFS_CTRL_RG_DDRPHY_FB_CK_EN);
1804 #endif
1805 
1806 	#if ENABLE_TIMING_TXSR_DFS_WA
1807 	TimingTxsrWA(p, shu_level);
1808 	#endif
1809 
1810 	msg3("Shuffle flow complete\n");
1811 
1812 	p->u1PLLMode = !p->u1PLLMode;
1813 	return;
1814 }
1815 
1816 
DramcDFSDirectJump_RGMode(DRAMC_CTX_T * p,U8 shu_level)1817 void DramcDFSDirectJump_RGMode(DRAMC_CTX_T *p, U8 shu_level)
1818 {
1819 	U8 u1ShuAck = 0;
1820 	U8 i = 0;
1821 	U8 u1shu_level = 0;
1822 
1823 	if (p->u1PLLMode == PHYPLL_MODE)
1824 	{
1825 		msg3("Disable CLRPLL\n");
1826 		vIO32WriteFldAlign_All(DDRPHY_REG_CLRPLL0, 0, CLRPLL0_RG_RCLRPLL_EN);
1827 	}
1828 	else
1829 	{
1830 		msg3("Disable PHYPLL\n");
1831 		vIO32WriteFldAlign_All(DDRPHY_REG_PHYPLL0, 0, PHYPLL0_RG_RPHYPLL_EN);
1832 	}
1833 
1834 	for (i = 0; i < p->support_channel_num; i++)
1835 	{
1836 		u1ShuAck |= (0x1 << i);
1837 	}
1838 
1839 	if (p->u1PLLMode == PHYPLL_MODE)
1840 	{
1841 		msg3("DFSDirectJump_RGMode to CLRPLL, SHU_LEVEL=%d, ACK=%x\n", shu_level, u1ShuAck);
1842 	}
1843 	else
1844 	{
1845 		msg3("DFSDirectJump_RGMode to PHYPLL, SHU_LEVEL=%d, ACK=%x\n", shu_level, u1ShuAck);
1846 	}
1847 
1848 	/*TINFO="DRAM : set ddrphy_fb_ck_en=1"*/
1849 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, 1, MISC_RG_DFS_CTRL_RG_DDRPHY_FB_CK_EN);
1850 
1851 	if (shu_level == DRAM_DFS_SHUFFLE_1)
1852 		u1shu_level = shu_level; // Darren: shuffle to shu0 status (original calib flow.)
1853 	else
1854 		u1shu_level = 1; // Darren: Using shu1 for backup/restore, it diff with SPM mode
1855 
1856 	if (p->u1PLLMode == PHYPLL_MODE)
1857 	{
1858 		vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, 0, MISC_RG_DFS_CTRL_RG_PHYPLL_SHU_EN);
1859 		vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, u1shu_level, MISC_RG_DFS_CTRL_RG_DR_SHU_LEVEL);
1860 		vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, 1, MISC_RG_DFS_CTRL_RG_PHYPLL2_SHU_EN);
1861 		msg3("Enable CLRPLL\n");
1862 	}
1863 	else
1864 	{
1865 		vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, 0, MISC_RG_DFS_CTRL_RG_PHYPLL2_SHU_EN);
1866 		vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, u1shu_level, MISC_RG_DFS_CTRL_RG_DR_SHU_LEVEL);
1867 		vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, 1, MISC_RG_DFS_CTRL_RG_PHYPLL_SHU_EN);
1868 		msg3("Enable PHYPLL\n");
1869 	}
1870 	mcDELAY_US(1);
1871 
1872 	if (p->u1PLLMode == PHYPLL_MODE)
1873 	{
1874 		//vIO32WriteFldAlign(DDRPHY_MISC_SPM_CTRL1, 1, MISC_SPM_CTRL1_RG_PHYPLL2_MODE_SW);
1875 		vIO32WriteFldAlign_All(DDRPHY_REG_CLRPLL0, 1, CLRPLL0_RG_RCLRPLL_EN); // Darren NOTE: Don't use PHYPLLx_MODE_SW and it will lock RCLRPLL_EN and RPHYPLL_EN control
1876 	}
1877 	else
1878 	{
1879 		//vIO32WriteFldAlign(DDRPHY_MISC_SPM_CTRL1, 1, MISC_SPM_CTRL1_RG_PHYPLL_MODE_SW);
1880 		vIO32WriteFldAlign_All(DDRPHY_REG_PHYPLL0, 1, PHYPLL0_RG_RPHYPLL_EN); // Darren NOTE: Don't use PHYPLLx_MODE_SW and it will lock RCLRPLL_EN and RPHYPLL_EN control
1881 	}
1882 
1883 #if (FOR_DV_SIMULATION_USED == 0 && SW_CHANGE_FOR_SIMULATION == 0)
1884 	mcDELAY_US(20); // for SRAM shuffle DV sim spec > 20us
1885 #else
1886 	mcDELAY_XUS(20); // for SRAM shuffle DV sim spec > 20us
1887 #endif
1888 
1889 #if 0
1890 	msg3("Enable SHORT-QUEUE\n");
1891 	vIO32WriteFldAlign(DDRPHY_MISC_SPM_CTRL1, 1, MISC_SPM_CTRL1_RG_DR_SHORT_QUEUE);
1892 
1893 	msg3("\twait 5us for short queue ack.\n");
1894 	mcDELAY_US(5);
1895 #endif
1896 
1897 	//msg("Disable RX-Tracking\n");
1898 	//vIO32WriteFldAlign(SPM_SW_RSV_8, 0, SW_RSV_8_RX_TRACKING_EN);
1899 
1900 
1901 	msg3("SHUFFLE Start\n");
1902 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, 1, MISC_RG_DFS_CTRL_RG_DR_SHU_EN);
1903 
1904 	//msg3("\twait 5us for shu_en ack.\n");
1905 	//mcDELAY_US(5);
1906 	//while (WaitChShuEnAck(p, DRAMC_REG_MRR_STATUS2, MRR_STATUS2_DVFS_STATE, u1ShuAckState) != u1ShuAck) // SHUFFLE_END
1907 	//@tg Fix RG mode can not recevie shuffle end ack.
1908 	while ((u4IO32ReadFldAlign(DDRPHY_MD32_REG_LPIF_STATUS_4, LPIF_STATUS_4_DR_SHU_EN_ACK) & u1ShuAck) != u1ShuAck
1909 #if CHANNEL_NUM > 2
1910 	|| (u4IO32ReadFldAlign(DDRPHY_MD32_REG_LPIF_STATUS_4+SHIFT_TO_CHB_ADDR, LPIF_STATUS_4_DR_SHU_EN_ACK) & u1ShuAck) != u1ShuAck
1911 #endif
1912 	)
1913 	{
1914 		msg3("\twait shu_en ack.\n");
1915 	}
1916 
1917 	//vIO32WriteFldAlign(DDRPHY_MISC_SPM_CTRL1, 0, MISC_SPM_CTRL1_RG_DR_SHORT_QUEUE);
1918 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, 0, MISC_RG_DFS_CTRL_RG_DR_SHU_EN);
1919 	msg3("SHUFFLE End\n");
1920 
1921 	//if(shu_level == 0)//LP4-2CH
1922 	//{
1923 		//msg("Enable RX-Tracking for shuffle-0\n");
1924 		//vIO32WriteFldAlign(SPM_SW_RSV_8, 3, SW_RSV_8_RX_TRACKING_EN);
1925 	//}
1926 
1927 	if (p->u1PLLMode == PHYPLL_MODE)
1928 	{
1929 		/*TINFO="DRAM : set sc_phypll_mode_sw=0"*/
1930 		//vIO32WriteFldAlign(DDRPHY_MISC_SPM_CTRL1, 0, MISC_SPM_CTRL1_RG_PHYPLL_MODE_SW); // Disable PHYPLL
1931 		vIO32WriteFldAlign_All(DDRPHY_REG_PHYPLL0, 0, PHYPLL0_RG_RPHYPLL_EN); // Darren NOTE: Don't use PHYPLLx_MODE_SW and it will lock RCLRPLL_EN and RPHYPLL_EN control
1932 	}
1933 	else
1934 	{
1935 		/*TINFO="DRAM : set sc_phypll2_mode_sw=0"*/
1936 		//vIO32WriteFldAlign(DDRPHY_MISC_SPM_CTRL1, 0, MISC_SPM_CTRL1_RG_PHYPLL2_MODE_SW); // Disable CLRPLL
1937 		vIO32WriteFldAlign_All(DDRPHY_REG_CLRPLL0, 0, CLRPLL0_RG_RCLRPLL_EN); // Darren NOTE: Don't use PHYPLLx_MODE_SW and it will lock RCLRPLL_EN and RPHYPLL_EN control
1938 	}
1939 
1940 	/*TINFO="DRAM : set ddrphy_fb_ck_en=0"*/
1941 	vIO32WriteFldAlign_All(DDRPHY_REG_MISC_RG_DFS_CTRL, 0, MISC_RG_DFS_CTRL_RG_DDRPHY_FB_CK_EN);
1942 
1943 	msg3("Shuffle flow complete\n");
1944 
1945 	p->u1PLLMode = !p->u1PLLMode;
1946 	return;
1947 }
1948 
DramcDFSDirectJump_SPMMode(DRAMC_CTX_T * p,U8 shu_level)1949 static void DramcDFSDirectJump_SPMMode(DRAMC_CTX_T *p, U8 shu_level)
1950 {
1951 	U8 u1ShuAck = 0, u1EnMd32Ch = 0;
1952 	U8 i = 0;
1953 	U8 pingpong_shu_level = 0; // for shu0/1
1954 	U8 u1PingPong = 0;
1955 	U16 u2SramLevel = 0;
1956 #if ENABLE_DFS_RUNTIME_MRW
1957 	U8 cur_fsp = u4IO32ReadFldAlign(DRAMC_REG_ADDR(DRAMC_REG_SA_RESERVE), SA_RESERVE_DFS_FSP_RTMRW);
1958 #endif
1959 
1960 	for (i = 0; i < DPM_CH_NUM; i++)
1961 	{
1962 		u1ShuAck |= (0x1 << i);
1963 		u1EnMd32Ch |= (0x1 << i);
1964 	}
1965 
1966 	if (p->u1PLLMode == PHYPLL_MODE)
1967 	{
1968 		msg3("DramcDFSDirectJump_SPMMode to CLRPLL, SHU_LEVEL=%d, ACK=%x\n", shu_level, u1ShuAck);
1969 	}
1970 	else
1971 	{
1972 		msg3("DramcDFSDirectJump_SPMMode to PHYPLL, SHU_LEVEL=%d, ACK=%x\n", shu_level, u1ShuAck);
1973 	}
1974 
1975 	//vIO32WriteFldAlign(DDRPHY_REG_MISC_STBCAL2, 0x1, MISC_STBCAL2_STB_DBG_STATUS); // HJ Huang
1976 	/*TINFO="DRAM : set ddrphy_fb_ck_en=1"*/
1977 	vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_0, u1EnMd32Ch, LPIF_LOW_POWER_CFG_0_DDRPHY_FB_CK_EN);
1978 	vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_2, u1EnMd32Ch, LPIF_LOW_POWER_CFG_2_DR_SHU_LEVEL_SRAM_LATCH);
1979 	mcDELAY_US(1);
1980 	vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_2, 0, LPIF_LOW_POWER_CFG_2_DR_SHU_LEVEL_SRAM_LATCH);
1981 
1982 	//LPIF_STATUS_10_DRAMC_DR_SHU_LEVEL[1:0] for CHA
1983 	//LPIF_STATUS_10_DRAMC_DR_SHU_LEVEL[3:2] for CHB
1984 	pingpong_shu_level = u4IO32ReadFldAlign(DDRPHY_MD32_REG_LPIF_STATUS_10, LPIF_STATUS_10_DRAMC_DR_SHU_LEVEL); // read shuffle level for dramc conf0/1
1985 	msg3("Ping-pong CONF%d\n", (pingpong_shu_level & 0x1));
1986 	for (i = 0; i < DPM_CH_NUM; i++)
1987 	{
1988 		u2SramLevel |= (shu_level << (i*4));
1989 		u1PingPong |= (!((pingpong_shu_level >> (i*2)) & 0x1)) << (i*2);
1990 	}
1991 	pingpong_shu_level = u1PingPong;
1992 
1993 	if (p->u1PLLMode == PHYPLL_MODE)
1994 	{
1995 		vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_0, 0, LPIF_LOW_POWER_CFG_0_PHYPLL_SHU_EN);
1996 		vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_1, pingpong_shu_level, LPIF_LOW_POWER_CFG_1_DR_SHU_LEVEL);
1997 		vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_0, u1EnMd32Ch, LPIF_LOW_POWER_CFG_0_PHYPLL2_SHU_EN);
1998 		msg3("Enable CLRPLL (0x%x 0x%x)\n", pingpong_shu_level, u2SramLevel);
1999 	}
2000 	else
2001 	{
2002 		vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_0, 0, LPIF_LOW_POWER_CFG_0_PHYPLL2_SHU_EN);
2003 		vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_1, pingpong_shu_level, LPIF_LOW_POWER_CFG_1_DR_SHU_LEVEL);
2004 		vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_0, u1EnMd32Ch, LPIF_LOW_POWER_CFG_0_PHYPLL_SHU_EN);
2005 		msg3("Enable PHYPLL (0x%x 0x%x)\n", pingpong_shu_level, u2SramLevel);
2006 	}
2007 	mcDELAY_US(1);
2008 
2009 #if ENABLE_DFS_RUNTIME_MRW
2010 	DFSRuntimeMRWEn(p, p->u1PLLMode, shu_level, cur_fsp);
2011 #endif
2012 
2013 #if 0 //Darren test+
2014 	vIO32WriteFldAlign(SPM_SPM_POWER_ON_VAL0, 0, SPM_POWER_ON_VAL0_SC_DR_SHU_LEVEL);
2015 	vIO32WriteFldAlign(SPM_SPM_POWER_ON_VAL0, shu_level, SPM_POWER_ON_VAL0_SC_DR_SHU_LEVEL);
2016 #else
2017 	vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_1, u2SramLevel, LPIF_LOW_POWER_CFG_1_DR_SHU_SRAM_LEVEL);
2018 	vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_1, u1EnMd32Ch, LPIF_LOW_POWER_CFG_1_DR_SRAM_LOAD);
2019 	while ((u4IO32ReadFldAlign(DDRPHY_MD32_REG_LPIF_STATUS_4, LPIF_STATUS_4_DR_SRAM_LOAD_ACK) & u1ShuAck) != u1ShuAck
2020 #if CHANNEL_NUM > 2
2021 	|| (u4IO32ReadFldAlign(DDRPHY_MD32_REG_LPIF_STATUS_4+SHIFT_TO_CHB_ADDR, LPIF_STATUS_4_DR_SRAM_LOAD_ACK) & u1ShuAck) != u1ShuAck
2022 #endif
2023 	)
2024 	{
2025 		msg3("\twait sram load ack.\n");
2026 	}
2027 	vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_1, 0, LPIF_LOW_POWER_CFG_1_DR_SRAM_LOAD);
2028 #endif
2029 
2030 	//vIO32WriteFldAlign(DDRPHY_REG_MISC_STBCAL2, 0x2, MISC_STBCAL2_STB_DBG_STATUS);
2031 
2032 #if ENABLE_DFS_SSC_WA
2033 	DVS_DMY_RD_ENTR(p);
2034 #endif
2035 
2036 	if (p->u1PLLMode == PHYPLL_MODE)
2037 		vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_0, u1EnMd32Ch, LPIF_LOW_POWER_CFG_0_PHYPLL2_MODE_SW);
2038 	else
2039 		vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_0, u1EnMd32Ch, LPIF_LOW_POWER_CFG_0_PHYPLL_MODE_SW);
2040 
2041 #if ENABLE_DFS_SSC_WA
2042 	DramcSSCHoppingOnOff(p, cur_shu_level, ENABLE);
2043 #endif
2044 
2045 #if DFS_NOQUEUE_FLUSH_WA
2046 	NoQueueFlushWA(p, ENABLE);
2047 #endif
2048 
2049 #if ENABLE_SRAM_DMA_WA
2050 	SRAMShuRestoreToDPHYWA(p, shu_level, !p->u1PLLMode);
2051 #endif
2052 
2053 #if ENABLE_DFS_RUNTIME_MRW
2054 	DFSRTMRW_HwsetWA(p, p->u1PLLMode, !p->u1PLLMode, cur_fsp);
2055 #endif
2056 
2057 	DFSHwSetWA(p, p->u1PLLMode, shu_level);
2058 	#if ENABLE_DFS_DEBUG_MODE
2059 	EntryDFSDebugMode(p, CHG_CLK_MODE);
2060 	#endif
2061 
2062 	mcDELAY_US(20);
2063 
2064 	/*TINFO="DRAM : set ddrphy_fb_ck_en=0"*/
2065 	vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_0, 0, LPIF_LOW_POWER_CFG_0_DDRPHY_FB_CK_EN);
2066 
2067 	//EnableDramcTrackingByShuffle(p, shu_level, DISABLE);
2068 
2069 	/*TINFO="DRAM : set ddrphy_fb_ck_en=1"*/
2070 	vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_0, u1EnMd32Ch, LPIF_LOW_POWER_CFG_0_DDRPHY_FB_CK_EN);
2071 
2072 	//func_imp_tracking_value_backup();
2073 	//func_imp_tracking_off();
2074 	//func_force_mm_ultra();
2075 
2076 #if ENABLE_DFS_SSC_WA
2077 	DVS_DMY_RD_EXIT(p);
2078 	//DramcSSCHoppingOnOff(p, cur_shu_level, ENABLE); // for waveform measure
2079 	//mcDELAY_US(10); // for waveform measure
2080 #endif
2081 
2082 #if ENABLE_DDR800_OPEN_LOOP_MODE_OPTION
2083 	DDR800semiPowerSavingOn(p, shu_level, DISABLE);
2084 #endif
2085 
2086 #if (ENABLE_TX_TRACKING && TX_RETRY_ENABLE)
2087 	SPMTx_Track_Retry_OnOff(p, shu_level, ENABLE);
2088 #endif
2089 
2090 	msg3("SHUFFLE Start\n");
2091 	//vIO32WriteFldAlign(DDRPHY_REG_MISC_STBCAL2, 0x3, MISC_STBCAL2_STB_DBG_STATUS);
2092 	vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_0, u1EnMd32Ch, LPIF_LOW_POWER_CFG_0_DR_SHU_EN);
2093 
2094 	#if ENABLE_DFS_DEBUG_MODE
2095 	ChkDFSDebugMode(p, CHG_CLK_MODE);
2096 	// Add WA at here
2097 	ExitDFSDebugMode(p, CHG_CLK_MODE);
2098 	#endif
2099 
2100 	while ((u4IO32ReadFldAlign(DDRPHY_MD32_REG_LPIF_STATUS_4, LPIF_STATUS_4_DR_SHU_EN_ACK) & u1ShuAck) != u1ShuAck
2101 #if CHANNEL_NUM > 2
2102 	|| (u4IO32ReadFldAlign(DDRPHY_MD32_REG_LPIF_STATUS_4+SHIFT_TO_CHB_ADDR, LPIF_STATUS_4_DR_SHU_EN_ACK) & u1ShuAck) != u1ShuAck
2103 #endif
2104 	)
2105 	{
2106 		msg3("\twait shu_en ack.\n");
2107 	}
2108 
2109 #if DFS_NOQUEUE_FLUSH_LATENCY_CNT
2110 	U8 MaxCnt = u4IO32ReadFldAlign(DDRPHY_MD32_REG_LPIF_RESERVED_6, LPIF_RESERVED_6_MAX_CNT_SHU_EN_HIGH_TO_ACK); // show chx max cnt
2111 	// cnt * 8 * 4.8ns (208M)
2112 	msg("\tMAX CNT = %d\n", MaxCnt);
2113 #endif
2114 
2115 #if ENABLE_TX_REBASE_WDQS_DQS_PI_WA
2116 	TxReBaseWDQSDqsPiWA(p, !p->u1PLLMode);
2117 #endif
2118 
2119 #if ENABLE_TX_REBASE_ODT_WA
2120 	TxReadBaseODTWA(p, shu_level);
2121 #endif
2122 	vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_0, 0, LPIF_LOW_POWER_CFG_0_DR_SHU_EN);
2123 	//vIO32WriteFldAlign(DDRPHY_REG_MISC_STBCAL2, 0x4, MISC_STBCAL2_STB_DBG_STATUS);
2124 	msg3("SHUFFLE End\n");
2125 
2126 	if (p->u1PLLMode == PHYPLL_MODE)
2127 		vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_0, 0, LPIF_LOW_POWER_CFG_0_PHYPLL_MODE_SW); // PHYPLL off
2128 	else
2129 		vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_0, 0, LPIF_LOW_POWER_CFG_0_PHYPLL2_MODE_SW); // CLRPLL off
2130 
2131 #if ENABLE_DDR800_OPEN_LOOP_MODE_OPTION
2132 	DDR800semiPowerSavingOn(p, shu_level, ENABLE);
2133 #endif
2134 
2135 #if (ENABLE_TX_TRACKING && TX_RETRY_ENABLE)
2136 	SPMTx_Track_Retry_OnOff(p, shu_level, DISABLE);
2137 #endif
2138 
2139 #if ENABLE_DFS_SSC_WA
2140 	DramcSSCHoppingOnOff(p, cur_shu_level, DISABLE);
2141 #endif
2142 
2143 	//func_imp_tracking_on();
2144 #if 1 //Darren test+
2145 	vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_2, u1EnMd32Ch, LPIF_LOW_POWER_CFG_2_DR_SRAM_RESTORE);
2146 	while ((u4IO32ReadFldAlign(DDRPHY_MD32_REG_LPIF_STATUS_4, LPIF_STATUS_4_DR_SRAM_RESTORE_ACK) & u1ShuAck) != u1ShuAck
2147 #if CHANNEL_NUM > 2
2148 	|| (u4IO32ReadFldAlign(DDRPHY_MD32_REG_LPIF_STATUS_4+SHIFT_TO_CHB_ADDR, LPIF_STATUS_4_DR_SRAM_RESTORE_ACK) & u1ShuAck) != u1ShuAck
2149 #endif
2150 	)
2151 	{
2152 		msg3("\twait sram restore ack.\n");
2153 	}
2154 	vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_2, 0, LPIF_LOW_POWER_CFG_2_DR_SRAM_RESTORE);
2155 #endif
2156 
2157 #if DFS_NOQUEUE_FLUSH_WA
2158 	NoQueueFlushWA(p, DISABLE);
2159 #endif
2160 
2161 	/*TINFO="DRAM : set ddrphy_fb_ck_en=0"*/
2162 	vIO32WriteFldAlign_All(DDRPHY_MD32_REG_LPIF_LOW_POWER_CFG_0, 0, LPIF_LOW_POWER_CFG_0_DDRPHY_FB_CK_EN);
2163 	//Darren-EnableDramcTrackingByShuffle(p, shu_level, ENABLE);
2164 
2165 	//-----------------------------------
2166 	//	   TRIGGER DRAM GATING ERROR
2167 	//-----------------------------------
2168 	//func_dram_dummy_read_on();
2169 	//mcDELAY_US(2);
2170 	//func_dram_dummy_read_off();
2171 
2172 	p->u1PLLMode = !p->u1PLLMode;
2173 
2174 	//vIO32WriteFldAlign(DDRPHY_REG_MISC_STBCAL2, 0x5, MISC_STBCAL2_STB_DBG_STATUS);
2175 	msg3("Shuffle flow complete\n");
2176 
2177 	return;
2178 }
2179 
DramcDFSDirectJump(DRAMC_CTX_T * p,U8 shu_level)2180 void DramcDFSDirectJump(DRAMC_CTX_T *p, U8 shu_level)
2181 {
2182 #if (DRAMC_DFS_MODE == 2)
2183 	gDVFSCtrlSel = 2; // SRAM RG mode
2184 #elif (DRAMC_DFS_MODE == 1)
2185 	gDVFSCtrlSel = 1; // MD32
2186 #elif (DRAMC_DFS_MODE == 0)
2187 	gDVFSCtrlSel = 0; // Legacy mode
2188 #endif
2189 
2190 	if (gDVFSCtrlSel == 0)
2191 	{
2192 		if (shu_level == SRAM_SHU0) // DDR4266
2193 			DramcDFSDirectJump_RGMode(p, 0); // Legacy mode for CONF0
2194 		else
2195 			DramcDFSDirectJump_RGMode(p, 1); // Legacy mode for CONF1
2196 	}
2197 	else if (gDVFSCtrlSel == 1)
2198 	{
2199 		DramcDFSDirectJump_SPMMode(p, shu_level);
2200 	}
2201 	else
2202 	{
2203 		DramcDFSDirectJump_SRAMShuRGMode(p, shu_level);
2204 	}
2205 }
2206 
ShuffleDfsToFSP1(DRAMC_CTX_T * p)2207 void ShuffleDfsToFSP1(DRAMC_CTX_T *p)
2208 {
2209 	U8 operating_fsp = p->dram_fsp;
2210 
2211 	// Support single rank and dual ranks
2212 	// Double confirm from CLRPLL to PHYPLL
2213 	if (operating_fsp == FSP_1)
2214 	{
2215 		cbt_dfs_mr13_global(p, CBT_HIGH_FREQ);
2216 		cbt_switch_freq(p, CBT_HIGH_FREQ);
2217 	}
2218 }
2219