1 /*
2 * Copyright (c) 2020 HiSilicon (Shanghai) Technologies CO., LIMITED.
3 *
4 * This program is free software; you can redistribute it and/or modify it
5 * under the terms of the GNU General Public License as published by the
6 * Free Software Foundation; either version 2 of the License, or (at your
7 * option) any later version.
8 *
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
13 *
14 * You should have received a copy of the GNU General Public License
15 * along with this program. If not, see <http://www.gnu.org/licenses/>.
16 *
17
18 * Description: DDR training implement
19 */
20
21 #include "ddr_training_impl.h"
22 #include "ddr_interface.h"
23 #include <compiler.h>
24
25 #define __common__
26
ddr_read(unsigned addr)27 unsigned int ddr_read(unsigned addr)
28 {
29 return (*(volatile unsigned int *)((uintptr_t)(addr)));
30 }
31
ddr_write(unsigned val,unsigned addr)32 void ddr_write(unsigned val, unsigned addr)
33 {
34 (*(volatile unsigned int *)((uintptr_t)(addr))) = (val);
35 }
36
ddrtr_memcpy(void * dst,const void * src,unsigned int len)37 void* ddrtr_memcpy(void *dst, const void *src, unsigned int len)
38 {
39 const char *s = src;
40 char *d = dst;
41
42 while (len--)
43 *d++ = *s++;
44 return dst;
45 }
46
ddrtr_memset(void * b,int c,unsigned int len)47 void* ddrtr_memset(void *b, int c, unsigned int len)
48 {
49 char *bp = b;
50
51 while (len--)
52 *bp++ = (unsigned char)c;
53 return b;
54 }
55
ddr_training_by_dmc(struct ddr_cfg_st * cfg)56 int ddr_training_by_dmc(struct ddr_cfg_st *cfg)
57 {
58 if (cfg->cmd_st) {
59 #ifdef DDR_TRAINING_CMD
60 return ddr_training_cmd_func(cfg);
61 #endif
62 } else {
63 return ddr_training_boot_func(cfg);
64 }
65 return 0;
66 }
67
ddr_training_by_rank(struct ddr_cfg_st * cfg)68 int ddr_training_by_rank(struct ddr_cfg_st *cfg)
69 {
70 int result = 0;
71 int i;
72
73 DDR_PHY_SWITCH_RANK(cfg->cur_phy, cfg->rank_idx);
74
75 for (i = 0; i < cfg->phy[cfg->phy_idx].dmc_num; i++) {
76 cfg->dmc_idx = i;
77 cfg->cur_dmc = cfg->phy[cfg->phy_idx].dmc[i].addr;
78 cfg->cur_pattern = cfg->phy[cfg->phy_idx].dmc[i].ddrt_pattern;
79 result += ddr_training_by_dmc(cfg);
80 }
81 return result;
82 }
83
ddr_training_by_phy(struct ddr_cfg_st * cfg)84 int ddr_training_by_phy(struct ddr_cfg_st *cfg)
85 {
86 int result = 0;
87 int i;
88 unsigned int phy_mask = 1 << (cfg->phy_idx);
89 unsigned int rank_num = cfg->phy[cfg->phy_idx].rank_num;
90 for (i = 0; i < rank_num; i++) {
91 cfg->rank_idx = i;
92 cfg->cur_item = cfg->phy[cfg->phy_idx].rank[i].item;
93 if (ddr_training_check_bypass(cfg, phy_mask))
94 continue;
95 result += ddr_training_by_rank(cfg);
96 }
97 return result;
98 }
99
ddr_training_all(struct ddr_cfg_st * cfg)100 int ddr_training_all(struct ddr_cfg_st *cfg)
101 {
102 int result = 0;
103 int i;
104 for (i = 0; i < cfg->phy_num; i++) {
105 cfg->phy_idx = i;
106 cfg->cur_phy = cfg->phy[i].addr;
107 result += ddr_training_by_phy(cfg);
108 }
109 return result;
110 }
111
112 /* DDR training phy/dmc/dram_type config init */
ddr_training_cfg_set_dmc(struct ddr_cfg_st * cfg)113 void ddr_training_cfg_set_dmc(struct ddr_cfg_st *cfg)
114 {
115 unsigned int ddrt_pattern;
116
117 if (PHY_DRAMCFG_TYPE_LPDDR4 == cfg->phy[0].dram_type) {
118 cfg->phy[0].dmc_num = 2;
119 ddrt_pattern = ddr_read(DDR_REG_BASE_SYSCTRL + SYSCTRL_DDRT_PATTERN);
120 cfg->phy[0].dmc[0].addr = DDR_REG_BASE_DMC0;
121 cfg->phy[0].dmc[0].ddrt_pattern = ddrt_pattern & 0xffff;
122 cfg->phy[0].dmc[0].byte_num = ddr_phy_get_byte_num(DDR_REG_BASE_DMC0);
123 cfg->phy[0].dmc[1].addr = DDR_REG_BASE_DMC1;
124 cfg->phy[0].dmc[1].ddrt_pattern = ddrt_pattern >> 16;
125 cfg->phy[0].dmc[1].byte_num = ddr_phy_get_byte_num(DDR_REG_BASE_DMC1);
126 cfg->phy[0].total_byte_num = cfg->phy[0].dmc[0].byte_num + cfg->phy[0].dmc[1].byte_num;
127 } else {
128 cfg->phy[0].dmc_num = 1;
129 cfg->phy[0].dmc[0].addr = DDR_REG_BASE_DMC0;
130 cfg->phy[0].dmc[0].ddrt_pattern = ddr_read(DDR_REG_BASE_SYSCTRL + SYSCTRL_DDRT_PATTERN);
131 cfg->phy[0].dmc[0].byte_num = ddr_phy_get_byte_num(DDR_REG_BASE_DMC0);
132 cfg->phy[0].total_byte_num = cfg->phy[0].dmc[0].byte_num;
133 }
134 DDR_INFO("phy[0] total_byte_num[%x] dram_type[%x]", cfg->phy[0].total_byte_num, cfg->phy[0].dram_type);
135
136 #ifdef DDR_REG_BASE_PHY1
137 if (PHY_DRAMCFG_TYPE_LPDDR4 == cfg->phy[1].dram_type) {
138 cfg->phy[1].dmc_num = 2;
139 ddrt_pattern = ddr_read(DDR_REG_BASE_SYSCTRL + SYSCTRL_DDRT_PATTERN_SEC);
140 cfg->phy[1].dmc[0].addr = DDR_REG_BASE_DMC2;
141 cfg->phy[1].dmc[0].ddrt_pattern = ddrt_pattern & 0xffff;
142 cfg->phy[1].dmc[0].byte_num = ddr_phy_get_byte_num(DDR_REG_BASE_DMC2);
143 cfg->phy[1].dmc[1].addr = DDR_REG_BASE_DMC3;
144 cfg->phy[1].dmc[1].ddrt_pattern = ddrt_pattern >> 16;
145 cfg->phy[1].dmc[1].byte_num = ddr_phy_get_byte_num(DDR_REG_BASE_DMC3);
146 cfg->phy[1].total_byte_num = cfg->phy[1].dmc[0].byte_num + cfg->phy[1].dmc[1].byte_num;
147 } else {
148 cfg->phy[1].dmc_num = 1;
149 cfg->phy[1].dmc[0].addr = DDR_REG_BASE_DMC1;
150 cfg->phy[1].dmc[0].ddrt_pattern = ddr_read(DDR_REG_BASE_SYSCTRL + SYSCTRL_DDRT_PATTERN_SEC);
151 cfg->phy[1].dmc[0].byte_num = ddr_phy_get_byte_num(DDR_REG_BASE_DMC1);
152 cfg->phy[1].total_byte_num = cfg->phy[1].dmc[0].byte_num;
153 }
154 DDR_INFO("phy[1] total_byte_num[%x] dram_type[%x]", cfg->phy[1].total_byte_num, cfg->phy[1].dram_type);
155 #endif
156 }
ddr_training_cfg_set_rank(struct ddr_cfg_st * cfg)157 void ddr_training_cfg_set_rank(struct ddr_cfg_st *cfg)
158 {
159 cfg->phy[0].rank_num = 1;
160 cfg->phy[0].rank[0].item = ddr_read(DDR_REG_BASE_SYSCTRL + SYSCTRL_DDR_TRAINING_CFG);
161 cfg->phy[0].rank[0].item_hw = ddr_read(DDR_REG_BASE_SYSCTRL + SYSCTRL_DDR_HW_PHY0_RANK0);
162
163 cfg->phy[0].rank[1].item = ddr_read(DDR_REG_BASE_SYSCTRL + SYSCTRL_DDR_TRAINING_CFG_SEC);
164 cfg->phy[0].rank[1].item_hw = ddr_read(DDR_REG_BASE_SYSCTRL + SYSCTRL_DDR_HW_PHY0_RANK1);
165
166 if (ddr_read(DDR_REG_BASE_SYSCTRL + SYSCTRL_DDR_HW_PHY0_RANK1)) {
167 cfg->phy[0].rank_num = 2;
168 }
169
170 DDR_INFO("Rank number PHY0 [%x]", cfg->phy[0].rank_num);
171 DDR_INFO("HW training item PHY0[%x = %x][%x = %x]",
172 (DDR_REG_BASE_SYSCTRL + SYSCTRL_DDR_HW_PHY0_RANK0), cfg->phy[0].rank[0].item_hw,
173 (DDR_REG_BASE_SYSCTRL + SYSCTRL_DDR_HW_PHY0_RANK1), cfg->phy[0].rank[1].item_hw);
174
175 #ifdef DDR_REG_BASE_PHY1
176 cfg->phy[1].rank_num = 1;
177 cfg->phy[1].rank[0].item = ddr_read(DDR_REG_BASE_SYSCTRL + SYSCTRL_DDR_TRAINING_CFG);
178 cfg->phy[1].rank[0].item_hw = ddr_read(DDR_REG_BASE_SYSCTRL + SYSCTRL_DDR_HW_PHY1_RANK0);
179
180 cfg->phy[1].rank[1].item = ddr_read(DDR_REG_BASE_SYSCTRL + SYSCTRL_DDR_TRAINING_CFG_SEC);
181 cfg->phy[1].rank[1].item_hw = ddr_read(DDR_REG_BASE_SYSCTRL + SYSCTRL_DDR_HW_PHY1_RANK1);
182
183
184 if (ddr_read(DDR_REG_BASE_SYSCTRL + SYSCTRL_DDR_HW_PHY1_RANK1)) {
185 cfg->phy[1].rank_num = 2;
186 }
187
188 DDR_INFO("Rank number PHY1[%x]", cfg->phy[1].rank_num);
189 DDR_INFO("HW training item PHY1[%x = %x][%x = %x]",
190 (DDR_REG_BASE_SYSCTRL + SYSCTRL_DDR_HW_PHY1_RANK0), cfg->phy[1].rank[0].item_hw,
191 (DDR_REG_BASE_SYSCTRL + SYSCTRL_DDR_HW_PHY1_RANK1), cfg->phy[1].rank[1].item_hw);
192 #endif
193
194 DDR_INFO("SW training item Rank0[%x = %x] Rank1[%x = %x]",
195 (DDR_REG_BASE_SYSCTRL + SYSCTRL_DDR_TRAINING_CFG), cfg->phy[0].rank[0].item,
196 (DDR_REG_BASE_SYSCTRL + SYSCTRL_DDR_TRAINING_CFG_SEC), cfg->phy[0].rank[1].item);
197 }
198
ddr_training_cfg_set_phy(struct ddr_cfg_st * cfg)199 void ddr_training_cfg_set_phy(struct ddr_cfg_st *cfg)
200 {
201 cfg->phy_num = DDR_PHY_NUM;
202 cfg->phy[0].addr = DDR_REG_BASE_PHY0;
203 cfg->phy[0].dram_type = ddr_read(DDR_REG_BASE_PHY0 + DDR_PHY_DRAMCFG)
204 & PHY_DRAMCFG_TYPE_MASK;
205 #ifdef DDR_REG_BASE_PHY1
206 cfg->phy[1].addr = DDR_REG_BASE_PHY1;
207 cfg->phy[1].dram_type = ddr_read(DDR_REG_BASE_PHY1 + DDR_PHY_DRAMCFG)
208 & PHY_DRAMCFG_TYPE_MASK;
209 #endif
210 }
211
ddr_training_cfg_init(struct ddr_cfg_st * cfg)212 void ddr_training_cfg_init(struct ddr_cfg_st *cfg)
213 {
214 ddrtr_memset(cfg, 0, sizeof(struct ddr_cfg_st));
215 ddr_training_cfg_set_phy(cfg);
216 ddr_training_cfg_set_dmc(cfg);
217 ddr_training_cfg_set_rank(cfg);
218 }
219
220 /* 2GHz CPU run 2000 "nop" in 1 ns */
ddr_training_delay(unsigned int cnt)221 static inline void ddr_training_delay(unsigned int cnt)
222 {
223 while (cnt--)
224 asm("nop");
225 }
226
227 /* set auto refresh */
ddr_training_set_timing(unsigned int base_dmc,unsigned int timing)228 void ddr_training_set_timing(unsigned int base_dmc, unsigned int timing)
229 {
230 ddr_training_delay(DDR_AUTO_TIMING_DELAY);
231 ddr_write(timing, base_dmc + DDR_DMC_TIMING2);
232 /* need to delay 1 ns */
233 ddr_training_delay(DDR_AUTO_TIMING_DELAY);
234 }
235
236 #ifdef DDR_TRAINING_STAT_CONFIG
237 /* Save training result in stat register */
ddr_training_save(unsigned int mask,unsigned int phy,int byte,int dq)238 static void ddr_training_save(unsigned int mask, unsigned int phy,
239 int byte, int dq)
240 {
241 unsigned int stat;
242 unsigned int phy_index;
243
244 stat = ddr_read(DDR_REG_BASE_SYSCTRL + SYSCTRL_DDR_TRAINING_STAT);
245 /* only record the first error */
246 if (stat)
247 return;
248
249 stat = mask;
250
251 if (0 != phy) {
252 phy_index = (DDR_REG_BASE_PHY0 == phy ?
253 DDR_ERR_PHY0 : DDR_ERR_PHY1);
254 stat |= phy_index;
255 }
256
257 if (-1 != byte)
258 stat |= ((unsigned int)byte << DDR_ERR_BYTE_BIT);
259
260 if (-1 != dq)
261 stat |= ((unsigned int)dq << DDR_ERR_DQ_BIT);
262
263 ddr_write(stat, DDR_REG_BASE_SYSCTRL + SYSCTRL_DDR_TRAINING_STAT);
264 }
265 #endif
266
267 /* Record error code in register */
ddr_training_stat(unsigned int mask,unsigned int phy,int byte,int dq)268 void ddr_training_stat(unsigned int mask, unsigned int phy, int byte, int dq)
269 {
270 ddr_training_error(mask, phy, byte, dq);
271 #ifdef DDR_TRAINING_STAT_CONFIG
272 ddr_training_save(mask, phy, byte, dq);
273 #endif
274 }
275
276 /* Check DDR training item whether by pass */
ddr_training_check_bypass(struct ddr_cfg_st * cfg,unsigned int mask)277 int ddr_training_check_bypass(struct ddr_cfg_st *cfg, unsigned int mask)
278 {
279 /* training item disable */
280 if ((cfg->cur_item) & mask) {
281 DDR_DEBUG("DDR training [%x] is disable, rank[%x] cfg[%x]",
282 mask, cfg->rank_idx, cfg->cur_item);
283 return DDR_TRUE;
284 } else {
285 return DDR_FALSE;
286 }
287 }
288
289 #if !defined(DDR_TRAINING_CUT_CODE_CONFIG) || defined(DDR_TRAINING_CMD)
290 /**
291 * Check PHY whether disable.
292 * DDR_TRUE: PHY is disable.
293 * DDR_FALSE: PHY is not disable.
294 */
ddr_training_phy_disable(int index)295 int ddr_training_phy_disable(int index)
296 {
297 return 0;
298 }
299
300 /* Save register value before training */
ddr_training_save_reg(struct ddr_cfg_st * cfg,struct tr_relate_reg * relate_reg,unsigned int mask)301 void ddr_training_save_reg(struct ddr_cfg_st *cfg,
302 struct tr_relate_reg *relate_reg, unsigned int mask)
303 {
304 unsigned int base_dmc = cfg->cur_dmc;
305 unsigned int base_phy = cfg->cur_phy;
306
307 /* save reg value */
308 relate_reg->auto_ref_timing =
309 ddr_read(base_dmc + DDR_DMC_TIMING2);
310 relate_reg->power_down =
311 ddr_read(base_dmc + DDR_DMC_CFG_PD);
312 relate_reg->misc_scramb = ddr_read(base_phy + DDR_PHY_MISC);
313 /* Static register have to read two times to get the right value. */
314 relate_reg->ac_phy_ctl =
315 ddr_read(base_phy + DDR_PHY_ACPHYCTL4);
316 relate_reg->ac_phy_ctl =
317 ddr_read(base_phy + DDR_PHY_ACPHYCTL4);
318
319 /* set new value */
320 switch (mask) {
321 case DDR_BYPASS_WL_MASK:
322 case DDR_BYPASS_LPCA_MASK:
323 /* disable auto refresh */
324 ddr_training_set_timing(base_dmc,
325 relate_reg->auto_ref_timing & DMC_AUTO_TIMING_DIS);
326 break;
327 case DDR_BYPASS_GATE_MASK:
328 /* disable auto refresh */
329 ddr_training_set_timing(base_dmc,
330 relate_reg->auto_ref_timing & DMC_AUTO_TIMING_DIS);
331
332 if (!(ddr_read(base_phy + DDR_PHY_DRAMCFG) & PHY_DRAMCFG_MA2T)) /* set 1T */
333 ddr_write(0x0, base_phy + DDR_PHY_ACPHYCTL4);
334 break;
335 case DDR_BYPASS_HW_MASK:
336 if (!(ddr_read(base_phy + DDR_PHY_DRAMCFG) & PHY_DRAMCFG_MA2T)) /* set 1T */
337 ddr_write(0x0, base_phy + DDR_PHY_ACPHYCTL4);
338 break;
339 default:
340 break;
341 }
342
343 ddr_write(relate_reg->power_down & DMC_POWER_DOWN_DIS,
344 base_dmc + DDR_DMC_CFG_PD);
345 ddr_write(relate_reg->misc_scramb & PHY_MISC_SCRAMB_DIS,
346 base_phy + DDR_PHY_MISC);
347
348 DDR_DQSSWAP_SAVE_FUNC(relate_reg->swapdfibyte_en, base_phy);
349
350 DDR_AXI_SAVE_FUNC(relate_reg);
351
352 DDR_RNKVOL_SAVE_FUNC(relate_reg, base_dmc);
353
354 /* save customer reg */
355 DDR_TRAINING_SAVE_REG_FUNC((void *)relate_reg, mask);
356
357 ddr_phy_cfg_update(base_phy);
358
359 DDR_ASM_DSB();
360 }
361
362 /* Restore register value after training */
ddr_training_restore_reg(struct ddr_cfg_st * cfg,struct tr_relate_reg * relate_reg)363 void ddr_training_restore_reg(struct ddr_cfg_st *cfg,
364 struct tr_relate_reg *relate_reg)
365 {
366 unsigned int base_dmc = cfg->cur_dmc;
367 unsigned int base_phy = cfg->cur_phy;
368
369 /* enable auto refresh */
370 ddr_training_set_timing(base_dmc, relate_reg->auto_ref_timing);
371 ddr_write(relate_reg->power_down, base_dmc + DDR_DMC_CFG_PD);
372 ddr_write(relate_reg->misc_scramb, base_phy + DDR_PHY_MISC);
373 if (!(ddr_read(base_phy + DDR_PHY_DRAMCFG) & PHY_DRAMCFG_MA2T))
374 ddr_write(relate_reg->ac_phy_ctl, base_phy + DDR_PHY_ACPHYCTL4);
375
376 DDR_DQSSWAP_RESTORE_FUNC(relate_reg->swapdfibyte_en, base_phy);
377
378 DDR_AXI_RESTORE_FUNC(relate_reg);
379
380 DDR_RNKVOL_RESTORE_FUNC(relate_reg, base_dmc);
381
382 /* restore customer reg */
383 DDR_TRAINING_RESTORE_REG_FUNC((void *)relate_reg);
384
385 ddr_phy_cfg_update(base_phy);
386
387 DDR_ASM_DSB();
388 }
389
390 /* Switch AXI to DMC0/DMC1/DMC2/DMC3 for DDRT test */
ddr_training_switch_axi(struct ddr_cfg_st * cfg)391 void ddr_training_switch_axi(struct ddr_cfg_st *cfg)
392 {
393 DDR_AXI_SWITCH_FUNC(cfg);
394
395 DDR_RNKVOL_SET_FUNC(cfg);
396 }
397 #endif
398
399 #if defined(DDR_WL_TRAINING_CONFIG) || defined(DDR_MPR_TRAINING_CONFIG)
400
401 /* Excute DMC sfc command */
ddr_dmc_sfc_cmd(unsigned int base_dmc,unsigned int sfc_cmd,unsigned int sfc_addr,unsigned int sfc_bank)402 static void ddr_dmc_sfc_cmd(unsigned int base_dmc, unsigned int sfc_cmd,
403 unsigned int sfc_addr, unsigned int sfc_bank)
404 {
405 unsigned int count = 0;
406
407 /* set sfc cmd */
408 DMC_SFC_CMD_WRITE(sfc_cmd, base_dmc + DDR_DMC_SFCCMD);
409 /* set col and row */
410 ddr_write(sfc_addr, base_dmc + DDR_DMC_SFCADDR);
411 /* set bank */
412 DMC_SFC_BANK_WRITE(sfc_bank, base_dmc + DDR_DMC_SFCBANK);
413 /* excute cmd */
414 ddr_write(0x1, base_dmc + DDR_DMC_SFCREQ);
415
416 DDR_ASM_DSB();
417
418 while (count < DDR_SFC_WAIT_TIMEOUT) { /* wait command finished */
419 if (!(ddr_read(base_dmc + DDR_DMC_SFCREQ) & 0x1))
420 break;
421
422 count++;
423 }
424
425 if (count >= DDR_HWR_WAIT_TIMEOUT)
426 DDR_ERROR("SFC cmd wait timeout.");
427 }
428 #endif
429
430 #if defined(DDR_HW_TRAINING_CONFIG) || defined(DDR_DCC_TRAINING_CONFIG)
431
432 /* Exit or enter auto self-refresh */
ddr_training_easr(unsigned int base_dmc,unsigned int sref_req)433 static int ddr_training_easr(unsigned int base_dmc, unsigned int sref_req)
434 {
435 unsigned int count = DDR_HWR_WAIT_TIMEOUT;
436 if (DDR_EXIT_SREF == sref_req) {
437 /* Exit Auto-self refresh */
438 ddr_write(DMC_CTRL_SREF_EXIT, base_dmc + DDR_DMC_CTRL_SREF);
439
440 while (count--) {
441 if (!(ddr_read(base_dmc + DDR_DMC_CURR_FUNC)
442 & DMC_CURR_FUNC_IN_SREF_MASK))
443 break;
444 }
445 } else if (DDR_ENTER_SREF == sref_req) {
446 /* Enter Auto-self refresh */
447 ddr_write(DMC_CTRL_SREF_ENTER, base_dmc + DDR_DMC_CTRL_SREF);
448
449 while (count--) {
450 if (ddr_read(base_dmc + DDR_DMC_CURR_FUNC)
451 & DMC_CURR_FUNC_IN_SREF_MASK)
452 break;
453 }
454 }
455
456 if (count == 0xffffffff) {
457 DDR_FATAL("SREF wait timeout.");
458 ddr_training_stat(DDR_ERR_HW_RD_DATAEYE, -1, -1, -1);
459 return -1;
460 }
461 return 0;
462 }
463
464 /* DDR hw/dcc training exit or enter auto self-refresh */
ddr_training_ctrl_easr(struct ddr_cfg_st * cfg,unsigned int sref_req)465 static int ddr_training_ctrl_easr(struct ddr_cfg_st *cfg, unsigned int sref_req)
466 {
467 int result = 0;
468 int i;
469 struct ddr_phy_st *phy_st = &cfg->phy[cfg->phy_idx];
470
471 for (i = 0; i < phy_st->dmc_num; i++) {
472 result += ddr_training_easr(phy_st->dmc[i].addr, sref_req);
473 }
474
475 return result;
476 }
477
ddr_training_save_timing(struct ddr_cfg_st * cfg,struct ddr_timing_st * timing_st)478 static void ddr_training_save_timing(struct ddr_cfg_st *cfg, struct ddr_timing_st *timing_st)
479 {
480 int i;
481 struct ddr_phy_st *phy_st = &cfg->phy[cfg->phy_idx];
482
483 for (i = 0; i < phy_st->dmc_num; i++) {
484
485 timing_st->val[i] = ddr_read(phy_st->dmc[i].addr + DDR_DMC_TIMING2);
486 /* disable auto refresh */
487 ddr_training_set_timing(phy_st->dmc[i].addr, timing_st->val[i] & DMC_AUTO_TIMING_DIS);
488 }
489 }
490
ddr_training_restore_timing(struct ddr_cfg_st * cfg,struct ddr_timing_st * timing_st)491 static void ddr_training_restore_timing(struct ddr_cfg_st *cfg, struct ddr_timing_st *timing_st)
492 {
493 int i;
494 struct ddr_phy_st *phy_st = &cfg->phy[cfg->phy_idx];
495 for (i = 0; i < phy_st->dmc_num; i++) {
496 ddr_training_set_timing(phy_st->dmc[i].addr, timing_st->val[i]);
497 }
498 }
499 #endif /* DDR_HW_TRAINING_CONFIG || DDR_DCC_TRAINING_CONFIG*/
500
501 /**
502 * Update delay setting in registers to PHY immediately.
503 * Make delay setting take effect.
504 */
ddr_phy_cfg_update(unsigned int base_phy)505 void ddr_phy_cfg_update(unsigned int base_phy)
506 {
507 unsigned int tmp;
508
509 tmp = ddr_read(base_phy + DDR_PHY_MISC);
510 tmp |= (1 << PHY_MISC_UPDATE_BIT);
511 /* update new config to PHY */
512 ddr_write(tmp, base_phy + DDR_PHY_MISC);
513 tmp &= ~(1 << PHY_MISC_UPDATE_BIT);
514 ddr_write(tmp, base_phy + DDR_PHY_MISC);
515 tmp = ddr_read(base_phy + DDR_PHY_PHYINITCTRL);
516 /* set 1 to issue PHY counter reset signal */
517 tmp |= (1 << PHY_PHYCONN_RST_BIT);
518 ddr_write(tmp, base_phy + DDR_PHY_PHYINITCTRL);
519 /* set 0 to end the reset signal */
520 tmp &= ~(1 << PHY_PHYCONN_RST_BIT);
521 ddr_write(tmp, base_phy + DDR_PHY_PHYINITCTRL);
522
523 DDR_ASM_DSB();
524 }
525
526 /* Set delay value of the bit delay line of the DATA block */
ddr_phy_set_dq_bdl(struct ddr_cfg_st * cfg,unsigned int value)527 void ddr_phy_set_dq_bdl(struct ddr_cfg_st *cfg, unsigned int value)
528 {
529 unsigned int val;
530 unsigned int offset;
531 unsigned int dq;
532 unsigned int base_phy = cfg->cur_phy;
533 unsigned int byte_index = cfg->cur_byte;
534 unsigned int rank = cfg->rank_idx;
535
536 dq = cfg->cur_dq & 0x7;
537 if (DDR_MODE_WRITE == cfg->cur_mode) {
538 if (dq < 4)
539 offset = DDR_PHY_DXNWDQNBDL0(rank, byte_index);
540 else
541 offset = DDR_PHY_DXNWDQNBDL1(rank, byte_index);
542 } else {
543 if (dq < 4)
544 offset = DDR_PHY_DXNRDQNBDL0(rank, byte_index);
545 else
546 offset = DDR_PHY_DXNRDQNBDL1(rank, byte_index);
547 }
548
549 dq &= 0x3;
550 val = ddr_read(base_phy + offset);
551 val &= ~(0xFF << (dq << 3));
552 val |= ((PHY_BDL_MASK & value) << ((dq << 3) + PHY_BDL_DQ_BIT));
553 ddr_write(val, base_phy + offset);
554
555 ddr_phy_cfg_update(base_phy);
556 }
557
558 /* Get PHY DQ value */
ddr_phy_get_dq_bdl(struct ddr_cfg_st * cfg)559 unsigned int ddr_phy_get_dq_bdl(struct ddr_cfg_st *cfg)
560 {
561 unsigned int val;
562 unsigned int offset;
563 unsigned int dq;
564 unsigned int byte_index = cfg->cur_byte;
565 unsigned int rank = cfg->rank_idx;
566
567 dq = cfg->cur_dq & 0x7;
568 if (DDR_MODE_WRITE == cfg->cur_mode) {
569 if (dq < 4)
570 offset = DDR_PHY_DXNWDQNBDL0(rank, byte_index);
571 else
572 offset = DDR_PHY_DXNWDQNBDL1(rank, byte_index);
573 } else {
574 if (dq < 4)
575 offset = DDR_PHY_DXNRDQNBDL0(rank, byte_index);
576 else
577 offset = DDR_PHY_DXNRDQNBDL1(rank, byte_index);
578 }
579
580 dq &= 0x3;
581 val = (ddr_read(cfg->cur_phy + offset) >> ((dq << 3) + PHY_BDL_DQ_BIT)) & PHY_BDL_MASK;
582
583 return val;
584 }
585
586 /* Get byte number */
ddr_phy_get_byte_num(unsigned int base_dmc)587 unsigned int ddr_phy_get_byte_num(unsigned int base_dmc)
588 {
589 unsigned int byte_num;
590
591 /* memery width -> byte number */
592 byte_num = ((ddr_read(base_dmc + DDR_DMC_CFG_DDRMODE)
593 >> DMC_MEM_WIDTH_BIT) & DMC_MEM_WIDTH_MASK) << 1;
594
595 /* for codedex */
596 if (byte_num > DDR_PHY_BYTE_MAX) {
597 byte_num = DDR_PHY_BYTE_MAX;
598 DDR_ERROR("get byte num fail");
599 }
600
601 return byte_num;
602 }
603
604
ddr_rdqs_sync_rdm(struct ddr_cfg_st * cfg,int offset)605 static void ddr_rdqs_sync_rdm(struct ddr_cfg_st *cfg, int offset)
606 {
607 unsigned int rdqnbdl;
608 int rdm;
609
610 rdqnbdl = ddr_read(cfg->cur_phy + DDR_PHY_DXNRDQNBDL2(cfg->rank_idx, cfg->cur_byte));
611 rdm = (rdqnbdl >> PHY_RDM_BDL_BIT) & PHY_RDM_BDL_MASK;
612 rdm += offset;
613 rdm = (rdm < 0 ? 0 : rdm);
614 rdm = (rdm > PHY_RDM_BDL_MASK ? PHY_RDM_BDL_MASK : rdm);
615 rdqnbdl = rdqnbdl & (~(PHY_RDM_BDL_MASK << PHY_RDM_BDL_BIT));
616 ddr_write(rdqnbdl | ((unsigned int)rdm << PHY_RDM_BDL_BIT), cfg->cur_phy + DDR_PHY_DXNRDQNBDL2(cfg->rank_idx, cfg->cur_byte));
617 }
618
ddr_rdqs_sync_rank_rdq(struct ddr_cfg_st * cfg,int offset)619 static void ddr_rdqs_sync_rank_rdq(struct ddr_cfg_st *cfg, int offset)
620 {
621 int dq_val;
622 int i;
623
624 /* sync other rank rdm */
625 ddr_rdqs_sync_rdm(cfg, offset);
626
627 /* sync other rank rdq */
628 DDR_DEBUG("Before sync rank[%x] byte[%x] dq[%x = %x][%x = %x] offset[%x]",
629 cfg->rank_idx, cfg->cur_byte,
630 cfg->cur_phy + DDR_PHY_DXNRDQNBDL0(cfg->rank_idx, cfg->cur_byte),
631 ddr_read(cfg->cur_phy + DDR_PHY_DXNRDQNBDL0(cfg->rank_idx, cfg->cur_byte)),
632 cfg->cur_phy + DDR_PHY_DXNRDQNBDL1(cfg->rank_idx, cfg->cur_byte),
633 ddr_read(cfg->cur_phy + DDR_PHY_DXNRDQNBDL1(cfg->rank_idx, cfg->cur_byte)), offset);
634
635 for (i = 0; i < DDR_PHY_BIT_NUM; i++) {
636 cfg->cur_dq = i;
637 dq_val = (int)ddr_phy_get_dq_bdl(cfg);
638 dq_val += offset;
639 dq_val = (dq_val < 0 ? 0 : dq_val);
640 dq_val = (dq_val > PHY_BDL_MASK ? PHY_BDL_MASK : dq_val);
641 ddr_phy_set_dq_bdl(cfg, dq_val);
642 }
643
644 DDR_DEBUG("After sync rank[%x] byte[%x] dq[%x = %x][%x = %x]",
645 cfg->rank_idx, cfg->cur_byte,
646 cfg->cur_phy + DDR_PHY_DXNRDQNBDL0(cfg->rank_idx, cfg->cur_byte),
647 ddr_read(cfg->cur_phy + DDR_PHY_DXNRDQNBDL0(cfg->rank_idx, cfg->cur_byte)),
648 cfg->cur_phy + DDR_PHY_DXNRDQNBDL1(cfg->rank_idx, cfg->cur_byte),
649 ddr_read(cfg->cur_phy + DDR_PHY_DXNRDQNBDL1(cfg->rank_idx, cfg->cur_byte)));
650 }
651
ddr_bdl_adj(struct ddr_cfg_st * cfg)652 static void ddr_bdl_adj(struct ddr_cfg_st *cfg)
653 {
654 int i;
655 int value_num = 10;
656 unsigned int rank = cfg->rank_idx;
657 unsigned int base_phy = cfg->cur_phy;
658 unsigned int byte_idx = cfg->cur_byte;
659 unsigned int bdl[value_num];
660 unsigned int min = 0xffffffff;
661 unsigned int dq03, dq47, rdm, rdqs;
662
663 dq03 = ddr_read(base_phy + DDR_PHY_DXNRDQNBDL0(rank, byte_idx));
664 dq47 = ddr_read(base_phy + DDR_PHY_DXNRDQNBDL1(rank, byte_idx));
665 rdm = ddr_read(base_phy + DDR_PHY_DXNRDQNBDL2(rank, byte_idx));
666 rdqs = ddr_read(base_phy + DDR_PHY_DXNRDQSDLY(byte_idx));
667
668 bdl[0] = (dq03 >> PHY_BDL_DQ0_BIT) & PHY_BDL_MASK;
669 bdl[1] = (dq03 >> PHY_BDL_DQ1_BIT) & PHY_BDL_MASK;
670 bdl[2] = (dq03 >> PHY_BDL_DQ2_BIT) & PHY_BDL_MASK;
671 bdl[3] = (dq03 >> PHY_BDL_DQ3_BIT) & PHY_BDL_MASK;
672 bdl[4] = (dq47 >> PHY_BDL_DQ0_BIT) & PHY_BDL_MASK;
673 bdl[5] = (dq47 >> PHY_BDL_DQ1_BIT) & PHY_BDL_MASK;
674 bdl[6] = (dq47 >> PHY_BDL_DQ2_BIT) & PHY_BDL_MASK;
675 bdl[7] = (dq47 >> PHY_BDL_DQ3_BIT) & PHY_BDL_MASK;
676 bdl[8] = (rdm >> PHY_RDM_BDL_BIT) & PHY_RDM_BDL_MASK;
677 bdl[9] = (rdqs >> PHY_RDQS_BDL_BIT) & PHY_RDQS_BDL_MASK;
678
679 for (i = 0; i < value_num; i++) {
680 if (bdl[i] < min)
681 min = bdl[i];
682 }
683
684 dq03 = ((bdl[0] - min) << PHY_BDL_DQ0_BIT) | ((bdl[1] - min) << PHY_BDL_DQ1_BIT) |
685 ((bdl[2] - min) << PHY_BDL_DQ2_BIT) | ((bdl[3] - min) << PHY_BDL_DQ3_BIT);
686 dq47 = ((bdl[4] - min) << PHY_BDL_DQ0_BIT) | ((bdl[5] - min) << PHY_BDL_DQ1_BIT) |
687 ((bdl[6] - min) << PHY_BDL_DQ2_BIT) | ((bdl[7] - min) << PHY_BDL_DQ3_BIT);
688 rdm = (rdm & (~(PHY_RDM_BDL_MASK << PHY_RDM_BDL_BIT))) | ((bdl[8] - min) << PHY_RDM_BDL_BIT);
689 rdqs = (rdqs & (~(PHY_RDQS_BDL_MASK << PHY_RDQS_BDL_BIT))) | ((bdl[9] - min) << PHY_RDQS_BDL_BIT);
690
691 ddr_write(dq03, base_phy + DDR_PHY_DXNRDQNBDL0(rank, byte_idx));
692 ddr_write(dq47, base_phy + DDR_PHY_DXNRDQNBDL1(rank, byte_idx));
693 ddr_write(rdm, base_phy + DDR_PHY_DXNRDQNBDL2(rank, byte_idx));
694 ddr_write(rdqs, base_phy + DDR_PHY_DXNRDQSDLY(byte_idx));
695 }
696
697 #define __ddrt__
698 #ifdef DDR_DDRT_SPECIAL_CONFIG
699 /* Some special DDRT need read register repeatedly */
ddr_ddrt_read(unsigned int addr)700 static unsigned int ddr_ddrt_read(unsigned int addr)
701 {
702 int times = 0;
703 unsigned int data0, data1, data2;
704 do {
705 data0 = ddr_read(addr);
706 data1 = ddr_read(addr);
707 data2 = ddr_read(addr);
708 times++;
709 } while (((data0 != data1) || (data1 != data2))
710 && (times < DDRT_READ_TIMEOUT));
711
712 if (times >= DDRT_READ_TIMEOUT) {
713 DDR_FATAL("DDRT wait timeout.");
714 ddr_training_stat(DDR_ERR_DDRT_TIME_OUT, 0, -1, -1);
715 }
716
717 return data0;
718 }
719
720 /* Some special DDRT need write twice register */
ddr_ddrt_write(unsigned int data,unsigned int addr)721 static void ddr_ddrt_write(unsigned int data, unsigned int addr)
722 {
723 unsigned int tmp;
724 tmp = ddr_read(addr);
725 ddr_write(data, addr);
726 ddr_write(data, addr);
727 }
728 #endif /* DDR_DDRT_SPECIAL_CONFIG */
729
ddr_get_rank_size(struct ddr_cfg_st * cfg)730 static unsigned int ddr_get_rank_size(struct ddr_cfg_st *cfg)
731 {
732 unsigned int base_dmc = cfg->cur_dmc;
733 unsigned int rnkvol;
734 unsigned int mem_bank, mem_row, mem_col, mem_width;
735 unsigned int size;
736
737 mem_width = (ddr_read(base_dmc + DDR_DMC_CFG_DDRMODE) >> DMC_MEM_WIDTH_BIT) & DMC_MEM_WIDTH_MASK;
738 rnkvol = ddr_read(base_dmc + DDR_DMC_CFG_RNKVOL(0));
739 mem_bank = (rnkvol >> DMC_RNKVOL_MEM_BANK_BIT) & DMC_RNKVOL_MEM_BANK_MASK;
740 mem_row = (rnkvol >> DMC_RNKVOL_MEM_ROW_BIT) & DMC_RNKVOL_MEM_ROW_MASK;
741 mem_col = rnkvol & DMC_RNKVOL_MEM_COL_MASK;
742
743 size = 1UL << ((mem_bank + 2) + (mem_row + 11) + (mem_col + 8) + mem_width);
744 DDR_DEBUG("rank size[%x]", size);
745
746 return size;
747 }
748
749 /* Init DDRT register before DDRT test */
ddr_ddrt_init(struct ddr_cfg_st * cfg,unsigned int mode)750 void ddr_ddrt_init(struct ddr_cfg_st *cfg, unsigned int mode)
751 {
752 unsigned int mem_width;
753 unsigned int mem_config;
754 unsigned int offset = 0;
755
756 if (1 == cfg->rank_idx)
757 offset = ddr_get_rank_size(cfg);
758
759 DDR_TRAINING_DDRT_PREPARE_FUNC();
760
761 mem_width = ((ddr_read(cfg->cur_dmc + DDR_DMC_CFG_DDRMODE)
762 >> DMC_MEM_WIDTH_BIT) & DMC_MEM_WIDTH_MASK);
763 mem_config = ((mem_width - 1) << DDRT_DDR_MEM_WIDTH)
764 | DDRT_DDR_COL_WIDTH | DDRT_DDR_ROW_WIDTH
765 | DDRT_DDR_BANK_WIDTH;
766 /* DDRT SDRAM config */
767 DDRT_REG_WRITE(mem_config, DDR_REG_BASE_DDRT + DDRT_MEM_CONFIG);
768 /* DDR Address Base */
769 DDRT_REG_WRITE(DDRT_GET_TEST_ADDR(DDRT_CFG_BASE_ADDR),
770 DDR_REG_BASE_DDRT + DDRT_DDR_BASE_ADDR);
771 /* DDRT test DDR using space */
772 DDRT_REG_WRITE(DDRT_GET_TEST_ADDR(ddr_ddrt_get_test_addr() + offset),
773 DDR_REG_BASE_DDRT + DDRT_ADDR);
774 DDRT_REG_WRITE(DDRT_CFG_SEED, DDR_REG_BASE_DDRT + DDRT_SEED);
775
776 if (DDR_DDRT_MODE_GATE == mode) {
777 /* Read or Write Once */
778 DDRT_REG_WRITE(DDRT_CFG_BURST_CFG_GATE,
779 DDR_REG_BASE_DDRT + DDRT_BURST_CONFIG);
780 DDRT_REG_WRITE(0x0, DDR_REG_BASE_DDRT + DDRT_BURST_NUM);
781 DDRT_REG_WRITE(0x0, DDR_REG_BASE_DDRT + DDRT_ADDR_NUM);
782 DDRT_REG_WRITE(0x0, DDR_REG_BASE_DDRT + DDRT_LOOP_NUM);
783 DDRT_REG_WRITE(DDRT_CFG_REVERSED,
784 DDR_REG_BASE_DDRT + DDRT_REVERSED_DQ);
785 } else {
786 /* reversed data form register init table */
787 /* 128bit BURST4 */
788 DDRT_REG_WRITE(DDRT_CFG_BURST_CFG_DATAEYE,
789 DDR_REG_BASE_DDRT + DDRT_BURST_CONFIG);
790 DDRT_REG_WRITE(cfg->phy[cfg->phy_idx].dmc[cfg->dmc_idx].ddrt_pattern,
791 DDR_REG_BASE_DDRT + DDRT_REVERSED_DQ);
792 DDRT_REG_WRITE(DDRT_CFG_BURST_NUM,
793 DDR_REG_BASE_DDRT + DDRT_BURST_NUM);
794 DDRT_REG_WRITE(DDRT_CFG_ADDR_NUM,
795 DDR_REG_BASE_DDRT + DDRT_ADDR_NUM);
796 DDRT_REG_WRITE(DDRT_CFG_LOOP_NUM,
797 DDR_REG_BASE_DDRT + DDRT_LOOP_NUM);
798 }
799
800 DDR_DEBUG("DDRT ADDR[%x = %x]", (DDR_REG_BASE_DDRT + DDRT_ADDR),
801 ddr_read(DDR_REG_BASE_DDRT + DDRT_ADDR));
802 }
803
804 /**
805 * ddr_ddrt_test
806 * @mask : DDRT option mask.
807 * @byte : DDR byte index.
808 * @dq : DDR dq index.
809 *
810 * DDRT test. Support read_only mode and write_read_compare mode.
811 * Success return 0, fail return -1.
812 */
ddr_ddrt_test(unsigned int mask,int byte,int dq)813 int ddr_ddrt_test(unsigned int mask, int byte, int dq)
814 {
815 unsigned int regval;
816 unsigned int err_ovfl;
817 unsigned int err_cnt;
818 unsigned int dq_num;
819 unsigned int dq_tmp;
820 unsigned int times = 0;
821
822 DDRT_REG_WRITE(mask | DDRT_CFG_START, DDR_REG_BASE_DDRT + DDRT_OP);
823 DDRT_REG_WRITE(0, DDR_REG_BASE_DDRT + DDRT_STATUS);
824
825 DDR_ASM_DSB();
826
827 do {
828 regval = DDRT_REG_READ(DDR_REG_BASE_DDRT + DDRT_STATUS);
829 times++;
830 } while ((!(regval & DDRT_TEST_DONE_MASK))
831 && (times < DDRT_WAIT_TIMEOUT));
832
833 if (times >= DDRT_WAIT_TIMEOUT) {
834 DDR_FATAL("DDRT wait timeout.");
835 ddr_training_stat(DDR_ERR_DDRT_TIME_OUT, 0, -1, -1);
836 return -1;
837 }
838
839 /* DDRT_READ_ONLY_MODE */
840 if (DDRT_READ_ONLY_MODE == (mask & DDRT_TEST_MODE_MASK))
841 return 0; /* return when DDRT finish */
842
843 /* DDRT_WR_COMPRARE_MODE No error occurred, test pass. */
844 if (regval & DDRT_TEST_PASS_MASK)
845 return 0;
846
847 if (-1 != dq) { /* check for dq */
848 dq_num = ((unsigned int)byte << 3) + dq;
849 err_ovfl = DDRT_REG_READ(DDR_REG_BASE_DDRT
850 + DDRT_DQ_ERR_OVFL) & (1 << dq_num);
851 if (err_ovfl)
852 return -1;
853
854 if (dq > 3)
855 dq_tmp = (unsigned int)(dq - 4) << 3;
856 else
857 dq_tmp = (unsigned int)dq << 3;
858 err_cnt = DDRT_REG_READ(DDR_REG_BASE_DDRT
859 + DDRT_DQ_ERR_CNT(((unsigned int)byte << 1) + ((unsigned int)dq >> 2)));
860 err_cnt = err_cnt & (0xff << dq_tmp);
861 if (err_cnt)
862 return -1;
863 } else if (-1 != byte) { /* check for byte */
864 err_ovfl = DDRT_REG_READ(DDR_REG_BASE_DDRT
865 + DDRT_DQ_ERR_OVFL) & (0xff << ((unsigned int)byte << 3));
866 if (err_ovfl)
867 return -1;
868
869 err_cnt = DDRT_REG_READ(DDR_REG_BASE_DDRT
870 + DDRT_DQ_ERR_CNT((unsigned int)byte << 1));
871 err_cnt += DDRT_REG_READ(DDR_REG_BASE_DDRT
872 + DDRT_DQ_ERR_CNT(((unsigned int)byte << 1) + 1));
873 if (err_cnt)
874 return -1;
875 }
876
877 return 0;
878 }
879
880 /* Check ddrt test result. Success return 0, fail return -1 */
ddr_ddrt_check(struct ddr_cfg_st * cfg)881 static int ddr_ddrt_check(struct ddr_cfg_st *cfg)
882 {
883 unsigned int byte_index_to_dmc = cfg->cur_byte;
884
885 /* ddrt test the byte relate to dmc, make sure not overflow */
886 if (cfg->cur_byte >= (cfg->dmc_idx << 1))
887 byte_index_to_dmc = cfg->cur_byte - (cfg->dmc_idx << 1);
888
889 DDRT_REG_WRITE(0, DDR_REG_BASE_DDRT + DDRT_REVERSED_DQ);
890 if (ddr_ddrt_test(DDRT_WR_COMPRARE_MODE | DDRT_PATTERM_PRBS9,
891 byte_index_to_dmc, cfg->cur_dq))
892 return -1;
893
894 DDRT_REG_WRITE(cfg->cur_pattern, DDR_REG_BASE_DDRT + DDRT_REVERSED_DQ);
895 if (ddr_ddrt_test(DDRT_WR_COMPRARE_MODE | DDRT_PATTERM_PRBS11,
896 byte_index_to_dmc, cfg->cur_dq))
897 return -1;
898
899 return 0;
900 }
901
902 #define __dataeye_adjust__
903 #ifdef DDR_TRAINING_ADJUST_CONFIG
ddr_adjust_get_average(struct ddr_cfg_st * cfg)904 static unsigned int ddr_adjust_get_average(struct ddr_cfg_st *cfg)
905 {
906 unsigned int dq0_3, dq4_7, val;
907 unsigned int base_phy = cfg->cur_phy;
908 unsigned int byte_index = cfg->cur_byte;
909 unsigned int rank = cfg->rank_idx;
910
911 if (DDR_MODE_WRITE == cfg->cur_mode)
912 return (ddr_read(base_phy + DDR_PHY_DXNWDQNBDL2(rank, byte_index))
913 >> PHY_WDM_BDL_BIT) & PHY_BDL_MASK;
914
915 /* read */
916 dq0_3 = ddr_read(base_phy + DDR_PHY_DXNRDQNBDL0(rank, byte_index));
917 dq4_7 = ddr_read(base_phy + DDR_PHY_DXNRDQNBDL1(rank, byte_index));
918
919 val = ((dq0_3 >> PHY_BDL_DQ0_BIT) & PHY_BDL_MASK)
920 + ((dq0_3 >> PHY_BDL_DQ1_BIT) & PHY_BDL_MASK)
921 + ((dq0_3 >> PHY_BDL_DQ2_BIT) & PHY_BDL_MASK)
922 + ((dq0_3 >> PHY_BDL_DQ3_BIT) & PHY_BDL_MASK)
923 + ((dq4_7 >> PHY_BDL_DQ0_BIT) & PHY_BDL_MASK)
924 + ((dq4_7 >> PHY_BDL_DQ1_BIT) & PHY_BDL_MASK)
925 + ((dq4_7 >> PHY_BDL_DQ2_BIT) & PHY_BDL_MASK)
926 + ((dq4_7 >> PHY_BDL_DQ3_BIT) & PHY_BDL_MASK);
927
928 val = val >> 3;
929 return val;
930 }
931
932 /**
933 * ddr_adjust_trend_check
934 * @accel : Return a value to adjust quickly.
935 *
936 * Check dataeye DQ window on left or right or middle.
937 */
ddr_adjust_trend_check(struct ddr_cfg_st * cfg,int * accel)938 static unsigned int ddr_adjust_trend_check(struct ddr_cfg_st *cfg, int *accel)
939 {
940 unsigned int dq_bdl = 0;
941 unsigned int size;
942
943 /* 32 BDL middle[13, 17]. 128 BDL middle[40, 56] */
944 /* 1 Phase = (DDR_BDL_PHASE_TRANSFORM) BDL */
945 size = DDR_BDL_PHASE_TRANSFORM >> 1;
946
947 dq_bdl = ddr_adjust_get_average(cfg);
948
949 /* increase adjust step to accelerate */
950 if (accel) {
951 if (dq_bdl > PHY_DQ_BDL_MIDDLE)
952 *accel = dq_bdl - PHY_DQ_BDL_MIDDLE;
953 else if (dq_bdl < PHY_DQ_BDL_MIDDLE)
954 *accel = PHY_DQ_BDL_MIDDLE - dq_bdl;
955
956 DDR_INFO("byte[%x] bdl[%x] middle[%x] accel[%x] rdqs[%x]",
957 cfg->cur_byte, dq_bdl, PHY_DQ_BDL_MIDDLE, *accel,
958 (ddr_read(cfg->cur_phy + DDR_PHY_DXNRDQSDLY(cfg->cur_byte))
959 >> PHY_RDQS_BDL_BIT) & PHY_RDQS_BDL_MASK);
960 }
961
962 /* window on left */
963 if (dq_bdl < (PHY_DQ_BDL_MIDDLE - size))
964 return DDR_WIN_LEFT;
965 /* on right */
966 else if (dq_bdl > (PHY_DQ_BDL_MIDDLE + size))
967 return DDR_WIN_RIGHT;
968 else
969 return DDR_WIN_MIDDLE;
970 }
971
972 /* Check adjust value whether valid */
ddr_adjust_check_val(int val,unsigned int mode)973 static int ddr_adjust_check_val(int val, unsigned int mode)
974 {
975 if (DDR_MODE_READ == mode) {
976 if (val < 0 || val > PHY_RDQS_BDL_MASK)
977 return DDR_FALSE;
978 } else {
979 if (val < 0 || val > PHY_WDQ_PHASE_MASK)
980 return DDR_FALSE;
981 }
982
983 return DDR_TRUE;
984 }
985
986 /* Get value which need to adjust */
ddr_adjust_get_val(struct ddr_cfg_st * cfg)987 static int ddr_adjust_get_val(struct ddr_cfg_st *cfg)
988 {
989 if (DDR_MODE_READ == cfg->cur_mode)
990 return (ddr_read(cfg->cur_phy + DDR_PHY_DXNRDQSDLY(cfg->cur_byte))
991 >> PHY_RDQS_BDL_BIT) & PHY_RDQS_BDL_MASK;
992 else
993 return (ddr_read(cfg->cur_phy + DDR_PHY_DXNWDQDLY(cfg->rank_idx, cfg->cur_byte))
994 >> PHY_WDQ_PHASE_BIT) & PHY_WDQ_PHASE_MASK;
995 }
996
ddr_rdqs_sync(struct ddr_cfg_st * cfg,int val)997 static void ddr_rdqs_sync(struct ddr_cfg_st *cfg, int val)
998 {
999 unsigned int rdqsdly;
1000 unsigned int cur_rank = cfg->rank_idx;
1001 int old, offset;
1002
1003 rdqsdly = ddr_read(cfg->cur_phy + DDR_PHY_DXNRDQSDLY(cfg->cur_byte));
1004 old = (rdqsdly >> PHY_RDQS_BDL_BIT) & PHY_RDQS_BDL_MASK;
1005 offset = val - old;
1006
1007 /* sync rdm */
1008 ddr_rdqs_sync_rank_rdq(cfg, offset);
1009
1010 if (1 == cfg->phy[cfg->phy_idx].rank_num) {
1011 DDR_DEBUG("Rank number[%x] not need sync another rank", cfg->phy[cfg->phy_idx].rank_num);
1012 return;
1013 }
1014
1015 /* sync other rank rdm and rdq */
1016 cfg->rank_idx = DDR_SUPPORT_RANK_MAX - 1 - cur_rank; /* switch to another rank */
1017 ddr_rdqs_sync_rank_rdq(cfg, offset);
1018 cfg->rank_idx = cur_rank; /* resotre to cur rank */
1019 }
1020
ddr_set_rdqs(struct ddr_cfg_st * cfg,int val)1021 static void ddr_set_rdqs(struct ddr_cfg_st *cfg, int val)
1022 {
1023 unsigned int delay;
1024 delay = ddr_read(cfg->cur_phy + DDR_PHY_DXNRDQSDLY(cfg->cur_byte));
1025
1026 DDR_PHY_RDQS_SYNC_RDM(cfg, val);
1027
1028 /* clear rdqs bdl */
1029 delay = delay & (~(PHY_RDQS_BDL_MASK << PHY_RDQS_BDL_BIT));
1030
1031 ddr_write(delay | ((unsigned int)val << PHY_RDQS_BDL_BIT),
1032 cfg->cur_phy + DDR_PHY_DXNRDQSDLY(cfg->cur_byte));
1033 }
1034
1035 /* Set value which need to adjust */
ddr_adjust_set_val(struct ddr_cfg_st * cfg,int val)1036 static void ddr_adjust_set_val(struct ddr_cfg_st *cfg, int val)
1037 {
1038 unsigned int delay;
1039 if (DDR_MODE_READ == cfg->cur_mode) {
1040 ddr_set_rdqs(cfg, val);
1041 } else {
1042 delay = ddr_read(cfg->cur_phy + DDR_PHY_DXNWDQDLY(cfg->rank_idx, cfg->cur_byte));
1043 /* clear wdq phase */
1044 delay = delay & (~(PHY_WDQ_PHASE_MASK << PHY_WDQ_PHASE_BIT));
1045
1046 ddr_write(delay | ((unsigned int)val << PHY_WDQ_PHASE_BIT),
1047 cfg->cur_phy + DDR_PHY_DXNWDQDLY(cfg->rank_idx, cfg->cur_byte));
1048 }
1049
1050 ddr_phy_cfg_update(cfg->cur_phy);
1051 }
1052
1053 /* Add or delete value to adjust */
ddr_adjust_change_val(unsigned int dir,int * val,int step,unsigned int mode)1054 static void ddr_adjust_change_val(unsigned int dir, int *val,
1055 int step, unsigned int mode)
1056 {
1057 if (DDR_MODE_READ == mode) {
1058 if (DDR_WIN_RIGHT == dir)
1059 (*val) = (*val) + step;
1060 else
1061 (*val) = (*val) - step;
1062 } else {
1063 /* decrease wdq phase, window move to right */
1064 if (DDR_WIN_RIGHT == dir)
1065 (*val) = (*val) - step;
1066 else
1067 (*val) = (*val) + step;
1068 }
1069 }
1070
1071 /**
1072 * ddr_adjust_move_win
1073 * @dir : move direction. DDR_TRUE move to right, DDR_FALSE move to left.
1074 *
1075 * Move window to specified direction until the best DQ bdl beyond the midline.
1076 */
ddr_adjust_move_win(struct ddr_cfg_st * cfg,struct training_data * training,int step,unsigned int dir)1077 static void ddr_adjust_move_win(struct ddr_cfg_st *cfg,
1078 struct training_data *training,
1079 int step, unsigned int dir)
1080 {
1081 int cur_val, def_val;
1082 int i;
1083 int accel;
1084 int trend;
1085 unsigned int max_value;
1086
1087 max_value = (DDR_MODE_WRITE == cfg->cur_mode ?
1088 PHY_WDQ_PHASE_MASK : PHY_RDQS_BDL_MASK);
1089
1090 def_val = ddr_adjust_get_val(cfg);
1091 cur_val = def_val;
1092 for (i = 0; i <= max_value; i++) {
1093 accel = step;
1094 /* write mode no need to accelerate */
1095 if (DDR_MODE_WRITE == cfg->cur_mode)
1096 trend = ddr_adjust_trend_check(cfg, 0);
1097 else
1098 trend = ddr_adjust_trend_check(cfg, &accel);
1099
1100 if (DDR_WIN_MIDDLE == trend || dir == trend) {
1101 DDR_DEBUG("Move byte[%x] window to middle suc", cfg->cur_byte);
1102 break;
1103 }
1104
1105 ddr_adjust_change_val(dir, &cur_val, accel, cfg->cur_mode);
1106 if (DDR_FALSE == ddr_adjust_check_val(cur_val, cfg->cur_mode)) {
1107 DDR_WARNING("Move byte[%x] to middle fail. value[%x]",
1108 cfg->cur_byte, cur_val);
1109 break;
1110 }
1111
1112 DDR_DEBUG("Byte[%x] mode[%x] set value[%x]",
1113 cfg->cur_byte, cfg->cur_mode, cur_val);
1114 ddr_adjust_set_val(cfg, cur_val);
1115 if (ddr_dataeye_deskew(cfg, training)) {
1116 ddr_adjust_set_val(cfg, def_val);
1117 /* MUST deskew dataeye after restore rdqs */
1118 ddr_dataeye_deskew(cfg, training);
1119 DDR_ERROR("Byte[%x] deskew fail, restore[%x]",
1120 cfg->cur_byte, def_val);
1121 break;
1122 }
1123 }
1124 }
1125
1126 /* Adjust specified byte winodw to middle */
ddr_adjust_byte(struct ddr_cfg_st * cfg,struct training_data * training)1127 static void ddr_adjust_byte(struct ddr_cfg_st *cfg, struct training_data *training)
1128 {
1129 unsigned int trend = ddr_adjust_trend_check(cfg, 0);
1130
1131 /* window on left, move to right */
1132 if (DDR_WIN_LEFT == trend)
1133 ddr_adjust_move_win(cfg, training, DDR_DQS_ADJ_STEP, DDR_WIN_RIGHT);
1134 /* window on right, move to left */
1135 else if (DDR_WIN_RIGHT == trend)
1136 ddr_adjust_move_win(cfg, training, DDR_DQS_ADJ_STEP, DDR_WIN_LEFT);
1137 /* window on middle, no need to move */
1138 else
1139 DDR_DEBUG("Byte[%x] mode[%x] win on middle.",
1140 cfg->cur_byte, cfg->cur_mode);
1141 }
1142
1143 /**
1144 * Adjust PHY dataeye. On normal case,
1145 * read dateeye window on left after read dataeye hardware training,
1146 * write dataeye window on left after write leveling training.
1147 *
1148 */
ddr_adjust_dataeye(struct ddr_cfg_st * cfg,struct training_data * training)1149 void ddr_adjust_dataeye(struct ddr_cfg_st *cfg, struct training_data *training)
1150 {
1151 int i;
1152
1153 /* dataeye adjust disable */
1154 if (ddr_training_check_bypass(cfg, DDR_BYPASS_DATAEYE_ADJ_MASK))
1155 return;
1156
1157 DDR_DEBUG("DDR dataeye adjust PHY[%x][%x] DMC[%x][%x] Rank[%x]",
1158 cfg->phy_idx, cfg->cur_phy, cfg->dmc_idx, cfg->cur_dmc, cfg->rank_idx);
1159
1160 if (DDR_FALSE == cfg->adjust)
1161 return;
1162
1163 for (i = 0; i < GET_BYTE_NUM(cfg); i++) {
1164 cfg->cur_byte = i + (cfg->dmc_idx << 1); /* byte index accord to phy */
1165 ddr_adjust_byte(cfg, training);
1166 }
1167 }
1168 #else
1169 #define ddr_adjust_dataeye(cfg, training)
1170 #endif /* DDR_TRAINING_ADJUST_CONFIG */
1171
1172 #define __dataeye_training__
1173 #ifdef DDR_DATAEYE_TRAINING_CONFIG
1174 /* Check dataeye dq */
ddr_dataeye_check_dq(struct ddr_cfg_st * cfg)1175 int ddr_dataeye_check_dq(struct ddr_cfg_st *cfg)
1176 {
1177 if (DDR_CHECK_TYPE_DDRT == cfg->dq_check_type)
1178 return ddr_ddrt_check(cfg);
1179 else if (DDR_CHECK_TYPE_MPR == cfg->dq_check_type)
1180 return ddr_mpr_check(cfg);
1181 else
1182 DDR_ERROR("DDR dataeye dq check type not set.");
1183
1184 return 0;
1185 }
1186
1187 /* Check dq whether valid and set mask to reduce search time */
ddr_dataeye_check_dir(unsigned int direction,unsigned int left,unsigned int right,unsigned int * mask,struct ddr_cfg_st * cfg)1188 static int ddr_dataeye_check_dir(unsigned int direction, unsigned int left,
1189 unsigned int right, unsigned int *mask,
1190 struct ddr_cfg_st *cfg)
1191 {
1192 int result = 0;
1193
1194 result = ddr_dataeye_check_dq(cfg);
1195 switch (direction) {
1196 case DDR_FIND_DQ_BOTH:
1197 *mask = DDR_FIND_DQ_LEFT | DDR_FIND_DQ_RIGHT;
1198 break;
1199 case DDR_FIND_DQ_LEFT:
1200 if (result) {
1201 /* ddr test error, search opposite side */
1202 *mask = DDR_FIND_DQ_RIGHT;
1203 } else { /* ddr test ok */
1204 ddr_phy_set_dq_bdl(cfg, left);
1205 if (!ddr_dataeye_check_dq(cfg))
1206 /* test ok, go on search this side */
1207 *mask = DDR_FIND_DQ_LEFT;
1208 }
1209 break;
1210 case DDR_FIND_DQ_RIGHT:
1211 if (result) { /* ddr test error, search opposite side */
1212 *mask = DDR_FIND_DQ_LEFT;
1213 } else { /* ddr test ok */
1214 ddr_phy_set_dq_bdl(cfg, right);
1215 if (!ddr_dataeye_check_dq(cfg))
1216 /* test OK, go on search this side */
1217 *mask = DDR_FIND_DQ_RIGHT;
1218 }
1219 break;
1220 default:
1221 break;
1222 }
1223
1224 return result;
1225 }
1226
1227 /* Binary search the valid dq bdl */
ddr_dataeye_search_dq(unsigned int left,unsigned int right,int * target,unsigned int direction,struct ddr_cfg_st * cfg)1228 static void ddr_dataeye_search_dq(unsigned int left, unsigned int right,
1229 int *target, unsigned int direction,
1230 struct ddr_cfg_st *cfg)
1231
1232 {
1233 unsigned int middle;
1234 unsigned int mask = 0;
1235
1236 middle = left + ((right - left) >> 1);
1237
1238 ddr_phy_set_dq_bdl(cfg, middle);
1239 if (!ddr_dataeye_check_dir(direction, left, right, &mask, cfg)) { /* test ok */
1240 *target = (int)middle;
1241 return;
1242 }
1243
1244 if (left == middle || middle == right) /* not found */
1245 return;
1246
1247 /* find left side */
1248 if (DDR_FIND_DQ_LEFT & mask)
1249 ddr_dataeye_search_dq(left, middle, target, direction, cfg);
1250
1251 /* find right side */
1252 if (DDR_FIND_DQ_RIGHT & mask)
1253 ddr_dataeye_search_dq(middle, right, target, direction, cfg);
1254
1255 return;
1256 }
1257
1258 /* Find DQ valid range */
ddr_dataeye_find_dq(struct ddr_cfg_st * cfg,struct training_data * training)1259 static void ddr_dataeye_find_dq(struct ddr_cfg_st *cfg,
1260 struct training_data *training)
1261 {
1262 int cur_dq, left_dq, right_dq, def_dq;
1263 unsigned int dq_num;
1264 unsigned int win_num;
1265
1266 dq_num = (cfg->cur_byte << 3) + cfg->cur_dq;
1267 def_dq = (int)ddr_phy_get_dq_bdl(cfg);
1268 cur_dq = def_dq;
1269
1270 /* check default dq */
1271 if (ddr_dataeye_check_dq(cfg)) {
1272 /* test error */
1273 cur_dq = -1;
1274 ddr_dataeye_search_dq(0, PHY_BDL_MASK, &cur_dq,
1275 DDR_FIND_DQ_BOTH, cfg);
1276 DDR_DEBUG("DQ[%x] def[%x] nok, find new value[%x]",
1277 dq_num, def_dq, cur_dq);
1278 if (-1 == cur_dq) { /* no valid dq */
1279 training->ddr_bit_result[dq_num] = 0;
1280 training->ddr_bit_best[dq_num] = 0;
1281 /* restore default value */
1282 ddr_phy_set_dq_bdl(cfg, def_dq);
1283 DDR_WARNING("DQ[%x] not found dq. restore[%x]", dq_num, def_dq);
1284 return;
1285 }
1286 }
1287
1288 /* find the left boundary */
1289 left_dq = cur_dq;
1290 ddr_dataeye_search_dq(0, cur_dq, &left_dq,
1291 DDR_FIND_DQ_LEFT, cfg);
1292 while (left_dq > 0) {
1293 left_dq--;
1294 ddr_phy_set_dq_bdl(cfg, left_dq);
1295 if (ddr_dataeye_check_dq(cfg)) {
1296 /* test error */
1297 left_dq++;
1298 break;
1299 }
1300 }
1301
1302 /* find the right boundary */
1303 right_dq = cur_dq;
1304 ddr_dataeye_search_dq(cur_dq, PHY_BDL_MASK, &right_dq,
1305 DDR_FIND_DQ_RIGHT, cfg);
1306 while (right_dq < PHY_BDL_MASK) {
1307 right_dq++;
1308 ddr_phy_set_dq_bdl(cfg, right_dq);
1309 if (ddr_dataeye_check_dq(cfg)) {
1310 /* test error */
1311 right_dq--;
1312 break;
1313 }
1314 }
1315
1316 /* reset dq */
1317 ddr_phy_set_dq_bdl(cfg, def_dq);
1318
1319 /**
1320 * 0 1 2 3 4 5 6 7 8 9
1321 * x x - - - - - x x x
1322 * | |
1323 * left_dq right_dq
1324 *
1325 * so left_dq = 2, right_dq = 6
1326 */
1327 /* set result */
1328 win_num = right_dq - left_dq + 1;
1329 training->ddr_bit_result[dq_num] = ((unsigned int)left_dq << DDR_DATAEYE_RESULT_BIT
1330 | (unsigned int)right_dq);
1331 training->ddr_bit_best[dq_num] = (win_num << DDR_DATAEYE_RESULT_BIT)
1332 | ((win_num >> 1) + (unsigned int)left_dq);
1333
1334 DDR_INFO("DQ[%x] range: left[%x] right[%x] best[%x] mode[%x] rank[%x]", dq_num,
1335 left_dq, right_dq, training->ddr_bit_best[dq_num], cfg->cur_mode, cfg->rank_idx);
1336 }
1337
1338 /* DDR dataeye training one byte */
ddr_dataeye_deskew(struct ddr_cfg_st * cfg,struct training_data * training)1339 int ddr_dataeye_deskew(struct ddr_cfg_st *cfg, struct training_data *training)
1340 {
1341 unsigned int dq_num;
1342 unsigned int loop_times = 0;
1343 unsigned int win_num, dq_sum;
1344 unsigned int def_dq, best_dq;
1345 int i;
1346 unsigned int byte_index = cfg->cur_byte;
1347
1348 dq_sum = 0;
1349 training->ddr_win_sum = 0;
1350 for (i = 0; i < DDR_PHY_BIT_NUM; i++) {
1351 cfg->cur_dq = i;
1352 dq_num = (byte_index << 3) + i;
1353 def_dq = ddr_phy_get_dq_bdl(cfg);
1354 ddr_dataeye_find_dq(cfg, training);
1355 win_num = training->ddr_bit_best[dq_num] >> DDR_DATAEYE_RESULT_BIT;
1356 best_dq = training->ddr_bit_best[dq_num] & DDR_DATAEYE_RESULT_MASK;
1357 /* check window number */
1358 if (win_num < DDR_DATAEYE_WIN_NUM) {
1359 if (loop_times < DDR_LOOP_TIMES_LMT) {
1360 loop_times++;
1361 i--;
1362 continue;
1363 } else {
1364 if (win_num == 0) {
1365 DDR_WARNING("Byte[%x] DQ[%x] no win.", byte_index, dq_num);
1366 /* restore default value */
1367 ddr_phy_set_dq_bdl(cfg, def_dq);
1368 ddr_training_stat(DDR_ERR_DATAEYE, cfg->cur_phy, byte_index, i);
1369 continue;
1370 }
1371 }
1372 }
1373
1374 loop_times = 0;
1375 ddr_phy_set_dq_bdl(cfg, best_dq);
1376 dq_sum = dq_sum + best_dq;
1377 training->ddr_win_sum = training->ddr_win_sum + win_num;
1378 }
1379
1380 dq_sum = dq_sum >> 3;
1381
1382 /* only DDR_MODE_WRITE need to set */
1383 if (DDR_MODE_WRITE == cfg->cur_mode)
1384 ddr_write((dq_sum & PHY_BDL_MASK) << PHY_WDM_BDL_BIT, cfg->cur_phy
1385 + DDR_PHY_DXNWDQNBDL2(cfg->rank_idx, byte_index));
1386
1387 ddr_phy_cfg_update(cfg->cur_phy);
1388 return 0;
1389 }
1390
1391 /* DDR write or read dataeye training */
ddr_dataeye_process(struct ddr_cfg_st * cfg,struct training_data * training)1392 static int ddr_dataeye_process(struct ddr_cfg_st *cfg,
1393 struct training_data *training)
1394 {
1395 int result = 0;
1396 int i;
1397
1398 /* dataeye training */
1399 for (i = 0; i < GET_BYTE_NUM(cfg); i++) {
1400 cfg->cur_byte = i + (cfg->dmc_idx << 1); /* byte index accord to phy */
1401 result += ddr_dataeye_deskew(cfg, training);
1402 }
1403
1404 if (result) {
1405 result = -1;
1406 DDR_ERROR("PHY[%x] mode[%x] dataeye training fail", cfg->cur_phy, cfg->cur_mode);
1407 } else {
1408 /* dataeye training result adjust */
1409 ddr_adjust_dataeye(cfg, training);
1410 }
1411
1412 /* save training result to printf */
1413 ddr_result_data_save(cfg, training);
1414
1415 return result;
1416 }
1417
1418 /* DDR dataeye training */
ddr_dataeye_training(struct ddr_cfg_st * cfg)1419 int ddr_dataeye_training(struct ddr_cfg_st *cfg)
1420 {
1421 struct training_data tmp_result;
1422 struct training_data *training = &tmp_result;
1423 int result_read, result_write;
1424
1425 DDR_DEBUG("DDR dataeye training PHY[%x][%x] DMC[%x][%x] Rank[%x]",
1426 cfg->phy_idx, cfg->cur_phy, cfg->dmc_idx, cfg->cur_dmc, cfg->rank_idx);
1427
1428 /* write dataeye training */
1429 cfg->cur_mode = DDR_MODE_WRITE;
1430 ddrtr_memset(training, 0, sizeof(struct training_data));
1431 result_write = ddr_dataeye_process(cfg, training);
1432
1433 /* read dataeye training */
1434 cfg->cur_mode = DDR_MODE_READ;
1435 ddrtr_memset(training, 0, sizeof(struct training_data));
1436 result_read = ddr_dataeye_process(cfg, training);
1437
1438 if (result_read || result_write)
1439 return -1;
1440 else
1441 return 0;
1442 }
1443
ddr_dataeye_training_func(struct ddr_cfg_st * cfg)1444 int ddr_dataeye_training_func(struct ddr_cfg_st *cfg)
1445 {
1446 struct tr_relate_reg relate_reg;
1447 int result;
1448
1449 /* dataeye training disable */
1450 if (ddr_training_check_bypass(cfg, DDR_BYPASS_DATAEYE_MASK))
1451 return 0;
1452
1453 ddr_training_save_reg(cfg, &relate_reg, DDR_BYPASS_DATAEYE_MASK);
1454 ddr_training_switch_axi(cfg);
1455 ddr_ddrt_init(cfg, DDR_DDRT_MODE_DATAEYE);
1456 cfg->adjust = DDR_DATAEYE_NORMAL_ADJUST;
1457 cfg->dq_check_type = DDR_CHECK_TYPE_DDRT;
1458 result = ddr_dataeye_training(cfg);
1459 ddr_training_restore_reg(cfg, &relate_reg);
1460
1461 return result;
1462 }
1463 #else
ddr_dataeye_training_func(struct ddr_cfg_st * cfg)1464 int ddr_dataeye_training_func(struct ddr_cfg_st *cfg)
1465 {
1466 DDR_WARNING("Not support DDR dataeye training.");
1467 return 0;
1468 }
1469 #endif /* DDR_DATAEYE_TRAINING_CONFIG */
1470
1471 #define __hardware_training__
1472 #ifdef DDR_HW_TRAINING_CONFIG
1473 #ifdef DDR_HW_READ_ADJ_CONFIG
1474 /**
1475 * Adjust rdqs and dq after hw read training.
1476 * When define DDR_TRAINING_ADJUST_DISABLE, MUST define DDR_HW_READ_ADJ_CONFIG.
1477 */
ddr_hw_read_adj(struct ddr_cfg_st * cfg)1478 static void ddr_hw_read_adj(struct ddr_cfg_st *cfg)
1479 {
1480 int i;
1481 unsigned int base_phy = cfg->cur_phy;
1482 unsigned int byte_num = cfg->phy[cfg->phy_idx].total_byte_num;
1483
1484 DDR_DEBUG("DDR hw read adjust.");
1485 /* check hw read adjust bypass bit */
1486 if (ddr_training_check_bypass(cfg, DDR_BYPASS_HW_ADJ_MASK))
1487 return;
1488
1489 /* assume read dataeye window on left */
1490 for (i = 0; i < byte_num; i++) {
1491 ddr_write(ddr_read(base_phy + DDR_PHY_DXNRDQNBDL0(cfg->rank_idx, i))
1492 + (PHY_DQ_MIDDLE_VAL << PHY_BDL_DQ_BIT),
1493 base_phy + DDR_PHY_DXNRDQNBDL0(cfg->rank_idx, i));
1494 ddr_write(ddr_read(base_phy + DDR_PHY_DXNRDQNBDL1(cfg->rank_idx, i))
1495 + (PHY_DQ_MIDDLE_VAL << PHY_BDL_DQ_BIT),
1496 base_phy + DDR_PHY_DXNRDQNBDL1(cfg->rank_idx, i));
1497 ddr_write(ddr_read(base_phy + DDR_PHY_DXNRDQSDLY(i))
1498 + (PHY_RDQS_MIDDLE_VAL << PHY_RDQS_BDL_BIT),
1499 base_phy + DDR_PHY_DXNRDQSDLY(i));
1500 }
1501 }
1502 #else
ddr_hw_read_adj(struct ddr_cfg_st * cfg)1503 static void ddr_hw_read_adj(struct ddr_cfg_st *cfg) {}
1504 #endif /* DDR_HW_READ_ADJ_CONFIG */
1505
ddr_training_get_rdqs(struct ddr_cfg_st * cfg,struct ddr_bdl_st * rdqs)1506 static void ddr_training_get_rdqs(struct ddr_cfg_st *cfg, struct ddr_bdl_st *rdqs)
1507 {
1508 unsigned int i;
1509 unsigned int byte_num = cfg->phy[cfg->phy_idx].total_byte_num;
1510 unsigned int base_phy = cfg->cur_phy;
1511
1512 for(i = 0; i < byte_num; i++) {
1513 rdqs->bdl[i] = ddr_read(base_phy + DDR_PHY_DXNRDQSDLY(i));
1514 }
1515 }
1516
ddr_training_set_rdqs(struct ddr_cfg_st * cfg,struct ddr_bdl_st * rdqs)1517 static void ddr_training_set_rdqs(struct ddr_cfg_st *cfg, struct ddr_bdl_st *rdqs)
1518 {
1519 unsigned int i;
1520 unsigned int byte_num = cfg->phy[cfg->phy_idx].total_byte_num;
1521 unsigned int base_phy = cfg->cur_phy;
1522
1523 for (i = 0; i < byte_num; i++) {
1524 ddr_write(rdqs->bdl[i], base_phy + DDR_PHY_DXNRDQSDLY(i));
1525 }
1526 }
1527
ddr_hw_training_adjust_rdqs(struct ddr_cfg_st * cfg,struct rdqs_data_st * rdqs_st)1528 static void ddr_hw_training_adjust_rdqs(struct ddr_cfg_st *cfg, struct rdqs_data_st *rdqs_st)
1529 {
1530 unsigned int i;
1531 unsigned int byte_num = cfg->phy[cfg->phy_idx].total_byte_num;
1532 unsigned int rdqs_rank0, rdqs_rank1;
1533 unsigned int cur_rank = cfg->rank_idx;
1534 int offset;
1535
1536 for (i = 0; i < byte_num; i++) {
1537 /* struct rdqs_data_st store the whole register value */
1538 rdqs_rank0 = (rdqs_st->rank[0].bdl[i] >> PHY_RDQS_BDL_BIT) & PHY_RDQS_BDL_MASK;
1539 rdqs_rank1 = (rdqs_st->rank[1].bdl[i] >> PHY_RDQS_BDL_BIT) & PHY_RDQS_BDL_MASK;
1540
1541 cfg->cur_byte = i;
1542 if (rdqs_rank0 > rdqs_rank1) {
1543 offset = rdqs_rank0 - rdqs_rank1;
1544 ddr_write(rdqs_st->rank[0].bdl[i], cfg->cur_phy+ DDR_PHY_DXNRDQSDLY(i));
1545 cfg->rank_idx = 1; /* switch to rank1 for sync rank1 rdq */
1546 } else {
1547 offset = rdqs_rank1 - rdqs_rank0;
1548 ddr_write(rdqs_st->rank[1].bdl[i], cfg->cur_phy+ DDR_PHY_DXNRDQSDLY(i));
1549 cfg->rank_idx = 0; /* switch to rank0 for sync rank0 rdq */
1550 }
1551 ddr_rdqs_sync_rank_rdq(cfg, offset);
1552 }
1553
1554 cfg->rank_idx = cur_rank; /* restore to current rank */
1555
1556 ddr_phy_cfg_update(cfg->cur_phy);
1557 }
1558
1559 /* DDR HW training process */
ddr_hw_training_process(struct ddr_cfg_st * cfg,unsigned int item)1560 static int ddr_hw_training_process(struct ddr_cfg_st *cfg, unsigned int item)
1561 {
1562 unsigned int count = DDR_HWR_WAIT_TIMEOUT;
1563 unsigned int base_phy = cfg->cur_phy;
1564 unsigned int init_ctrl = ddr_read(base_phy + DDR_PHY_PHYINITCTRL);
1565
1566 if (!item)
1567 return 0;
1568
1569 DDR_DEBUG("base_phy[%x] itme[%x]", base_phy, item);
1570 /* hardware training enable */
1571 ddr_write(item | PHY_PHYINITCTRL_INIT_EN | init_ctrl, base_phy + DDR_PHY_PHYINITCTRL);
1572
1573 if (item & PHY_PHYINITCTRL_DRAM_RST) {
1574 if (ddr_training_ctrl_easr(cfg, DDR_EXIT_SREF))
1575 return -1;
1576 }
1577
1578 count = DDR_HWR_WAIT_TIMEOUT;
1579 /* auto cleared to 0 after training finished */
1580 while (count--) {
1581 if (!(ddr_read(base_phy + DDR_PHY_PHYINITCTRL)
1582 & PHY_PHYINITCTRL_MASK))
1583 break;
1584 }
1585
1586 if (count == 0xffffffff) {
1587 DDR_FATAL("HWR wait timeout.");
1588 ddr_training_stat(DDR_ERR_HW_RD_DATAEYE, base_phy, item, ddr_read(base_phy + DDR_PHY_PHYINITSTATUS)); /* TODO: */
1589 return -1;
1590 }
1591
1592 if (ddr_read(base_phy + DDR_PHY_PHYINITSTATUS)) {
1593 DDR_FATAL("Phy[%x] hw[%x] failed[%x]", base_phy, item, ddr_read(base_phy + DDR_PHY_PHYINITSTATUS));
1594 ddr_training_stat(DDR_ERR_HW_RD_DATAEYE, base_phy, item, ddr_read(base_phy + DDR_PHY_PHYINITSTATUS)); /* TODO: */
1595 return -1;
1596 }
1597 return 0;
1598 }
1599
1600 /* Dataeye hardware training */
ddr_hw_dataeye_read(struct ddr_cfg_st * cfg)1601 int ddr_hw_dataeye_read(struct ddr_cfg_st *cfg)
1602 {
1603 unsigned int base_phy = cfg->cur_phy;
1604 unsigned int byte_num = cfg->phy[cfg->phy_idx].total_byte_num;
1605
1606 unsigned int i;
1607 int result;
1608
1609 ddr_training_cfg_init(cfg);
1610 /* clear */
1611 for (i = 0; i < byte_num; i++) {
1612 ddr_write(0, base_phy + DDR_PHY_DXNRDQNBDL0(cfg->rank_idx, i));
1613 ddr_write(0, base_phy + DDR_PHY_DXNRDQNBDL1(cfg->rank_idx, i));
1614 ddr_write(0, base_phy + DDR_PHY_DXNRDQSDLY(i));
1615 }
1616
1617 ddr_phy_cfg_update(base_phy);
1618
1619 result = ddr_hw_training_process(cfg, PHY_PHYINITCTRL_RDET_EN);
1620
1621 ddr_hw_read_adj(cfg);
1622
1623 return result;
1624 }
1625
1626 /* DDR HW training control */
ddr_hw_training_ctl(struct ddr_cfg_st * cfg)1627 int ddr_hw_training_ctl(struct ddr_cfg_st *cfg)
1628 {
1629 int byte_idx;
1630 int result = 0;
1631 unsigned int temp = 0;
1632 unsigned int item = cfg->cur_item;
1633 unsigned int base_phy = cfg->cur_phy;
1634 unsigned int byte_num = cfg->phy[cfg->phy_idx].total_byte_num;
1635 unsigned int dvrft_ctrl = ddr_read(base_phy + DDR_PHY_DVRFTCTRL);
1636 struct rdqs_data_st *rdqs_st = (struct rdqs_data_st *)cfg->res_st;
1637
1638 if (!item || !rdqs_st)
1639 return 0;
1640
1641 ddr_phy_cfg_update(base_phy);
1642 /* NOTE: not support array when boot */
1643 result += ddr_hw_training_process(cfg, item & PHY_HW_GP_CNT_RESET_START);
1644 result += ddr_hw_training_process(cfg, item & PHY_HW_GP_PLL);
1645
1646 /* save rdqs bdl after PHY_PHYINITCTRL_DLYMEAS_EN */
1647 if (0 == cfg->rank_idx)
1648 ddr_training_get_rdqs(cfg, &rdqs_st->origin);
1649
1650 for (byte_idx = 0; byte_idx < byte_num; byte_idx++) {
1651 cfg->cur_byte = byte_idx;
1652 ddr_bdl_adj(cfg);
1653 }
1654
1655 if (PHY_DRAMCFG_TYPE_LPDDR4 == cfg->phy[cfg->phy_idx].dram_type) {
1656 temp = ddr_read(base_phy + 0x64);
1657 ddr_write(temp & 0x0fffffff, base_phy + 0x64); /* ca odt disable */
1658
1659 result += ddr_hw_training_process(cfg, item & PHY_HW_GP_DRAM_RESET);
1660 ddr_write(temp, base_phy + 0x64); /* restore */
1661
1662 temp = ddr_read(base_phy + 0x48);
1663 ddr_write(temp & 0xfffffffe, base_phy + 0x48); /* todo rank0 */
1664
1665 result += ddr_hw_training_process(cfg, item & PHY_HW_GP_VREF_AC);
1666
1667 ddr_write(temp | 0x1, base_phy + 0x48); /* rank1 */
1668
1669 result += ddr_hw_training_process(cfg, item & PHY_HW_GP_VREF_AC);
1670
1671 ddr_write(temp, base_phy + 0x48); /* restore */
1672
1673 /* ddr_training_delay(10000); */
1674 result += ddr_hw_training_process(cfg, item & PHY_PHYINITCTRL_DRAM_INIT_EN);
1675 } else {
1676 #ifdef DDR_WRITE_DM_DISABLE
1677 if (PHY_DRAMCFG_TYPE_DDR4 == cfg->phy[cfg->phy_idx].dram_type) {
1678 temp = ddr_read(base_phy + 0xe0);
1679 ddr_write((temp & 0xFBFFFFFF) | 0x8000000, base_phy + 0xe0); /* write dm disable */
1680 }
1681 #endif
1682 result += ddr_hw_training_process(cfg, item & PHY_HW_GP_DRAM_RESET);
1683 }
1684 result += ddr_hw_training_process(cfg, item & PHY_PHYINITCTRL_CAT_EN);
1685
1686 result += ddr_hw_training_process(cfg, item & PHY_HW_GP_CS);
1687
1688 ddr_write(dvrft_ctrl & (~PHY_DVRFTCTRL_PDAEN_EN),
1689 base_phy + DDR_PHY_DVRFTCTRL);
1690 /* DDR_PHY_VREFTCTRL 31bit:1 do vref dram set twice */
1691 ddr_write((ddr_read(base_phy + DDR_PHY_VREFTCTRL)
1692 & (~(0x1 << PHY_VREFS_MRS_ENTER_BIT)))
1693 | (0x1 << PHY_VREFS_MRS_ENTER_BIT),
1694 base_phy + DDR_PHY_VREFTCTRL);
1695 result += ddr_hw_training_process(cfg, item & PHY_HW_GP_VREF_DQ);
1696 result += ddr_hw_training_process(cfg, item & PHY_HW_GP_VREF_DQ);
1697 /* DDR_PHY_VREFTCTRL 31bit:0 do vref dram set once */
1698 ddr_write(ddr_read(base_phy + DDR_PHY_VREFTCTRL)
1699 & (~(0x1 << PHY_VREFS_MRS_ENTER_BIT)),
1700 base_phy + DDR_PHY_VREFTCTRL);
1701 result += ddr_hw_training_process(cfg, item & PHY_HW_GP_VREF_DQ);
1702 ddr_write(dvrft_ctrl, base_phy + DDR_PHY_DVRFTCTRL);
1703
1704 result += ddr_hw_training_process(cfg, item & PHY_HW_GP_NORMAL);
1705
1706 #ifdef DDR_WRITE_DM_DISABLE
1707 unsigned int temp1 = 0;
1708 if (PHY_DRAMCFG_TYPE_DDR4 == cfg->phy[cfg->phy_idx].dram_type) {
1709 ddr_write(temp, base_phy + 0xe0); /* restore */
1710 temp = ddr_read(base_phy + 0x1e0);
1711 temp1 = ddr_read(base_phy + 0x2c);
1712 ddr_write(0x05555555, base_phy + 0x1e0); /* inti MR5 */
1713 ddr_write(temp1 | 0x00004000 , base_phy + 0x2c); /* write dm disable */
1714 result += ddr_hw_training_process(cfg, item & PHY_PHYINITCTRL_DRAM_INIT_EN);
1715 ddr_write(temp, base_phy + 0x1e0); /* restore */
1716 ddr_write(temp1, base_phy + 0x2c); /* restore */
1717 }
1718 #endif
1719 ddr_phy_cfg_update(base_phy);
1720
1721 return result;
1722 }
1723
ddr_hw_training_by_rank(struct ddr_cfg_st * cfg)1724 static int ddr_hw_training_by_rank(struct ddr_cfg_st *cfg)
1725 {
1726 DDR_DEBUG("PHY[%x][%x] Rank[%x] itme[%x]",
1727 cfg->phy_idx, cfg->cur_phy, cfg->rank_idx, cfg->cur_item);
1728
1729 /* 0:PHY_TRAINCTRL0_DTR_RANK0, 1:PHY_TRAINCTRL0_DTR_RANK1 */
1730 DDR_PHY_SWITCH_RANK(cfg->cur_phy, cfg->rank_idx);
1731 return ddr_hw_training_ctl(cfg);
1732 }
1733
ddr_hw_training_by_phy(struct ddr_cfg_st * cfg)1734 static int ddr_hw_training_by_phy(struct ddr_cfg_st *cfg)
1735 {
1736 int result = 0;
1737 int i;
1738 struct rdqs_data_st rdqs_data;
1739 struct rdqs_data_st *rdqs_st = &rdqs_data;
1740 struct ddr_timing_st timing_st;
1741 unsigned int rank_num = cfg->phy[cfg->phy_idx].rank_num;
1742
1743 cfg->res_st = rdqs_st;
1744
1745 /* disable auto refresh */
1746 ddr_training_save_timing(cfg, &timing_st);
1747
1748 for (i = 0; i < rank_num; i++) {
1749 cfg->rank_idx = i;
1750 cfg->cur_item = cfg->phy[cfg->phy_idx].rank[i].item_hw;
1751
1752 result += ddr_hw_training_by_rank(cfg);
1753
1754 if (DDR_SUPPORT_RANK_MAX != rank_num)
1755 break;
1756
1757 /* save rank rdqs bdl */
1758 ddr_training_get_rdqs(cfg, &(rdqs_st->rank[i]));
1759
1760 /* restore PHY_PHYINITCTRL_DLYMEAS_EN rdqs before training next rank */
1761 if ((rank_num - 1) != i)
1762 ddr_training_set_rdqs(cfg, &(rdqs_st->origin));
1763 }
1764
1765 if (DDR_SUPPORT_RANK_MAX == rank_num) {
1766 ddr_hw_training_adjust_rdqs(cfg, rdqs_st);
1767 }
1768
1769 /* restore auto refresh */
1770 ddr_training_restore_timing(cfg, &timing_st);
1771
1772 cfg->res_st = 0;
1773
1774 return result;
1775 }
1776
1777 /* DDR hardware training */
ddr_hw_training(struct ddr_cfg_st * cfg)1778 int ddr_hw_training(struct ddr_cfg_st *cfg)
1779 {
1780 int result = 0;
1781 int i;
1782 struct tr_relate_reg reg;
1783
1784 /* save customer reg */
1785 DDR_TRAINING_SAVE_REG_FUNC(®, 0x1);
1786 ddr_boot_cmd_save_func(®);
1787
1788 for (i = 0; i < cfg->phy_num; i++) {
1789 cfg->phy_idx = i;
1790 cfg->cur_phy = cfg->phy[i].addr;
1791 result += ddr_hw_training_by_phy(cfg);
1792 }
1793 /* restore customer reg */
1794 DDR_TRAINING_RESTORE_REG_FUNC(®);
1795 ddr_boot_cmd_restore_func(®);
1796
1797 return result;
1798 }
1799 #endif /* DDR_HW_TRAINING_CONFIG */
1800
1801 #define __mpr_training__
1802 #ifdef DDR_MPR_TRAINING_CONFIG
1803 /* Switch MPR function */
ddr_mpr_switch(unsigned int base_dmc,int val)1804 static void ddr_mpr_switch(unsigned int base_dmc, int val)
1805 {
1806 unsigned int sfc_cmd;
1807 if (DDR_TRUE == val)
1808 sfc_cmd = (DMC_CMD_MRS_MR3 << DMC_SFC_CMD_MRS_BIT)
1809 | DMC_CMD_TYPE_LMR;
1810 else
1811 sfc_cmd = DMC_CMD_TYPE_LMR;
1812
1813 ddr_dmc_sfc_cmd(base_dmc, sfc_cmd, 0x0, DMC_BANK_MR3);
1814
1815 /* clear */
1816 if (DDR_FALSE == val) {
1817 ddr_write(0x0, base_dmc + DDR_DMC_SFCBANK);
1818 ddr_write(0x0, base_dmc + DDR_DMC_SFCREQ);
1819 }
1820 }
1821
1822 /* Judge MPR data */
ddr_mpr_judge(unsigned int data1,unsigned int data2,unsigned int data3,unsigned int data4,unsigned int dq_index)1823 static int ddr_mpr_judge(unsigned int data1, unsigned int data2,
1824 unsigned int data3, unsigned int data4,
1825 unsigned int dq_index)
1826 {
1827 /* check byte */
1828 if (-1 == dq_index) {
1829 if (DDR_MPR_BYTE_MASK == data1 && 0x0 == data2
1830 && DDR_MPR_BYTE_MASK == data3 && 0x0 == data4)
1831 return 0;
1832 else
1833 return -1;
1834 } else {
1835 /* check DQ */
1836 data1 = (data1 >> dq_index) & DDR_MPR_BIT_MASK;
1837 data2 = (data2 >> dq_index) & DDR_MPR_BIT_MASK;
1838 data3 = (data3 >> dq_index) & DDR_MPR_BIT_MASK;
1839 data4 = (data4 >> dq_index) & DDR_MPR_BIT_MASK;
1840 if (DDR_MPR_BIT_MASK == data1 && 0x0 == data2
1841 && DDR_MPR_BIT_MASK == data3 && 0x0 == data4)
1842 return 0;
1843 else
1844 return -1;
1845 }
1846 }
1847
1848 /* Extract MPR read data to judge */
ddr_mpr_extract(struct ddr_cfg_st * cfg,unsigned int offset0,unsigned int offset1,unsigned int offset2,unsigned int offset3)1849 static int ddr_mpr_extract(struct ddr_cfg_st *cfg,
1850 unsigned int offset0, unsigned int offset1,
1851 unsigned int offset2, unsigned int offset3)
1852 {
1853 unsigned int data1, data2, data3, data4;
1854 unsigned int base_dmc = cfg->cur_dmc;
1855 unsigned int byte_index = cfg->cur_byte;
1856
1857 data1 = ddr_read(base_dmc + offset0); /* [127:96] or [255:224] */
1858 data2 = ddr_read(base_dmc + offset1); /* [95:64] or [223:192] */
1859 data3 = ddr_read(base_dmc + offset2); /* [63:32] or [191:160] */
1860 data4 = ddr_read(base_dmc + offset3); /* [31:0] or [159:128] */
1861
1862 DDR_INFO("byte[%x] data[%x=%x][%x=%x][%x=%x][%x=%x]",
1863 byte_index,
1864 base_dmc + offset0, data1, base_dmc + offset1, data2,
1865 base_dmc + offset2, data3, base_dmc + offset3, data4);
1866
1867 if (DDR_PHY_BYTE_MAX == GET_BYTE_NUM(cfg)) {
1868 /* four byte: data1[0xFFFFFFFF] data2[0x00000000]
1869 data3[0xFFFFFFFF] data4[0x00000000] */
1870 data1 = (data1 >> (byte_index << 3)) & DDR_MPR_BYTE_MASK;
1871 data2 = (data2 >> (byte_index << 3)) & DDR_MPR_BYTE_MASK;
1872 data3 = (data3 >> (byte_index << 3)) & DDR_MPR_BYTE_MASK;
1873 data4 = (data4 >> (byte_index << 3)) & DDR_MPR_BYTE_MASK;
1874 } else {
1875 /* two byte: data1[0xFFFF0000] data2[0xFFFF0000]
1876 data3[0xFFFF0000] data4[0xFFFF0000] */
1877 data1 = ((data1 >> DDR_MPR_BYTE_BIT) >> (byte_index << 3))
1878 & DDR_MPR_BYTE_MASK;
1879 data2 = (data2 >> (byte_index << 3)) & DDR_MPR_BYTE_MASK;
1880 data3 = ((data3 >> DDR_MPR_BYTE_BIT) >> (byte_index << 3))
1881 & DDR_MPR_BYTE_MASK;
1882 data4 = (data4 >> (byte_index << 3)) & DDR_MPR_BYTE_MASK;
1883 if (ddr_mpr_judge(data1, data2, data3, data4, cfg->cur_dq))
1884 return -1;
1885
1886 /* two byte need to swap data and check again */
1887 data1 = ((ddr_read(base_dmc + DDR_DMC_SFC_RDATA1)
1888 >> DDR_MPR_BYTE_BIT) >> (byte_index << 3))
1889 & DDR_MPR_BYTE_MASK;
1890 data2 = (ddr_read(base_dmc + DDR_DMC_SFC_RDATA0)
1891 >> (byte_index << 3)) & DDR_MPR_BYTE_MASK;
1892 data3 = ((ddr_read(base_dmc + DDR_DMC_SFC_RDATA3)
1893 >> DDR_MPR_BYTE_BIT) >> (byte_index << 3))
1894 & DDR_MPR_BYTE_MASK;
1895 data4 = (ddr_read(base_dmc + DDR_DMC_SFC_RDATA2)
1896 >> (byte_index << 3)) & DDR_MPR_BYTE_MASK;
1897 }
1898
1899 return ddr_mpr_judge(data1, data2, data3, data4, cfg->cur_dq);
1900 }
1901
1902 /* Check MPR read data */
ddr_mpr_check(struct ddr_cfg_st * cfg)1903 int ddr_mpr_check(struct ddr_cfg_st *cfg)
1904 {
1905 /* read data */
1906 ddr_dmc_sfc_cmd(cfg->cur_dmc, DMC_CMD_TYPE_READ, 0x0, 0x0);
1907 return DMC_MPR_CHECK_BIT_0_127(cfg);
1908 }
1909
1910 /* Find RDQ via MPR */
ddr_mpr_find_rdq(struct ddr_cfg_st * cfg)1911 static int ddr_mpr_find_rdq(struct ddr_cfg_st *cfg)
1912 {
1913 struct training_data tmp_result;
1914 struct training_data *training = &tmp_result;
1915 unsigned int dq_num;
1916 unsigned int win_num;
1917 unsigned int def_dq, best_dq;
1918 unsigned int byte_index, dq_index;
1919
1920 /* find rdq via mpr */
1921 cfg->dq_check_type = DDR_CHECK_TYPE_MPR;
1922
1923 /* find rdq */
1924 for (byte_index = 0;
1925 byte_index < GET_BYTE_NUM(cfg); byte_index++) {
1926 for (dq_index = 0; dq_index < DDR_PHY_BIT_NUM; dq_index++) {
1927 dq_num = (byte_index << 3) + dq_index;
1928 def_dq = ddr_phy_get_dq_bdl(cfg);
1929 ddr_dataeye_find_dq(cfg, training);
1930 win_num = training->ddr_bit_best[dq_num]
1931 >> DDR_DATAEYE_RESULT_BIT;
1932 best_dq = training->ddr_bit_best[dq_num]
1933 & DDR_DATAEYE_RESULT_MASK;
1934 if (win_num > 0)
1935 ddr_phy_set_dq_bdl(cfg, best_dq);
1936 else {
1937 /* In normal case, not reach here */
1938 /* restore default value */
1939 ddr_phy_set_dq_bdl(cfg, def_dq);
1940
1941 DDR_FATAL("PHY[%x] Byte[%x] DQ[%x] MPR fail",
1942 cfg->cur_phy, byte_index, dq_index);
1943 ddr_training_stat(DDR_ERR_MPR, cfg->cur_phy,
1944 byte_index, dq_index);
1945 return -1;
1946 }
1947 }
1948 }
1949
1950 return 0;
1951
1952 }
1953
1954 /* Find RDQS via MPR */
ddr_mpr_find_rdqs(struct ddr_cfg_st * cfg)1955 static int ddr_mpr_find_rdqs(struct ddr_cfg_st *cfg)
1956 {
1957 unsigned int rdqs_start = 0;
1958 unsigned int rdqs_end = PHY_RDQS_BDL_MASK;
1959 unsigned int rdqs_mid;
1960 unsigned int val, delay;
1961 unsigned int count = 0;
1962 int found = DDR_FALSE;
1963 unsigned int base_phy = cfg->cur_phy;
1964 unsigned int byte_index = cfg->cur_byte;
1965
1966 /* set rdq to middle value */
1967 ddr_write(PHY_DQ_MIDDLE_VAL << PHY_BDL_DQ_BIT, base_phy + DDR_PHY_DXNRDQNBDL0(cfg->rank_idx, byte_index));
1968 ddr_write(PHY_DQ_MIDDLE_VAL << PHY_BDL_DQ_BIT, base_phy + DDR_PHY_DXNRDQNBDL1(cfg->rank_idx, byte_index));
1969
1970 /* clear rdqs */
1971 delay = ddr_read(base_phy + DDR_PHY_DXNRDQSDLY(byte_index)) >> PHY_RDQS_BDL_BIT;
1972 rdqs_mid = delay; /* if not found, restore default value */
1973 delay = delay & (~PHY_RDQS_BDL_MASK);
1974
1975 /* find rdqs */
1976 for (val = 0; val <= PHY_RDQS_BDL_MASK; val++) {
1977 ddr_write(delay | (val << PHY_RDQS_BDL_BIT),
1978 base_phy + DDR_PHY_DXNRDQSDLY(byte_index));
1979 ddr_phy_cfg_update(base_phy);
1980 /* check ok */
1981 if (!ddr_mpr_check(cfg)) {
1982 if (DDR_FALSE == found) {
1983 rdqs_start = val; /* found start value */
1984 count++;
1985 if (DDR_MPR_RDQS_FIND_TIMES == count)
1986 found = DDR_TRUE;
1987 }
1988 } else {
1989 if (DDR_TRUE == found) {
1990 rdqs_end = val; /* found end value */
1991 break;
1992 }
1993 }
1994 }
1995
1996 if (DDR_TRUE == found) {
1997 rdqs_mid = ((rdqs_end - rdqs_start) >> 1) + rdqs_start;
1998 DDR_INFO("PHY[%x] Byte[%x] rdqs_middle[%x]",
1999 base_phy, byte_index, rdqs_mid);
2000 DDR_INFO("rdqs_start[%x] rdqs_end[%x]",
2001 rdqs_start, rdqs_end);
2002 } else {
2003 DDR_FATAL("PHY[%x] Byte[%x] not find RDQS, restore.",
2004 base_phy, byte_index);
2005 ddr_training_stat(DDR_ERR_MPR, base_phy,
2006 byte_index, -1);
2007 }
2008
2009 ddr_write(delay | (rdqs_mid << PHY_RDQS_BDL_BIT), base_phy + DDR_PHY_DXNRDQSDLY(byte_index));
2010 ddr_phy_cfg_update(base_phy);
2011
2012 return ((DDR_TRUE == found) ? 0 : -1);
2013 }
2014
2015 /* Multi Purpose Register(MPR) */
ddr_mpr_training(struct ddr_cfg_st * cfg)2016 int ddr_mpr_training(struct ddr_cfg_st *cfg)
2017 {
2018 int i;
2019 int result = 0;
2020 unsigned int byte_num = GET_BYTE_NUM(cfg);
2021 unsigned int mr0;
2022 unsigned int sfc_cmd;
2023 unsigned int base_dmc = cfg->cur_dmc;
2024 unsigned int base_phy = cfg->cur_phy;
2025
2026 DDR_DEBUG("DDR MPR training.");
2027
2028 /* set DDR bust */
2029 if (DDR_PHY_BYTE_MAX == byte_num) {
2030 mr0 = (ddr_read(base_phy + DDR_PHY_MODEREG01)
2031 & DMC_MRS_MASK)
2032 & (~DMC_MR0_BL_MASK);
2033 sfc_cmd = ((mr0 | DMC_MR0_BL_BUST4)
2034 << DMC_SFC_CMD_MRS_BIT)
2035 | DMC_CMD_TYPE_LMR;
2036 ddr_dmc_sfc_cmd(base_dmc, sfc_cmd, 0x0, 0x0);
2037 }
2038
2039 /* precharge all */
2040 ddr_dmc_sfc_cmd(base_dmc, DMC_CMD_TYPE_PRECHARGE_ALL, 0x0, 0x0);
2041
2042 /* enable MPR */
2043 ddr_mpr_switch(base_dmc, DDR_TRUE);
2044
2045 /* find rdqs */
2046 for (i = 0; i < byte_num; i++)
2047 result += ddr_mpr_find_rdqs(cfg);
2048
2049 /* find rdq */
2050 if (!result)
2051 result = ddr_mpr_find_rdq(cfg);
2052
2053 /* disable MPR */
2054 ddr_mpr_switch(base_dmc, DDR_FALSE);
2055
2056 /* restore DDR bust */
2057 if (DDR_PHY_BYTE_MAX == byte_num) {
2058 mr0 = (ddr_read(base_phy + DDR_PHY_MODEREG01)
2059 & DMC_MRS_MASK);
2060 sfc_cmd = (mr0 << DMC_SFC_CMD_MRS_BIT)
2061 | DMC_CMD_TYPE_LMR;
2062 ddr_dmc_sfc_cmd(base_dmc, sfc_cmd, 0x0, 0x0);
2063 }
2064 return result;
2065 }
2066
ddr_mpr_training_func(struct ddr_cfg_st * cfg)2067 int ddr_mpr_training_func(struct ddr_cfg_st *cfg)
2068 {
2069 struct tr_relate_reg relate_reg;
2070 int result = 0;
2071
2072 /* MPR training disable */
2073 if (ddr_training_check_bypass(cfg, DDR_BYPASS_MPR_MASK))
2074 return 0;
2075
2076 ddr_training_save_reg(cfg, &relate_reg, DDR_BYPASS_MPR_MASK);
2077 result = ddr_mpr_training(cfg);
2078 ddr_training_restore_reg(cfg, &relate_reg);
2079
2080 return result;
2081 }
2082 #endif /* DDR_MPR_TRAINING_CONFIG */
2083
2084 #define __vref_training__
2085 #ifdef DDR_VREF_TRAINING_CONFIG
2086 #ifdef DDR_VREF_WITHOUT_BDL_CONFIG
2087 /* Save dataeye dq bdl before vref training */
ddr_vref_save_bdl(struct ddr_cfg_st * cfg,struct tr_dq_data * dq_data)2088 static void ddr_vref_save_bdl(struct ddr_cfg_st *cfg, struct tr_dq_data *dq_data)
2089 {
2090 int i;
2091 unsigned int base_phy = cfg->cur_phy;
2092 unsigned int rank = cfg->rank_idx;
2093 unsigned int byte_index;
2094
2095 for (i = 0; i < GET_BYTE_NUM(cfg); i++) {
2096 byte_index = i + (cfg->dmc_idx << 1); /* byte index accord to phy */
2097 if (DDR_MODE_WRITE == cfg->cur_mode) {
2098 dq_data->dq03[i] = ddr_read(base_phy + DDR_PHY_DXNWDQNBDL0(rank, byte_index));
2099 dq_data->dq47[i] = ddr_read(base_phy + DDR_PHY_DXNWDQNBDL1(rank, byte_index));
2100 dq_data->wdm[i] = ddr_read(base_phy + DDR_PHY_DXNWDQNBDL2(rank, byte_index));
2101 } else {
2102 dq_data->dq03[i] = ddr_read(base_phy + DDR_PHY_DXNRDQNBDL0(rank, byte_index));
2103 dq_data->dq47[i] = ddr_read(base_phy + DDR_PHY_DXNRDQNBDL1(rank, byte_index));
2104 }
2105 }
2106 }
2107
2108 /* Restore dataeye dq bdl after vref training */
ddr_vref_restore_bdl(struct ddr_cfg_st * cfg,struct tr_dq_data * dq_data)2109 static void ddr_vref_restore_bdl(struct ddr_cfg_st *cfg, struct tr_dq_data *dq_data)
2110 {
2111 int i;
2112 unsigned int base_phy = cfg->cur_phy;
2113 unsigned int rank = cfg->rank_idx;
2114 unsigned int byte_index;
2115
2116 for (i = 0; i < GET_BYTE_NUM(cfg); i++) {
2117 byte_index = i + (cfg->dmc_idx << 1); /* byte index accord to phy */
2118 if (DDR_MODE_WRITE == cfg->cur_mode) {
2119 ddr_write(dq_data->dq03[i], base_phy + DDR_PHY_DXNWDQNBDL0(rank, byte_index));
2120 ddr_write(dq_data->dq47[i], base_phy + DDR_PHY_DXNWDQNBDL1(rank, byte_index));
2121 ddr_write(dq_data->wdm[i], base_phy + DDR_PHY_DXNWDQNBDL2(rank, byte_index));
2122 } else {
2123 ddr_write(dq_data->dq03[i], base_phy + DDR_PHY_DXNRDQNBDL0(rank, byte_index));
2124 ddr_write(dq_data->dq47[i], base_phy + DDR_PHY_DXNRDQNBDL1(rank, byte_index));
2125 }
2126 }
2127 }
2128 #else
ddr_vref_save_bdl(struct ddr_cfg_st * cfg,struct tr_dq_data * dq_data)2129 static void ddr_vref_save_bdl(struct ddr_cfg_st *cfg, struct tr_dq_data *dq_data)
2130 {
2131 }
ddr_vref_restore_bdl(struct ddr_cfg_st * cfg,struct tr_dq_data * dq_data)2132 static void ddr_vref_restore_bdl(struct ddr_cfg_st *cfg, struct tr_dq_data *dq_data)
2133 {
2134 }
2135 #endif /* DDR_VREF_WITHOUT_BDL_CONFIG */
2136
2137 /* Set DDR Vref value */
ddr_vref_set(struct ddr_cfg_st * cfg,unsigned int val)2138 static void ddr_vref_set(struct ddr_cfg_st *cfg, unsigned int val)
2139 {
2140 if (DDR_MODE_READ == cfg->cur_mode) { /* HOST vref */
2141 DDR_PHY_VREF_HOST_SET(cfg->cur_phy, cfg->rank_idx, GET_BYTE_NUM(cfg), cfg->cur_byte, val); /* TODO */
2142 } else { /* DRAM vref */
2143 unsigned int auto_ref_timing =
2144 ddr_read(cfg->cur_dmc + DDR_DMC_TIMING2);
2145 /* disable auto refresh */
2146 ddr_training_set_timing(cfg->cur_dmc,
2147 auto_ref_timing & DMC_AUTO_TIMING_DIS);
2148
2149 /* DDR_PHY_VREFTCTRL 31bit:1 do vref dram set twice */
2150 ddr_write((ddr_read(cfg->cur_phy + DDR_PHY_VREFTCTRL)
2151 & (~(0x1 << PHY_VREFS_MRS_ENTER_BIT)))
2152 | (0x1 << PHY_VREFS_MRS_ENTER_BIT),
2153 cfg->cur_phy + DDR_PHY_VREFTCTRL);
2154 DDR_PHY_VREF_DRAM_SET(cfg->cur_phy, val, cfg->cur_byte);
2155 DDR_PHY_VREF_DRAM_SET(cfg->cur_phy, val, cfg->cur_byte);
2156 /* DDR_PHY_VREFTCTRL 31bit:0 do vref dram set once */
2157 ddr_write(ddr_read(cfg->cur_phy + DDR_PHY_VREFTCTRL)
2158 & (~(0x1 << PHY_VREFS_MRS_ENTER_BIT)),
2159 cfg->cur_phy + DDR_PHY_VREFTCTRL);
2160 DDR_PHY_VREF_DRAM_SET(cfg->cur_phy, val, cfg->cur_byte);
2161
2162 /* enable auto refresh */
2163 ddr_training_set_timing(cfg->cur_dmc, auto_ref_timing);
2164 }
2165 DDR_INFO("byte[%x] mode[%x] set vref [%x]", cfg->cur_byte, cfg->cur_mode, val);
2166 }
2167
2168 /* Get DDR Vref value */
ddr_vref_get(struct ddr_cfg_st * cfg)2169 static unsigned int ddr_vref_get(struct ddr_cfg_st *cfg)
2170 {
2171 unsigned int val = 0;
2172
2173 if (DDR_MODE_READ == cfg->cur_mode) { /* HOST vref */
2174 DDR_PHY_VREF_HOST_GET(cfg->cur_phy, cfg->rank_idx, cfg->cur_byte, val);
2175 } else { /* DRAM vref */
2176 DDR_PHY_VREF_DRAM_GET(cfg->cur_phy, val, cfg->cur_byte);
2177 }
2178 DDR_INFO("byte[%x] mode[%x] get vref [%x]", cfg->cur_byte, cfg->cur_mode, val);
2179 return val;
2180 }
2181
2182 /* Get totol win number of training result */
ddr_vref_get_win(struct ddr_cfg_st * cfg,struct training_data * training,int vref)2183 static unsigned int ddr_vref_get_win(struct ddr_cfg_st *cfg,
2184 struct training_data *training, int vref)
2185 {
2186 unsigned int vref_min = 0;
2187 unsigned int vref_max = DDR_VREF_DRAM_VAL_MAX;
2188 int vref_set;
2189
2190 training->ddr_win_sum = 0;
2191
2192 if (DDR_MODE_READ == cfg->cur_mode) {
2193 DDR_VREF_GET_HOST_MAX(cfg->rank_idx, vref_max);
2194 }
2195
2196 if (vref < vref_min)
2197 vref_set = vref_min;
2198 else if (vref > vref_max)
2199 vref_set = vref_max;
2200 else
2201 vref_set = vref;
2202
2203 ddr_vref_set(cfg, vref_set);
2204
2205 ddr_dataeye_deskew(cfg, training);
2206
2207 return training->ddr_win_sum;
2208 }
2209
2210 /* Find the best vref which win number is max */
ddr_vref_find_best(struct ddr_cfg_st * cfg,struct training_data * training,unsigned int vref,int step)2211 static unsigned int ddr_vref_find_best(struct ddr_cfg_st *cfg,
2212 struct training_data *training, unsigned int vref, int step)
2213 {
2214 int cur_vref;
2215 unsigned int best_vref;
2216 unsigned int cur_win;
2217 unsigned int max_win;
2218 unsigned int lower_times = 0;
2219 unsigned int vref_min = 0;
2220 unsigned int vref_max = DDR_VREF_DRAM_VAL_MAX;
2221
2222 if (DDR_MODE_READ == cfg->cur_mode) {
2223 DDR_VREF_GET_HOST_MAX(cfg->rank_idx, vref_max);
2224 }
2225
2226 max_win = 0;
2227 cur_vref = vref + step;
2228
2229 if (vref < vref_min)
2230 best_vref = vref_min;
2231 else if (vref > vref_max)
2232 best_vref = vref_max;
2233 else
2234 best_vref = vref;
2235
2236 /* find parabola vertex */
2237 while (cur_vref >= vref_min
2238 && cur_vref <= vref_max) {
2239 cur_win = ddr_vref_get_win(cfg, training, cur_vref);
2240 DDR_DEBUG("byte[%x] vref[%x] win[%x] mode[%x]",
2241 cfg->cur_byte, cur_vref, cur_win, cfg->cur_mode);
2242 if (cur_win < max_win) {
2243 lower_times++;
2244 if (DDR_VREF_COMPARE_TIMES == lower_times) {
2245 /* Continuous decline, mean found vertex */
2246 break;
2247 }
2248 } else {
2249 lower_times = 0;
2250 max_win = cur_win;
2251 best_vref = cur_vref;
2252 }
2253 cur_vref = cur_vref + step;
2254 }
2255
2256 return best_vref;
2257 }
2258
2259 /* DDR Vref calibrate and set the best value */
ddr_vref_cal(struct ddr_cfg_st * cfg,struct training_data * training)2260 static void ddr_vref_cal(struct ddr_cfg_st *cfg, struct training_data *training)
2261 {
2262 unsigned int def_vref;
2263 unsigned int best_vref;
2264 unsigned int left_win;
2265 unsigned int right_win;
2266
2267 def_vref = ddr_vref_get(cfg);
2268 left_win = ddr_vref_get_win(cfg, training, def_vref - DDR_VREF_COMPARE_STEP);
2269 right_win = ddr_vref_get_win(cfg, training, def_vref + DDR_VREF_COMPARE_STEP);
2270
2271 DDR_DEBUG("byte[%x] default vref[%x] win[%x][%x] mode[%x]",
2272 cfg->cur_byte, def_vref, left_win, right_win, cfg->cur_mode);
2273
2274 /* With vref increments, WIN number is a parabola.
2275 So firstly determine the result on left or right. */
2276 /* parabola vertex */
2277 if (left_win < right_win) { /* the result on right */
2278 best_vref = ddr_vref_find_best(cfg, training, def_vref, 1);
2279 } else if (left_win > right_win) { /* the result on left */
2280 best_vref = ddr_vref_find_best(cfg, training, def_vref, -1);
2281 } else {
2282 /* when (left_win == right_win), check def_vref */
2283 unsigned int vref_max = DDR_VREF_DRAM_VAL_MAX;
2284 if (DDR_MODE_READ == cfg->cur_mode) {
2285 DDR_VREF_GET_HOST_MAX(cfg->rank_idx, vref_max);
2286 }
2287
2288 if (def_vref < (vref_max >> 1))
2289 best_vref = ddr_vref_find_best(cfg, training, def_vref, 1);
2290 else
2291 best_vref = ddr_vref_find_best(cfg, training, def_vref, -1);
2292 }
2293
2294
2295 DDR_DEBUG("byte[%x] best vref[%x] mode[%x]",
2296 cfg->cur_byte, best_vref, cfg->cur_mode);
2297 ddr_vref_set(cfg, best_vref);
2298 }
2299
ddr_vref_training(struct ddr_cfg_st * cfg)2300 int ddr_vref_training(struct ddr_cfg_st *cfg)
2301 {
2302 struct training_data tmp_result;
2303 struct training_data *training = &tmp_result;
2304 struct tr_dq_data dq_data;
2305 int result = 0;
2306 int i;
2307
2308 DDR_DEBUG("DDR Vref[%x] training PHY[%x][%x] DMC[%x][%x] Rank[%x]",
2309 cfg->cur_mode, cfg->phy_idx, cfg->cur_phy, cfg->dmc_idx,
2310 cfg->cur_dmc, cfg->rank_idx);
2311
2312 ddr_vref_save_bdl(cfg, &dq_data);
2313 ddrtr_memset(training, 0, sizeof(struct training_data));
2314
2315 /* vref calibrate */
2316 if (DDR_MODE_READ == cfg->cur_mode) {
2317 /* only training byte0 and byte2 */
2318 for (i = 0; i < GET_BYTE_NUM(cfg); i++) {
2319 cfg->cur_byte = i + (cfg->dmc_idx << 1); /* byte index accord to phy */
2320 if (1 == cfg->cur_byte || 3 == cfg->cur_byte)
2321 continue;
2322
2323 ddr_vref_cal(cfg, training);
2324 }
2325 } else {
2326 unsigned int dram_type = cfg->phy[cfg->phy_idx].dram_type;
2327 unsigned int bank_group = (ddr_read(cfg->cur_dmc
2328 + DDR_DMC_CFG_RNKVOL(cfg->rank_idx)) >> DMC_CFG_MEM_BG_BIT)
2329 & DMC_CFG_MEM_BG_MASK;
2330
2331 if (PHY_DRAMCFG_TYPE_LPDDR4 != dram_type
2332 && PHY_DRAMCFG_TYPE_DDR4 != dram_type)
2333 return 0;
2334
2335 if (PHY_DRAMCFG_TYPE_LPDDR4 == dram_type)
2336 bank_group = DMC_CFG_MEM_2BG; /* lpddr4 not training byte1 byte3 */
2337
2338 for (i = 0; i < GET_BYTE_NUM(cfg); i++) {
2339 cfg->cur_byte = i + (cfg->dmc_idx << 1); /* byte index accord to phy */
2340 /* byte1 and byte3 bypass when 2 Bank Group */
2341 if ((DMC_CFG_MEM_2BG == bank_group)
2342 && ((1 == i) || (3 == i)))
2343 continue;
2344
2345 ddr_vref_cal(cfg, training);
2346 }
2347 }
2348
2349 #if !defined(DDR_VREF_WITHOUT_BDL_CONFIG) || defined(DDR_TRAINING_CMD)
2350 /* dataeye deskew again on best vref. */
2351 for (i = 0; i < GET_BYTE_NUM(cfg); i++) {
2352 cfg->cur_byte = i + (cfg->dmc_idx << 1); /* byte index accord to phy */
2353 result += ddr_dataeye_deskew(cfg, training);
2354 }
2355 #endif
2356
2357 ddr_vref_restore_bdl(cfg, &dq_data);
2358
2359 ddr_result_data_save(cfg, training);
2360
2361 return result;
2362 }
2363
ddr_vref_training_func(struct ddr_cfg_st * cfg)2364 int ddr_vref_training_func(struct ddr_cfg_st *cfg)
2365 {
2366 struct tr_relate_reg relate_reg;
2367 int result = 0;
2368
2369 ddr_training_save_reg(cfg, &relate_reg, DDR_BYPASS_VREF_HOST_MASK);
2370 ddr_training_switch_axi(cfg);
2371 ddr_ddrt_init(cfg, DDR_DDRT_MODE_DATAEYE);
2372 cfg->dq_check_type = DDR_CHECK_TYPE_DDRT;
2373
2374 /* host vref training disable */
2375 if (!ddr_training_check_bypass(cfg, DDR_BYPASS_VREF_HOST_MASK)) {
2376 cfg->cur_mode = DDR_MODE_READ;
2377 result += ddr_vref_training(cfg);
2378 }
2379
2380 /* dram vref training enable && DDR4 */
2381 if (!ddr_training_check_bypass(cfg, DDR_BYPASS_VREF_DRAM_MASK)) {
2382 cfg->cur_mode = DDR_MODE_WRITE;
2383 result += ddr_vref_training(cfg);
2384 }
2385 ddr_training_restore_reg(cfg, &relate_reg);
2386
2387 return result;
2388 }
2389 #else
ddr_vref_training_func(struct ddr_cfg_st * cfg)2390 int ddr_vref_training_func(struct ddr_cfg_st *cfg)
2391 {
2392 DDR_WARNING("Not support DDR vref training.");
2393 return 0;
2394 }
2395 #endif /* DDR_VREF_TRAINING_CONFIG */
2396
2397 #define __write_leveling__
2398 #ifdef DDR_WL_TRAINING_CONFIG
ddr_bdl_add(unsigned int * raw,unsigned int val)2399 static void ddr_bdl_add(unsigned int *raw, unsigned int val)
2400 {
2401 if (((*raw) + val) > PHY_BDL_MASK)
2402 *raw = PHY_BDL_MASK;
2403 else
2404 *raw += val;
2405 }
2406
ddr_bdl_sub(unsigned int * raw,unsigned int val)2407 static void ddr_bdl_sub(unsigned int *raw, unsigned int val)
2408 {
2409 if ((*raw) > val)
2410 *raw -= val;
2411 else
2412 *raw = 0;
2413 }
2414
2415 /* DDR PHY DQ phase increase */
ddr_phase_inc(unsigned int * raw)2416 static void ddr_phase_inc(unsigned int *raw)
2417 {
2418 #if defined (DDR_PHY_T28_CONFIG) || defined(DDR_PHY_T16_CONFIG) \
2419 || defined (DDR_PHY_T12_V100_CONFIG) || defined (DDR_PHY_T12_V101_CONFIG)
2420 if ((*raw) < (PHY_WDQS_PHASE_MASK - 1)) {
2421 if (((*raw) & 0x3) == 0x2)
2422 *raw += 0x2;
2423 else
2424 *raw += 0x1;
2425 }
2426 #else
2427 if ((*raw) < PHY_WDQS_PHASE_MASK)
2428 *raw += 0x1;
2429 #endif
2430 }
2431
2432 /* DDR PHY DQ phase decrease */
ddr_phase_dec(unsigned int * raw)2433 static void ddr_phase_dec(unsigned int *raw)
2434 {
2435 #if defined (DDR_PHY_T28_CONFIG) || defined(DDR_PHY_T16_CONFIG) \
2436 || defined (DDR_PHY_T12_V100_CONFIG) || defined (DDR_PHY_T12_V101_CONFIG)
2437 if ((*raw) > 0x1) {
2438 if (((*raw) & 0x3) == 0x3)
2439 *raw -= 0x2;
2440 else
2441 *raw -= 0x1;
2442 }
2443 #else
2444 if ((*raw) > 0x0)
2445 *raw -= 0x1;
2446 #endif
2447 }
2448
2449 /* DQ bdl add or sub */
ddr_dq_bdl_operate(unsigned int base_phy,unsigned int addr_offset,unsigned int val,unsigned int is_add)2450 static void ddr_dq_bdl_operate(unsigned int base_phy,
2451 unsigned int addr_offset, unsigned int val, unsigned int is_add)
2452 {
2453 unsigned int tmp;
2454 unsigned int dq_bdl[DDR_PHY_REG_DQ_NUM];
2455 int i;
2456
2457 tmp = ddr_read(base_phy + addr_offset);
2458 dq_bdl[0] = (tmp >> PHY_BDL_DQ0_BIT) & PHY_BDL_MASK;
2459 dq_bdl[1] = (tmp >> PHY_BDL_DQ1_BIT) & PHY_BDL_MASK;
2460 dq_bdl[2] = (tmp >> PHY_BDL_DQ2_BIT) & PHY_BDL_MASK;
2461 dq_bdl[3] = (tmp >> PHY_BDL_DQ3_BIT) & PHY_BDL_MASK;
2462
2463 for (i = 0; i < DDR_PHY_REG_DQ_NUM; i++) {
2464 if (is_add)
2465 ddr_bdl_add(&dq_bdl[i], val);
2466 else
2467 ddr_bdl_sub(&dq_bdl[i], val);
2468 }
2469
2470 tmp = (dq_bdl[3] << PHY_BDL_DQ3_BIT) + (dq_bdl[2] << PHY_BDL_DQ2_BIT)
2471 + (dq_bdl[1] << PHY_BDL_DQ1_BIT) + (dq_bdl[0] << PHY_BDL_DQ0_BIT);
2472 ddr_write(tmp, base_phy + addr_offset);
2473 }
2474
2475 /* Disable or enable DDR write leveling mode */
ddr_wl_switch(unsigned int base_dmc,unsigned int base_phy,int val)2476 static void ddr_wl_switch(unsigned int base_dmc, unsigned int base_phy,
2477 int val)
2478 {
2479 unsigned int mr1_raw;
2480 unsigned int sfc_cmd;
2481 unsigned int sfc_bank;
2482
2483 /* Set Rank = 0, Cmd = MRS, No Precharch CMD */
2484 mr1_raw = ddr_read(base_phy + DDR_PHY_MODEREG01)
2485 >> PHY_MODEREG01_MR1_BIT;
2486 sfc_cmd = DMC_CMD_TYPE_LMR;
2487 sfc_bank = DMC_BANK_MR1;
2488
2489 if (DDR_TRUE == val) { /* enable DDR wl */
2490 /* Set A7 = 1 */
2491 sfc_cmd += (mr1_raw | DMC_CMD_MRS_A7) << DMC_SFC_CMD_MRS_BIT;
2492 } else {
2493
2494 /* Set A7 = 0 */
2495 sfc_cmd += (mr1_raw & ((~DMC_CMD_MRS_A7) & DMC_CMD_MRS_MASK))
2496 << DMC_SFC_CMD_MRS_BIT;
2497 }
2498
2499 ddr_dmc_sfc_cmd(base_dmc, sfc_cmd, 0x0, sfc_bank);
2500
2501 /* clear */
2502 if (DDR_FALSE == val) {
2503 ddr_write(0x0, base_dmc + DDR_DMC_SFCBANK);
2504 ddr_write(0x0, base_dmc + DDR_DMC_SFCREQ);
2505 }
2506
2507 /* phy sw write leveling mode */
2508 ddr_write(val, base_phy + DDR_PHY_SWTMODE);
2509 }
2510
2511 #ifdef DDR_WL_DATAEYE_ADJUST_CONFIG
2512 /* Adjust dataeye WDQ after Write leveling */
ddr_wl_wdq_adjust(struct ddr_cfg_st * cfg,struct ddr_delay_st * wdqs_new,struct ddr_delay_st * wdqs_old)2513 static void ddr_wl_wdq_adjust(struct ddr_cfg_st *cfg,
2514 struct ddr_delay_st *wdqs_new, struct ddr_delay_st *wdqs_old)
2515 {
2516 unsigned int val;
2517 int i;
2518 unsigned int phase_adj, bdl_adj = 0; /* for write dataeye */
2519 unsigned int wdm_bdl;
2520 unsigned int wdq_phase;
2521 unsigned int base_phy = cfg->cur_phy;
2522 unsigned int byte_num = GET_BYTE_NUM(cfg);
2523 unsigned int rank_index = cfg->rank_idx;
2524
2525 DDR_DEBUG("DDR WL write adjust.");
2526
2527 /* check wl write adjust bypass bit */
2528 if (ddr_training_check_bypass(cfg, DDR_BYPASS_WL_ADJ_MASK))
2529 return;
2530
2531 /* adjust wdq phase, wdq bdl, wdm bdl */
2532 for (i = 0; i < byte_num; i++) {
2533 if (wdqs_new->phase[i] == wdqs_old->phase[i]
2534 && wdqs_new->bdl[i] == wdqs_old->bdl[i]) {
2535 continue;
2536 }
2537
2538 phase_adj = 0;
2539 wdq_phase = (ddr_read(base_phy + DDR_PHY_DXNWDQDLY(rank_index, i))
2540 >> PHY_WDQ_PHASE_BIT)
2541 & PHY_WDQ_PHASE_MASK;
2542 wdm_bdl = (ddr_read(base_phy + DDR_PHY_DXNWDQNBDL2(rank_index, i))
2543 >> PHY_WDM_BDL_BIT)
2544 & PHY_BDL_MASK;
2545
2546 if (wdqs_new->bdl[i] > wdqs_old->bdl[i]) {
2547 val = wdqs_new->bdl[i] - wdqs_old->bdl[i];
2548 phase_adj = val >> DDR_BDL_PHASE_REL;
2549 wdq_phase = wdq_phase + phase_adj;
2550
2551 if (wdq_phase > PHY_WDQ_PHASE_MASK)
2552 wdq_phase = PHY_WDQ_PHASE_MASK;
2553
2554 /* adjust wdq bdl and dm bdl in opposite direction */
2555 bdl_adj = phase_adj << DDR_BDL_PHASE_REL;
2556 ddr_dq_bdl_operate(base_phy, DDR_PHY_DXNWDQNBDL0(rank_index, i),
2557 bdl_adj, DDR_FALSE);
2558 ddr_dq_bdl_operate(base_phy, DDR_PHY_DXNWDQNBDL1(rank_index, i),
2559 bdl_adj, DDR_FALSE);
2560 ddr_bdl_sub(&wdm_bdl, bdl_adj);
2561
2562 } else if (wdqs_new->bdl[i] < wdqs_old->bdl[i]) {
2563 val = wdqs_old->bdl[i] - wdqs_new->bdl[i];
2564 phase_adj = val >> DDR_BDL_PHASE_REL;
2565 wdq_phase = (wdq_phase > phase_adj)
2566 ? (wdq_phase - phase_adj) : 0;
2567
2568 /* adjust wdq bdl and dm bdl in opposite direction */
2569 bdl_adj = phase_adj << DDR_BDL_PHASE_REL;
2570 ddr_dq_bdl_operate(base_phy, DDR_PHY_DXNWDQNBDL0(rank_index, i),
2571 bdl_adj, DDR_TRUE);
2572 ddr_dq_bdl_operate(base_phy, DDR_PHY_DXNWDQNBDL1(rank_index, i),
2573 bdl_adj, DDR_TRUE);
2574 ddr_bdl_add(&wdm_bdl, bdl_adj);
2575 }
2576
2577 DDR_INFO("Byte[%x] WDQ adjust phase[%x] bdl[%x]",
2578 i, phase_adj, bdl_adj);
2579
2580 ddr_write(wdq_phase << PHY_WDQ_PHASE_BIT,
2581 base_phy + DDR_PHY_DXNWDQDLY(rank_index, i));
2582 ddr_write(wdm_bdl << PHY_WDM_BDL_BIT, base_phy + DDR_PHY_DXNWDQNBDL2(rank_index, i));
2583 }
2584
2585 ddr_phy_cfg_update(base_phy);
2586 }
2587 #endif /* DDR_WL_DATAEYE_ADJUST_CONFIG */
2588
2589 /* Sync WDQ phase, WDQ bdl, WDM bdl, OEN bdl, WDQ SOE bdl by WDQS value */
ddr_wl_bdl_sync(struct ddr_cfg_st * cfg,struct ddr_delay_st * wdqs_new,struct ddr_delay_st * wdqs_old)2590 static void ddr_wl_bdl_sync(struct ddr_cfg_st *cfg,
2591 struct ddr_delay_st *wdqs_new, struct ddr_delay_st *wdqs_old)
2592 {
2593 unsigned int tmp;
2594 unsigned int val;
2595 int i;
2596
2597 unsigned int oen_bdl, wdqsoe_bdl, wdm_bdl;
2598 unsigned int wdq_phase;
2599 unsigned int base_phy = cfg->cur_phy;
2600 unsigned int byte_num = GET_BYTE_NUM(cfg);
2601 unsigned int rank_index = cfg->rank_idx;
2602
2603 /* sync wdq phase, wdq bdl, wdm bdl, oen bdl, wdq soe bdl */
2604 for (i = 0; i < byte_num; i++) {
2605 if (wdqs_new->phase[i] == wdqs_old->phase[i]
2606 && wdqs_new->bdl[i] == wdqs_old->bdl[i]) {
2607 continue;
2608 }
2609
2610 DDR_DEBUG("Byte[%x] new[%x][%x] old[%x][%x]", i,
2611 wdqs_new->phase[i], wdqs_new->bdl[i],
2612 wdqs_old->phase[i], wdqs_old->bdl[i]);
2613
2614 /* wdq phase */
2615 wdq_phase = (ddr_read(base_phy + DDR_PHY_DXNWDQDLY(rank_index, i))
2616 >> PHY_WDQ_PHASE_BIT)
2617 & PHY_WDQ_PHASE_MASK;
2618 /* always new_phase >= old_phase */
2619 wdq_phase = wdq_phase
2620 + (wdqs_new->phase[i] - wdqs_old->phase[i]);
2621
2622 /* bdl */
2623 tmp = ddr_read(base_phy + DDR_PHY_DXNOEBDL(rank_index, i));
2624 oen_bdl = (tmp >> PHY_OEN_BDL_BIT) & PHY_BDL_MASK;
2625 wdqsoe_bdl = (tmp >> PHY_WDQSOE_BDL_BIT) & PHY_BDL_MASK;
2626 wdm_bdl = (ddr_read(base_phy + DDR_PHY_DXNWDQNBDL2(rank_index, i))
2627 >> PHY_WDM_BDL_BIT) & PHY_BDL_MASK;
2628
2629 if (wdqs_new->bdl[i] > wdqs_old->bdl[i]) {
2630 val = wdqs_new->bdl[i] - wdqs_old->bdl[i];
2631 ddr_dq_bdl_operate(base_phy,
2632 DDR_PHY_DXNWDQNBDL0(rank_index, i), val, DDR_TRUE);
2633 ddr_dq_bdl_operate(base_phy,
2634 DDR_PHY_DXNWDQNBDL1(rank_index, i), val, DDR_TRUE);
2635 ddr_bdl_add(&oen_bdl, val);
2636 ddr_bdl_add(&wdqsoe_bdl, val);
2637 ddr_bdl_add(&wdm_bdl, val);
2638 } else if (wdqs_new->bdl[i] < wdqs_old->bdl[i]) {
2639 val = wdqs_old->bdl[i] - wdqs_new->bdl[i];
2640 ddr_dq_bdl_operate(base_phy, DDR_PHY_DXNWDQNBDL0(rank_index, i),
2641 val, DDR_FALSE);
2642 ddr_dq_bdl_operate(base_phy, DDR_PHY_DXNWDQNBDL1(rank_index, i),
2643 val, DDR_FALSE);
2644 ddr_bdl_sub(&oen_bdl, val);
2645 ddr_bdl_sub(&wdqsoe_bdl, val);
2646 ddr_bdl_sub(&wdm_bdl, val);
2647 }
2648
2649 if (wdq_phase > PHY_WDQ_PHASE_MASK)
2650 wdq_phase = PHY_WDQ_PHASE_MASK;
2651
2652 ddr_write(wdq_phase << PHY_WDQ_PHASE_BIT,
2653 base_phy + DDR_PHY_DXNWDQDLY(rank_index, i));
2654 ddr_write((wdqsoe_bdl << PHY_WDQSOE_BDL_BIT) + (oen_bdl << PHY_OEN_BDL_BIT),
2655 base_phy + DDR_PHY_DXNOEBDL(rank_index, i));
2656 ddr_write((wdm_bdl << PHY_WDM_BDL_BIT), base_phy + DDR_PHY_DXNWDQNBDL2(rank_index, i));
2657 }
2658
2659 ddr_phy_cfg_update(base_phy);
2660 }
2661
2662 /**
2663 * Write leveling process.
2664 * WL depend default WDQS phase value in register init table.
2665 */
ddr_wl_process(struct ddr_cfg_st * cfg,unsigned int type,struct ddr_delay_st * wdqs)2666 static int ddr_wl_process(struct ddr_cfg_st *cfg,
2667 unsigned int type, struct ddr_delay_st *wdqs)
2668 {
2669 int i, j;
2670 unsigned int wl_result = 0;
2671 unsigned int length;
2672 unsigned int base_phy = cfg->cur_phy;
2673 unsigned int byte_num = GET_BYTE_NUM(cfg);
2674
2675 if (DDR_DELAY_PHASE == type)
2676 length = PHY_WDQS_PHASE_MASK;
2677 else
2678 length = PHY_BDL_MASK;
2679
2680 /* find WDQS phase or bdl, assume CLK Delay > DQS Delay */
2681 for (i = 0; i <= length; i++) {
2682 ddr_phy_cfg_update(base_phy);
2683 ddr_write(0x1, base_phy + DDR_PHY_SWTWLDQS);
2684 DDR_ASM_DSB();
2685 wl_result = ddr_read(base_phy + DDR_PHY_SWTRLT)
2686 & PHY_SWTRLT_WL_MASK;
2687 ddr_write(0x0, base_phy + DDR_PHY_SWTWLDQS);
2688
2689 if ((wl_result & ((1 << byte_num) - 1)) == ((1 << byte_num) - 1))
2690 break;
2691
2692 for (j = 0; j < byte_num; j++) {
2693 DDR_INFO("type[0x%x] byte[0x%x] phase[0x%x] bdl[0x%x] wl_result[0x%x]",
2694 type, j, wdqs->phase[j], wdqs->bdl[j], wl_result);
2695 if (!(wl_result & (1 << j))) {
2696 if (DDR_DELAY_PHASE == type)
2697 ddr_phase_inc(&wdqs->phase[j]);
2698 else
2699 wdqs->bdl[j] += DDR_WL_BDL_STEP;
2700
2701 ddr_write((wdqs->phase[j] << PHY_WDQS_PHASE_BIT)
2702 + (wdqs->bdl[j] << PHY_WDQS_BDL_BIT),
2703 base_phy + DDR_PHY_DXWDQSDLY(cfg->rank_idx, j));
2704 }
2705 }
2706 }
2707
2708 if (i > length) { /* wl error, not find wdqs delay */
2709 if (DDR_DELAY_BDL == type) {
2710 DDR_FATAL("PHY[%x] WL fail, result[%x]",
2711 base_phy, wl_result);
2712 for (j = 0; j < byte_num; j++)
2713 if (!(wl_result & (1 << j)))
2714 ddr_training_stat(DDR_ERR_WL,
2715 base_phy, j, -1);
2716
2717 } else
2718 DDR_DEBUG("PHY[%x] WL not found phase, result[%x]",
2719 base_phy, wl_result);
2720
2721 return -1;
2722 } else
2723 return 0;
2724 }
2725
2726 /**
2727 * Find WDQS delay, sync to WDQ delay and OE delay.
2728 * WL depend default WDQS phase value in register init table.
2729 */
ddr_write_leveling(struct ddr_cfg_st * cfg)2730 int ddr_write_leveling(struct ddr_cfg_st *cfg)
2731 {
2732 unsigned int i, tmp;
2733 struct ddr_delay_st wdqs_old;
2734 struct ddr_delay_st wdqs_new;
2735 int result = 0;
2736
2737 unsigned int base_phy = cfg->cur_phy;
2738 unsigned int base_dmc = cfg->cur_dmc;
2739 unsigned int byte_num = GET_BYTE_NUM(cfg);
2740 unsigned int rank_index = cfg->rank_idx;
2741
2742 DDR_DEBUG("DDR Write Leveling training.");
2743
2744 /* init wdqs */
2745 for (i = 0; i < byte_num; i++) {
2746 tmp = ddr_read(base_phy + DDR_PHY_DXWDQSDLY(rank_index, i));
2747
2748 wdqs_old.phase[i] = (tmp >> PHY_WDQS_PHASE_BIT)
2749 & PHY_WDQS_PHASE_MASK;
2750 wdqs_old.bdl[i] = (tmp >> PHY_WDQS_BDL_BIT) & PHY_BDL_MASK;
2751
2752 wdqs_new.phase[i] = wdqs_old.phase[i];
2753 wdqs_new.bdl[i] = 0;
2754
2755 /* clear wdqs bdl */
2756 ddr_write(wdqs_new.phase[i] << PHY_WDQS_PHASE_BIT,
2757 base_phy + DDR_PHY_DXWDQSDLY(rank_index, i));
2758 }
2759
2760 /* enable sw write leveling mode */
2761 ddr_wl_switch(base_dmc, base_phy, DDR_TRUE);
2762
2763 /* find first WDQS phase, assume CLK delay > DQS delay. */
2764 result = ddr_wl_process(cfg, DDR_DELAY_PHASE, &wdqs_new);
2765
2766 /* check phase result */
2767 for (i = 0; i < byte_num; i++) {
2768 /* find phase error, keep max value to find bdl. */
2769 /* find phase ok, decrease to find bdl. */
2770 if (!result)
2771 ddr_phase_dec(&wdqs_new.phase[i]);
2772
2773 ddr_write(wdqs_new.phase[i] << PHY_WDQS_PHASE_BIT,
2774 base_phy + DDR_PHY_DXWDQSDLY(rank_index, i));
2775 }
2776
2777 /* find WDQS bdl */
2778 result = ddr_wl_process(cfg, DDR_DELAY_BDL, &wdqs_new);
2779
2780 /* disable sw write leveling mode */
2781 ddr_wl_switch(base_dmc, base_phy, DDR_FALSE);
2782
2783 if (result) {
2784 /* restore default value when find WDQS fail */
2785 for (i = 0; i < byte_num; i++) {
2786 tmp = (wdqs_old.phase[i] << PHY_WDQS_PHASE_BIT)
2787 + (wdqs_old.bdl[i] << PHY_WDQS_BDL_BIT);
2788 ddr_write(tmp, base_phy + DDR_PHY_DXWDQSDLY(rank_index, i));
2789 }
2790 ddr_phy_cfg_update(base_phy);
2791 return -1;
2792 }
2793
2794 /* sync delay */
2795 ddr_wl_bdl_sync(cfg, &wdqs_new, &wdqs_old);
2796
2797 #ifdef DDR_WL_DATAEYE_ADJUST_CONFIG
2798 /* adjust WDQ for dataeye */
2799 ddr_wl_wdq_adjust(cfg, &wdqs_new, &wdqs_old);
2800 #endif
2801 return 0;
2802 }
2803
ddr_wl_func(struct ddr_cfg_st * cfg)2804 int ddr_wl_func(struct ddr_cfg_st *cfg)
2805 {
2806 struct tr_relate_reg relate_reg;
2807 int result = 0;
2808
2809 /* write leveling disable */
2810 if (ddr_training_check_bypass(cfg, DDR_BYPASS_WL_MASK))
2811 return 0;
2812
2813 ddr_training_save_reg(cfg, &relate_reg, DDR_BYPASS_WL_MASK);
2814
2815 result += ddr_write_leveling(cfg);
2816
2817 ddr_training_restore_reg(cfg, &relate_reg);
2818
2819 return result;
2820 }
2821 #else
ddr_wl_func(struct ddr_cfg_st * cfg)2822 int ddr_wl_func(struct ddr_cfg_st *cfg)
2823 {
2824 DDR_WARNING("Not support DDR WL training.");
2825 return 0;
2826 }
2827 #endif /* DDR_WL_TRAINING_CONFIG */
2828
2829 #define __gate_training__
2830 #ifdef DDR_GATE_TRAINING_CONFIG
2831 /* Find gate phase */
ddr_gate_find_phase(struct ddr_cfg_st * cfg,struct ddr_delay_st * rdqsg)2832 static int ddr_gate_find_phase(struct ddr_cfg_st *cfg,
2833 struct ddr_delay_st *rdqsg)
2834 {
2835 int i;
2836 unsigned int base_phy = cfg->cur_phy;
2837
2838 for (i = 0; i < GET_BYTE_NUM(cfg); i++) {
2839 for (rdqsg->phase[i] = PHY_RDQSG_PHASE_MAX;
2840 rdqsg->phase[i] > PHY_GATE_PHASE_MARGIN;
2841 rdqsg->phase[i] -= PHY_RDQSG_PHASE_STEP) {
2842 ddr_write(rdqsg->phase[i] << PHY_RDQSG_PHASE_BIT,
2843 base_phy + DDR_PHY_DXNRDQSGDLY(cfg->rank_idx, i));
2844 ddr_phy_cfg_update(base_phy);
2845 if (0 == ddr_ddrt_test(DDRT_WR_COMPRARE_MODE, i, -1))
2846 break;
2847 }
2848 if (rdqsg->phase[i] <= PHY_GATE_PHASE_MARGIN) {
2849 /* find gate phase fail */
2850 DDR_FATAL("find gate phase[%x] fail.",
2851 rdqsg->phase[i]);
2852 ddr_training_stat(DDR_ERR_GATING, base_phy, -1, -1);
2853 return -1;
2854 } else {
2855 /* decrease one setp to find bdl */
2856 rdqsg->phase[i] -= PHY_RDQSG_PHASE_STEP;
2857 ddr_write(rdqsg->phase[i] << PHY_RDQSG_PHASE_BIT,
2858 base_phy + DDR_PHY_DXNRDQSGDLY(cfg->rank_idx, i));
2859 }
2860 }
2861
2862 ddr_phy_cfg_update(base_phy);
2863 return 0;
2864 }
2865
ddr_gate_find_bdl(struct ddr_cfg_st * cfg,struct ddr_delay_st * rdqsg)2866 static int ddr_gate_find_bdl(struct ddr_cfg_st *cfg,
2867 struct ddr_delay_st *rdqsg)
2868 {
2869 int i, j;
2870 unsigned int gate_result;
2871 unsigned int tmp;
2872 unsigned int base_phy = cfg->cur_phy;
2873 unsigned int byte_num = GET_BYTE_NUM(cfg);
2874
2875 unsigned int swtmode = ddr_read(base_phy + DDR_PHY_SWTMODE);
2876
2877 for (i = 0; i < byte_num; i++)
2878 rdqsg->bdl[i] = 0;
2879
2880 /* enable phy sw gate training mode */
2881 ddr_write(swtmode | (1 << PHY_SWTMODE_SW_GTMODE_BIT),
2882 base_phy + DDR_PHY_SWTMODE);
2883
2884 for (i = 0; i < PHY_GATE_BDL_MAX; i++) {
2885 ddr_phy_cfg_update(base_phy);
2886 ddr_ddrt_test(DDRT_READ_ONLY_MODE, -1, -1);
2887 gate_result = (ddr_read(base_phy + DDR_PHY_SWTRLT) >> 8)
2888 & PHY_SWTRLT_GATE_MASK;
2889 if (gate_result == ((1 << byte_num) - 1))
2890 break;
2891
2892 for (j = 0; j < byte_num; j++) {
2893 if (!(gate_result & (1 << j))) {
2894 rdqsg->bdl[j] += DDR_GATE_BDL_STEP;
2895 if (rdqsg->bdl[j] > PHY_BDL_MASK) {
2896 tmp = ((rdqsg->bdl[j]
2897 - PHY_BDL_MASK - 1)
2898 << PHY_RDQSG_TX_BDL_BIT)
2899 + (rdqsg->phase[j]
2900 << PHY_RDQSG_PHASE_BIT)
2901 + (PHY_BDL_MASK - 1);
2902 } else {
2903 tmp = (rdqsg->phase[j]
2904 << PHY_RDQSG_PHASE_BIT)
2905 + rdqsg->bdl[j];
2906 }
2907 ddr_write(tmp,
2908 base_phy + DDR_PHY_DXNRDQSGDLY(cfg->rank_idx, j));
2909 }
2910 }
2911 }
2912
2913 /* disable phy sw gate training mode */
2914 ddr_write(swtmode & (~(1 << PHY_SWTMODE_SW_GTMODE_BIT)),
2915 base_phy + DDR_PHY_SWTMODE);
2916
2917 if (i == PHY_GATE_BDL_MAX) { /* find gate bdl fail */
2918 DDR_FATAL("PHY[%x] find gate bdl fail. result[%x]",
2919 base_phy, gate_result);
2920 for (j = 0; j < byte_num; j++)
2921 if (!(gate_result & (1 << j)))
2922 ddr_training_stat(DDR_ERR_GATING,
2923 base_phy, j, -1);
2924 return -1;
2925 } else
2926 return 0;
2927 }
2928
ddr_gate_training(struct ddr_cfg_st * cfg)2929 int ddr_gate_training(struct ddr_cfg_st *cfg)
2930 {
2931 unsigned int i, tmp;
2932 unsigned int byte_num;
2933 struct ddr_delay_st rdqsg;
2934 unsigned int def_delay[DDR_PHY_BYTE_MAX];
2935 int result;
2936 unsigned int base_phy = cfg->cur_phy;
2937
2938 DDR_DEBUG("DDR Gate training.");
2939
2940 byte_num = GET_BYTE_NUM(cfg);
2941
2942 for (i = 0; i < byte_num; i++)
2943 def_delay[i] = ddr_read(base_phy + DDR_PHY_DXNRDQSGDLY(cfg->rank_idx, i));
2944
2945 /* find phase first */
2946 result = ddr_gate_find_phase(cfg, &rdqsg);
2947
2948 /* find bdl */
2949 if (!result)
2950 result = ddr_gate_find_bdl(cfg, &rdqsg);
2951
2952 /* set new phase */
2953 if (!result) {
2954 for (i = 0; i < byte_num; i++) {
2955 rdqsg.phase[i] -= PHY_GATE_PHASE_MARGIN;
2956 tmp = ddr_read(base_phy + DDR_PHY_DXNRDQSGDLY(cfg->rank_idx, i));
2957 tmp &= ~(PHY_RDQSG_PHASE_MASK << PHY_RDQSG_PHASE_BIT);
2958 tmp |= rdqsg.phase[i] << PHY_RDQSG_PHASE_BIT;
2959 ddr_write(tmp, base_phy + DDR_PHY_DXNRDQSGDLY(cfg->rank_idx, i));
2960 }
2961 } else {
2962 /* restore default value */
2963 for (i = 0; i < byte_num; i++)
2964 ddr_write(def_delay[i],
2965 base_phy + DDR_PHY_DXNRDQSGDLY(cfg->rank_idx, i));
2966 }
2967
2968 ddr_phy_cfg_update(base_phy);
2969 return 0; /* use default value and not reset */
2970 }
2971
ddr_gating_func(struct ddr_cfg_st * cfg)2972 int ddr_gating_func(struct ddr_cfg_st *cfg)
2973 {
2974 struct tr_relate_reg relate_reg;
2975 int result = 0;
2976
2977 /* gate training disable */
2978 if (ddr_training_check_bypass(cfg, DDR_BYPASS_GATE_MASK)) {
2979 /* check hardware gating */
2980 if (ddr_read(cfg->cur_phy + DDR_PHY_PHYINITSTATUS)
2981 & PHY_INITSTATUS_GT_MASK) {
2982 DDR_FATAL("PHY[%x] hw gating fail.", cfg->cur_phy);
2983 ddr_training_stat(DDR_ERR_HW_GATING,
2984 cfg->cur_phy, -1, -1);
2985 return -1;
2986 }
2987 return 0;
2988 }
2989
2990 ddr_training_save_reg(cfg, &relate_reg, DDR_BYPASS_GATE_MASK);
2991
2992 ddr_training_switch_axi(cfg);
2993 ddr_ddrt_init(cfg, DDR_DDRT_MODE_GATE);
2994 result += ddr_gate_training(cfg);
2995
2996 ddr_training_restore_reg(cfg, &relate_reg);
2997
2998 return result;
2999 }
3000 #else
ddr_gating_func(struct ddr_cfg_st * cfg)3001 int ddr_gating_func(struct ddr_cfg_st *cfg)
3002 {
3003 DDR_WARNING("Not support DDR gate training.");
3004 return 0;
3005 }
3006 #endif /* DDR_GATE_TRAINING_CONFIG */
3007
3008 #define __ac_training__
3009 #ifdef DDR_AC_TRAINING_CONFIG
3010 /**
3011 * Get clk value.
3012 * Assume clk0 and clk1 is the same.
3013 */
ddr_ac_get_clk(unsigned int base_phy)3014 static int ddr_ac_get_clk(unsigned int base_phy)
3015 {
3016 unsigned int val;
3017 unsigned int ac_phy_ctl;
3018 /* Static register have to read two times to get the right value. */
3019 ac_phy_ctl = ddr_read(base_phy + DDR_PHY_ACPHYCTL7);
3020 ac_phy_ctl = ddr_read(base_phy + DDR_PHY_ACPHYCTL7);
3021 /* halft_dramclk0 */
3022 val = (ac_phy_ctl >> PHY_ACPHY_DRAMCLK0_BIT)
3023 & PHY_ACPHY_DRAMCLK_MASK;
3024 val = (val << PHY_ACPHY_DRAMCLK_EXT_BIT)
3025 | ((ac_phy_ctl >> PHY_ACPHY_DCLK0_BIT)
3026 & PHY_ACPHY_DCLK_MASK);
3027 return val;
3028 }
3029
3030 /* Set clk0 and clk1 the same value */
ddr_ac_set_clk(unsigned int base_phy,unsigned int val)3031 static void ddr_ac_set_clk(unsigned int base_phy, unsigned int val)
3032 {
3033 unsigned int ac_phy_ctl, dramclk, dclk;
3034 dclk = val & PHY_ACPHY_DCLK_MASK;
3035 dramclk = (val >> PHY_ACPHY_DRAMCLK_EXT_BIT)
3036 & PHY_ACPHY_DRAMCLK_MASK;
3037 /* Static register have to read two times to get the right value. */
3038 ac_phy_ctl = ddr_read(base_phy + DDR_PHY_ACPHYCTL7);
3039 ac_phy_ctl = ddr_read(base_phy + DDR_PHY_ACPHYCTL7);
3040 /* clear cp1p_dclk0 */
3041 ac_phy_ctl &= (~(PHY_ACPHY_DCLK_MASK << PHY_ACPHY_DCLK0_BIT));
3042 /* clear ck2p_dclk1 */
3043 ac_phy_ctl &= (~(PHY_ACPHY_DCLK_MASK << PHY_ACPHY_DCLK1_BIT));
3044 /* clear halft_dramclk0 */
3045 ac_phy_ctl &= (~(PHY_ACPHY_DRAMCLK_MASK << PHY_ACPHY_DRAMCLK0_BIT));
3046 /* clear halft_dramclk1 */
3047 ac_phy_ctl &= (~(PHY_ACPHY_DRAMCLK_MASK << PHY_ACPHY_DRAMCLK1_BIT));
3048
3049 ac_phy_ctl |= (dclk << PHY_ACPHY_DCLK0_BIT); /* set cp1p_dclk0 */
3050 ac_phy_ctl |= (dclk << PHY_ACPHY_DCLK1_BIT); /* set cp2p_dclk1 */
3051 /* set halft_dramclk0 */
3052 ac_phy_ctl |= (dramclk << PHY_ACPHY_DRAMCLK0_BIT);
3053 /* set halft_dramclk1 */
3054 ac_phy_ctl |= (dramclk << PHY_ACPHY_DRAMCLK1_BIT);
3055 ddr_write(ac_phy_ctl, base_phy + DDR_PHY_ACPHYCTL7);
3056 }
3057
3058 /**
3059 * Get cs bdl value.
3060 * Assume cs0 and cs 1 is the same.
3061 */
ddr_ac_get_cs(unsigned int base_phy)3062 static int ddr_ac_get_cs(unsigned int base_phy)
3063 {
3064 return (ddr_read(base_phy + DDR_PHY_ACCMDBDL2) >> 1) & PHY_BDL_MASK;
3065 }
3066
3067 /* Set CS value */
ddr_ac_set_cs(unsigned int base_phy,unsigned int val)3068 static void ddr_ac_set_cs(unsigned int base_phy, unsigned int val)
3069 {
3070 unsigned int ac_cmd_bdl;
3071 ac_cmd_bdl = ddr_read(base_phy + DDR_PHY_ACCMDBDL2);
3072 ac_cmd_bdl &= (~(PHY_BDL_MASK << PHY_ACCMD_CS0_BIT)); /* clear cs0_bdl */
3073 ac_cmd_bdl &= (~(PHY_BDL_MASK << PHY_ACCMD_CS1_BIT)); /* clear cs1_bdl */
3074 ac_cmd_bdl |= (val << PHY_ACCMD_CS0_BIT); /* set cs0_bdl */
3075 ac_cmd_bdl |= (val << PHY_ACCMD_CS1_BIT); /* set cs1_bdl */
3076 ddr_write(ac_cmd_bdl, base_phy + DDR_PHY_ACCMDBDL2);
3077 }
3078
ddr_ac_ddrt_test(unsigned int mask,unsigned int base_phy)3079 static int ddr_ac_ddrt_test(unsigned int mask, unsigned int base_phy)
3080 {
3081 unsigned int regval;
3082 unsigned int times = 0;
3083
3084 DDRT_REG_WRITE(mask | DDRT_CFG_START, DDR_REG_BASE_DDRT + DDRT_OP);
3085 DDRT_REG_WRITE(0, DDR_REG_BASE_DDRT + DDRT_STATUS);
3086
3087 do {
3088 regval = DDRT_REG_READ(DDR_REG_BASE_DDRT + DDRT_STATUS);
3089 times++;
3090 } while ((!(regval & DDRT_TEST_DONE_MASK))
3091 && (times < DDRT_WAIT_TIMEOUT));
3092
3093 if (times >= DDRT_WAIT_TIMEOUT) {
3094 DDR_FATAL("DDRT wait timeout.");
3095 ddr_training_stat(DDR_ERR_DDRT_TIME_OUT, base_phy, -1, -1);
3096 return -1;
3097 }
3098
3099 /* DDRT_WRITE_ONLY_MODE */
3100 if (DDRT_WRITE_ONLY_MODE == (mask & DDRT_TEST_MODE_MASK))
3101 return 0;
3102
3103 /* DDRT_READ_ONLY_MODE */
3104 if (regval & DDRT_TEST_PASS_MASK) /* No error occurred, test pass. */
3105 return 0;
3106 else
3107 return -1;
3108
3109 }
3110
3111 /* Check CS value */
ddr_ac_check_cs(unsigned int base_phy,unsigned int def_cs,unsigned int step)3112 static int ddr_ac_check_cs(unsigned int base_phy, unsigned int def_cs,
3113 unsigned int step)
3114 {
3115 ddr_ac_set_cs(base_phy, def_cs + step);
3116 ddr_phy_cfg_update(base_phy);
3117
3118 ddr_ac_ddrt_test(DDRT_WRITE_ONLY_MODE, base_phy);
3119
3120 ddr_ac_set_cs(base_phy, def_cs); /* restore default to check */
3121 ddr_phy_cfg_update(base_phy);
3122
3123 return ddr_ac_ddrt_test(DDRT_READ_ONLY_MODE, base_phy);
3124 }
3125
3126 /* Check CLK value */
ddr_ac_check_clk(struct ddr_cfg_st * cfg,unsigned int def_clk,struct ddr_delay_st * def_phase,unsigned int step)3127 static int ddr_ac_check_clk(struct ddr_cfg_st *cfg, unsigned int def_clk,
3128 struct ddr_delay_st *def_phase,
3129 unsigned int step)
3130 {
3131 int i;
3132 unsigned int wdqs_phase_range, wdq_phase_range, phase_range;
3133 unsigned int base_phy = cfg->cur_phy;
3134 unsigned int byte_num = GET_BYTE_NUM(cfg);
3135
3136 /* set new value */
3137 ddr_ac_set_clk(base_phy, def_clk + step);
3138 for (i = 0; i < byte_num; i++) {
3139 wdqs_phase_range = PHY_WDQS_PHASE_MASK
3140 - ((def_phase->phase[i] >> PHY_WDQS_PHASE_BIT)
3141 & PHY_WDQS_PHASE_MASK);
3142 wdq_phase_range = PHY_WDQ_PHASE_MASK
3143 - ((def_phase->bdl[i] >> PHY_WDQ_PHASE_BIT)
3144 & PHY_WDQ_PHASE_MASK);
3145 phase_range = (wdqs_phase_range < wdq_phase_range)
3146 ? wdqs_phase_range : wdq_phase_range;
3147 phase_range = (phase_range < step) ? phase_range : step;
3148
3149 ddr_write(def_phase->phase[i]
3150 + (phase_range << PHY_WDQS_PHASE_BIT),
3151 base_phy + DDR_PHY_DXWDQSDLY(cfg->rank_idx, i));
3152 ddr_write(def_phase->bdl[i]
3153 + (phase_range << PHY_WDQ_PHASE_BIT),
3154 base_phy + DDR_PHY_DXNWDQDLY(cfg->rank_idx, i));
3155 }
3156 ddr_phy_cfg_update(base_phy);
3157
3158 ddr_ac_ddrt_test(DDRT_WRITE_ONLY_MODE, base_phy);
3159
3160 /* restore default to check */
3161 ddr_ac_set_clk(base_phy, def_clk);
3162 for (i = 0; i < byte_num; i++) {
3163 ddr_write(def_phase->phase[i],
3164 base_phy + DDR_PHY_DXWDQSDLY(cfg->rank_idx, i));
3165 ddr_write(def_phase->bdl[i],
3166 base_phy + DDR_PHY_DXNWDQDLY(cfg->rank_idx, i));
3167 }
3168 ddr_phy_cfg_update(base_phy);
3169
3170 return ddr_ac_ddrt_test(DDRT_READ_ONLY_MODE, base_phy);
3171 }
3172
3173 /* Find CS difference */
ddr_ac_find_cs(unsigned int base_phy)3174 static int ddr_ac_find_cs(unsigned int base_phy)
3175 {
3176 unsigned int def_cs, step;
3177
3178 def_cs = ddr_ac_get_cs(base_phy);
3179 for (step = 1; step <= (PHY_BDL_MASK - def_cs); step++) {
3180 if (ddr_ac_check_cs(base_phy, def_cs, step)) {
3181 DDR_DEBUG("PHY[%x] default cs[%x], find diff_cs[%x]",
3182 base_phy, def_cs, step);
3183 break;
3184 }
3185 }
3186
3187 return step;
3188 }
3189
3190 /* Find CLK difference */
ddr_ac_find_clk(struct ddr_cfg_st * cfg)3191 static int ddr_ac_find_clk(struct ddr_cfg_st *cfg)
3192 {
3193 int i;
3194 unsigned int def_clk, step;
3195 struct ddr_delay_st def_phase;
3196 unsigned int base_phy = cfg->cur_phy;
3197 unsigned int byte_num = GET_BYTE_NUM(cfg);
3198
3199 def_clk = ddr_ac_get_clk(base_phy);
3200 for (i = 0; i < byte_num; i++) {
3201 /* WDQS phase */
3202 def_phase.phase[i] = ddr_read(base_phy + DDR_PHY_DXWDQSDLY(cfg->rank_idx, i));
3203 /* WDQ phase */
3204 def_phase.bdl[i] = ddr_read(base_phy + DDR_PHY_DXNWDQDLY(cfg->rank_idx, i));
3205 }
3206
3207 for (step = 1; step <= (PHY_ACPHY_CLK_MAX - def_clk); step++) {
3208 if (ddr_ac_check_clk(cfg, def_clk, &def_phase, step)) {
3209 DDR_DEBUG("PHY[%x] default clk[%x], find diff_clk[%x]",
3210 base_phy, def_clk, step);
3211 break;
3212 }
3213 }
3214
3215 return step;
3216 }
3217
3218 /* DDR AC training */
ddr_ac_training(struct ddr_cfg_st * cfg)3219 int ddr_ac_training(struct ddr_cfg_st *cfg)
3220 {
3221 unsigned int diff_cs, diff_clk;
3222 unsigned int clk_phase, cs_bdl, phase_tmp;
3223 unsigned int byte_num;
3224 unsigned int wdqs_phase, wdq_phase;
3225 unsigned int wdqs_phase_range, wdq_phase_range, phase_range;
3226 unsigned int def_clk, def_cs;
3227 int i;
3228 unsigned int base_phy = cfg->cur_phy;
3229
3230 DDR_DEBUG("DDR AC training.");
3231
3232 byte_num = GET_BYTE_NUM(cfg);
3233
3234 diff_cs = ddr_ac_find_cs(base_phy); /* setup time(bdl) */
3235 diff_clk = ddr_ac_find_clk(cfg); /* hold time(phase) */
3236 /* cs bdl transform to clk phase */
3237 phase_tmp = diff_cs >> DDR_BDL_PHASE_REL;
3238
3239 if (diff_clk > phase_tmp) {
3240 clk_phase = (diff_clk - phase_tmp) >> 1;
3241 def_clk = ddr_ac_get_clk(base_phy);
3242
3243 /* set new value */
3244 ddr_ac_set_clk(base_phy, def_clk + clk_phase);
3245 for (i = 0; i < byte_num; i++) {
3246 wdqs_phase = ddr_read(base_phy + DDR_PHY_DXWDQSDLY(cfg->rank_idx, i));
3247 wdq_phase = ddr_read(base_phy + DDR_PHY_DXNWDQDLY(cfg->rank_idx, i));
3248
3249 wdqs_phase_range = PHY_WDQS_PHASE_MASK
3250 - ((wdqs_phase >> PHY_WDQS_PHASE_BIT)
3251 & PHY_WDQS_PHASE_MASK);
3252 wdq_phase_range = PHY_WDQ_PHASE_MASK
3253 - ((wdq_phase >> PHY_WDQ_PHASE_BIT)
3254 & PHY_WDQ_PHASE_MASK);
3255 phase_range = (wdqs_phase_range < wdq_phase_range)
3256 ? wdqs_phase_range : wdq_phase_range;
3257 phase_range = (phase_range < clk_phase)
3258 ? phase_range : clk_phase;
3259 ddr_write(wdqs_phase
3260 + (phase_range << PHY_WDQS_PHASE_BIT),
3261 base_phy + DDR_PHY_DXWDQSDLY(cfg->rank_idx, i));
3262 ddr_write(wdq_phase
3263 + (phase_range << PHY_WDQ_PHASE_BIT),
3264 base_phy + DDR_PHY_DXNWDQDLY(cfg->rank_idx, i));
3265 }
3266 DDR_DEBUG("PHY[%x] def clk[%x] add phase[%x]",
3267 base_phy, def_clk, clk_phase);
3268 } else {
3269 def_cs = ddr_ac_get_cs(base_phy);
3270 cs_bdl = 0;
3271 if (diff_cs > (diff_clk << DDR_BDL_PHASE_REL))
3272 cs_bdl = diff_cs - (diff_clk << DDR_BDL_PHASE_REL);
3273
3274 ddr_ac_set_cs(base_phy, def_cs + cs_bdl);
3275 DDR_DEBUG("PHY[%x] def cs[%x] add bdl[%x]",
3276 base_phy, def_cs, cs_bdl);
3277 }
3278
3279 ddr_phy_cfg_update(base_phy);
3280 return 0;
3281 }
3282
ddr_ac_training_func(struct ddr_cfg_st * cfg)3283 int ddr_ac_training_func(struct ddr_cfg_st *cfg)
3284 {
3285 int result = 0;
3286 struct tr_relate_reg relate_reg;
3287
3288 /* AC training disable */
3289 if (ddr_training_check_bypass(cfg, DDR_BYPASS_AC_MASK))
3290 return 0;
3291
3292 ddr_training_save_reg(cfg, &relate_reg, DDR_BYPASS_AC_MASK);
3293
3294 ddr_training_switch_axi(cfg);
3295 ddr_ddrt_init(cfg, DDR_DDRT_MODE_DATAEYE);
3296 result += ddr_ac_training(cfg);
3297
3298 ddr_training_restore_reg(cfg, &relate_reg);
3299
3300 return result;
3301 }
3302 #else
ddr_ac_training_func(struct ddr_cfg_st * cfg)3303 int ddr_ac_training_func(struct ddr_cfg_st *cfg)
3304 {
3305 DDR_WARNING("Not support DDR AC training.");
3306 return 0;
3307 }
3308 #endif /* DDR_AC_TRAINING_CONFIG */
3309
3310 #define __lpca_training__
3311 #ifdef DDR_LPCA_TRAINING_CONFIG
3312 /* Reset address bdl training data */
ddr_lpca_reset(struct ca_data_st * data)3313 static void ddr_lpca_reset(struct ca_data_st *data)
3314 {
3315 unsigned int index;
3316 for (index = 0; index < DDR_PHY_CA_MAX; index++) {
3317 data->left[index] = -1;
3318 data->right[index] = -1;
3319 }
3320
3321 data->min = PHY_ACADDR_BDL_MASK;
3322 data->max = 0;
3323 data->done = 0;
3324 }
3325
3326 /* Get ca bit relation */
ddr_lpca_get_bit(struct ca_data_st * data)3327 static void ddr_lpca_get_bit(struct ca_data_st *data)
3328 {
3329 unsigned int index;
3330 //unsigned int swap_sel;
3331
3332 /* get ca bit in four register */
3333 #if 0
3334 for (index = 0; index < (DDR_PHY_CA_REG_MAX - 1); index++) {
3335 ddr_write(index + 1, data->base_phy + DDR_PHY_CATSWAPINDEX);
3336 swap_sel = ddr_read(data->base_phy + DDR_PHY_CATSWAPSEL);
3337
3338 data->bits[index * 2].bit_p =
3339 swap_sel & PHY_CATSWAPSEL_BIT_MASK;
3340 data->bits[index * 2].bit_n =
3341 (swap_sel >> 8) & PHY_CATSWAPSEL_BIT_MASK;
3342 data->bits[index * 2 + 1].bit_p =
3343 (swap_sel >> 16) & PHY_CATSWAPSEL_BIT_MASK;
3344 data->bits[index * 2 + 1].bit_n =
3345 (swap_sel >> 24) & PHY_CATSWAPSEL_BIT_MASK;
3346 }
3347 #else /* for HiMVPV200 */
3348 for (index = 0; index < (DDR_PHY_CA_REG_MAX - 1); index++) {
3349 data->bits[index * 2].bit_p =index*4+ 0;
3350 data->bits[index * 2].bit_n =index*4+ 1;
3351 data->bits[index * 2 + 1].bit_p =index*4+ 2;
3352 data->bits[index * 2 + 1].bit_n =index*4+ 3;
3353 }
3354 #endif
3355
3356 /**
3357 * set ca bit for ca4 and ca9
3358 * ca4 = ca0, ca9 = ca5
3359 */
3360 for (index = 8; index > 4; index--) {
3361 data->bits[index].bit_p = data->bits[index - 1].bit_p;
3362 data->bits[index].bit_n = data->bits[index - 1].bit_n;
3363 }
3364
3365 data->bits[4].bit_p = data->bits[0].bit_p;
3366 data->bits[4].bit_n = data->bits[0].bit_n;
3367 data->bits[9].bit_p = data->bits[5].bit_p;
3368 data->bits[9].bit_n = data->bits[5].bit_n;
3369
3370 #if defined(DDR_TRAINING_CMD)
3371 for (index = 0; index < DDR_PHY_CA_MAX; index++) {
3372 DDR_INFO("CA[%x] bit_p[%x]", index, data->bits[index].bit_p);
3373 DDR_INFO("CA[%x] bit_n[%x]", index, data->bits[index].bit_n);
3374 }
3375 #endif
3376 }
3377
3378 /* Get address bdl default value */
ddr_lpca_get_def(struct ca_data_st * data)3379 static void ddr_lpca_get_def(struct ca_data_st *data)
3380 {
3381 unsigned int index;
3382
3383 for (index = 0; index < DDR_PHY_CA_REG_MAX; index++)
3384 data->def[index] = ddr_read(data->base_phy
3385 + DDR_PHY_ACADDRBDL(index));
3386 }
3387
3388 /* Restore address bdl default value */
ddr_lpca_restore_def(struct ca_data_st * data)3389 static void ddr_lpca_restore_def(struct ca_data_st *data)
3390 {
3391 unsigned int index;
3392
3393 for (index = 0; index < DDR_PHY_CA_REG_MAX; index++)
3394 ddr_write(data->def[index], data->base_phy
3395 + DDR_PHY_ACADDRBDL(index));
3396
3397 ddr_phy_cfg_update(data->base_phy);
3398 }
3399
3400 /* Set address bdl value */
ddr_lpca_set_bdl(unsigned int base_phy,unsigned int bdl)3401 static void ddr_lpca_set_bdl(unsigned int base_phy, unsigned int bdl)
3402 {
3403 unsigned int index;
3404 for (index = 0; index < DDR_PHY_CA_REG_MAX; index++)
3405 ddr_write(bdl | (bdl << PHY_ACADDRBDL_ADDR1_BIT),
3406 base_phy + DDR_PHY_ACADDRBDL(index));
3407
3408 ddr_phy_cfg_update(base_phy);
3409 }
3410
3411 /* Update address bdl value with training result */
ddr_lpca_update_bdl(struct ca_data_st * data)3412 static void ddr_lpca_update_bdl(struct ca_data_st *data)
3413 {
3414 unsigned int index;
3415 unsigned int addr0, addr1;
3416
3417 for (index = 0; index < DDR_PHY_CA_REG_MAX; index++) {
3418 addr0 = (data->left[index * 2] + data->right[index * 2]) >> 1;
3419 addr1 = (data->left[index * 2 + 1]
3420 + data->right[index * 2 + 1]) >> 1;
3421 ddr_write(addr0 | (addr1 << PHY_ACADDRBDL_ADDR1_BIT),
3422 data->base_phy + DDR_PHY_ACADDRBDL(index));
3423 }
3424
3425 ddr_phy_cfg_update(data->base_phy);
3426 }
3427
3428 /* Init data before training */
ddr_lpca_init(unsigned int base_dmc,unsigned int base_phy,struct ca_data_st * data)3429 static void ddr_lpca_init(unsigned int base_dmc, unsigned int base_phy,
3430 struct ca_data_st *data)
3431 {
3432 data->base_dmc = base_dmc;
3433 data->base_phy = base_phy;
3434
3435 /* gat ca bit relation */
3436 ddr_lpca_get_bit(data);
3437
3438 /* get ac addr bdl default value */
3439 ddr_lpca_get_def(data);
3440
3441 /* reset training data */
3442 ddr_lpca_reset(data);
3443 }
3444
3445 /* Display training result */
ddr_lpca_display(struct ca_data_st * data)3446 static void ddr_lpca_display(struct ca_data_st *data)
3447 {
3448 #if defined(DDR_TRAINING_CMD)
3449 unsigned int index;
3450
3451 DDR_DEBUG("CA phase[%x = %x]",
3452 data->base_phy + DDR_PHY_ADDRPHBOUND,
3453 ddr_read(data->base_phy + DDR_PHY_ADDRPHBOUND));
3454
3455 for (index = 0; index < DDR_PHY_CA_MAX; index++)
3456 DDR_DEBUG("CA[%x] left[%x] right[%x]",
3457 index, data->left[index], data->right[index]);
3458
3459 DDR_DEBUG("min[%x] max[%x] done[%x]",
3460 data->min, data->max, data->done);
3461 #endif
3462 }
3463
3464 /* Wait lpca command done */
ddr_lpca_wait(volatile union U_PHY_CATCONFIG * ca)3465 static void ddr_lpca_wait(volatile union U_PHY_CATCONFIG *ca)
3466 {
3467 unsigned int count = 0;
3468 while (count < DDR_LPCA_WAIT_TIMEOUT) {
3469 if (1 == ca->bits.sw_cat_dqvalid) {
3470 ca->bits.sw_cat_dqvalid = 0; /* clear */
3471 break;
3472 }
3473
3474 count++;
3475 }
3476
3477 /* generally, count is 0 */
3478 if (count >= DDR_LPCA_WAIT_TIMEOUT)
3479 DDR_ERROR("LPCA wait timeout.");
3480 }
3481
3482 /* Compare dq result and pattern */
ddr_lpca_compare(struct ca_bit_st * ca_bit,unsigned int dq_result,unsigned int pattern_p,unsigned int pattern_n,unsigned int index)3483 static int ddr_lpca_compare(struct ca_bit_st *ca_bit,
3484 unsigned int dq_result, unsigned int pattern_p,
3485 unsigned int pattern_n, unsigned int index)
3486 {
3487 if (((dq_result >> ca_bit->bit_p) & 0x1)
3488 != ((pattern_p >> index) & 0x1))
3489 return -1;
3490
3491 if (((dq_result >> ca_bit->bit_n) & 0x1)
3492 != ((pattern_n >> index) & 0x1))
3493 return -1;
3494
3495 return 0;
3496 }
3497
3498 /* Check each CA whether pass */
ddr_lpca_check(struct ca_data_st * data,unsigned int bdl,unsigned int is_ca49)3499 static void ddr_lpca_check(struct ca_data_st *data, unsigned int bdl,
3500 unsigned int is_ca49)
3501 {
3502 unsigned int dq_result = ddr_read(data->base_phy + DDR_PHY_PHYDQRESULT);
3503 unsigned int pattern_p = ddr_read(data->base_phy
3504 + DDR_PHY_SWCATPATTERN_P) & PHY_CAT_PATTERN_MASK;
3505 unsigned int pattern_n = ddr_read(data->base_phy
3506 + DDR_PHY_SWCATPATTERN_N) & PHY_CAT_PATTERN_MASK;
3507 unsigned int index;
3508
3509 for (index = 0; index < DDR_PHY_CA_MAX; index++) {
3510 if (is_ca49) {
3511 if (4 != index && 9 != index)
3512 continue;
3513 } else {
3514 if (4 == index || 9 == index)
3515 continue;
3516 }
3517
3518 /* compare result and pattern */
3519 if (!ddr_lpca_compare(&data->bits[index],
3520 dq_result, pattern_p, pattern_n, index)) {
3521 /* pass */
3522 if (-1 == data->left[index]) {
3523 data->left[index] = bdl;
3524 /* set min left bound */
3525 if (bdl < data->min)
3526 data->min = bdl;
3527 }
3528
3529 /* unstable border value or abnormal value */
3530 if ((-1 != data->right[index])
3531 && ((bdl - data->right[index]) > 1))
3532 DDR_WARNING("CA[%x] bdl[%x] right[%x] ph[%x]",
3533 index, bdl, data->right[index],
3534 ddr_read(data->base_phy
3535 + DDR_PHY_ADDRPHBOUND));
3536
3537 data->right[index] = bdl;
3538 data->done |= (0x1 << index);
3539
3540 /* set max right bound */
3541 if (data->right[index] > data->max)
3542 data->max = data->right[index];
3543 }
3544 }
3545 }
3546
3547 /* Excute lpca command and check result */
ddr_lpca_excute(struct ca_data_st * data,unsigned int bdl,unsigned int is_ca49)3548 static void ddr_lpca_excute(struct ca_data_st *data, unsigned int bdl,
3549 unsigned int is_ca49)
3550 {
3551 volatile union U_PHY_CATCONFIG *ca = (union U_PHY_CATCONFIG *)
3552 (data->base_phy + DDR_PHY_CATCONFIG);
3553
3554 if (is_ca49)
3555 ca->bits.sw_cat_mrw48 = 1;
3556 else
3557 ca->bits.sw_cat_mrw41 = 1;
3558
3559 ddr_lpca_wait(ca);
3560 ca->bits.sw_cat_cke_low = 1;
3561 ddr_lpca_wait(ca);
3562 ca->bits.sw_cat_strobe = 1;
3563 ddr_lpca_wait(ca);
3564
3565 /* check PHYDQRESULT */
3566 ddr_lpca_check(data, bdl, is_ca49);
3567
3568 ca->bits.sw_cat_cke_high = 1;
3569 ddr_lpca_wait(ca);
3570 ca->bits.sw_cat_mrw42 = 1;
3571 ddr_lpca_wait(ca);
3572 }
3573
3574 /* Find address bdl */
ddr_lpca_find_bdl(struct ca_data_st * data)3575 static int ddr_lpca_find_bdl(struct ca_data_st *data)
3576 {
3577 unsigned int bdl;
3578
3579 for (bdl = 0; bdl <= PHY_ACADDR_BDL_MASK; bdl++) {
3580 /* update bdl */
3581 ddr_lpca_set_bdl(data->base_phy, bdl);
3582
3583 /* ca0~ca3, ca5~ca8 */
3584 ddr_lpca_excute(data, bdl, DDR_FALSE);
3585
3586 /* ca4, ca9 */
3587 ddr_lpca_excute(data, bdl, DDR_TRUE);
3588 }
3589
3590 if (PHY_CAT_PATTERN_MASK == data->done)
3591 return 0;
3592
3593 return -1;
3594 }
3595
3596 /* Loop phase to find valid bdl and phase */
ddr_lpca_loop_phase(struct ca_data_st * data,int step)3597 static int ddr_lpca_loop_phase(struct ca_data_st *data, int step)
3598 {
3599 volatile union U_PHY_ADDRPHBOUND *ph = (union U_PHY_ADDRPHBOUND *)
3600 (data->base_phy + DDR_PHY_ADDRPHBOUND);
3601 unsigned int phase;
3602 unsigned int addrph_def = ph->bits.addrph_a;
3603 int addrph = addrph_def;
3604
3605 for (phase = 0; phase <= PHY_ADDRPH_MASK; phase++) {
3606 /* reset ca training data */
3607 ddr_lpca_reset(data);
3608
3609 /* find bdl */
3610 if (!ddr_lpca_find_bdl(data))
3611 return 0;
3612
3613 addrph += step;
3614 if (addrph < 0 || addrph > PHY_ADDRPH_MASK)
3615 break;
3616
3617 ph->bits.addrph_a = addrph;
3618 ddr_phy_cfg_update(data->base_phy);
3619 }
3620
3621 /* restore default value */
3622 DDR_DEBUG("current phase[%x = %x], restore default[%x]",
3623 ph, *ph, addrph_def);
3624 ph->bits.addrph_a = addrph_def;
3625 return -1;
3626 }
3627
3628 /* Find a valid phase */
ddr_lpca_find_phase(struct ca_data_st * data)3629 static int ddr_lpca_find_phase(struct ca_data_st *data)
3630 {
3631 /* increase default value to find */
3632 if (!ddr_lpca_loop_phase(data, 1))
3633 return 0;
3634
3635 /* decrease default value to find */
3636 if (!ddr_lpca_loop_phase(data, -1))
3637 return 0;
3638
3639 return -1;
3640 }
3641
3642 /* Set step to adjust address window */
ddr_lpca_set_step(struct ca_data_st * data)3643 static int ddr_lpca_set_step(struct ca_data_st *data)
3644 {
3645 /* max window, no need to found */
3646 if (0 == data->min && PHY_ACADDR_BDL_MASK == data->max)
3647 return 0;
3648
3649 if (0 == data->min)
3650 return -1; /* window on left, move to right */
3651 else
3652 return 1; /* window on right, move to left */
3653 }
3654
3655 /**
3656 * Adjust address window via change phase.
3657 * Increase phase, window will move to left.
3658 */
ddr_lpca_adjust(struct ca_data_st * data)3659 static void ddr_lpca_adjust(struct ca_data_st *data)
3660 {
3661 int step = 0;
3662 volatile union U_PHY_ADDRPHBOUND *ph = (union U_PHY_ADDRPHBOUND *)
3663 (data->base_phy + DDR_PHY_ADDRPHBOUND);
3664 unsigned int phase;
3665 unsigned int addrph_last = ph->bits.addrph_a;
3666 int addrph_cur = addrph_last;
3667
3668 /* set step to increase or decrease phase */
3669 step = ddr_lpca_set_step(data);
3670
3671 if (!step)
3672 return;
3673
3674 for (phase = 0; phase <= PHY_ADDRPH_MASK; phase++) {
3675 addrph_cur += step;
3676 if (addrph_cur < 0 || addrph_cur > PHY_ADDRPH_MASK)
3677 return;
3678
3679 ph->bits.addrph_a = addrph_cur;
3680 ddr_phy_cfg_update(data->base_phy);
3681
3682 /* reset ca training data */
3683 ddr_lpca_reset(data);
3684
3685 if (ddr_lpca_find_bdl(data)) {
3686 /* not find bdl, restore last value */
3687 addrph_cur -= step;
3688 ddr_lpca_find_bdl(data);
3689 return;
3690 }
3691
3692 /* max window: ------- */
3693 if (0 == data->min && PHY_ACADDR_BDL_MASK == data->max)
3694 return;
3695
3696 /* last window: -----xx */
3697 if (0 == data->min && 1 == step) {
3698 /* last value is best */
3699 addrph_cur -= step;
3700 ph->bits.addrph_a = addrph_cur;
3701 ddr_phy_cfg_update(data->base_phy);
3702 ddr_lpca_reset(data);
3703 ddr_lpca_find_bdl(data);
3704 return;
3705 }
3706
3707 /* best window: x-----x */
3708 if (0 < data->min && -1 == step)
3709 return;
3710 }
3711 }
3712
3713 /* Low power DDR CA training */
ddr_lpca_training(struct ddr_cfg_st * cfg)3714 int ddr_lpca_training(struct ddr_cfg_st *cfg)
3715 {
3716 volatile union U_PHY_CATCONFIG *ca = (union U_PHY_CATCONFIG *)
3717 (cfg->cur_phy + DDR_PHY_CATCONFIG);
3718
3719 struct ca_data_st data;
3720 int ret = -1;
3721
3722 DDR_DEBUG("DDR LPCA training.");
3723
3724 /* init data */
3725 ddr_lpca_init(cfg->cur_dmc, cfg->cur_phy, &data);
3726
3727 /* enable sw ca training, wait 62.5ns */
3728 ca->bits.sw_cat_en = 1;
3729
3730 /* find a valid phase first */
3731 ret = ddr_lpca_find_phase(&data);
3732
3733 /* display training result */
3734 ddr_lpca_display(&data);
3735
3736 if (ret) {
3737 /* restore default value when fail */
3738 ddr_lpca_restore_def(&data);
3739 DDR_ERROR("PHY[%x] found phase fail, result[%x].",
3740 cfg->cur_phy, data.done);
3741 ddr_training_stat(DDR_ERR_LPCA, cfg->cur_phy, -1, -1);
3742 } else {
3743 /* adjust window via phase */
3744 ddr_lpca_adjust(&data);
3745 ddr_lpca_display(&data);
3746 /* set training result */
3747 ddr_lpca_update_bdl(&data);
3748 }
3749
3750 /* disable sw ca training */
3751 ca->bits.sw_cat_en = 0;
3752
3753 /* save lpca result data to printf */
3754 ddr_lpca_data_save(&data);
3755
3756 return ret;
3757 }
3758
ddr_lpca_training_func(struct ddr_cfg_st * cfg)3759 int ddr_lpca_training_func(struct ddr_cfg_st *cfg)
3760 {
3761 int result = 0;
3762 struct tr_relate_reg relate_reg;
3763
3764 /* LPCA training disable */
3765 if (ddr_training_check_bypass(cfg, DDR_BYPASS_LPCA_MASK))
3766 return 0;
3767
3768 ddr_training_save_reg(cfg, &relate_reg, DDR_BYPASS_LPCA_MASK);
3769
3770 /* only lowpower ddr3 support */
3771 if (PHY_DRAMCFG_TYPE_LPDDR3 ==
3772 (ddr_read(cfg->cur_phy + DDR_PHY_DRAMCFG)
3773 & PHY_DRAMCFG_TYPE_LPDDR3))
3774 result += ddr_lpca_training(cfg);
3775
3776 ddr_training_restore_reg(cfg, &relate_reg);
3777
3778 return result;
3779 }
3780 #else
ddr_lpca_training_func(struct ddr_cfg_st * cfg)3781 int ddr_lpca_training_func(struct ddr_cfg_st *cfg)
3782 {
3783 DDR_WARNING("Not support LPDDR CA training.");
3784 return 0;
3785 }
3786 #endif /* DDR_LPCA_TRAINING_CONFIG */
3787
3788 /* s40/t28/t16 not support dcc training */
3789 #define __dcc_training__
3790 #ifdef DDR_DCC_TRAINING_CONFIG
3791 /* Save two rank RDET result */
ddr_save_two_rank_bdl(struct ddr_cfg_st * cfg,struct dcc_data_st * dcc_data)3792 static void ddr_save_two_rank_bdl(struct ddr_cfg_st *cfg, struct dcc_data_st *dcc_data)
3793 {
3794 unsigned int byte_idx;
3795 unsigned int base_phy = cfg->cur_phy;
3796 unsigned int rank_idx = cfg->rank_idx;
3797 unsigned int byte_num = cfg->phy[cfg->phy_idx].total_byte_num;
3798
3799 for (byte_idx = 0; byte_idx < byte_num; byte_idx++) {
3800 dcc_data->rank[rank_idx].dq03[byte_idx] = ddr_read(base_phy + DDR_PHY_DXNRDQNBDL0(rank_idx, byte_idx));
3801 dcc_data->rank[rank_idx].dq47[byte_idx] = ddr_read(base_phy + DDR_PHY_DXNRDQNBDL1(rank_idx, byte_idx));
3802 dcc_data->rank[rank_idx].rdm[byte_idx] = ddr_read(base_phy + DDR_PHY_DXNRDQNBDL2(rank_idx, byte_idx));
3803 dcc_data->rank[rank_idx].rdqs[byte_idx] = ddr_read(base_phy + DDR_PHY_DXNRDQSDLY(byte_idx));
3804
3805 DDR_DEBUG("rank[%x] dq03[%x] dq47[%x] rdm[%x] rdqs[%x]", rank_idx,
3806 dcc_data->rank[rank_idx].dq03[byte_idx],
3807 dcc_data->rank[rank_idx].dq47[byte_idx],
3808 dcc_data->rank[rank_idx].rdm[byte_idx],
3809 dcc_data->rank[rank_idx].rdqs[byte_idx]);
3810 }
3811 }
3812
3813 /* Restore two rank RDET result */
ddr_restore_two_rank_bdl(struct ddr_cfg_st * cfg,struct dcc_data_st * dcc_data)3814 static void ddr_restore_two_rank_bdl(struct ddr_cfg_st *cfg, struct dcc_data_st *dcc_data)
3815 {
3816 unsigned int byte_idx;
3817 unsigned int base_phy = cfg->cur_phy;
3818 unsigned int rank_idx = cfg->rank_idx;
3819 unsigned int byte_num = cfg->phy[cfg->phy_idx].total_byte_num;
3820
3821 for (byte_idx = 0; byte_idx < byte_num; byte_idx++) {
3822 ddr_write(dcc_data->rank[rank_idx].dq03[byte_idx], base_phy + DDR_PHY_DXNRDQNBDL0(rank_idx, byte_idx));
3823 ddr_write(dcc_data->rank[rank_idx].dq47[byte_idx], base_phy + DDR_PHY_DXNRDQNBDL1(rank_idx, byte_idx));
3824 ddr_write(dcc_data->rank[rank_idx].rdm[byte_idx], base_phy + DDR_PHY_DXNRDQNBDL2(rank_idx, byte_idx));
3825 ddr_write(dcc_data->rank[rank_idx].rdqs[byte_idx], base_phy + DDR_PHY_DXNRDQSDLY(byte_idx));
3826 }
3827 }
3828
3829 /* DMC_CFG_SREF exit self-refresa enter powerdown */
ddr_exit_sref_enter_pd(struct ddr_cfg_st * cfg,struct dmc_cfg_sref_st * cfg_sref)3830 static void ddr_exit_sref_enter_pd(struct ddr_cfg_st *cfg, struct dmc_cfg_sref_st *cfg_sref)
3831 {
3832 int i;
3833 struct ddr_phy_st *phy_st = &cfg->phy[cfg->phy_idx];
3834
3835 for (i = 0; i < phy_st->dmc_num; i++) {
3836 cfg_sref->val[i] = ddr_read(phy_st->dmc[i].addr + DDR_DMC_CFG_SREF);
3837 ddr_write((cfg_sref->val[i] & (~DMC_CFG_INIT_XSREF_PD_MASK)) | DMC_CFG_INIT_XSREF_PD,
3838 phy_st->dmc[i].addr + DDR_DMC_CFG_SREF);
3839 }
3840 }
3841
3842 /* Restore DMC_CFG_SREF config */
ddr_restore_sref_cfg(struct ddr_cfg_st * cfg,struct dmc_cfg_sref_st * cfg_sref)3843 static void ddr_restore_sref_cfg(struct ddr_cfg_st *cfg, struct dmc_cfg_sref_st *cfg_sref)
3844 {
3845 int i;
3846 struct ddr_phy_st *phy_st = &cfg->phy[cfg->phy_idx];
3847
3848 for (i = 0; i < phy_st->dmc_num; i++) {
3849 ddr_write(cfg_sref->val[i], phy_st->dmc[i].addr + DDR_DMC_CFG_SREF);
3850 }
3851 }
3852
3853 /* DCC RDET training */
ddr_dcc_dataeye_read(struct ddr_cfg_st * cfg)3854 static int ddr_dcc_dataeye_read(struct ddr_cfg_st *cfg)
3855 {
3856 /* 0:PHY_TRAINCTRL0_DTR_RANK0, 1:PHY_TRAINCTRL0_DTR_RANK1 */
3857 DDR_PHY_SWITCH_RANK(cfg->cur_phy, cfg->rank_idx);
3858 return ddr_hw_training_process(cfg, PHY_PHYINITCTRL_RDET_EN);
3859 }
3860
3861 /* Duty direction ctl */
ddr_dcc_ck_ctl(struct ddr_cfg_st * cfg,unsigned int ioctl21_def,unsigned int ctl_index)3862 static int ddr_dcc_ck_ctl(struct ddr_cfg_st *cfg, unsigned int ioctl21_def, unsigned int ctl_index)
3863 {
3864 unsigned int ioctl21;
3865 if (PHY_DRAMCFG_TYPE_LPDDR4 == cfg->phy[cfg->phy_idx].dram_type) {
3866 ioctl21 = (ioctl21_def & (~(1 << PHY_ACIOCTL21_CTL0_BIT))
3867 & (~(1 << PHY_ACIOCTL21_CTL1_BIT)))
3868 | (ctl_index << PHY_ACIOCTL21_CTL0_BIT)
3869 | (ctl_index << PHY_ACIOCTL21_CTL1_BIT);
3870 ddr_write(ioctl21, cfg->cur_phy + DDR_PHY_ACIOCTL21);
3871 } else {
3872 ioctl21 = (ioctl21_def & (~(1 << PHY_ACIOCTL21_CTL0_BIT)))
3873 | (ctl_index << PHY_ACIOCTL21_CTL0_BIT);
3874 ddr_write(ioctl21, cfg->cur_phy + DDR_PHY_ACIOCTL21);
3875 }
3876 return ioctl21;
3877 }
3878
3879 /* Duty Correction */
ddr_dcc_correct_duty(struct ddr_cfg_st * cfg,unsigned int cur_duty,unsigned int duty_def)3880 static int ddr_dcc_correct_duty(struct ddr_cfg_st *cfg, unsigned int cur_duty, unsigned int duty_def)
3881 {
3882 unsigned int ioctl21;
3883 if (PHY_DRAMCFG_TYPE_LPDDR4 == cfg->phy[cfg->phy_idx].dram_type) {
3884 /* Correct CK0 & CK1 duty */
3885 ioctl21 = (duty_def & (~(PHY_ACIOCTL21_MASK << PHY_ACIOCTL21_CK0_BIT))
3886 & (~(PHY_ACIOCTL21_MASK << PHY_ACIOCTL21_CK1_BIT)))
3887 | (cur_duty << PHY_ACIOCTL21_CK0_BIT)
3888 | (cur_duty << PHY_ACIOCTL21_CK1_BIT);
3889 ddr_write(ioctl21, cfg->cur_phy + DDR_PHY_ACIOCTL21);
3890 } else {
3891 /* Correct CK0 duty */
3892 ioctl21 = (duty_def & (~(PHY_ACIOCTL21_MASK << PHY_ACIOCTL21_CK0_BIT)))
3893 | (cur_duty << PHY_ACIOCTL21_CK0_BIT);
3894 ddr_write(ioctl21, cfg->cur_phy + DDR_PHY_ACIOCTL21);
3895 }
3896
3897 return ioctl21;
3898 }
3899
3900 /* Duty Correction Control get win data */
ddr_dcc_get_win(struct dcc_data_st * dcc_data,int ck_index,int val_index)3901 static int ddr_dcc_get_win(struct dcc_data_st *dcc_data, int ck_index, int val_index)
3902 {
3903 unsigned int win;
3904 unsigned int rdqsbdl_right;
3905 unsigned int rdqsbdl_left;
3906 rdqsbdl_right = dcc_data->ck[ck_index].val[val_index] >> PHY_DXNRDBOUND_RIGHT_BIT & PHY_DXNRDBOUND_MASK;
3907 rdqsbdl_left = dcc_data->ck[ck_index].val[val_index] >> PHY_DXNRDBOUND_LEFT_BIT & PHY_DXNRDBOUND_MASK;
3908 win = rdqsbdl_right - rdqsbdl_left;
3909 return win;
3910 }
3911
3912 /* Duty Correction Control get the min win of two byte */
ddr_dcc_get_min_win(struct dcc_data_st * dcc_data,int ck_index)3913 static int ddr_dcc_get_min_win(struct dcc_data_st *dcc_data, int ck_index)
3914 {
3915 int i;
3916 unsigned int win_min;
3917 unsigned int cur_win;
3918 win_min = ddr_dcc_get_win(dcc_data, ck_index, 0);
3919 for (i = 0; i < DDR_CK_RESULT_MAX; i++) {
3920 cur_win = ddr_dcc_get_win(dcc_data, ck_index, i);
3921 DDR_DEBUG("CK win[%x] = [%x]", i, cur_win);
3922 if (cur_win < win_min) {
3923 win_min = cur_win;
3924 }
3925 }
3926 return win_min;
3927 }
3928
3929 /* Duty Correction Control get ck0 min win */
ddr_dcc_get_ck0_win(struct ddr_cfg_st * cfg,struct dcc_data_st * dcc_data,int rank_index,unsigned int ck0_win_min)3930 static int ddr_dcc_get_ck0_win(struct ddr_cfg_st *cfg, struct dcc_data_st *dcc_data,
3931 int rank_index, unsigned int ck0_win_min)
3932 {
3933 int ck_index = 0;
3934 unsigned int byte_index;
3935 unsigned int ck0_win;
3936 unsigned int byte_num = cfg->phy[cfg->phy_idx].total_byte_num;
3937
3938 for (byte_index = 0; byte_index < (byte_num/2); byte_index++) {
3939 dcc_data->ck[ck_index].val[byte_index] = ddr_read(cfg->cur_phy + DDR_PHY_DXNRDBOUND(byte_index));
3940 }
3941
3942 ck0_win = ddr_dcc_get_min_win(dcc_data, ck_index);
3943
3944 if (ck0_win < ck0_win_min)
3945 ck0_win_min = ck0_win;
3946
3947 return ck0_win_min;
3948 }
3949
3950 /* Duty Correction Control get ck1 min win */
ddr_dcc_get_ck1_win(struct ddr_cfg_st * cfg,struct dcc_data_st * dcc_data,int rank_index,unsigned int ck1_win_min)3951 static int ddr_dcc_get_ck1_win(struct ddr_cfg_st *cfg, struct dcc_data_st *dcc_data,
3952 int rank_index, unsigned int ck1_win_min)
3953 {
3954 int ck_index = 1;
3955 unsigned int byte_index;
3956 unsigned int ck1_win;
3957 unsigned int byte_num = cfg->phy[cfg->phy_idx].total_byte_num;
3958
3959 for (byte_index = 2; byte_index < byte_num; byte_index++) {
3960 dcc_data->ck[ck_index].val[byte_index - 2] = ddr_read(cfg->cur_phy + DDR_PHY_DXNRDBOUND(byte_index));
3961 }
3962
3963 ck1_win = ddr_dcc_get_min_win(dcc_data, ck_index);
3964
3965 if (ck1_win < ck1_win_min)
3966 ck1_win_min = ck1_win;
3967
3968 return ck1_win_min;
3969 }
3970
dcc_data_init(struct dcc_data_st * dcc_data)3971 static void dcc_data_init(struct dcc_data_st *dcc_data)
3972 {
3973 dcc_data->ck[0].win_min_ctl = 0xffffffff;
3974 dcc_data->ck[0].win_max_ctl = 0x0;
3975 dcc_data->ck[1].win_min_ctl = 0xffffffff;
3976 dcc_data->ck[1].win_max_ctl = 0x0;
3977 dcc_data->ck[0].idx_duty = 0;
3978 dcc_data->ck[0].idx_duty_ctl = 0;
3979 dcc_data->ck[0].idx_ctl = 0;
3980 dcc_data->ck[1].idx_duty = 0;
3981 dcc_data->ck[1].idx_duty_ctl = 0;
3982 dcc_data->ck[1].idx_ctl = 0;
3983 dcc_data->ck[0].BYPASS_CK_BIT = PHY_BYPASS_CK0_BIT;
3984 dcc_data->ck[0].ACIOCTL21_CTL_BIT = PHY_ACIOCTL21_CTL0_BIT;
3985 dcc_data->ck[0].ACIOCTL21_CK_BIT = PHY_ACIOCTL21_CK0_BIT;
3986 dcc_data->ck[1].BYPASS_CK_BIT = PHY_BYPASS_CK1_BIT;
3987 dcc_data->ck[1].ACIOCTL21_CTL_BIT = PHY_ACIOCTL21_CTL1_BIT;
3988 dcc_data->ck[1].ACIOCTL21_CK_BIT = PHY_ACIOCTL21_CK1_BIT;
3989 }
3990
3991 /* dcc training get window by rank */
ddr_dcc_get_win_by_rank(struct ddr_cfg_st * cfg,struct dcc_data_st * dcc_data)3992 static int ddr_dcc_get_win_by_rank(struct ddr_cfg_st *cfg, struct dcc_data_st *dcc_data)
3993 {
3994 int i;
3995 int result = 0;
3996 unsigned int rank_num = cfg->phy[cfg->phy_idx].rank_num;
3997 for (i = 0; i < rank_num; i++) {
3998 DDR_DEBUG("cur_rank = [%x]", i);
3999 cfg->rank_idx = i;
4000 /* RDET */
4001 result += ddr_dcc_dataeye_read(cfg);
4002
4003 /* Get win */
4004 dcc_data->ck[0].win = ddr_dcc_get_ck0_win(cfg, dcc_data, i, dcc_data->ck[0].win);
4005 DDR_DEBUG("ck0 win = [%x]", dcc_data->ck[0].win);
4006
4007 if (PHY_DRAMCFG_TYPE_LPDDR4 == cfg->phy[cfg->phy_idx].dram_type) {
4008 dcc_data->ck[1].win = ddr_dcc_get_ck1_win(cfg, dcc_data, i, dcc_data->ck[1].win);
4009 DDR_DEBUG("ck1 win = [%x]", dcc_data->ck[1].win);
4010 }
4011
4012 /* Restore two rank bdl */
4013 ddr_restore_two_rank_bdl(cfg, dcc_data);
4014 }
4015 return result;
4016 }
4017
4018 /* ddr dcc training compare result */
ddr_dcc_compare_result(struct dcc_data_st * dcc_data,int ck_num,unsigned int base_phy,unsigned int gated_bypass_def,unsigned int ioctl21_def)4019 static void ddr_dcc_compare_result(struct dcc_data_st *dcc_data, int ck_num,
4020 unsigned int base_phy, unsigned int gated_bypass_def, unsigned int ioctl21_def)
4021 {
4022 int ck_idx;
4023
4024 for (ck_idx = 0; ck_idx < ck_num; ck_idx++) {
4025 /* Config ck0 duty */
4026 if (dcc_data->ck[ck_idx].win_max_ctl - dcc_data->ck[ck_idx].win_min_ctl <= 2) {
4027 dcc_data->ck[ck_idx].def_bp = gated_bypass_def >> dcc_data->ck[ck_idx].BYPASS_CK_BIT & 0x1;
4028 dcc_data->ck[ck_idx].def_ctl = ioctl21_def >> dcc_data->ck[ck_idx].ACIOCTL21_CTL_BIT & 0x1;
4029 dcc_data->ck[ck_idx].def_duty = ioctl21_def >> dcc_data->ck[ck_idx].ACIOCTL21_CK_BIT & PHY_ACIOCTL21_MASK;
4030
4031 gated_bypass_def = (gated_bypass_def & (~(1 << dcc_data->ck[ck_idx].BYPASS_CK_BIT)))
4032 | (dcc_data->ck[ck_idx].def_bp << dcc_data->ck[ck_idx].BYPASS_CK_BIT);
4033 ddr_write(gated_bypass_def, base_phy + DDR_PHY_AC_GATED_BYPASS);
4034
4035 ioctl21_def = (ioctl21_def & (~(1 << dcc_data->ck[ck_idx].ACIOCTL21_CTL_BIT))
4036 & (~(PHY_ACIOCTL21_MASK << dcc_data->ck[ck_idx].ACIOCTL21_CK_BIT)))
4037 | (dcc_data->ck[ck_idx].def_ctl << dcc_data->ck[ck_idx].ACIOCTL21_CTL_BIT)
4038 | (dcc_data->ck[ck_idx].def_duty << dcc_data->ck[ck_idx].ACIOCTL21_CK_BIT);
4039 ddr_write(ioctl21_def, base_phy + DDR_PHY_ACIOCTL21);
4040
4041 DDR_DEBUG("ck[%x] Final AC_GATED_BYPASS[%x]", ck_idx, gated_bypass_def);
4042 DDR_DEBUG("ck[%x] Final ACIOCTL21[%x]", ck_idx, ioctl21_def);
4043 } else {
4044 ioctl21_def = (ioctl21_def & (~(1 << dcc_data->ck[ck_idx].ACIOCTL21_CTL_BIT))
4045 & (~(PHY_ACIOCTL21_MASK << dcc_data->ck[ck_idx].ACIOCTL21_CK_BIT)))
4046 | (dcc_data->ck[ck_idx].idx_ctl << dcc_data->ck[ck_idx].ACIOCTL21_CTL_BIT)
4047 | (dcc_data->ck[ck_idx].idx_duty_ctl << dcc_data->ck[ck_idx].ACIOCTL21_CK_BIT);
4048 ddr_write(ioctl21_def, base_phy + DDR_PHY_ACIOCTL21);
4049
4050 DDR_DEBUG("ck[%x] Final ACIOCTL21[%x]", ck_idx, ioctl21_def);
4051 }
4052 }
4053 }
4054
ddr_dcc_get_best_duty(struct ddr_cfg_st * cfg,struct dmc_cfg_sref_st * cfg_sref,struct dcc_data_st * dcc_data)4055 static int ddr_dcc_get_best_duty(struct ddr_cfg_st *cfg,
4056 struct dmc_cfg_sref_st *cfg_sref, struct dcc_data_st *dcc_data)
4057 {
4058 int ck_idx;
4059 int ck_num;
4060 int result = 0;
4061 unsigned int cur_ctl;
4062 unsigned int cur_duty;
4063 unsigned int base_phy = cfg->cur_phy;
4064 unsigned int ioctl21_def;
4065 unsigned int gated_bypass_def, gated_bypass_temp;
4066
4067 if (PHY_DRAMCFG_TYPE_LPDDR4 == cfg->phy[cfg->phy_idx].dram_type)
4068 ck_num = 2;
4069 else
4070 ck_num = 1;
4071
4072 dcc_data_init(dcc_data);
4073
4074 /* Save ck duty default config. Read two times to get the right static register value. */
4075 gated_bypass_def = ddr_read(base_phy + DDR_PHY_AC_GATED_BYPASS);
4076 gated_bypass_def = ddr_read(base_phy + DDR_PHY_AC_GATED_BYPASS);
4077 ioctl21_def = ddr_read(base_phy + DDR_PHY_ACIOCTL21);
4078 ioctl21_def = ddr_read(base_phy + DDR_PHY_ACIOCTL21);
4079
4080 DDR_DEBUG("gated_bypass_def[%x] ioctl21_def[%x]", gated_bypass_def, ioctl21_def);
4081
4082 /* DCC training exit self-refresa enter powerdown. */
4083 if (PHY_DRAMCFG_TYPE_LPDDR4 == cfg->phy[cfg->phy_idx].dram_type)
4084 ddr_exit_sref_enter_pd(cfg, cfg_sref);
4085
4086 /* DDR dcc training enter auto self-refresh. */
4087 if (ddr_training_ctrl_easr(cfg, DDR_ENTER_SREF))
4088 return -1;
4089
4090 /* Enable ck0 & ck1 duty. */
4091 if (PHY_DRAMCFG_TYPE_LPDDR4 == cfg->phy[cfg->phy_idx].dram_type) {
4092 gated_bypass_temp = gated_bypass_def | PHY_CK1_IOCTL_DUTY_EN | PHY_CK_IOCTL_DUTY_EN;
4093 ddr_write(gated_bypass_temp, base_phy + DDR_PHY_AC_GATED_BYPASS);
4094 } else {
4095 gated_bypass_temp = gated_bypass_def | PHY_CK_IOCTL_DUTY_EN;
4096 ddr_write(gated_bypass_temp, base_phy + DDR_PHY_AC_GATED_BYPASS);
4097 }
4098 DDR_DEBUG("Cur GATED_BYPASS[%x]", gated_bypass_temp);
4099
4100 if (ddr_training_ctrl_easr(cfg, DDR_EXIT_SREF))
4101 return -1;
4102
4103 for (cur_ctl = 0; cur_ctl < DDR_DUTY_CTL_NUM; cur_ctl++) {
4104 dcc_data->ck[0].win_min_duty = 0xffffffff;
4105 dcc_data->ck[0].win_max_duty = 0x0;
4106 dcc_data->ck[1].win_min_duty = 0xffffffff;
4107 dcc_data->ck[1].win_max_duty = 0x0;
4108
4109 DDR_DEBUG("cur_ctl = [%x]", cur_ctl);
4110
4111 if (ddr_training_ctrl_easr(cfg, DDR_ENTER_SREF))
4112 return -1;
4113
4114 /* Correct CK duty dirrection control */
4115 dcc_data->ioctl21_tmp = ddr_dcc_ck_ctl(cfg, ioctl21_def, cur_ctl);
4116
4117 if (ddr_training_ctrl_easr(cfg, DDR_EXIT_SREF))
4118 return -1;
4119
4120 for (cur_duty = 0; cur_duty < DDR_DUTY_NUM; cur_duty += PHY_AC_IOCTL21_STEP) {
4121 dcc_data->ck[0].win = 0xffffffff;
4122 dcc_data->ck[1].win = 0xffffffff;
4123
4124 DDR_DEBUG("cur_duty = [%x]", cur_duty);
4125 /* Correct ck0 and ck1 duty */
4126 if (ddr_training_ctrl_easr(cfg, DDR_ENTER_SREF))
4127 return -1;
4128 dcc_data->ioctl21_tmp = ddr_dcc_correct_duty(cfg, cur_duty, dcc_data->ioctl21_tmp);
4129 if (ddr_training_ctrl_easr(cfg, DDR_EXIT_SREF))
4130 return -1;
4131 DDR_DEBUG("Cur ACIOCTL21[%x]", dcc_data->ioctl21_tmp);
4132
4133 result = ddr_dcc_get_win_by_rank(cfg, dcc_data);
4134
4135 /* Get ck0/ck1 duty_win_min/duty_win_max/duty_index */
4136 for (ck_idx = 0; ck_idx < ck_num; ck_idx++) {
4137 if (dcc_data->ck[ck_idx].win < dcc_data->ck[ck_idx].win_min_duty)
4138 dcc_data->ck[ck_idx].win_min_duty = dcc_data->ck[ck_idx].win;
4139
4140 if (dcc_data->ck[ck_idx].win > dcc_data->ck[ck_idx].win_max_duty) {
4141 dcc_data->ck[ck_idx].win_max_duty = dcc_data->ck[ck_idx].win;
4142 dcc_data->ck[ck_idx].idx_duty = cur_duty;
4143 }
4144 DDR_DEBUG("ck[%x] duty_win_min[%x] duty_win_max[%x] duty_index[%x]", ck_idx,
4145 dcc_data->ck[ck_idx].win_min_duty,
4146 dcc_data->ck[ck_idx].win_max_duty,
4147 dcc_data->ck[ck_idx].idx_duty);
4148 }
4149 }
4150
4151 for (ck_idx = 0; ck_idx < ck_num; ck_idx++) {
4152 /* Get ck0/ck1 duty_win_min/duty_win_max/duty_index */
4153 if (dcc_data->ck[ck_idx].win_min_duty < dcc_data->ck[ck_idx].win_min_ctl)
4154 dcc_data->ck[ck_idx].win_min_ctl = dcc_data->ck[ck_idx].win_min_duty;
4155
4156 if (dcc_data->ck[ck_idx].win_max_duty > dcc_data->ck[ck_idx].win_max_ctl) {
4157 dcc_data->ck[ck_idx].win_max_ctl = dcc_data->ck[ck_idx].win_max_duty;
4158 dcc_data->ck[ck_idx].idx_duty_ctl = dcc_data->ck[ck_idx].idx_duty;
4159 dcc_data->ck[ck_idx].idx_ctl = cur_ctl;
4160 }
4161 DDR_DEBUG("ck[%x] win_min_ctl[%x] win_max_ctl[%x] ctl_index0[%x] duty_ctl_idx0[%x]", ck_idx,
4162 dcc_data->ck[ck_idx].win_min_ctl,
4163 dcc_data->ck[ck_idx].win_max_ctl,
4164 dcc_data->ck[ck_idx].idx_ctl,
4165 dcc_data->ck[ck_idx].idx_duty_ctl);
4166 }
4167 }
4168
4169 /* Config ck duty */
4170 /* DCC training exit self-refresa enter powerdown. */
4171 if (PHY_DRAMCFG_TYPE_LPDDR4 == cfg->phy[cfg->phy_idx].dram_type)
4172 ddr_exit_sref_enter_pd(cfg, cfg_sref);
4173
4174 /* DDR dcc training enter auto self-refresh. */
4175 if (ddr_training_ctrl_easr(cfg, DDR_ENTER_SREF))
4176 return -1;
4177
4178 /* DDR dcc training compare result. */
4179 ddr_dcc_compare_result(dcc_data, ck_num, base_phy, gated_bypass_def, ioctl21_def);
4180
4181 /* DDR dcc training exit auto self-refresh. */
4182 if (ddr_training_ctrl_easr(cfg, DDR_EXIT_SREF))
4183 return -1;
4184
4185 return result;
4186 }
4187
4188 #ifdef DDR_TRAINING_DEBUG
4189 #define DDR_TRINING_BREAK_POINT(name) ddr_training_break_point(name)
4190 #else
4191 #define DDR_TRINING_BREAK_POINT(name)
4192 #endif
4193
ddr_training_break_point(const char * name)4194 void ddr_training_break_point(const char* name)
4195 {
4196 DDR_INFO(name);
4197 ddr_training_console_if(0);
4198 }
4199
ddr_dcc_training(struct ddr_cfg_st * cfg)4200 int ddr_dcc_training(struct ddr_cfg_st *cfg)
4201 {
4202 int i;
4203 int result = 0;
4204 unsigned int rank_num = cfg->phy[cfg->phy_idx].rank_num;
4205
4206 struct dmc_cfg_sref_st cfg_sref;
4207 struct ddr_timing_st timing_st;
4208 struct dcc_data_st dcc_st;
4209 struct dcc_data_st *dcc_data = &dcc_st;
4210
4211 DDR_DEBUG("dram_type[%x]", cfg->phy[cfg->phy_idx].dram_type);
4212
4213 DDR_DEBUG("rank num[%x]", rank_num);
4214
4215 /* Save two rank DERT default result: rdq/rdqs/rdm/ bdl */
4216 for (i = 0; i < rank_num; i++) {
4217 cfg->rank_idx = i;
4218 ddr_save_two_rank_bdl(cfg, dcc_data);
4219 }
4220
4221 /* Disable auto refresh */
4222 ddr_training_save_timing(cfg, &timing_st);
4223
4224 /* Duty Correction Control training. */
4225 result += ddr_dcc_get_best_duty(cfg, &cfg_sref, dcc_data);
4226
4227 /* Do DERT training again */
4228 for (i = 0; i < rank_num; i++) {
4229 cfg->rank_idx = i;
4230 dcc_data->item[i] = cfg->phy[cfg->phy_idx].rank[i].item_hw;
4231 cfg->phy[cfg->phy_idx].rank[i].item_hw = PHY_PHYINITCTRL_HVREFT_EN;
4232 DDR_DEBUG("item_hw[%x]=[%x]", i, cfg->phy[cfg->phy_idx].rank[i].item_hw);
4233 }
4234
4235 result += ddr_hw_training_by_phy(cfg);
4236
4237 for (i = 0; i < rank_num; i++) {
4238 cfg->rank_idx = i;
4239 cfg->phy[cfg->phy_idx].rank[i].item_hw = dcc_data->item[i];
4240 }
4241
4242 /* Enable auto refresh */
4243 ddr_training_restore_timing(cfg, &timing_st);
4244
4245 if (PHY_DRAMCFG_TYPE_LPDDR4 == cfg->phy[cfg->phy_idx].dram_type) {
4246 /* DCC restore DMC_CFG_SREF config. */
4247 ddr_restore_sref_cfg(cfg, &cfg_sref);
4248 }
4249
4250 return result;
4251 }
4252
ddr_dcc_training_func(struct ddr_cfg_st * cfg)4253 int ddr_dcc_training_func(struct ddr_cfg_st *cfg)
4254 {
4255 int i;
4256 int result = 0;
4257
4258 for (i = 0; i < cfg->phy_num; i++) {
4259 cfg->phy_idx = i;
4260 cfg->cur_phy = cfg->phy[i].addr;
4261 cfg->cur_item = cfg->phy[i].rank[0].item;
4262
4263 if (ddr_training_check_bypass(cfg, 1 << (cfg->phy_idx)))
4264 continue;
4265
4266 /* dpmc training disable */
4267 if (!ddr_training_check_bypass(cfg, DDR_BYPASS_DCC_MASK))
4268 result += ddr_dcc_training(cfg);
4269 }
4270 return result;
4271 }
4272
4273 #else
ddr_dcc_training_func(struct ddr_cfg_st * cfg)4274 int ddr_dcc_training_func(struct ddr_cfg_st *cfg)
4275 {
4276 DDR_WARNING("Not support DCC training.");
4277 return 0;
4278 }
4279 #endif /* DDR_DCC_TRAINING_CONFIG */
4280
4281 #define __pcode_training__
4282 #ifdef DDR_PCODE_TRAINING_CONFIG
4283 /* Set pcode value to register IMPSTATUS and DDR_PHY_IMP_STATUS1 */
ddr_pcode_set_value(unsigned int base_phy,unsigned int pcode_value)4284 static void ddr_pcode_set_value(unsigned int base_phy, unsigned int pcode_value)
4285 {
4286 unsigned int imp_ctrl1;
4287
4288 ddr_write((ddr_read(base_phy + DDR_PHY_IMPSTATUS)
4289 & (~(PHY_ZCODE_PDRV_MASK << PHY_ZCODE_PDRV_BIT)))
4290 | (pcode_value << PHY_ZCODE_PDRV_BIT), base_phy + DDR_PHY_IMPSTATUS);
4291 DDR_DEBUG("cur IMPSTATUS [%x] = [%x]",
4292 base_phy + DDR_PHY_IMPSTATUS, ddr_read(base_phy + DDR_PHY_IMPSTATUS));
4293
4294 imp_ctrl1 = ddr_read(base_phy + DDR_PHY_IMP_CTRL1);
4295 /* ac_vddq_cal_en set 0 */
4296 ddr_write(imp_ctrl1 & (~(0x1 << PHY_AC_VDDQ_CAL_EN_BIT)), base_phy + DDR_PHY_IMP_CTRL1);
4297
4298 ddr_write((ddr_read(base_phy + DDR_PHY_IMP_STATUS1)
4299 & (~(PHY_ACCTL_PDRV_LATCH_MASK << PHY_ACCTL_PDRV_LATCH_BIT)))
4300 | (pcode_value << PHY_ACCTL_PDRV_LATCH_BIT), base_phy + DDR_PHY_IMP_STATUS1);
4301 DDR_DEBUG("cur IMP_STATUS1 [%x] = [%x]",
4302 base_phy + DDR_PHY_IMP_STATUS1, ddr_read(base_phy + DDR_PHY_IMP_STATUS1));
4303
4304 /* restore ac_vddq_cal_en */
4305 ddr_write(imp_ctrl1, base_phy + DDR_PHY_IMP_CTRL1);
4306 }
4307
ddr_pcode_trainig_by_phy(struct ddr_cfg_st * cfg)4308 static int ddr_pcode_trainig_by_phy(struct ddr_cfg_st *cfg)
4309 {
4310 unsigned int times = 0;
4311 unsigned int base_phy = cfg->cur_phy;
4312 unsigned int pcode_value;
4313 unsigned int osc_rpt_vld;
4314 unsigned int osc_cnt_rdata;
4315 int ddr_freq;
4316
4317 /* test start */
4318 ddr_write(ddr_read(base_phy + DDR_PHY_CORNER_DETECTOR) | PHY_OSC_START_MASK,
4319 base_phy + DDR_PHY_CORNER_DETECTOR);
4320
4321 do {
4322 osc_rpt_vld = (ddr_read(base_phy + DDR_PHY_CORNER_DETECTOR)
4323 >> PHY_OSC_RPT_VLD) & PHY_OSC_RPT_VLD_MASK;
4324 times++;
4325 } while ((!osc_rpt_vld)
4326 && (times < DDRT_PCODE_WAIT_TIMEOUT));
4327
4328 if (times >= DDRT_PCODE_WAIT_TIMEOUT) {
4329 DDR_FATAL("IO pcode training wait timeout.");
4330 return -1;
4331 }
4332
4333 osc_cnt_rdata = (ddr_read(base_phy + DDR_PHY_CORNER_DETECTOR)
4334 >> PHY_OSC_CNT_RDATA_BIT)
4335 & PHY_OSC_CNT_RDATA_MASK;
4336
4337 /* test stop */
4338 ddr_write(ddr_read(base_phy + DDR_PHY_CORNER_DETECTOR)
4339 & (~PHY_OSC_START_MASK), base_phy + DDR_PHY_CORNER_DETECTOR);
4340
4341 ddr_freq = ddr_get_cksel();
4342 /* get pcode value */
4343 pcode_value = (490960 - (89 * osc_cnt_rdata * ddr_freq) / 300) / 10000;
4344
4345 DDR_DEBUG("pcode value[%x]", pcode_value);
4346 if (pcode_value < PHY_PCODE_MIN) {
4347 pcode_value = PHY_PCODE_MIN;
4348 } else if (pcode_value > PHY_PCODE_MAX) {
4349 pcode_value = PHY_PCODE_MAX;
4350 }
4351
4352 /* set pcode value */
4353 ddr_pcode_set_value(base_phy, pcode_value);
4354 return 0;
4355 }
4356
ddr_pcode_training(struct ddr_cfg_st * cfg)4357 int ddr_pcode_training(struct ddr_cfg_st *cfg)
4358 {
4359 struct tr_relate_reg relate_reg;
4360 int result = 0;
4361 int i;
4362
4363 for (i = 0; i < cfg->phy_num; i++) {
4364 cfg->phy_idx = i;
4365 cfg->cur_phy = cfg->phy[i].addr;
4366 cfg->cur_item = cfg->phy[i].rank[0].item;
4367
4368 if (ddr_training_check_bypass(cfg, 1 << (cfg->phy_idx)))
4369 continue;
4370
4371 /* pcode training disable */
4372 if (ddr_training_check_bypass(cfg, DDR_BYPASS_PCODE_MASK))
4373 continue;
4374
4375 ddr_training_save_reg(cfg, &relate_reg, DDR_BYPASS_PCODE_MASK);
4376 result += ddr_pcode_trainig_by_phy(cfg);
4377 ddr_training_restore_reg(cfg, &relate_reg);
4378 }
4379
4380 return result;
4381 }
4382 #else
ddr_pcode_training(struct ddr_cfg_st * cfg)4383 int ddr_pcode_training(struct ddr_cfg_st *cfg)
4384 {
4385 DDR_WARNING("Not support DDR pcode training.");
4386 return 0;
4387 }
4388 #endif
4389