1 /*
2 * Copyright (c) 2021, Renesas Electronics Corporation. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7 #include <stdint.h>
8 #include <string.h>
9
10 #include <common/debug.h>
11 #include <lib/mmio.h>
12
13 #include "boot_init_dram.h"
14 #include "boot_init_dram_regdef.h"
15 #include "ddr_regdef.h"
16 #include "dram_sub_func.h"
17 #include "init_dram_tbl_g2m.h"
18 #include "micro_delay.h"
19 #include "rcar_def.h"
20
21 /* load board configuration */
22 #include "boot_init_dram_config.c"
23
24 #define DDR_BACKUPMODE
25 #define FATAL_MSG(x) NOTICE(x)
26
27 /* variables */
28 #ifdef RCAR_DDR_FIXED_LSI_TYPE
29 #ifndef RCAR_AUTO
30 #define RCAR_AUTO 99U
31 #define RZ_G2M 100U
32
33 #define RCAR_CUT_10 0U
34 #define RCAR_CUT_11 1U
35 #define RCAR_CUT_20 10U
36 #define RCAR_CUT_30 20U
37 #endif /* RCAR_AUTO */
38 #ifndef RCAR_LSI
39 #define RCAR_LSI RCAR_AUTO
40 #endif
41
42 #if (RCAR_LSI == RCAR_AUTO)
43 static uint32_t prr_product;
44 static uint32_t prr_cut;
45 #else /* RCAR_LSI == RCAR_AUTO */
46 #if (RCAR_LSI == RZ_G2M)
47 static const uint32_t prr_product = PRR_PRODUCT_M3;
48 #endif /* RCAR_LSI == RZ_G2M */
49
50 #ifndef RCAR_LSI_CUT
51 static uint32_t prr_cut;
52 #else /* RCAR_LSI_CUT */
53 #if (RCAR_LSI_CUT == RCAR_CUT_10)
54 static const uint32_t prr_cut = PRR_PRODUCT_10;
55 #elif(RCAR_LSI_CUT == RCAR_CUT_11)
56 static const uint32_t prr_cut = PRR_PRODUCT_11;
57 #elif(RCAR_LSI_CUT == RCAR_CUT_20)
58 static const uint32_t prr_cut = PRR_PRODUCT_20;
59 #elif(RCAR_LSI_CUT == RCAR_CUT_30)
60 static const uint32_t prr_cut = PRR_PRODUCT_30;
61 #endif /* RCAR_LSI_CUT == RCAR_CUT_10 */
62 #endif /* RCAR_LSI_CUT */
63 #endif /* RCAR_LSI == RCAR_AUTO */
64 #else /* RCAR_DDR_FIXED_LSI_TYPE */
65 static uint32_t prr_product;
66 static uint32_t prr_cut;
67 #endif /* RCAR_DDR_FIXED_LSI_TYPE */
68
69 static const uint32_t *p_ddr_regdef_tbl;
70 static uint32_t brd_clk;
71 static uint32_t brd_clkdiv;
72 static uint32_t brd_clkdiva;
73 static uint32_t ddr_mbps;
74 static uint32_t ddr_mbpsdiv;
75 static uint32_t ddr_tccd;
76 static uint32_t ddr_phycaslice;
77 static const struct _boardcnf *board_cnf;
78 static uint32_t ddr_phyvalid;
79 static uint32_t ddr_density[DRAM_CH_CNT][CS_CNT];
80 static uint32_t ch_have_this_cs[CS_CNT] __aligned(64);
81 static uint32_t rdqdm_dly[DRAM_CH_CNT][CSAB_CNT][SLICE_CNT * 2U][9U];
82 static uint32_t max_density;
83 static uint32_t ddr0800_mul;
84 static uint32_t ddr_mul;
85 static uint32_t DDR_PHY_SLICE_REGSET_OFS;
86 static uint32_t DDR_PHY_ADR_V_REGSET_OFS;
87 static uint32_t DDR_PHY_ADR_I_REGSET_OFS;
88 static uint32_t DDR_PHY_ADR_G_REGSET_OFS;
89 static uint32_t DDR_PI_REGSET_OFS;
90 static uint32_t DDR_PHY_SLICE_REGSET_SIZE;
91 static uint32_t DDR_PHY_ADR_V_REGSET_SIZE;
92 static uint32_t DDR_PHY_ADR_I_REGSET_SIZE;
93 static uint32_t DDR_PHY_ADR_G_REGSET_SIZE;
94 static uint32_t DDR_PI_REGSET_SIZE;
95 static uint32_t DDR_PHY_SLICE_REGSET_NUM;
96 static uint32_t DDR_PHY_ADR_V_REGSET_NUM;
97 static uint32_t DDR_PHY_ADR_I_REGSET_NUM;
98 static uint32_t DDR_PHY_ADR_G_REGSET_NUM;
99 static uint32_t DDR_PI_REGSET_NUM;
100 static uint32_t DDR_PHY_ADR_I_NUM;
101 #define DDR_PHY_REGSET_MAX 128
102 #define DDR_PI_REGSET_MAX 320
103 static uint32_t _cnf_DDR_PHY_SLICE_REGSET[DDR_PHY_REGSET_MAX];
104 static uint32_t _cnf_DDR_PHY_ADR_V_REGSET[DDR_PHY_REGSET_MAX];
105 static uint32_t _cnf_DDR_PHY_ADR_I_REGSET[DDR_PHY_REGSET_MAX];
106 static uint32_t _cnf_DDR_PHY_ADR_G_REGSET[DDR_PHY_REGSET_MAX];
107 static uint32_t _cnf_DDR_PI_REGSET[DDR_PI_REGSET_MAX];
108 static uint32_t pll3_mode;
109 static uint32_t loop_max;
110 #ifdef DDR_BACKUPMODE
111 uint32_t ddr_backup = DRAM_BOOT_STATUS_COLD;
112 /* #define DDR_BACKUPMODE_HALF */ /* for Half channel(ch0,1 only) */
113 #endif
114
115 #ifdef DDR_QOS_INIT_SETTING /* only for non qos_init */
116 #define OPERATING_FREQ (400U) /* Mhz */
117 #define BASE_SUB_SLOT_NUM (0x6U)
118 #define SUB_SLOT_CYCLE (0x7EU) /* 126 */
119 #define QOSWT_WTSET0_CYCLE \
120 ((SUB_SLOT_CYCLE * BASE_SUB_SLOT_NUM * 1000U) / \
121 OPERATING_FREQ) /* unit:ns */
122
get_refperiod(void)123 uint32_t get_refperiod(void)
124 {
125 return QOSWT_WTSET0_CYCLE;
126 }
127 #else /* DDR_QOS_INIT_SETTING */
128 extern uint32_t get_refperiod(void);
129 #endif /* DDR_QOS_INIT_SETTING */
130
131 #define _reg_PHY_RX_CAL_X_NUM 11U
132 static const uint32_t _reg_PHY_RX_CAL_X[_reg_PHY_RX_CAL_X_NUM] = {
133 _reg_PHY_RX_CAL_DQ0,
134 _reg_PHY_RX_CAL_DQ1,
135 _reg_PHY_RX_CAL_DQ2,
136 _reg_PHY_RX_CAL_DQ3,
137 _reg_PHY_RX_CAL_DQ4,
138 _reg_PHY_RX_CAL_DQ5,
139 _reg_PHY_RX_CAL_DQ6,
140 _reg_PHY_RX_CAL_DQ7,
141 _reg_PHY_RX_CAL_DM,
142 _reg_PHY_RX_CAL_DQS,
143 _reg_PHY_RX_CAL_FDBK
144 };
145
146 #define _reg_PHY_CLK_WRX_SLAVE_DELAY_NUM 10U
147 static const uint32_t _reg_PHY_CLK_WRX_SLAVE_DELAY
148 [_reg_PHY_CLK_WRX_SLAVE_DELAY_NUM] = {
149 _reg_PHY_CLK_WRDQ0_SLAVE_DELAY,
150 _reg_PHY_CLK_WRDQ1_SLAVE_DELAY,
151 _reg_PHY_CLK_WRDQ2_SLAVE_DELAY,
152 _reg_PHY_CLK_WRDQ3_SLAVE_DELAY,
153 _reg_PHY_CLK_WRDQ4_SLAVE_DELAY,
154 _reg_PHY_CLK_WRDQ5_SLAVE_DELAY,
155 _reg_PHY_CLK_WRDQ6_SLAVE_DELAY,
156 _reg_PHY_CLK_WRDQ7_SLAVE_DELAY,
157 _reg_PHY_CLK_WRDM_SLAVE_DELAY,
158 _reg_PHY_CLK_WRDQS_SLAVE_DELAY
159 };
160
161 #define _reg_PHY_RDDQS_X_FALL_SLAVE_DELAY_NUM 9U
162 static const uint32_t _reg_PHY_RDDQS_X_FALL_SLAVE_DELAY
163 [_reg_PHY_RDDQS_X_FALL_SLAVE_DELAY_NUM] = {
164 _reg_PHY_RDDQS_DQ0_FALL_SLAVE_DELAY,
165 _reg_PHY_RDDQS_DQ1_FALL_SLAVE_DELAY,
166 _reg_PHY_RDDQS_DQ2_FALL_SLAVE_DELAY,
167 _reg_PHY_RDDQS_DQ3_FALL_SLAVE_DELAY,
168 _reg_PHY_RDDQS_DQ4_FALL_SLAVE_DELAY,
169 _reg_PHY_RDDQS_DQ5_FALL_SLAVE_DELAY,
170 _reg_PHY_RDDQS_DQ6_FALL_SLAVE_DELAY,
171 _reg_PHY_RDDQS_DQ7_FALL_SLAVE_DELAY,
172 _reg_PHY_RDDQS_DM_FALL_SLAVE_DELAY
173 };
174
175 #define _reg_PHY_RDDQS_X_RISE_SLAVE_DELAY_NUM 9U
176 static const uint32_t _reg_PHY_RDDQS_X_RISE_SLAVE_DELAY
177 [_reg_PHY_RDDQS_X_RISE_SLAVE_DELAY_NUM] = {
178 _reg_PHY_RDDQS_DQ0_RISE_SLAVE_DELAY,
179 _reg_PHY_RDDQS_DQ1_RISE_SLAVE_DELAY,
180 _reg_PHY_RDDQS_DQ2_RISE_SLAVE_DELAY,
181 _reg_PHY_RDDQS_DQ3_RISE_SLAVE_DELAY,
182 _reg_PHY_RDDQS_DQ4_RISE_SLAVE_DELAY,
183 _reg_PHY_RDDQS_DQ5_RISE_SLAVE_DELAY,
184 _reg_PHY_RDDQS_DQ6_RISE_SLAVE_DELAY,
185 _reg_PHY_RDDQS_DQ7_RISE_SLAVE_DELAY,
186 _reg_PHY_RDDQS_DM_RISE_SLAVE_DELAY
187 };
188
189 #define _reg_PHY_PAD_TERM_X_NUM 8U
190 static const uint32_t _reg_PHY_PAD_TERM_X[_reg_PHY_PAD_TERM_X_NUM] = {
191 _reg_PHY_PAD_FDBK_TERM,
192 _reg_PHY_PAD_DATA_TERM,
193 _reg_PHY_PAD_DQS_TERM,
194 _reg_PHY_PAD_ADDR_TERM,
195 _reg_PHY_PAD_CLK_TERM,
196 _reg_PHY_PAD_CKE_TERM,
197 _reg_PHY_PAD_RST_TERM,
198 _reg_PHY_PAD_CS_TERM
199 };
200
201 #define _reg_PHY_CLK_CACS_SLAVE_DELAY_X_NUM 10U
202 static const uint32_t _reg_PHY_CLK_CACS_SLAVE_DELAY_X
203 [_reg_PHY_CLK_CACS_SLAVE_DELAY_X_NUM] = {
204 _reg_PHY_ADR0_CLK_WR_SLAVE_DELAY,
205 _reg_PHY_ADR1_CLK_WR_SLAVE_DELAY,
206 _reg_PHY_ADR2_CLK_WR_SLAVE_DELAY,
207 _reg_PHY_ADR3_CLK_WR_SLAVE_DELAY,
208 _reg_PHY_ADR4_CLK_WR_SLAVE_DELAY,
209 _reg_PHY_ADR5_CLK_WR_SLAVE_DELAY,
210
211 _reg_PHY_GRP_SLAVE_DELAY_0,
212 _reg_PHY_GRP_SLAVE_DELAY_1,
213 _reg_PHY_GRP_SLAVE_DELAY_2,
214 _reg_PHY_GRP_SLAVE_DELAY_3
215 };
216
217 /* Prototypes */
218 static inline uint32_t vch_nxt(uint32_t pos);
219 static void cpg_write_32(uint32_t a, uint32_t v);
220 static void pll3_control(uint32_t high);
221 static inline void dsb_sev(void);
222 static void wait_dbcmd(void);
223 static void send_dbcmd(uint32_t cmd);
224 static uint32_t reg_ddrphy_read(uint32_t phyno, uint32_t regadd);
225 static void reg_ddrphy_write(uint32_t phyno, uint32_t regadd, uint32_t regdata);
226 static void reg_ddrphy_write_a(uint32_t regadd, uint32_t regdata);
227 static inline uint32_t ddr_regdef(uint32_t _regdef);
228 static inline uint32_t ddr_regdef_adr(uint32_t _regdef);
229 static inline uint32_t ddr_regdef_lsb(uint32_t _regdef);
230 static void ddr_setval_s(uint32_t ch, uint32_t slice, uint32_t _regdef,
231 uint32_t val);
232 static uint32_t ddr_getval_s(uint32_t ch, uint32_t slice, uint32_t _regdef);
233 static void ddr_setval(uint32_t ch, uint32_t regdef, uint32_t val);
234 static void ddr_setval_ach_s(uint32_t slice, uint32_t regdef, uint32_t val);
235 static void ddr_setval_ach(uint32_t regdef, uint32_t val);
236 static void ddr_setval_ach_as(uint32_t regdef, uint32_t val);
237 static uint32_t ddr_getval(uint32_t ch, uint32_t regdef);
238 static uint32_t ddr_getval_ach(uint32_t regdef, uint32_t *p);
239 static uint32_t ddr_getval_ach_as(uint32_t regdef, uint32_t *p);
240 static void _tblcopy(uint32_t *to, const uint32_t *from, uint32_t size);
241 static void ddrtbl_setval(uint32_t *tbl, uint32_t _regdef, uint32_t val);
242 static uint32_t ddrtbl_getval(uint32_t *tbl, uint32_t _regdef);
243 static uint32_t ddrphy_regif_chk(void);
244 static inline void ddrphy_regif_idle(void);
245 static uint16_t _f_scale(uint32_t _ddr_mbps, uint32_t _ddr_mbpsdiv, uint32_t ps,
246 uint16_t cyc);
247 static void _f_scale_js2(uint32_t _ddr_mbps, uint32_t _ddr_mbpsdiv,
248 uint16_t *_js2);
249 static int16_t _f_scale_adj(int16_t ps);
250 static void ddrtbl_load(void);
251 static void ddr_config_sub(void);
252 static void ddr_config(void);
253 static void dbsc_regset(void);
254 static void dbsc_regset_post(void);
255 static uint32_t dfi_init_start(void);
256 static void change_lpddr4_en(uint32_t mode);
257 static uint32_t set_term_code(void);
258 static void ddr_register_set(void);
259 static inline uint32_t wait_freqchgreq(uint32_t assert);
260 static inline void set_freqchgack(uint32_t assert);
261 static inline void set_dfifrequency(uint32_t freq);
262 static uint32_t pll3_freq(uint32_t on);
263 static void update_dly(void);
264 static uint32_t pi_training_go(void);
265 static uint32_t init_ddr(void);
266 static uint32_t swlvl1(uint32_t ddr_csn, uint32_t reg_cs, uint32_t reg_kick);
267 static uint32_t wdqdm_man1(void);
268 static uint32_t wdqdm_man(void);
269 static uint32_t rdqdm_man1(void);
270 static uint32_t rdqdm_man(void);
271
272 static int32_t _find_change(uint64_t val, uint32_t dir);
273 static uint32_t _rx_offset_cal_updn(uint32_t code);
274 static uint32_t rx_offset_cal(void);
275 static uint32_t rx_offset_cal_hw(void);
276 static void adjust_wpath_latency(void);
277
278 struct ddrt_data {
279 int32_t init_temp; /* Initial Temperature (do) */
280 uint32_t init_cal[4U]; /* Initial io-code (4 is for G2H) */
281 uint32_t tcomp_cal[4U]; /* Temp. compensated io-code (4 is for G2H) */
282 };
283
284 static struct ddrt_data tcal;
285
286 static void pvtcode_update(void);
287 static void pvtcode_update2(void);
288 static void ddr_padcal_tcompensate_getinit(uint32_t override);
289
290 #ifndef DDR_FAST_INIT
291 static uint32_t rdqdm_le[DRAM_CH_CNT][CS_CNT][SLICE_CNT * 2U][9U];
292 static uint32_t rdqdm_te[DRAM_CH_CNT][CS_CNT][SLICE_CNT * 2U][9U];
293 static uint32_t rdqdm_nw[DRAM_CH_CNT][CS_CNT][SLICE_CNT * 2U][9U];
294 static uint32_t rdqdm_win[DRAM_CH_CNT][CS_CNT][SLICE_CNT];
295 static uint32_t rdqdm_st[DRAM_CH_CNT][CS_CNT][SLICE_CNT * 2U];
296 static void rdqdm_clr1(uint32_t ch, uint32_t ddr_csn);
297 static uint32_t rdqdm_ana1(uint32_t ch, uint32_t ddr_csn);
298
299 static uint32_t wdqdm_le[DRAM_CH_CNT][CS_CNT][SLICE_CNT][9U];
300 static uint32_t wdqdm_te[DRAM_CH_CNT][CS_CNT][SLICE_CNT][9U];
301 static uint32_t wdqdm_dly[DRAM_CH_CNT][CS_CNT][SLICE_CNT][9U];
302 static uint32_t wdqdm_st[DRAM_CH_CNT][CS_CNT][SLICE_CNT];
303 static uint32_t wdqdm_win[DRAM_CH_CNT][CS_CNT][SLICE_CNT];
304 static void wdqdm_clr1(uint32_t ch, uint32_t ddr_csn);
305 static uint32_t wdqdm_ana1(uint32_t ch, uint32_t ddr_csn);
306 #endif/* DDR_FAST_INIT */
307
308 /* macro for channel selection loop */
vch_nxt(uint32_t pos)309 static inline uint32_t vch_nxt(uint32_t pos)
310 {
311 uint32_t posn;
312
313 for (posn = pos; posn < DRAM_CH_CNT; posn++) {
314 if ((ddr_phyvalid & (1U << posn)) != 0U) {
315 break;
316 }
317 }
318 return posn;
319 }
320
321 #define foreach_vch(ch) \
322 for (ch = vch_nxt(0U); ch < DRAM_CH_CNT; ch = vch_nxt(ch + 1U))
323
324 #define foreach_ech(ch) \
325 for (ch = 0U; ch < DRAM_CH_CNT; ch++)
326
327 /* Printing functions */
328 #define MSG_LF(...)
329
330 /* clock settings, reset control */
cpg_write_32(uint32_t a,uint32_t v)331 static void cpg_write_32(uint32_t a, uint32_t v)
332 {
333 mmio_write_32(CPG_CPGWPR, ~v);
334 mmio_write_32(a, v);
335 }
336
wait_for_pll3_status_bit_turned_on(void)337 static void wait_for_pll3_status_bit_turned_on(void)
338 {
339 uint32_t data_l;
340
341 do {
342 data_l = mmio_read_32(CPG_PLLECR);
343 } while ((data_l & CPG_PLLECR_PLL3ST_BIT) == 0);
344 dsb_sev();
345 }
346
pll3_control(uint32_t high)347 static void pll3_control(uint32_t high)
348 {
349 uint32_t data_l, data_div, data_mul, tmp_div;
350
351 if (high != 0U) {
352 tmp_div = 3999U * brd_clkdiv * (brd_clkdiva + 1U) /
353 (brd_clk * ddr_mul) / 2U;
354 data_mul = ((ddr_mul * tmp_div) - 1U) << 24U;
355 pll3_mode = 1U;
356 loop_max = 2U;
357 } else {
358 tmp_div = 3999U * brd_clkdiv * (brd_clkdiva + 1U) /
359 (brd_clk * ddr0800_mul) / 2U;
360 data_mul = ((ddr0800_mul * tmp_div) - 1U) << 24U;
361 pll3_mode = 0U;
362 loop_max = 8U;
363 }
364
365 switch (tmp_div) {
366 case 1:
367 data_div = 0U;
368 break;
369 case 2:
370 case 3:
371 case 4:
372 data_div = tmp_div;
373 break;
374 default:
375 data_div = 6U;
376 data_mul = (data_mul * tmp_div) / 3U;
377 break;
378 }
379 data_mul = data_mul | (brd_clkdiva << 7);
380
381 /* PLL3 disable */
382 data_l = mmio_read_32(CPG_PLLECR) & ~CPG_PLLECR_PLL3E_BIT;
383 cpg_write_32(CPG_PLLECR, data_l);
384 dsb_sev();
385
386 if (prr_product == PRR_PRODUCT_M3) {
387 /* PLL3 DIV resetting(Lowest value:3) */
388 data_l = 0x00030003U | (0xFF80FF80U & mmio_read_32(CPG_FRQCRD));
389 cpg_write_32(CPG_FRQCRD, data_l);
390 dsb_sev();
391
392 /* zb3 clk stop */
393 data_l = CPG_ZB3CKCR_ZB3ST_BIT | mmio_read_32(CPG_ZB3CKCR);
394 cpg_write_32(CPG_ZB3CKCR, data_l);
395 dsb_sev();
396
397 /* PLL3 enable */
398 data_l = CPG_PLLECR_PLL3E_BIT | mmio_read_32(CPG_PLLECR);
399 cpg_write_32(CPG_PLLECR, data_l);
400 dsb_sev();
401
402 wait_for_pll3_status_bit_turned_on();
403
404 /* PLL3 DIV resetting (Highest value:0) */
405 data_l = (0xFF80FF80U & mmio_read_32(CPG_FRQCRD));
406 cpg_write_32(CPG_FRQCRD, data_l);
407 dsb_sev();
408
409 /* DIV SET KICK */
410 data_l = CPG_FRQCRB_KICK_BIT | mmio_read_32(CPG_FRQCRB);
411 cpg_write_32(CPG_FRQCRB, data_l);
412 dsb_sev();
413
414 /* PLL3 multiplier set */
415 cpg_write_32(CPG_PLL3CR, data_mul);
416 dsb_sev();
417
418 wait_for_pll3_status_bit_turned_on();
419
420 /* PLL3 DIV resetting(Target value) */
421 data_l = (data_div << 16U) | data_div |
422 (mmio_read_32(CPG_FRQCRD) & 0xFF80FF80U);
423 cpg_write_32(CPG_FRQCRD, data_l);
424 dsb_sev();
425
426 /* DIV SET KICK */
427 data_l = CPG_FRQCRB_KICK_BIT | mmio_read_32(CPG_FRQCRB);
428 cpg_write_32(CPG_FRQCRB, data_l);
429 dsb_sev();
430
431 wait_for_pll3_status_bit_turned_on();
432
433 /* zb3 clk start */
434 data_l = (~CPG_ZB3CKCR_ZB3ST_BIT) & mmio_read_32(CPG_ZB3CKCR);
435 cpg_write_32(CPG_ZB3CKCR, data_l);
436 dsb_sev();
437 }
438 }
439
440 /* barrier */
dsb_sev(void)441 static inline void dsb_sev(void)
442 {
443 __asm__ __volatile__("dsb sy");
444 }
445
446 /* DDR memory register access */
wait_dbcmd(void)447 static void wait_dbcmd(void)
448 {
449 uint32_t data_l;
450 /* dummy read */
451 data_l = mmio_read_32(DBSC_DBCMD);
452 dsb_sev();
453 while (true) {
454 /* wait DBCMD 1=busy, 0=ready */
455 data_l = mmio_read_32(DBSC_DBWAIT);
456 dsb_sev();
457 if ((data_l & 0x00000001U) == 0x00U) {
458 break;
459 }
460 }
461 }
462
send_dbcmd(uint32_t cmd)463 static void send_dbcmd(uint32_t cmd)
464 {
465 /* dummy read */
466 wait_dbcmd();
467 mmio_write_32(DBSC_DBCMD, cmd);
468 dsb_sev();
469 }
470
dbwait_loop(uint32_t wait_loop)471 static void dbwait_loop(uint32_t wait_loop)
472 {
473 uint32_t i;
474
475 for (i = 0U; i < wait_loop; i++) {
476 wait_dbcmd();
477 }
478 }
479
480 /* DDRPHY register access (raw) */
reg_ddrphy_read(uint32_t phyno,uint32_t regadd)481 static uint32_t reg_ddrphy_read(uint32_t phyno, uint32_t regadd)
482 {
483 uint32_t val;
484 uint32_t loop;
485
486 val = 0U;
487 mmio_write_32(DBSC_DBPDRGA(phyno), regadd);
488 dsb_sev();
489
490 while (mmio_read_32(DBSC_DBPDRGA(phyno)) != regadd) {
491 dsb_sev();
492 }
493 dsb_sev();
494
495 for (loop = 0U; loop < loop_max; loop++) {
496 val = mmio_read_32(DBSC_DBPDRGD(phyno));
497 dsb_sev();
498 }
499
500 return val;
501 }
502
reg_ddrphy_write(uint32_t phyno,uint32_t regadd,uint32_t regdata)503 static void reg_ddrphy_write(uint32_t phyno, uint32_t regadd, uint32_t regdata)
504 {
505 uint32_t loop;
506
507 mmio_write_32(DBSC_DBPDRGA(phyno), regadd);
508 dsb_sev();
509 for (loop = 0U; loop < loop_max; loop++) {
510 mmio_read_32(DBSC_DBPDRGA(phyno));
511 dsb_sev();
512 }
513 mmio_write_32(DBSC_DBPDRGD(phyno), regdata);
514 dsb_sev();
515
516 for (loop = 0U; loop < loop_max; loop++) {
517 mmio_read_32(DBSC_DBPDRGD(phyno));
518 dsb_sev();
519 }
520 }
521
reg_ddrphy_write_a(uint32_t regadd,uint32_t regdata)522 static void reg_ddrphy_write_a(uint32_t regadd, uint32_t regdata)
523 {
524 uint32_t ch;
525 uint32_t loop;
526
527 foreach_vch(ch) {
528 mmio_write_32(DBSC_DBPDRGA(ch), regadd);
529 dsb_sev();
530 }
531
532 foreach_vch(ch) {
533 mmio_write_32(DBSC_DBPDRGD(ch), regdata);
534 dsb_sev();
535 }
536
537 for (loop = 0U; loop < loop_max; loop++) {
538 mmio_read_32(DBSC_DBPDRGD(0));
539 dsb_sev();
540 }
541 }
542
ddrphy_regif_idle(void)543 static inline void ddrphy_regif_idle(void)
544 {
545 reg_ddrphy_read(0U, ddr_regdef_adr(_reg_PI_INT_STATUS));
546 dsb_sev();
547 }
548
549 /* DDRPHY register access (field modify) */
ddr_regdef(uint32_t _regdef)550 static inline uint32_t ddr_regdef(uint32_t _regdef)
551 {
552 return p_ddr_regdef_tbl[_regdef];
553 }
554
ddr_regdef_adr(uint32_t _regdef)555 static inline uint32_t ddr_regdef_adr(uint32_t _regdef)
556 {
557 return DDR_REGDEF_ADR(p_ddr_regdef_tbl[_regdef]);
558 }
559
ddr_regdef_lsb(uint32_t _regdef)560 static inline uint32_t ddr_regdef_lsb(uint32_t _regdef)
561 {
562 return DDR_REGDEF_LSB(p_ddr_regdef_tbl[_regdef]);
563 }
564
ddr_setval_s(uint32_t ch,uint32_t slice,uint32_t _regdef,uint32_t val)565 static void ddr_setval_s(uint32_t ch, uint32_t slice, uint32_t _regdef,
566 uint32_t val)
567 {
568 uint32_t adr;
569 uint32_t lsb;
570 uint32_t len;
571 uint32_t msk;
572 uint32_t tmp;
573 uint32_t regdef;
574
575 regdef = ddr_regdef(_regdef);
576 adr = DDR_REGDEF_ADR(regdef) + 0x80U * slice;
577 len = DDR_REGDEF_LEN(regdef);
578 lsb = DDR_REGDEF_LSB(regdef);
579 if (len == 0x20U) {
580 msk = 0xffffffffU;
581 } else {
582 msk = ((1U << len) - 1U) << lsb;
583 }
584
585 tmp = reg_ddrphy_read(ch, adr);
586 tmp = (tmp & (~msk)) | ((val << lsb) & msk);
587 reg_ddrphy_write(ch, adr, tmp);
588 }
589
ddr_getval_s(uint32_t ch,uint32_t slice,uint32_t _regdef)590 static uint32_t ddr_getval_s(uint32_t ch, uint32_t slice, uint32_t _regdef)
591 {
592 uint32_t adr;
593 uint32_t lsb;
594 uint32_t len;
595 uint32_t msk;
596 uint32_t tmp;
597 uint32_t regdef;
598
599 regdef = ddr_regdef(_regdef);
600 adr = DDR_REGDEF_ADR(regdef) + 0x80U * slice;
601 len = DDR_REGDEF_LEN(regdef);
602 lsb = DDR_REGDEF_LSB(regdef);
603 if (len == 0x20U) {
604 msk = 0xffffffffU;
605 } else {
606 msk = ((1U << len) - 1U);
607 }
608
609 tmp = reg_ddrphy_read(ch, adr);
610 tmp = (tmp >> lsb) & msk;
611
612 return tmp;
613 }
614
ddr_setval(uint32_t ch,uint32_t regdef,uint32_t val)615 static void ddr_setval(uint32_t ch, uint32_t regdef, uint32_t val)
616 {
617 ddr_setval_s(ch, 0U, regdef, val);
618 }
619
ddr_setval_ach_s(uint32_t slice,uint32_t regdef,uint32_t val)620 static void ddr_setval_ach_s(uint32_t slice, uint32_t regdef, uint32_t val)
621 {
622 uint32_t ch;
623
624 foreach_vch(ch) {
625 ddr_setval_s(ch, slice, regdef, val);
626 }
627 }
628
ddr_setval_ach(uint32_t regdef,uint32_t val)629 static void ddr_setval_ach(uint32_t regdef, uint32_t val)
630 {
631 ddr_setval_ach_s(0U, regdef, val);
632 }
633
ddr_setval_ach_as(uint32_t regdef,uint32_t val)634 static void ddr_setval_ach_as(uint32_t regdef, uint32_t val)
635 {
636 uint32_t slice;
637
638 for (slice = 0U; slice < SLICE_CNT; slice++) {
639 ddr_setval_ach_s(slice, regdef, val);
640 }
641 }
642
ddr_getval(uint32_t ch,uint32_t regdef)643 static uint32_t ddr_getval(uint32_t ch, uint32_t regdef)
644 {
645 return ddr_getval_s(ch, 0U, regdef);
646 }
647
ddr_getval_ach(uint32_t regdef,uint32_t * p)648 static uint32_t ddr_getval_ach(uint32_t regdef, uint32_t *p)
649 {
650 uint32_t ch;
651
652 foreach_vch(ch) {
653 p[ch] = ddr_getval_s(ch, 0U, regdef);
654 }
655 return p[0U];
656 }
657
ddr_getval_ach_as(uint32_t regdef,uint32_t * p)658 static uint32_t ddr_getval_ach_as(uint32_t regdef, uint32_t *p)
659 {
660 uint32_t ch, slice;
661 uint32_t *pp;
662
663 pp = p;
664 foreach_vch(ch) {
665 for (slice = 0U; slice < SLICE_CNT; slice++) {
666 *pp++ = ddr_getval_s(ch, slice, regdef);
667 }
668 }
669 return p[0U];
670 }
671
672 /* handling functions for setting ddrphy value table */
_tblcopy(uint32_t * to,const uint32_t * from,uint32_t size)673 static void _tblcopy(uint32_t *to, const uint32_t *from, uint32_t size)
674 {
675 uint32_t i;
676
677 for (i = 0U; i < size; i++) {
678 to[i] = from[i];
679 }
680 }
681
ddrtbl_setval(uint32_t * tbl,uint32_t _regdef,uint32_t val)682 static void ddrtbl_setval(uint32_t *tbl, uint32_t _regdef, uint32_t val)
683 {
684 uint32_t adr;
685 uint32_t lsb;
686 uint32_t len;
687 uint32_t msk;
688 uint32_t tmp;
689 uint32_t adrmsk;
690 uint32_t regdef;
691
692 regdef = ddr_regdef(_regdef);
693 adr = DDR_REGDEF_ADR(regdef);
694 len = DDR_REGDEF_LEN(regdef);
695 lsb = DDR_REGDEF_LSB(regdef);
696 if (len == 0x20U) {
697 msk = 0xffffffffU;
698 } else {
699 msk = ((1U << len) - 1U) << lsb;
700 }
701
702 if (adr < 0x400U) {
703 adrmsk = 0xffU;
704 } else {
705 adrmsk = 0x7fU;
706 }
707
708 tmp = tbl[adr & adrmsk];
709 tmp = (tmp & (~msk)) | ((val << lsb) & msk);
710 tbl[adr & adrmsk] = tmp;
711 }
712
ddrtbl_getval(uint32_t * tbl,uint32_t _regdef)713 static uint32_t ddrtbl_getval(uint32_t *tbl, uint32_t _regdef)
714 {
715 uint32_t adr;
716 uint32_t lsb;
717 uint32_t len;
718 uint32_t msk;
719 uint32_t tmp;
720 uint32_t adrmsk;
721 uint32_t regdef;
722
723 regdef = ddr_regdef(_regdef);
724 adr = DDR_REGDEF_ADR(regdef);
725 len = DDR_REGDEF_LEN(regdef);
726 lsb = DDR_REGDEF_LSB(regdef);
727 if (len == 0x20U) {
728 msk = 0xffffffffU;
729 } else {
730 msk = ((1U << len) - 1U);
731 }
732
733 if (adr < 0x400U) {
734 adrmsk = 0xffU;
735 } else {
736 adrmsk = 0x7fU;
737 }
738
739 tmp = tbl[adr & adrmsk];
740 tmp = (tmp >> lsb) & msk;
741
742 return tmp;
743 }
744
745 /* DDRPHY register access handling */
ddrphy_regif_chk(void)746 static uint32_t ddrphy_regif_chk(void)
747 {
748 uint32_t tmp_ach[DRAM_CH_CNT];
749 uint32_t ch;
750 uint32_t err;
751 uint32_t PI_VERSION_CODE;
752
753 if (prr_product == PRR_PRODUCT_M3) {
754 PI_VERSION_CODE = 0x2041U; /* G2M */
755 }
756
757 ddr_getval_ach(_reg_PI_VERSION, (uint32_t *)tmp_ach);
758 err = 0U;
759 foreach_vch(ch) {
760 if (tmp_ach[ch] != PI_VERSION_CODE) {
761 err = 1U;
762 }
763 }
764 return err;
765 }
766
767 /* functions and parameters for timing setting */
768 struct _jedec_spec1 {
769 uint16_t fx3;
770 uint8_t rlwodbi;
771 uint8_t rlwdbi;
772 uint8_t WL;
773 uint8_t nwr;
774 uint8_t nrtp;
775 uint8_t odtlon;
776 uint8_t MR1;
777 uint8_t MR2;
778 };
779
780 #define JS1_USABLEC_SPEC_LO 2U
781 #define JS1_USABLEC_SPEC_HI 5U
782 #define JS1_FREQ_TBL_NUM 8
783 #define JS1_MR1(f) (0x04U | ((f) << 4U))
784 #define JS1_MR2(f) (0x00U | ((f) << 3U) | (f))
785 static const struct _jedec_spec1 js1[JS1_FREQ_TBL_NUM] = {
786 /* 533.333Mbps */
787 { 800U, 6U, 6U, 4U, 6U, 8U, 0U, JS1_MR1(0U), JS1_MR2(0U) | 0x40U },
788 /* 1066.666Mbps */
789 { 1600U, 10U, 12U, 8U, 10U, 8U, 0U, JS1_MR1(1U), JS1_MR2(1U) | 0x40U },
790 /* 1600.000Mbps */
791 { 2400U, 14U, 16U, 12U, 16U, 8U, 6U, JS1_MR1(2U), JS1_MR2(2U) | 0x40U },
792 /* 2133.333Mbps */
793 { 3200U, 20U, 22U, 10U, 20U, 8U, 4U, JS1_MR1(3U), JS1_MR2(3U) },
794 /* 2666.666Mbps */
795 { 4000U, 24U, 28U, 12U, 24U, 10U, 4U, JS1_MR1(4U), JS1_MR2(4U) },
796 /* 3200.000Mbps */
797 { 4800U, 28U, 32U, 14U, 30U, 12U, 6U, JS1_MR1(5U), JS1_MR2(5U) },
798 /* 3733.333Mbps */
799 { 5600U, 32U, 36U, 16U, 34U, 14U, 6U, JS1_MR1(6U), JS1_MR2(6U) },
800 /* 4266.666Mbps */
801 { 6400U, 36U, 40U, 18U, 40U, 16U, 8U, JS1_MR1(7U), JS1_MR2(7U) }
802 };
803
804 struct _jedec_spec2 {
805 uint16_t ps;
806 uint16_t cyc;
807 };
808
809 #define js2_tsr 0
810 #define js2_txp 1
811 #define js2_trtp 2
812 #define js2_trcd 3
813 #define js2_trppb 4
814 #define js2_trpab 5
815 #define js2_tras 6
816 #define js2_twr 7
817 #define js2_twtr 8
818 #define js2_trrd 9
819 #define js2_tppd 10
820 #define js2_tfaw 11
821 #define js2_tdqsck 12
822 #define js2_tckehcmd 13
823 #define js2_tckelcmd 14
824 #define js2_tckelpd 15
825 #define js2_tmrr 16
826 #define js2_tmrw 17
827 #define js2_tmrd 18
828 #define js2_tzqcalns 19
829 #define js2_tzqlat 20
830 #define js2_tiedly 21
831 #define js2_tODTon_min 22
832 #define JS2_TBLCNT 23
833
834 #define js2_trcpb (JS2_TBLCNT)
835 #define js2_trcab (JS2_TBLCNT + 1)
836 #define js2_trfcab (JS2_TBLCNT + 2)
837 #define JS2_CNT (JS2_TBLCNT + 3)
838
839 #ifndef JS2_DERATE
840 #define JS2_DERATE 0
841 #endif
842 static const struct _jedec_spec2 jedec_spec2[2][JS2_TBLCNT] = {
843 {
844 /* tSR */ { 15000, 3 },
845 /* tXP */ { 7500, 3 },
846 /* tRTP */ { 7500, 8 },
847 /* tRCD */ { 18000, 4 },
848 /* tRPpb */ { 18000, 3 },
849 /* tRPab */ { 21000, 3 },
850 /* tRAS */ { 42000, 3 },
851 /* tWR */ { 18000, 4 },
852 /* tWTR */ { 10000, 8 },
853 /* tRRD */ { 10000, 4 },
854 /* tPPD */ { 0, 0 },
855 /* tFAW */ { 40000, 0 },
856 /* tDQSCK */ { 3500, 0 },
857 /* tCKEHCMD */ { 7500, 3 },
858 /* tCKELCMD */ { 7500, 3 },
859 /* tCKELPD */ { 7500, 3 },
860 /* tMRR */ { 0, 8 },
861 /* tMRW */ { 10000, 10 },
862 /* tMRD */ { 14000, 10 },
863 /* tZQCALns */ { 1000 * 10, 0 },
864 /* tZQLAT */ { 30000, 10 },
865 /* tIEdly */ { 12500, 0 },
866 /* tODTon_min */{ 1500, 0 }
867 }, {
868 /* tSR */ { 15000, 3 },
869 /* tXP */ { 7500, 3 },
870 /* tRTP */ { 7500, 8 },
871 /* tRCD */ { 19875, 4 },
872 /* tRPpb */ { 19875, 3 },
873 /* tRPab */ { 22875, 3 },
874 /* tRAS */ { 43875, 3 },
875 /* tWR */ { 18000, 4 },
876 /* tWTR */ { 10000, 8 },
877 /* tRRD */ { 11875, 4 },
878 /* tPPD */ { 0, 0 },
879 /* tFAW */ { 40000, 0 },
880 /* tDQSCK */ { 3600, 0 },
881 /* tCKEHCMD */ { 7500, 3 },
882 /* tCKELCMD */ { 7500, 3 },
883 /* tCKELPD */ { 7500, 3 },
884 /* tMRR */ { 0, 8 },
885 /* tMRW */ { 10000, 10 },
886 /* tMRD */ { 14000, 10 },
887 /* tZQCALns */ { 1000 * 10, 0 },
888 /* tZQLAT */ { 30000, 10 },
889 /* tIEdly */ { 12500, 0 },
890 /* tODTon_min */{ 1500, 0 }
891 }
892 };
893
894 static const uint16_t jedec_spec2_trfc_ab[7] = {
895 /* 4Gb, 6Gb, 8Gb, 12Gb, 16Gb, 24Gb(non), 32Gb(non) */
896 130U, 180U, 180U, 280U, 280U, 560U, 560U
897 };
898
899 static uint32_t js1_ind;
900 static uint16_t js2[JS2_CNT];
901 static uint8_t RL;
902 static uint8_t WL;
903
_f_scale(uint32_t _ddr_mbps,uint32_t _ddr_mbpsdiv,uint32_t ps,uint16_t cyc)904 static uint16_t _f_scale(uint32_t _ddr_mbps, uint32_t _ddr_mbpsdiv, uint32_t ps,
905 uint16_t cyc)
906 {
907 uint16_t ret = cyc;
908 uint32_t tmp;
909 uint32_t div;
910
911 tmp = (((uint32_t)(ps) + 9U) / 10U) * _ddr_mbps;
912 div = tmp / (200000U * _ddr_mbpsdiv);
913 if (tmp != (div * 200000U * _ddr_mbpsdiv)) {
914 div = div + 1U;
915 }
916
917 if (div > cyc) {
918 ret = (uint16_t)div;
919 }
920
921 return ret;
922 }
923
_f_scale_js2(uint32_t _ddr_mbps,uint32_t _ddr_mbpsdiv,uint16_t * _js2)924 static void _f_scale_js2(uint32_t _ddr_mbps, uint32_t _ddr_mbpsdiv,
925 uint16_t *_js2)
926 {
927 int i;
928
929 for (i = 0; i < JS2_TBLCNT; i++) {
930 _js2[i] = _f_scale(_ddr_mbps, _ddr_mbpsdiv,
931 jedec_spec2[JS2_DERATE][i].ps,
932 jedec_spec2[JS2_DERATE][i].cyc);
933 }
934
935 _js2[js2_trcpb] = _js2[js2_tras] + _js2[js2_trppb];
936 _js2[js2_trcab] = _js2[js2_tras] + _js2[js2_trpab];
937 }
938
939 /* scaler for DELAY value */
_f_scale_adj(int16_t ps)940 static int16_t _f_scale_adj(int16_t ps)
941 {
942 int32_t tmp;
943 /*
944 * tmp = (int32_t)512 * ps * ddr_mbps /2 / ddr_mbpsdiv / 1000 / 1000;
945 * = ps * ddr_mbps /2 / ddr_mbpsdiv *512 / 8 / 8 / 125 / 125
946 * = ps * ddr_mbps / ddr_mbpsdiv *4 / 125 / 125
947 */
948 tmp = (int32_t)4 * (int32_t)ps * (int32_t)ddr_mbps /
949 (int32_t)ddr_mbpsdiv;
950 tmp = (int32_t)tmp / (int32_t)15625;
951
952 return (int16_t)tmp;
953 }
954
955 static const uint32_t reg_pi_mr1_data_fx_csx[2U][CSAB_CNT] = {
956 {
957 _reg_PI_MR1_DATA_F0_0,
958 _reg_PI_MR1_DATA_F0_1,
959 _reg_PI_MR1_DATA_F0_2,
960 _reg_PI_MR1_DATA_F0_3},
961 {
962 _reg_PI_MR1_DATA_F1_0,
963 _reg_PI_MR1_DATA_F1_1,
964 _reg_PI_MR1_DATA_F1_2,
965 _reg_PI_MR1_DATA_F1_3}
966 };
967
968 static const uint32_t reg_pi_mr2_data_fx_csx[2U][CSAB_CNT] = {
969 {
970 _reg_PI_MR2_DATA_F0_0,
971 _reg_PI_MR2_DATA_F0_1,
972 _reg_PI_MR2_DATA_F0_2,
973 _reg_PI_MR2_DATA_F0_3},
974 {
975 _reg_PI_MR2_DATA_F1_0,
976 _reg_PI_MR2_DATA_F1_1,
977 _reg_PI_MR2_DATA_F1_2,
978 _reg_PI_MR2_DATA_F1_3}
979 };
980
981 static const uint32_t reg_pi_mr3_data_fx_csx[2U][CSAB_CNT] = {
982 {
983 _reg_PI_MR3_DATA_F0_0,
984 _reg_PI_MR3_DATA_F0_1,
985 _reg_PI_MR3_DATA_F0_2,
986 _reg_PI_MR3_DATA_F0_3},
987 {
988 _reg_PI_MR3_DATA_F1_0,
989 _reg_PI_MR3_DATA_F1_1,
990 _reg_PI_MR3_DATA_F1_2,
991 _reg_PI_MR3_DATA_F1_3}
992 };
993
994 static const uint32_t reg_pi_mr11_data_fx_csx[2U][CSAB_CNT] = {
995 {
996 _reg_PI_MR11_DATA_F0_0,
997 _reg_PI_MR11_DATA_F0_1,
998 _reg_PI_MR11_DATA_F0_2,
999 _reg_PI_MR11_DATA_F0_3},
1000 {
1001 _reg_PI_MR11_DATA_F1_0,
1002 _reg_PI_MR11_DATA_F1_1,
1003 _reg_PI_MR11_DATA_F1_2,
1004 _reg_PI_MR11_DATA_F1_3}
1005 };
1006
1007 static const uint32_t reg_pi_mr12_data_fx_csx[2U][CSAB_CNT] = {
1008 {
1009 _reg_PI_MR12_DATA_F0_0,
1010 _reg_PI_MR12_DATA_F0_1,
1011 _reg_PI_MR12_DATA_F0_2,
1012 _reg_PI_MR12_DATA_F0_3},
1013 {
1014 _reg_PI_MR12_DATA_F1_0,
1015 _reg_PI_MR12_DATA_F1_1,
1016 _reg_PI_MR12_DATA_F1_2,
1017 _reg_PI_MR12_DATA_F1_3}
1018 };
1019
1020 static const uint32_t reg_pi_mr14_data_fx_csx[2U][CSAB_CNT] = {
1021 {
1022 _reg_PI_MR14_DATA_F0_0,
1023 _reg_PI_MR14_DATA_F0_1,
1024 _reg_PI_MR14_DATA_F0_2,
1025 _reg_PI_MR14_DATA_F0_3},
1026 {
1027 _reg_PI_MR14_DATA_F1_0,
1028 _reg_PI_MR14_DATA_F1_1,
1029 _reg_PI_MR14_DATA_F1_2,
1030 _reg_PI_MR14_DATA_F1_3}
1031 };
1032
1033 /*
1034 * regif pll w/a ( REGIF G2M WA )
1035 */
regif_pll_wa(void)1036 static void regif_pll_wa(void)
1037 {
1038 uint32_t ch;
1039 uint32_t reg_ofs;
1040
1041 /* PLL setting for PHY : G2M */
1042 reg_ddrphy_write_a(ddr_regdef_adr(_reg_PHY_PLL_WAIT),
1043 (0x5064U <<
1044 ddr_regdef_lsb(_reg_PHY_PLL_WAIT)));
1045
1046 reg_ddrphy_write_a(ddr_regdef_adr(_reg_PHY_PLL_CTRL),
1047 (ddrtbl_getval(_cnf_DDR_PHY_ADR_G_REGSET,
1048 _reg_PHY_PLL_CTRL_TOP) << 16) |
1049 ddrtbl_getval(_cnf_DDR_PHY_ADR_G_REGSET,
1050 _reg_PHY_PLL_CTRL));
1051 reg_ddrphy_write_a(ddr_regdef_adr(_reg_PHY_PLL_CTRL_CA),
1052 ddrtbl_getval(_cnf_DDR_PHY_ADR_G_REGSET,
1053 _reg_PHY_PLL_CTRL_CA));
1054
1055 reg_ddrphy_write_a(ddr_regdef_adr(_reg_PHY_LP4_BOOT_PLL_CTRL),
1056 (ddrtbl_getval(_cnf_DDR_PHY_ADR_G_REGSET,
1057 _reg_PHY_LP4_BOOT_PLL_CTRL_CA) << 16) |
1058 ddrtbl_getval(_cnf_DDR_PHY_ADR_G_REGSET,
1059 _reg_PHY_LP4_BOOT_PLL_CTRL));
1060 reg_ddrphy_write_a(ddr_regdef_adr(_reg_PHY_LP4_BOOT_TOP_PLL_CTRL),
1061 ddrtbl_getval(_cnf_DDR_PHY_ADR_G_REGSET,
1062 _reg_PHY_LP4_BOOT_TOP_PLL_CTRL));
1063
1064 reg_ofs = ddr_regdef_adr(_reg_PHY_LPDDR3_CS) - DDR_PHY_ADR_G_REGSET_OFS;
1065 reg_ddrphy_write_a(ddr_regdef_adr(_reg_PHY_LPDDR3_CS),
1066 _cnf_DDR_PHY_ADR_G_REGSET[reg_ofs]);
1067
1068 /* protect register interface */
1069 ddrphy_regif_idle();
1070 pll3_control(0U);
1071
1072 reg_ddrphy_write_a(ddr_regdef_adr(_reg_PHY_DLL_RST_EN),
1073 (0x01U << ddr_regdef_lsb(_reg_PHY_DLL_RST_EN)));
1074 ddrphy_regif_idle();
1075
1076 /*
1077 * init start
1078 * dbdficnt0:
1079 * dfi_dram_clk_disable=1
1080 * dfi_frequency = 0
1081 * freq_ratio = 01 (2:1)
1082 * init_start =0
1083 */
1084 foreach_vch(ch) {
1085 mmio_write_32(DBSC_DBDFICNT(ch), 0x00000F10U);
1086 }
1087 dsb_sev();
1088
1089 /*
1090 * dbdficnt0:
1091 * dfi_dram_clk_disable=1
1092 * dfi_frequency = 0
1093 * freq_ratio = 01 (2:1)
1094 * init_start =1
1095 */
1096 foreach_vch(ch) {
1097 mmio_write_32(DBSC_DBDFICNT(ch), 0x00000F11U);
1098 }
1099 dsb_sev();
1100
1101 foreach_ech(ch) {
1102 if ((board_cnf->phyvalid & BIT(ch)) != 0U) {
1103 while ((mmio_read_32(DBSC_PLL_LOCK(ch)) & 0x1fU) != 0x1fU) {
1104 }
1105 }
1106 }
1107 dsb_sev();
1108 }
1109
1110 /* load table data into DDR registers */
ddrtbl_load(void)1111 static void ddrtbl_load(void)
1112 {
1113 uint32_t i;
1114 uint32_t slice;
1115 uint32_t csab;
1116 uint32_t adr;
1117 uint32_t data_l;
1118 uint32_t tmp[3];
1119 uint16_t dataS;
1120
1121 /*
1122 * TIMING REGISTERS
1123 * search jedec_spec1 index
1124 */
1125 for (i = JS1_USABLEC_SPEC_LO; i < (uint32_t)JS1_FREQ_TBL_NUM - 1U; i++) {
1126 if ((js1[i].fx3 * 2U * ddr_mbpsdiv >= ddr_mbps * 3U) != 0U) {
1127 break;
1128 }
1129 }
1130 if (i > JS1_USABLEC_SPEC_HI) {
1131 js1_ind = JS1_USABLEC_SPEC_HI;
1132 } else {
1133 js1_ind = i;
1134 }
1135
1136 if (board_cnf->dbi_en != 0U) {
1137 RL = js1[js1_ind].rlwdbi;
1138 } else {
1139 RL = js1[js1_ind].rlwodbi;
1140 }
1141
1142 WL = js1[js1_ind].WL;
1143
1144 /* calculate jedec_spec2 */
1145 _f_scale_js2(ddr_mbps, ddr_mbpsdiv, js2);
1146
1147 /* PREPARE TBL */
1148 if (prr_product == PRR_PRODUCT_M3) {
1149 /* G2M */
1150 _tblcopy(_cnf_DDR_PHY_SLICE_REGSET,
1151 DDR_PHY_SLICE_REGSET_G2M, DDR_PHY_SLICE_REGSET_NUM_G2M);
1152 _tblcopy(_cnf_DDR_PHY_ADR_V_REGSET,
1153 DDR_PHY_ADR_V_REGSET_G2M, DDR_PHY_ADR_V_REGSET_NUM_G2M);
1154 _tblcopy(_cnf_DDR_PHY_ADR_I_REGSET,
1155 DDR_PHY_ADR_I_REGSET_G2M, DDR_PHY_ADR_I_REGSET_NUM_G2M);
1156 _tblcopy(_cnf_DDR_PHY_ADR_G_REGSET,
1157 DDR_PHY_ADR_G_REGSET_G2M, DDR_PHY_ADR_G_REGSET_NUM_G2M);
1158 _tblcopy(_cnf_DDR_PI_REGSET,
1159 DDR_PI_REGSET_G2M, DDR_PI_REGSET_NUM_G2M);
1160
1161 DDR_PHY_SLICE_REGSET_OFS = DDR_PHY_SLICE_REGSET_OFS_G2M;
1162 DDR_PHY_ADR_V_REGSET_OFS = DDR_PHY_ADR_V_REGSET_OFS_G2M;
1163 DDR_PHY_ADR_I_REGSET_OFS = DDR_PHY_ADR_I_REGSET_OFS_G2M;
1164 DDR_PHY_ADR_G_REGSET_OFS = DDR_PHY_ADR_G_REGSET_OFS_G2M;
1165 DDR_PI_REGSET_OFS = DDR_PI_REGSET_OFS_G2M;
1166 DDR_PHY_SLICE_REGSET_SIZE = DDR_PHY_SLICE_REGSET_SIZE_G2M;
1167 DDR_PHY_ADR_V_REGSET_SIZE = DDR_PHY_ADR_V_REGSET_SIZE_G2M;
1168 DDR_PHY_ADR_I_REGSET_SIZE = DDR_PHY_ADR_I_REGSET_SIZE_G2M;
1169 DDR_PHY_ADR_G_REGSET_SIZE = DDR_PHY_ADR_G_REGSET_SIZE_G2M;
1170 DDR_PI_REGSET_SIZE = DDR_PI_REGSET_SIZE_G2M;
1171 DDR_PHY_SLICE_REGSET_NUM = DDR_PHY_SLICE_REGSET_NUM_G2M;
1172 DDR_PHY_ADR_V_REGSET_NUM = DDR_PHY_ADR_V_REGSET_NUM_G2M;
1173 DDR_PHY_ADR_I_REGSET_NUM = DDR_PHY_ADR_I_REGSET_NUM_G2M;
1174 DDR_PHY_ADR_G_REGSET_NUM = DDR_PHY_ADR_G_REGSET_NUM_G2M;
1175 DDR_PI_REGSET_NUM = DDR_PI_REGSET_NUM_G2M;
1176
1177 DDR_PHY_ADR_I_NUM = 2U;
1178 }
1179
1180 /* on fly gate adjust */
1181 if ((prr_product == PRR_PRODUCT_M3) && (prr_cut == PRR_PRODUCT_10)) {
1182 ddrtbl_setval(_cnf_DDR_PHY_SLICE_REGSET,
1183 _reg_ON_FLY_GATE_ADJUST_EN, 0x00);
1184 }
1185
1186 /* Adjust PI parameters */
1187 #ifdef _def_LPDDR4_ODT
1188 for (i = 0U; i < 2U; i++) {
1189 for (csab = 0U; csab < CSAB_CNT; csab++) {
1190 ddrtbl_setval(_cnf_DDR_PI_REGSET,
1191 reg_pi_mr11_data_fx_csx[i][csab],
1192 _def_LPDDR4_ODT);
1193 }
1194 }
1195 #endif /* _def_LPDDR4_ODT */
1196
1197 #ifdef _def_LPDDR4_VREFCA
1198 for (i = 0U; i < 2U; i++) {
1199 for (csab = 0U; csab < CSAB_CNT; csab++) {
1200 ddrtbl_setval(_cnf_DDR_PI_REGSET,
1201 reg_pi_mr12_data_fx_csx[i][csab],
1202 _def_LPDDR4_VREFCA);
1203 }
1204 }
1205 #endif /* _def_LPDDR4_VREFCA */
1206
1207 if ((js2[js2_tiedly]) >= 0x0eU) {
1208 dataS = 0x0eU;
1209 } else {
1210 dataS = js2[js2_tiedly];
1211 }
1212
1213 ddrtbl_setval(_cnf_DDR_PHY_SLICE_REGSET, _reg_PHY_RDDATA_EN_DLY, dataS);
1214 ddrtbl_setval(_cnf_DDR_PHY_SLICE_REGSET, _reg_PHY_RDDATA_EN_TSEL_DLY,
1215 (dataS - 2U));
1216 ddrtbl_setval(_cnf_DDR_PI_REGSET, _reg_PI_RDLAT_ADJ_F1, RL - dataS);
1217
1218 if (ddrtbl_getval(_cnf_DDR_PHY_SLICE_REGSET, _reg_PHY_WRITE_PATH_LAT_ADD) != 0U) {
1219 data_l = WL - 1U;
1220 } else {
1221 data_l = WL;
1222 }
1223 ddrtbl_setval(_cnf_DDR_PI_REGSET, _reg_PI_WRLAT_ADJ_F1, data_l - 2U);
1224 ddrtbl_setval(_cnf_DDR_PI_REGSET, _reg_PI_WRLAT_F1, data_l);
1225
1226 if (board_cnf->dbi_en != 0U) {
1227 ddrtbl_setval(_cnf_DDR_PHY_SLICE_REGSET, _reg_PHY_DBI_MODE,
1228 0x01U);
1229 ddrtbl_setval(_cnf_DDR_PHY_SLICE_REGSET,
1230 _reg_PHY_WDQLVL_DATADM_MASK, 0x000U);
1231 } else {
1232 ddrtbl_setval(_cnf_DDR_PHY_SLICE_REGSET, _reg_PHY_DBI_MODE,
1233 0x00U);
1234 ddrtbl_setval(_cnf_DDR_PHY_SLICE_REGSET,
1235 _reg_PHY_WDQLVL_DATADM_MASK, 0x100U);
1236 }
1237
1238 tmp[0] = js1[js1_ind].MR1;
1239 tmp[1] = js1[js1_ind].MR2;
1240 data_l = ddrtbl_getval(_cnf_DDR_PI_REGSET, _reg_PI_MR3_DATA_F1_0);
1241 if (board_cnf->dbi_en != 0U) {
1242 tmp[2] = data_l | 0xc0U;
1243 } else {
1244 tmp[2] = data_l & (~0xc0U);
1245 }
1246
1247 for (i = 0U; i < 2U; i++) {
1248 for (csab = 0U; csab < CSAB_CNT; csab++) {
1249 ddrtbl_setval(_cnf_DDR_PI_REGSET,
1250 reg_pi_mr1_data_fx_csx[i][csab], tmp[0]);
1251 ddrtbl_setval(_cnf_DDR_PI_REGSET,
1252 reg_pi_mr2_data_fx_csx[i][csab], tmp[1]);
1253 ddrtbl_setval(_cnf_DDR_PI_REGSET,
1254 reg_pi_mr3_data_fx_csx[i][csab], tmp[2]);
1255 }
1256 }
1257
1258 /* DDRPHY INT START */
1259 regif_pll_wa();
1260 dbwait_loop(5U);
1261
1262 /* FREQ_SEL_MULTICAST & PER_CS_TRAINING_MULTICAST SET (for safety) */
1263 reg_ddrphy_write_a(ddr_regdef_adr(_reg_PHY_FREQ_SEL_MULTICAST_EN),
1264 BIT(ddr_regdef_lsb(_reg_PHY_FREQ_SEL_MULTICAST_EN)));
1265 ddr_setval_ach_as(_reg_PHY_PER_CS_TRAINING_MULTICAST_EN, 0x01U);
1266
1267 /* SET DATA SLICE TABLE */
1268 for (slice = 0U; slice < SLICE_CNT; slice++) {
1269 adr =
1270 DDR_PHY_SLICE_REGSET_OFS +
1271 DDR_PHY_SLICE_REGSET_SIZE * slice;
1272 for (i = 0U; i < DDR_PHY_SLICE_REGSET_NUM; i++) {
1273 reg_ddrphy_write_a(adr + i,
1274 _cnf_DDR_PHY_SLICE_REGSET[i]);
1275 }
1276 }
1277
1278 /* SET ADR SLICE TABLE */
1279 adr = DDR_PHY_ADR_V_REGSET_OFS;
1280 for (i = 0U; i < DDR_PHY_ADR_V_REGSET_NUM; i++) {
1281 reg_ddrphy_write_a(adr + i, _cnf_DDR_PHY_ADR_V_REGSET[i]);
1282 }
1283
1284 if ((prr_product == PRR_PRODUCT_M3) &&
1285 ((0x00ffffffU & (uint32_t)((board_cnf->ch[0].ca_swap) >> 40U))
1286 != 0x00U)) {
1287 adr = DDR_PHY_ADR_I_REGSET_OFS + DDR_PHY_ADR_I_REGSET_SIZE;
1288 for (i = 0U; i < DDR_PHY_ADR_V_REGSET_NUM; i++) {
1289 reg_ddrphy_write_a(adr + i,
1290 _cnf_DDR_PHY_ADR_V_REGSET[i]);
1291 }
1292 ddrtbl_setval(_cnf_DDR_PHY_ADR_G_REGSET,
1293 _reg_PHY_ADR_DISABLE, 0x02);
1294 DDR_PHY_ADR_I_NUM -= 1U;
1295 ddr_phycaslice = 1U;
1296
1297 #ifndef _def_LPDDR4_ODT
1298 for (i = 0U; i < 2U; i++) {
1299 for (csab = 0U; csab < CSAB_CNT; csab++) {
1300 ddrtbl_setval(_cnf_DDR_PI_REGSET,
1301 reg_pi_mr11_data_fx_csx[i][csab],
1302 0x66);
1303 }
1304 }
1305 #endif/* _def_LPDDR4_ODT */
1306 } else {
1307 ddr_phycaslice = 0U;
1308 }
1309
1310 if (DDR_PHY_ADR_I_NUM > 0U) {
1311 for (slice = 0U; slice < DDR_PHY_ADR_I_NUM; slice++) {
1312 adr =
1313 DDR_PHY_ADR_I_REGSET_OFS +
1314 DDR_PHY_ADR_I_REGSET_SIZE * slice;
1315 for (i = 0U; i < DDR_PHY_ADR_I_REGSET_NUM; i++) {
1316 reg_ddrphy_write_a(adr + i,
1317 _cnf_DDR_PHY_ADR_I_REGSET
1318 [i]);
1319 }
1320 }
1321 }
1322
1323 /* SET ADRCTRL SLICE TABLE */
1324 adr = DDR_PHY_ADR_G_REGSET_OFS;
1325 for (i = 0U; i < DDR_PHY_ADR_G_REGSET_NUM; i++) {
1326 reg_ddrphy_write_a(adr + i, _cnf_DDR_PHY_ADR_G_REGSET[i]);
1327 }
1328
1329 /* SET PI REGISTERS */
1330 adr = DDR_PI_REGSET_OFS;
1331 for (i = 0U; i < DDR_PI_REGSET_NUM; i++) {
1332 reg_ddrphy_write_a(adr + i, _cnf_DDR_PI_REGSET[i]);
1333 }
1334 }
1335
1336 /* CONFIGURE DDR REGISTERS */
ddr_config_sub(void)1337 static void ddr_config_sub(void)
1338 {
1339 const uint32_t _par_CALVL_DEVICE_MAP = 1U;
1340 uint8_t high_byte[SLICE_CNT];
1341 uint32_t ch, slice;
1342 uint32_t data_l;
1343 uint32_t tmp;
1344 uint32_t i;
1345
1346 foreach_vch(ch) {
1347 /* BOARD SETTINGS (DQ,DM,VREF_DRIVING) */
1348 for (slice = 0U; slice < SLICE_CNT; slice++) {
1349 high_byte[slice] =
1350 (board_cnf->ch[ch].dqs_swap >> (4U * slice)) % 2U;
1351 ddr_setval_s(ch, slice, _reg_PHY_DQ_DM_SWIZZLE0,
1352 board_cnf->ch[ch].dq_swap[slice]);
1353 ddr_setval_s(ch, slice, _reg_PHY_DQ_DM_SWIZZLE1,
1354 board_cnf->ch[ch].dm_swap[slice]);
1355 if (high_byte[slice] != 0U) {
1356 /* HIGHER 16 BYTE */
1357 ddr_setval_s(ch, slice,
1358 _reg_PHY_CALVL_VREF_DRIVING_SLICE,
1359 0x00);
1360 } else {
1361 /* LOWER 16 BYTE */
1362 ddr_setval_s(ch, slice,
1363 _reg_PHY_CALVL_VREF_DRIVING_SLICE,
1364 0x01);
1365 }
1366 }
1367
1368 /* BOARD SETTINGS (CA,ADDR_SEL) */
1369 data_l = (0x00ffffffU & (uint32_t)(board_cnf->ch[ch].ca_swap)) |
1370 0x00888888U;
1371
1372 /* --- ADR_CALVL_SWIZZLE --- */
1373 if (prr_product == PRR_PRODUCT_M3) {
1374 ddr_setval(ch, _reg_PHY_ADR_CALVL_SWIZZLE0_0, data_l);
1375 ddr_setval(ch, _reg_PHY_ADR_CALVL_SWIZZLE1_0,
1376 0x00000000);
1377 ddr_setval(ch, _reg_PHY_ADR_CALVL_SWIZZLE0_1, data_l);
1378 ddr_setval(ch, _reg_PHY_ADR_CALVL_SWIZZLE1_1,
1379 0x00000000);
1380 ddr_setval(ch, _reg_PHY_ADR_CALVL_DEVICE_MAP,
1381 _par_CALVL_DEVICE_MAP);
1382 } else {
1383 ddr_setval(ch, _reg_PHY_ADR_CALVL_SWIZZLE0, data_l);
1384 ddr_setval(ch, _reg_PHY_ADR_CALVL_SWIZZLE1, 0x00000000);
1385 ddr_setval(ch, _reg_PHY_CALVL_DEVICE_MAP,
1386 _par_CALVL_DEVICE_MAP);
1387 }
1388
1389 /* --- ADR_ADDR_SEL --- */
1390 data_l = 0U;
1391 tmp = board_cnf->ch[ch].ca_swap;
1392 for (i = 0U; i < 6U; i++) {
1393 data_l |= ((tmp & 0x0fU) << (i * 5U));
1394 tmp = tmp >> 4;
1395 }
1396 ddr_setval(ch, _reg_PHY_ADR_ADDR_SEL, data_l);
1397 if (ddr_phycaslice == 1U) {
1398 /* ----------- adr slice2 swap ----------- */
1399 tmp = (uint32_t)((board_cnf->ch[ch].ca_swap) >> 40);
1400 data_l = (tmp & 0x00ffffffU) | 0x00888888U;
1401
1402 /* --- ADR_CALVL_SWIZZLE --- */
1403 if (prr_product == PRR_PRODUCT_M3) {
1404 ddr_setval_s(ch, 2,
1405 _reg_PHY_ADR_CALVL_SWIZZLE0_0,
1406 data_l);
1407 ddr_setval_s(ch, 2,
1408 _reg_PHY_ADR_CALVL_SWIZZLE1_0,
1409 0x00000000);
1410 ddr_setval_s(ch, 2,
1411 _reg_PHY_ADR_CALVL_SWIZZLE0_1,
1412 data_l);
1413 ddr_setval_s(ch, 2,
1414 _reg_PHY_ADR_CALVL_SWIZZLE1_1,
1415 0x00000000);
1416 ddr_setval_s(ch, 2,
1417 _reg_PHY_ADR_CALVL_DEVICE_MAP,
1418 _par_CALVL_DEVICE_MAP);
1419 } else {
1420 ddr_setval_s(ch, 2,
1421 _reg_PHY_ADR_CALVL_SWIZZLE0,
1422 data_l);
1423 ddr_setval_s(ch, 2,
1424 _reg_PHY_ADR_CALVL_SWIZZLE1,
1425 0x00000000);
1426 ddr_setval_s(ch, 2,
1427 _reg_PHY_CALVL_DEVICE_MAP,
1428 _par_CALVL_DEVICE_MAP);
1429 }
1430
1431 /* --- ADR_ADDR_SEL --- */
1432 data_l = 0U;
1433 for (i = 0U; i < 6U; i++) {
1434 data_l |= ((tmp & 0x0fU) << (i * 5U));
1435 tmp = tmp >> 4U;
1436 }
1437
1438 ddr_setval_s(ch, 2, _reg_PHY_ADR_ADDR_SEL, data_l);
1439 }
1440
1441 /* BOARD SETTINGS (BYTE_ORDER_SEL) */
1442 if (prr_product == PRR_PRODUCT_M3) {
1443 /* --- DATA_BYTE_SWAP --- */
1444 data_l = 0U;
1445 tmp = board_cnf->ch[ch].dqs_swap;
1446 for (i = 0U; i < 4U; i++) {
1447 data_l |= ((tmp & 0x03U) << (i * 2U));
1448 tmp = tmp >> 4U;
1449 }
1450 } else {
1451 /* --- DATA_BYTE_SWAP --- */
1452 data_l = board_cnf->ch[ch].dqs_swap;
1453 ddr_setval(ch, _reg_PI_DATA_BYTE_SWAP_EN, 0x01);
1454 ddr_setval(ch, _reg_PI_DATA_BYTE_SWAP_SLICE0,
1455 (data_l) & 0x0fU);
1456 ddr_setval(ch, _reg_PI_DATA_BYTE_SWAP_SLICE1,
1457 (data_l >> 4U * 1U) & 0x0fU);
1458 ddr_setval(ch, _reg_PI_DATA_BYTE_SWAP_SLICE2,
1459 (data_l >> 4U * 2U) & 0x0fU);
1460 ddr_setval(ch, _reg_PI_DATA_BYTE_SWAP_SLICE3,
1461 (data_l >> 4U * 3U) & 0x0fU);
1462
1463 ddr_setval(ch, _reg_PHY_DATA_BYTE_ORDER_SEL_HIGH, 0x00U);
1464 }
1465 ddr_setval(ch, _reg_PHY_DATA_BYTE_ORDER_SEL, data_l);
1466 }
1467 }
1468
ddr_config(void)1469 static void ddr_config(void)
1470 {
1471 uint32_t num_cacs_dly = _reg_PHY_CLK_CACS_SLAVE_DELAY_X_NUM;
1472 uint32_t reg_ofs, dly;
1473 uint32_t ch, slice;
1474 uint32_t data_l;
1475 uint32_t tmp;
1476 uint32_t i;
1477 int8_t _adj;
1478 int16_t adj;
1479 uint32_t dq;
1480 union {
1481 uint32_t ui32[4];
1482 uint8_t ui8[16];
1483 } patt;
1484 uint16_t patm;
1485
1486 /* configure ddrphy registers */
1487 ddr_config_sub();
1488
1489 /* WDQ_USER_PATT */
1490 foreach_vch(ch) {
1491 for (slice = 0U; slice < SLICE_CNT; slice++) {
1492 patm = 0U;
1493 for (i = 0U; i < 16U; i++) {
1494 tmp = board_cnf->ch[ch].wdqlvl_patt[i];
1495 patt.ui8[i] = tmp & 0xffU;
1496 if ((tmp & 0x100U) != 0U) {
1497 patm |= (1U << (uint16_t)i);
1498 }
1499 }
1500 ddr_setval_s(ch, slice, _reg_PHY_USER_PATT0,
1501 patt.ui32[0]);
1502 ddr_setval_s(ch, slice, _reg_PHY_USER_PATT1,
1503 patt.ui32[1]);
1504 ddr_setval_s(ch, slice, _reg_PHY_USER_PATT2,
1505 patt.ui32[2]);
1506 ddr_setval_s(ch, slice, _reg_PHY_USER_PATT3,
1507 patt.ui32[3]);
1508 ddr_setval_s(ch, slice, _reg_PHY_USER_PATT4, patm);
1509 }
1510 }
1511
1512 /* CACS DLY */
1513 data_l = board_cnf->cacs_dly + (uint32_t)_f_scale_adj(board_cnf->cacs_dly_adj);
1514 reg_ddrphy_write_a(ddr_regdef_adr(_reg_PHY_FREQ_SEL_MULTICAST_EN), 0x00U);
1515 foreach_vch(ch) {
1516 for (i = 0U; i < num_cacs_dly - 4U; i++) {
1517 adj = _f_scale_adj(board_cnf->ch[ch].cacs_adj[i]);
1518 dly = _reg_PHY_CLK_CACS_SLAVE_DELAY_X[i];
1519 ddrtbl_setval(_cnf_DDR_PHY_ADR_V_REGSET, dly,
1520 data_l + (uint32_t)adj);
1521 reg_ofs = ddr_regdef_adr(dly) - DDR_PHY_ADR_V_REGSET_OFS;
1522 reg_ddrphy_write(ch, ddr_regdef_adr(dly),
1523 _cnf_DDR_PHY_ADR_V_REGSET[reg_ofs]);
1524 }
1525
1526 for (i = num_cacs_dly - 4U; i < num_cacs_dly; i++) {
1527 adj = _f_scale_adj(board_cnf->ch[ch].cacs_adj[i]);
1528 dly = _reg_PHY_CLK_CACS_SLAVE_DELAY_X[i];
1529 ddrtbl_setval(_cnf_DDR_PHY_ADR_G_REGSET, dly,
1530 data_l + (uint32_t)adj);
1531 reg_ofs = ddr_regdef_adr(dly) - DDR_PHY_ADR_G_REGSET_OFS;
1532 reg_ddrphy_write(ch, ddr_regdef_adr(dly),
1533 _cnf_DDR_PHY_ADR_G_REGSET[reg_ofs]);
1534 }
1535
1536 if (ddr_phycaslice == 1U) {
1537 for (i = 0U; i < 6U; i++) {
1538 adj = _f_scale_adj(board_cnf->ch[ch].cacs_adj[i + num_cacs_dly]);
1539 dly = _reg_PHY_CLK_CACS_SLAVE_DELAY_X[i];
1540 ddrtbl_setval(_cnf_DDR_PHY_ADR_V_REGSET, dly,
1541 data_l + (uint32_t)adj);
1542 reg_ofs = ddr_regdef_adr(dly) - DDR_PHY_ADR_V_REGSET_OFS;
1543 reg_ddrphy_write(ch, ddr_regdef_adr(dly) + 0x0100U,
1544 _cnf_DDR_PHY_ADR_V_REGSET[reg_ofs]);
1545 }
1546 }
1547 }
1548
1549 reg_ddrphy_write_a(ddr_regdef_adr(_reg_PHY_FREQ_SEL_MULTICAST_EN),
1550 BIT(ddr_regdef_lsb(_reg_PHY_FREQ_SEL_MULTICAST_EN)));
1551
1552 /* WDQDM DLY */
1553 data_l = board_cnf->dqdm_dly_w;
1554 foreach_vch(ch) {
1555 for (slice = 0U; slice < SLICE_CNT; slice++) {
1556 for (i = 0U; i <= 8U; i++) {
1557 dq = slice * 8U + (uint32_t)i;
1558 if (i == 8U) {
1559 _adj = board_cnf->ch[ch].dm_adj_w[slice];
1560 } else {
1561 _adj = board_cnf->ch[ch].dq_adj_w[dq];
1562 }
1563 adj = _f_scale_adj(_adj);
1564 ddr_setval_s(ch, slice,
1565 _reg_PHY_CLK_WRX_SLAVE_DELAY[i],
1566 data_l + (uint32_t)adj);
1567 }
1568 }
1569 }
1570
1571 /* RDQDM DLY */
1572 data_l = board_cnf->dqdm_dly_r;
1573 foreach_vch(ch) {
1574 for (slice = 0U; slice < SLICE_CNT; slice++) {
1575 for (i = 0U; i <= 8U; i++) {
1576 dq = slice * 8U + (uint32_t)i;
1577 if (i == 8U) {
1578 _adj = board_cnf->ch[ch].dm_adj_r[slice];
1579 } else {
1580 _adj = board_cnf->ch[ch].dq_adj_r[dq];
1581 }
1582 adj = _f_scale_adj(_adj);
1583 dly = _reg_PHY_RDDQS_X_FALL_SLAVE_DELAY[i];
1584 ddr_setval_s(ch, slice, dly, data_l + (uint32_t)adj);
1585 dly = _reg_PHY_RDDQS_X_RISE_SLAVE_DELAY[i];
1586 ddr_setval_s(ch, slice, dly, data_l + (uint32_t)adj);
1587 }
1588 }
1589 }
1590 }
1591
1592 /* DBSC register setting functions */
dbsc_regset_pre(void)1593 static void dbsc_regset_pre(void)
1594 {
1595 uint32_t ch, csab;
1596 uint32_t data_l;
1597
1598 /* PRIMARY SETTINGS */
1599 /* LPDDR4, BL=16, DFI interface */
1600 mmio_write_32(DBSC_DBKIND, 0x0000000aU);
1601 mmio_write_32(DBSC_DBBL, 0x00000002U);
1602 mmio_write_32(DBSC_DBPHYCONF0, 0x00000001U);
1603
1604 /* FREQRATIO=2 */
1605 mmio_write_32(DBSC_DBSYSCONF1, 0x00000002U);
1606
1607 /*
1608 * DRAM SIZE REGISTER:
1609 * set all ranks as density=0(4Gb) for PHY initialization
1610 */
1611 foreach_vch(ch) {
1612 for (csab = 0U; csab < 4U; csab++) {
1613 mmio_write_32(DBSC_DBMEMCONF(ch, csab),
1614 DBMEMCONF_REGD(0U));
1615 }
1616 }
1617
1618 if (prr_product == PRR_PRODUCT_M3) {
1619 data_l = 0xe4e4e4e4U;
1620 foreach_ech(ch) {
1621 if ((ddr_phyvalid & (1U << ch)) != 0U) {
1622 data_l = (data_l & (~(0x000000FFU << (ch * 8U))))
1623 | (((board_cnf->ch[ch].dqs_swap & 0x0003U)
1624 | ((board_cnf->ch[ch].dqs_swap & 0x0030U) >> 2U)
1625 | ((board_cnf->ch[ch].dqs_swap & 0x0300U) >> 4U)
1626 | ((board_cnf->ch[ch].dqs_swap & 0x3000U) >> 6U))
1627 << (ch * 8U));
1628 }
1629 }
1630 mmio_write_32(DBSC_DBBSWAP, data_l);
1631 }
1632 }
1633
dbsc_regset(void)1634 static void dbsc_regset(void)
1635 {
1636 int32_t i;
1637 uint32_t ch;
1638 uint32_t data_l;
1639 uint32_t data_l2;
1640 uint32_t wdql;
1641 uint32_t dqenltncy;
1642 uint32_t dql;
1643 uint32_t dqienltncy;
1644 uint32_t wrcslat;
1645 uint32_t wrcsgap;
1646 uint32_t rdcslat;
1647 uint32_t rdcsgap;
1648 uint32_t scfctst0_act_act;
1649 uint32_t scfctst0_rda_act;
1650 uint32_t scfctst0_wra_act;
1651 uint32_t scfctst0_pre_act;
1652 uint32_t scfctst1_rd_wr;
1653 uint32_t scfctst1_wr_rd;
1654 uint32_t scfctst1_act_rd_wr;
1655 uint32_t scfctst1_asyncofs;
1656 uint32_t dbschhrw1_sctrfcab;
1657
1658 /* RFC */
1659 js2[js2_trfcab] =
1660 _f_scale(ddr_mbps, ddr_mbpsdiv,
1661 jedec_spec2_trfc_ab[max_density] * 1000U, 0U);
1662 /* DBTR0.CL : RL */
1663 mmio_write_32(DBSC_DBTR(0), RL);
1664
1665 /* DBTR1.CWL : WL */
1666 mmio_write_32(DBSC_DBTR(1), WL);
1667
1668 /* DBTR2.AL : 0 */
1669 mmio_write_32(DBSC_DBTR(2), 0U);
1670
1671 /* DBTR3.TRCD: tRCD */
1672 mmio_write_32(DBSC_DBTR(3), js2[js2_trcd]);
1673
1674 /* DBTR4.TRPA,TRP: tRPab,tRPpb */
1675 mmio_write_32(DBSC_DBTR(4), (js2[js2_trpab] << 16) | js2[js2_trppb]);
1676
1677 /* DBTR5.TRC : use tRCpb */
1678 mmio_write_32(DBSC_DBTR(5), js2[js2_trcpb]);
1679
1680 /* DBTR6.TRAS : tRAS */
1681 mmio_write_32(DBSC_DBTR(6), js2[js2_tras]);
1682
1683 /* DBTR7.TRRD : tRRD */
1684 mmio_write_32(DBSC_DBTR(7), (js2[js2_trrd] << 16) | js2[js2_trrd]);
1685
1686 /* DBTR8.TFAW : tFAW */
1687 mmio_write_32(DBSC_DBTR(8), js2[js2_tfaw]);
1688
1689 /* DBTR9.TRDPR : tRTP */
1690 mmio_write_32(DBSC_DBTR(9), js2[js2_trtp]);
1691
1692 /* DBTR10.TWR : nWR */
1693 mmio_write_32(DBSC_DBTR(10), js1[js1_ind].nwr);
1694
1695 /*
1696 * DBTR11.TRDWR : RL + BL / 2 + Rounddown(tRPST) + PHY_ODTLoff -
1697 * odtlon + tDQSCK - tODTon,min +
1698 * PCB delay (out+in) + tPHY_ODToff
1699 */
1700 mmio_write_32(DBSC_DBTR(11),
1701 RL + (16U / 2U) + 1U + 2U - js1[js1_ind].odtlon +
1702 js2[js2_tdqsck] - js2[js2_tODTon_min] +
1703 _f_scale(ddr_mbps, ddr_mbpsdiv, 1300, 0));
1704
1705 /* DBTR12.TWRRD : WL + 1 + BL/2 + tWTR */
1706 data_l = WL + 1U + (16U / 2U) + js2[js2_twtr];
1707 mmio_write_32(DBSC_DBTR(12), (data_l << 16) | data_l);
1708
1709 /* DBTR13.TRFCAB : tRFCab */
1710 mmio_write_32(DBSC_DBTR(13), js2[js2_trfcab]);
1711
1712 /* DBTR14.TCKEHDLL,tCKEH : tCKEHCMD,tCKEHCMD */
1713 mmio_write_32(DBSC_DBTR(14),
1714 (js2[js2_tckehcmd] << 16) | (js2[js2_tckehcmd]));
1715
1716 /* DBTR15.TCKESR,TCKEL : tSR,tCKELPD */
1717 mmio_write_32(DBSC_DBTR(15), (js2[js2_tsr] << 16) | (js2[js2_tckelpd]));
1718
1719 /* DBTR16 */
1720 /* WDQL : tphy_wrlat + tphy_wrdata */
1721 wdql = ddrtbl_getval(_cnf_DDR_PI_REGSET, _reg_PI_WRLAT_F1);
1722 /* DQENLTNCY : tphy_wrlat = WL-2 : PHY_WRITE_PATH_LAT_ADD == 0
1723 * tphy_wrlat = WL-3 : PHY_WRITE_PATH_LAT_ADD != 0
1724 */
1725 dqenltncy = ddrtbl_getval(_cnf_DDR_PI_REGSET, _reg_PI_WRLAT_ADJ_F1);
1726 /* DQL : tphy_rdlat + trdata_en */
1727 /* it is not important for dbsc */
1728 dql = RL + 16U;
1729 /* DQIENLTNCY : trdata_en */
1730 dqienltncy = ddrtbl_getval(_cnf_DDR_PI_REGSET, _reg_PI_RDLAT_ADJ_F1) - 1U;
1731 mmio_write_32(DBSC_DBTR(16),
1732 (dqienltncy << 24) | (dql << 16) | (dqenltncy << 8) | wdql);
1733
1734 /* DBTR24 */
1735 /* WRCSLAT = WRLAT -5 */
1736 wrcslat = wdql - 5U;
1737 /* WRCSGAP = 5 */
1738 wrcsgap = 5U;
1739 /* RDCSLAT = RDLAT_ADJ +2 */
1740 rdcslat = dqienltncy;
1741 if (prr_product != PRR_PRODUCT_M3) {
1742 rdcslat += 2U;
1743 }
1744 /* RDCSGAP = 6 */
1745 rdcsgap = 6U;
1746 if (prr_product == PRR_PRODUCT_M3) {
1747 rdcsgap = 4U;
1748 }
1749 mmio_write_32(DBSC_DBTR(24),
1750 (rdcsgap << 24) | (rdcslat << 16) | (wrcsgap << 8) | wrcslat);
1751
1752 /* DBTR17.TMODRD,TMOD,TRDMR: tMRR,tMRD,(0) */
1753 mmio_write_32(DBSC_DBTR(17),
1754 (js2[js2_tmrr] << 24) | (js2[js2_tmrd] << 16));
1755
1756 /* DBTR18.RODTL, RODTA, WODTL, WODTA : do not use in LPDDR4 */
1757 mmio_write_32(DBSC_DBTR(18), 0);
1758
1759 /* DBTR19.TZQCL, TZQCS : do not use in LPDDR4 */
1760 mmio_write_32(DBSC_DBTR(19), 0);
1761
1762 /* DBTR20.TXSDLL, TXS : tRFCab+tCKEHCMD */
1763 data_l = js2[js2_trfcab] + js2[js2_tckehcmd];
1764 mmio_write_32(DBSC_DBTR(20), (data_l << 16) | data_l);
1765
1766 /* DBTR21.TCCD */
1767 /* DBTR23.TCCD */
1768 if (ddr_tccd == 8U) {
1769 data_l = 8U;
1770 mmio_write_32(DBSC_DBTR(21), (data_l << 16) | data_l);
1771 mmio_write_32(DBSC_DBTR(23), 0x00000002);
1772 } else if (ddr_tccd <= 11U) {
1773 data_l = 11U;
1774 mmio_write_32(DBSC_DBTR(21), (data_l << 16) | data_l);
1775 mmio_write_32(DBSC_DBTR(23), 0x00000000);
1776 } else {
1777 data_l = ddr_tccd;
1778 mmio_write_32(DBSC_DBTR(21), (data_l << 16) | data_l);
1779 mmio_write_32(DBSC_DBTR(23), 0x00000000);
1780 }
1781
1782 /* DBTR22.ZQLAT : */
1783 data_l = js2[js2_tzqcalns] * 100U; /* 1000 * 1000 ps */
1784 data_l = (data_l << 16U) | (js2[js2_tzqlat] + 24U + 20U);
1785 mmio_write_32(DBSC_DBTR(22), data_l);
1786
1787 /* DBTR25 : do not use in LPDDR4 */
1788 mmio_write_32(DBSC_DBTR(25), 0U);
1789
1790 /*
1791 * DBRNK :
1792 * DBSC_DBRNK2 rkrr
1793 * DBSC_DBRNK3 rkrw
1794 * DBSC_DBRNK4 rkwr
1795 * DBSC_DBRNK5 rkww
1796 */
1797 #define _par_DBRNK_VAL (0x7007U)
1798
1799 for (i = 0; i < 4; i++) {
1800 data_l = (_par_DBRNK_VAL >> ((uint32_t)i * 4U)) & 0x0fU;
1801 data_l2 = 0U;
1802 foreach_vch(ch) {
1803 data_l2 = data_l2 | (data_l << (4U * ch));
1804 }
1805 mmio_write_32(DBSC_DBRNK(2 + i), data_l2);
1806 }
1807 mmio_write_32(DBSC_DBADJ0, 0x00000000U);
1808
1809 /* timing registers for scheduler */
1810 /* SCFCTST0 */
1811 /* SCFCTST0 ACT-ACT */
1812 scfctst0_act_act = js2[js2_trcpb] * 800UL * ddr_mbpsdiv / ddr_mbps;
1813 /* SCFCTST0 RDA-ACT */
1814 scfctst0_rda_act = ((16U / 2U) + js2[js2_trtp] - 8U +
1815 js2[js2_trppb]) * 800UL * ddr_mbpsdiv / ddr_mbps;
1816 /* SCFCTST0 WRA-ACT */
1817 scfctst0_wra_act = (WL + 1U + (16U / 2U) +
1818 js1[js1_ind].nwr) * 800UL * ddr_mbpsdiv / ddr_mbps;
1819 /* SCFCTST0 PRE-ACT */
1820 scfctst0_pre_act = js2[js2_trppb];
1821 mmio_write_32(DBSC_SCFCTST0,
1822 (scfctst0_act_act << 24) | (scfctst0_rda_act << 16) |
1823 (scfctst0_wra_act << 8) | scfctst0_pre_act);
1824
1825 /* SCFCTST1 */
1826 /* SCFCTST1 RD-WR */
1827 scfctst1_rd_wr = (mmio_read_32(DBSC_DBTR(11)) & 0xffU) * 800UL * ddr_mbpsdiv /
1828 ddr_mbps;
1829 /* SCFCTST1 WR-RD */
1830 scfctst1_wr_rd = (mmio_read_32(DBSC_DBTR(12)) & 0xff) * 800UL * ddr_mbpsdiv /
1831 ddr_mbps;
1832 /* SCFCTST1 ACT-RD/WR */
1833 scfctst1_act_rd_wr = js2[js2_trcd] * 800UL * ddr_mbpsdiv / ddr_mbps;
1834 /* SCFCTST1 ASYNCOFS */
1835 scfctst1_asyncofs = 12U;
1836 mmio_write_32(DBSC_SCFCTST1,
1837 (scfctst1_rd_wr << 24) | (scfctst1_wr_rd << 16) |
1838 (scfctst1_act_rd_wr << 8) | scfctst1_asyncofs);
1839
1840 /* DBSCHRW1 */
1841 /* DBSCHRW1 SCTRFCAB */
1842 dbschhrw1_sctrfcab = js2[js2_trfcab] * 800UL * ddr_mbpsdiv / ddr_mbps;
1843 data_l = (((mmio_read_32(DBSC_DBTR(16)) & 0x00FF0000U) >> 16) +
1844 (mmio_read_32(DBSC_DBTR(22)) & 0x0000FFFFU) +
1845 (0x28U * 2U)) * 400U * 2U * ddr_mbpsdiv / ddr_mbps + 7U;
1846 if (dbschhrw1_sctrfcab < data_l) {
1847 dbschhrw1_sctrfcab = data_l;
1848 }
1849
1850 if ((prr_product == PRR_PRODUCT_M3) && (prr_cut < PRR_PRODUCT_30)) {
1851 mmio_write_32(DBSC_DBSCHRW1, dbschhrw1_sctrfcab +
1852 ((mmio_read_32(DBSC_DBTR(22)) & 0x0000FFFFU) *
1853 400U * 2U * ddr_mbpsdiv + (ddr_mbps - 1U)) / ddr_mbps - 3U);
1854 } else {
1855 mmio_write_32(DBSC_DBSCHRW1, dbschhrw1_sctrfcab +
1856 ((mmio_read_32(DBSC_DBTR(22)) & 0x0000FFFFU) *
1857 400U * 2U * ddr_mbpsdiv + (ddr_mbps - 1U)) / ddr_mbps);
1858 }
1859
1860 /* QOS and CAM */
1861 #ifdef DDR_QOS_INIT_SETTING /* only for non qos_init */
1862 /* wbkwait(0004), wbkmdhi(4,2),wbkmdlo(1,8) */
1863 mmio_write_32(DBSC_DBCAM0CNF1, 0x00043218U);
1864 /* 0(fillunit),8(dirtymax),4(dirtymin) */
1865 mmio_write_32(DBSC_DBCAM0CNF2, 0x000000F4U);
1866 /* stop_tolerance */
1867 mmio_write_32(DBSC_DBSCHRW0, 0x22421111U);
1868 /* rd-wr/wr-rd toggle priority */
1869 mmio_write_32(DBSC_SCFCTST2, 0x012F1123U);
1870 mmio_write_32(DBSC_DBSCHSZ0, 0x00000001U);
1871 mmio_write_32(DBSC_DBSCHCNT0, 0x000F0037U);
1872
1873 /* QoS Settings */
1874 mmio_write_32(DBSC_DBSCHQOS00, 0x00000F00U);
1875 mmio_write_32(DBSC_DBSCHQOS01, 0x00000B00U);
1876 mmio_write_32(DBSC_DBSCHQOS02, 0x00000000U);
1877 mmio_write_32(DBSC_DBSCHQOS03, 0x00000000U);
1878 mmio_write_32(DBSC_DBSCHQOS40, 0x00000300U);
1879 mmio_write_32(DBSC_DBSCHQOS41, 0x000002F0U);
1880 mmio_write_32(DBSC_DBSCHQOS42, 0x00000200U);
1881 mmio_write_32(DBSC_DBSCHQOS43, 0x00000100U);
1882 mmio_write_32(DBSC_DBSCHQOS90, 0x00000100U);
1883 mmio_write_32(DBSC_DBSCHQOS91, 0x000000F0U);
1884 mmio_write_32(DBSC_DBSCHQOS92, 0x000000A0U);
1885 mmio_write_32(DBSC_DBSCHQOS93, 0x00000040U);
1886 mmio_write_32(DBSC_DBSCHQOS120, 0x00000040U);
1887 mmio_write_32(DBSC_DBSCHQOS121, 0x00000030U);
1888 mmio_write_32(DBSC_DBSCHQOS122, 0x00000020U);
1889 mmio_write_32(DBSC_DBSCHQOS123, 0x00000010U);
1890 mmio_write_32(DBSC_DBSCHQOS130, 0x00000100U);
1891 mmio_write_32(DBSC_DBSCHQOS131, 0x000000F0U);
1892 mmio_write_32(DBSC_DBSCHQOS132, 0x000000A0U);
1893 mmio_write_32(DBSC_DBSCHQOS133, 0x00000040U);
1894 mmio_write_32(DBSC_DBSCHQOS140, 0x000000C0U);
1895 mmio_write_32(DBSC_DBSCHQOS141, 0x000000B0U);
1896 mmio_write_32(DBSC_DBSCHQOS142, 0x00000080U);
1897 mmio_write_32(DBSC_DBSCHQOS143, 0x00000040U);
1898 mmio_write_32(DBSC_DBSCHQOS150, 0x00000040U);
1899 mmio_write_32(DBSC_DBSCHQOS151, 0x00000030U);
1900 mmio_write_32(DBSC_DBSCHQOS152, 0x00000020U);
1901 mmio_write_32(DBSC_DBSCHQOS153, 0x00000010U);
1902
1903 mmio_write_32(QOSCTRL_RAEN, 0x00000001U);
1904 #endif /* DDR_QOS_INIT_SETTING */
1905
1906 /* resrdis */
1907 mmio_write_32(DBSC_DBBCAMDIS, 0x00000001U);
1908 }
1909
dbsc_regset_post(void)1910 static void dbsc_regset_post(void)
1911 {
1912 uint32_t slice, rdlat_max, rdlat_min;
1913 uint32_t ch, cs;
1914 uint32_t data_l;
1915 uint32_t srx;
1916
1917 rdlat_max = 0U;
1918 rdlat_min = 0xffffU;
1919 foreach_vch(ch) {
1920 for (cs = 0U; cs < CS_CNT; cs++) {
1921 if ((ch_have_this_cs[cs] & (1U << ch)) != 0U) {
1922 for (slice = 0U; slice < SLICE_CNT; slice++) {
1923 ddr_setval_s(ch, slice,
1924 _reg_PHY_PER_CS_TRAINING_INDEX,
1925 cs);
1926 data_l = ddr_getval_s(ch, slice,
1927 _reg_PHY_RDDQS_LATENCY_ADJUST);
1928 if (data_l > rdlat_max) {
1929 rdlat_max = data_l;
1930 }
1931 if (data_l < rdlat_min) {
1932 rdlat_min = data_l;
1933 }
1934 }
1935 }
1936 }
1937 }
1938
1939 mmio_write_32(DBSC_DBTR(24),
1940 ((rdlat_max + 2U) << 24) +
1941 ((rdlat_max + 2U) << 16) +
1942 mmio_read_32(DBSC_DBTR(24)));
1943
1944 /* set ddr density information */
1945 foreach_ech(ch) {
1946 for (cs = 0U; cs < CS_CNT; cs++) {
1947 if (ddr_density[ch][cs] == 0xffU) {
1948 mmio_write_32(DBSC_DBMEMCONF(ch, cs), 0x00U);
1949 } else {
1950 mmio_write_32(DBSC_DBMEMCONF(ch, cs),
1951 DBMEMCONF_REGD(ddr_density[ch]
1952 [cs]));
1953 }
1954 }
1955 mmio_write_32(DBSC_DBMEMCONF(ch, 2), 0x00000000U);
1956 mmio_write_32(DBSC_DBMEMCONF(ch, 3), 0x00000000U);
1957 }
1958
1959 mmio_write_32(DBSC_DBBUS0CNF1, 0x00000010U);
1960
1961 /* set DBI */
1962 if (board_cnf->dbi_en != 0U) {
1963 mmio_write_32(DBSC_DBDBICNT, 0x00000003U);
1964 }
1965
1966 /* set REFCYCLE */
1967 data_l = (get_refperiod()) * ddr_mbps / 2000U / ddr_mbpsdiv;
1968 mmio_write_32(DBSC_DBRFCNF1, 0x00080000U | (data_l & 0x0000ffffU));
1969 mmio_write_32(DBSC_DBRFCNF2, 0x00010000U | DBSC_REFINTS);
1970
1971 #if RCAR_REWT_TRAINING != 0
1972 /* Periodic-WriteDQ Training seeting */
1973 if ((prr_product == PRR_PRODUCT_M3) &&
1974 (prr_cut == PRR_PRODUCT_10)) {
1975 /* G2M Ver.1.0 not support */
1976 } else {
1977 /* G2M Ver.1.1 or later */
1978 mmio_write_32(DBSC_DBDFIPMSTRCNF, 0x00000000U);
1979
1980 ddr_setval_ach_as(_reg_PHY_WDQLVL_PATT, 0x04U);
1981 ddr_setval_ach_as(_reg_PHY_WDQLVL_QTR_DLY_STEP, 0x0FU);
1982 ddr_setval_ach_as(_reg_PHY_WDQLVL_DLY_STEP, 0x50U);
1983 ddr_setval_ach_as(_reg_PHY_WDQLVL_DQDM_SLV_DLY_START, 0x0300U);
1984
1985 ddr_setval_ach(_reg_PI_WDQLVL_CS_MAP,
1986 ddrtbl_getval(_cnf_DDR_PI_REGSET,
1987 _reg_PI_WDQLVL_CS_MAP));
1988 ddr_setval_ach(_reg_PI_LONG_COUNT_MASK, 0x1fU);
1989 ddr_setval_ach(_reg_PI_WDQLVL_VREF_EN, 0x00U);
1990 ddr_setval_ach(_reg_PI_WDQLVL_ROTATE, 0x01U);
1991 ddr_setval_ach(_reg_PI_TREF_F0, 0x0000U);
1992 ddr_setval_ach(_reg_PI_TREF_F1, 0x0000U);
1993 ddr_setval_ach(_reg_PI_TREF_F2, 0x0000U);
1994
1995 if (prr_product == PRR_PRODUCT_M3) {
1996 ddr_setval_ach(_reg_PI_WDQLVL_EN, 0x02U);
1997 } else {
1998 ddr_setval_ach(_reg_PI_WDQLVL_EN_F1, 0x02U);
1999 }
2000 ddr_setval_ach(_reg_PI_WDQLVL_PERIODIC, 0x01U);
2001
2002 /* DFI_PHYMSTR_ACK , WTmode setting */
2003 /* DFI_PHYMSTR_ACK: WTmode =b'01 */
2004 mmio_write_32(DBSC_DBDFIPMSTRCNF, 0x00000011U);
2005 }
2006 #endif /* RCAR_REWT_TRAINING */
2007 /* periodic dram zqcal enable */
2008 mmio_write_32(DBSC_DBCALCNF, 0x01000010U);
2009
2010 /* periodic phy ctrl update enable */
2011 if ((prr_product == PRR_PRODUCT_M3) &&
2012 (prr_cut < PRR_PRODUCT_30)) {
2013 /* non : G2M Ver.1.x not support */
2014 } else {
2015 mmio_write_32(DBSC_DBDFICUPDCNF, 0x28240001U);
2016 }
2017
2018 #ifdef DDR_BACKUPMODE
2019 /* SRX */
2020 srx = 0x0A840001U; /* for All channels */
2021 if (ddr_backup == DRAM_BOOT_STATUS_WARM) {
2022 #ifdef DDR_BACKUPMODE_HALF /* for Half channel(ch0, 1 only) */
2023 NOTICE("BL2: [DEBUG_MESS] DDR_BACKUPMODE_HALF\n");
2024 srx = 0x0A040001U;
2025 #endif /* DDR_BACKUPMODE_HALF */
2026 send_dbcmd(srx);
2027 }
2028 #endif /* DDR_BACKUPMODE */
2029
2030 /* set Auto Refresh */
2031 mmio_write_32(DBSC_DBRFEN, 0x00000001U);
2032
2033 #if RCAR_REWT_TRAINING != 0
2034 /* Periodic WriteDQ Traning */
2035 if ((prr_product == PRR_PRODUCT_M3) &&
2036 (prr_cut == PRR_PRODUCT_10)) {
2037 /* non : G2M Ver.1.0 not support */
2038 } else {
2039 /* G2M Ver.1.1 or later */
2040 ddr_setval_ach(_reg_PI_WDQLVL_INTERVAL, 0x0100U);
2041 }
2042 #endif /* RCAR_REWT_TRAINING */
2043
2044 /* dram access enable */
2045 mmio_write_32(DBSC_DBACEN, 0x00000001U);
2046
2047 MSG_LF(__func__ "(done)");
2048 }
2049
2050 /* DFI_INIT_START */
dfi_init_start(void)2051 static uint32_t dfi_init_start(void)
2052 {
2053 uint32_t ch;
2054 uint32_t phytrainingok;
2055 uint32_t retry;
2056 uint32_t data_l;
2057 uint32_t ret = 0U;
2058 const uint32_t RETRY_MAX = 0x10000U;
2059
2060 ddr_setval_ach_as(_reg_PHY_DLL_RST_EN, 0x02U);
2061 dsb_sev();
2062 ddrphy_regif_idle();
2063
2064 /* dll_rst negate */
2065 foreach_vch(ch) {
2066 mmio_write_32(DBSC_DBPDCNT3(ch), 0x0000CF01U);
2067 }
2068 dsb_sev();
2069
2070 /* wait init_complete */
2071 phytrainingok = 0U;
2072 retry = 0U;
2073 while (retry++ < RETRY_MAX) {
2074 foreach_vch(ch) {
2075 data_l = mmio_read_32(DBSC_DBDFISTAT(ch));
2076 if (data_l & 0x00000001U) {
2077 phytrainingok |= (1U << ch);
2078 }
2079 }
2080 dsb_sev();
2081 if (phytrainingok == ddr_phyvalid) {
2082 break;
2083 }
2084 if (retry % 256U == 0U) {
2085 ddr_setval_ach_as(_reg_SC_PHY_RX_CAL_START, 0x01U);
2086 }
2087 }
2088
2089 /* all ch ok? */
2090 if ((phytrainingok & ddr_phyvalid) != ddr_phyvalid) {
2091 ret = 0xffU;
2092 goto done;
2093 }
2094
2095 /*
2096 * dbdficnt0:
2097 * dfi_dram_clk_disable=0
2098 * dfi_frequency = 0
2099 * freq_ratio = 01 (2:1)
2100 * init_start =0
2101 */
2102 foreach_vch(ch) {
2103 mmio_write_32(DBSC_DBDFICNT(ch), 0x00000010U);
2104 }
2105 dsb_sev();
2106 done:
2107 return ret;
2108 }
2109
2110 /* drivability setting : CMOS MODE ON/OFF */
change_lpddr4_en(uint32_t mode)2111 static void change_lpddr4_en(uint32_t mode)
2112 {
2113 uint32_t ch;
2114 uint32_t i;
2115 uint32_t data_l;
2116 const uint32_t _reg_PHY_PAD_DRIVE_X[3] = {
2117 _reg_PHY_PAD_ADDR_DRIVE,
2118 _reg_PHY_PAD_CLK_DRIVE,
2119 _reg_PHY_PAD_CS_DRIVE
2120 };
2121
2122 foreach_vch(ch) {
2123 for (i = 0U; i < 3U; i++) {
2124 data_l = ddr_getval(ch, _reg_PHY_PAD_DRIVE_X[i]);
2125 if (mode != 0U) {
2126 data_l |= (1U << 14);
2127 } else {
2128 data_l &= ~(1U << 14);
2129 }
2130 ddr_setval(ch, _reg_PHY_PAD_DRIVE_X[i], data_l);
2131 }
2132 }
2133 }
2134
2135 /* drivability setting */
set_term_code(void)2136 static uint32_t set_term_code(void)
2137 {
2138 uint32_t i;
2139 uint32_t ch, index;
2140 uint32_t data_l;
2141 uint32_t chip_id[2];
2142 uint32_t term_code;
2143 uint32_t override;
2144
2145 term_code = ddrtbl_getval(_cnf_DDR_PHY_ADR_G_REGSET,
2146 _reg_PHY_PAD_DATA_TERM);
2147 override = 0U;
2148 for (i = 0U; i < 2U; i++) {
2149 chip_id[i] = mmio_read_32(LIFEC_CHIPID(i));
2150 }
2151
2152 index = 0U;
2153 while (true) {
2154 if (termcode_by_sample[index][0] == 0xffffffff) {
2155 break;
2156 }
2157 if ((termcode_by_sample[index][0] == chip_id[0]) &&
2158 (termcode_by_sample[index][1] == chip_id[1])) {
2159 term_code = termcode_by_sample[index][2];
2160 override = 1;
2161 break;
2162 }
2163 index++;
2164 }
2165
2166 if (override != 0U) {
2167 for (index = 0U; index < _reg_PHY_PAD_TERM_X_NUM; index++) {
2168 data_l =
2169 ddrtbl_getval(_cnf_DDR_PHY_ADR_G_REGSET,
2170 _reg_PHY_PAD_TERM_X[index]);
2171 data_l = (data_l & 0xfffe0000U) | term_code;
2172 ddr_setval_ach(_reg_PHY_PAD_TERM_X[index], data_l);
2173 }
2174 } else if ((prr_product == PRR_PRODUCT_M3) &&
2175 (prr_cut == PRR_PRODUCT_10)) {
2176 /* non */
2177 } else {
2178 ddr_setval_ach(_reg_PHY_PAD_TERM_X[0],
2179 (ddrtbl_getval(_cnf_DDR_PHY_ADR_G_REGSET,
2180 _reg_PHY_PAD_TERM_X[0]) & 0xFFFE0000U));
2181 ddr_setval_ach(_reg_PHY_CAL_CLEAR_0, 0x01U);
2182 ddr_setval_ach(_reg_PHY_CAL_START_0, 0x01U);
2183 foreach_vch(ch) {
2184 do {
2185 data_l =
2186 ddr_getval(ch, _reg_PHY_CAL_RESULT2_OBS_0);
2187 } while (!(data_l & 0x00800000U));
2188 }
2189
2190 /* G2M Ver.1.1 or later */
2191 foreach_vch(ch) {
2192 for (index = 0U; index < _reg_PHY_PAD_TERM_X_NUM;
2193 index++) {
2194 data_l = ddr_getval(ch, _reg_PHY_PAD_TERM_X[index]);
2195 ddr_setval(ch, _reg_PHY_PAD_TERM_X[index],
2196 (data_l & 0xFFFE0FFFU) | 0x00015000U);
2197 }
2198 }
2199 }
2200
2201 ddr_padcal_tcompensate_getinit(override);
2202
2203 return 0U;
2204 }
2205
2206 /* DDR mode register setting */
ddr_register_set(void)2207 static void ddr_register_set(void)
2208 {
2209 int32_t fspwp;
2210 uint32_t tmp;
2211
2212 for (fspwp = 1; fspwp >= 0; fspwp--) {
2213 /*MR13, fspwp */
2214 send_dbcmd(0x0e840d08U | ((2U - fspwp) << 6));
2215
2216 tmp = ddrtbl_getval(_cnf_DDR_PI_REGSET,
2217 reg_pi_mr1_data_fx_csx[fspwp][0]);
2218 send_dbcmd(0x0e840100U | tmp);
2219
2220 tmp = ddrtbl_getval(_cnf_DDR_PI_REGSET,
2221 reg_pi_mr2_data_fx_csx[fspwp][0]);
2222 send_dbcmd(0x0e840200U | tmp);
2223
2224 tmp = ddrtbl_getval(_cnf_DDR_PI_REGSET,
2225 reg_pi_mr3_data_fx_csx[fspwp][0]);
2226 send_dbcmd(0x0e840300U | tmp);
2227
2228 tmp = ddrtbl_getval(_cnf_DDR_PI_REGSET,
2229 reg_pi_mr11_data_fx_csx[fspwp][0]);
2230 send_dbcmd(0x0e840b00U | tmp);
2231
2232 tmp = ddrtbl_getval(_cnf_DDR_PI_REGSET,
2233 reg_pi_mr12_data_fx_csx[fspwp][0]);
2234 send_dbcmd(0x0e840c00U | tmp);
2235
2236 tmp = ddrtbl_getval(_cnf_DDR_PI_REGSET,
2237 reg_pi_mr14_data_fx_csx[fspwp][0]);
2238 send_dbcmd(0x0e840e00U | tmp);
2239 /* MR22 */
2240 send_dbcmd(0x0e841616U);
2241
2242 /* ZQCAL start */
2243 send_dbcmd(0x0d84004FU);
2244
2245 /* ZQLAT */
2246 send_dbcmd(0x0d840051U);
2247 }
2248
2249 /* MR13, fspwp */
2250 send_dbcmd(0x0e840d08U);
2251 }
2252
2253 /* Training handshake functions */
wait_freqchgreq(uint32_t assert)2254 static inline uint32_t wait_freqchgreq(uint32_t assert)
2255 {
2256 uint32_t data_l;
2257 uint32_t count;
2258 uint32_t ch;
2259
2260 count = 100000U;
2261
2262 if (assert != 0U) {
2263 do {
2264 data_l = 1U;
2265 foreach_vch(ch) {
2266 data_l &= mmio_read_32(DBSC_DBPDSTAT(ch));
2267 }
2268 count = count - 1U;
2269 } while (((data_l & 0x01U) != 0x01U) && (count != 0U));
2270 } else {
2271 do {
2272 data_l = 0U;
2273 foreach_vch(ch) {
2274 data_l |= mmio_read_32(DBSC_DBPDSTAT(ch));
2275 }
2276 count = count - 1U;
2277 } while (((data_l & 0x01U) != 0x00U) && (count != 0U));
2278 }
2279
2280 return (count == 0U);
2281 }
2282
set_freqchgack(uint32_t assert)2283 static inline void set_freqchgack(uint32_t assert)
2284 {
2285 uint32_t ch;
2286 uint32_t data_l;
2287
2288 if (assert != 0U) {
2289 data_l = 0x0CF20000U;
2290 } else {
2291 data_l = 0x00000000U;
2292 }
2293
2294 foreach_vch(ch) {
2295 mmio_write_32(DBSC_DBPDCNT2(ch), data_l);
2296 }
2297 }
2298
set_dfifrequency(uint32_t freq)2299 static inline void set_dfifrequency(uint32_t freq)
2300 {
2301 uint32_t ch;
2302
2303 foreach_vch(ch) {
2304 mmio_clrsetbits_32(DBSC_DBDFICNT(ch), 0x1fU << 24, freq << 24);
2305 }
2306 dsb_sev();
2307 }
2308
pll3_freq(uint32_t on)2309 static uint32_t pll3_freq(uint32_t on)
2310 {
2311 uint32_t timeout;
2312
2313 timeout = wait_freqchgreq(1U);
2314
2315 if (timeout != 0U) {
2316 return 1U;
2317 }
2318
2319 pll3_control(on);
2320 set_dfifrequency(on);
2321
2322 set_freqchgack(1U);
2323 timeout = wait_freqchgreq(0U);
2324 set_freqchgack(0U);
2325
2326 if (timeout != 0U) {
2327 FATAL_MSG("BL2: Time out[2]\n");
2328 return 1U;
2329 }
2330
2331 return 0U;
2332 }
2333
2334 /* update dly */
update_dly(void)2335 static void update_dly(void)
2336 {
2337 ddr_setval_ach(_reg_SC_PHY_MANUAL_UPDATE, 0x01U);
2338 ddr_setval_ach(_reg_PHY_ADRCTL_MANUAL_UPDATE, 0x01U);
2339 }
2340
2341 /* training by pi */
pi_training_go(void)2342 static uint32_t pi_training_go(void)
2343 {
2344 uint32_t flag;
2345 uint32_t data_l;
2346 uint32_t retry;
2347 const uint32_t RETRY_MAX = 4096U * 16U;
2348 uint32_t ch;
2349 uint32_t mst_ch;
2350 uint32_t cur_frq;
2351 uint32_t complete;
2352 uint32_t frqchg_req;
2353
2354 /* pi_start */
2355 ddr_setval_ach(_reg_PI_START, 0x01U);
2356 foreach_vch(ch) {
2357 ddr_getval(ch, _reg_PI_INT_STATUS);
2358 }
2359
2360 /* set dfi_phymstr_ack = 1 */
2361 mmio_write_32(DBSC_DBDFIPMSTRCNF, 0x00000001U);
2362 dsb_sev();
2363
2364 /* wait pi_int_status[0] */
2365 mst_ch = 0U;
2366 flag = 0U;
2367 complete = 0U;
2368 cur_frq = 0U;
2369 for (retry = 0U; retry < RETRY_MAX; retry++) {
2370 frqchg_req = mmio_read_32(DBSC_DBPDSTAT(mst_ch)) & 0x01;
2371
2372 if (frqchg_req != 0U) {
2373 if (cur_frq != 0U) {
2374 /* Low frequency */
2375 flag = pll3_freq(0U);
2376 cur_frq = 0U;
2377 } else {
2378 /* High frequency */
2379 flag = pll3_freq(1U);
2380 cur_frq = 1U;
2381 }
2382 if (flag != 0U) {
2383 break;
2384 }
2385 } else {
2386 if (cur_frq != 0U) {
2387 foreach_vch(ch) {
2388 if ((complete & (1U << ch)) != 0U) {
2389 continue;
2390 }
2391 data_l = ddr_getval(ch, _reg_PI_INT_STATUS);
2392 if ((data_l & 0x01U) != 0U) {
2393 complete |= (1U << ch);
2394 }
2395 }
2396 if (complete == ddr_phyvalid) {
2397 break;
2398 }
2399 }
2400 }
2401 }
2402 foreach_vch(ch) {
2403 /* dummy read */
2404 data_l = ddr_getval_s(ch, 0U, _reg_PHY_CAL_RESULT2_OBS_0);
2405 data_l = ddr_getval(ch, _reg_PI_INT_STATUS);
2406 ddr_setval(ch, _reg_PI_INT_ACK, data_l);
2407 }
2408 if (ddrphy_regif_chk() != 0U) {
2409 complete = 0xfdU;
2410 }
2411 return complete;
2412 }
2413
2414 /* Initialize DDR */
init_ddr(void)2415 static uint32_t init_ddr(void)
2416 {
2417 uint32_t i;
2418 uint32_t data_l;
2419 uint32_t phytrainingok;
2420 uint32_t ch, slice;
2421 uint32_t index;
2422 uint32_t err;
2423 int16_t adj;
2424
2425 MSG_LF(__func__ ":0\n");
2426
2427 /* unlock phy */
2428 /* Unlock DDRPHY register(AGAIN) */
2429 foreach_vch(ch) {
2430 mmio_write_32(DBSC_DBPDLK(ch), 0x0000A55AU);
2431 }
2432 dsb_sev();
2433
2434 reg_ddrphy_write_a(0x00001010U, 0x00000001U);
2435 /* DBSC register pre-setting */
2436 dbsc_regset_pre();
2437
2438 /* load ddrphy registers */
2439 ddrtbl_load();
2440
2441 /* configure ddrphy registers */
2442 ddr_config();
2443
2444 /* dfi_reset assert */
2445 foreach_vch(ch) {
2446 mmio_write_32(DBSC_DBPDCNT0(ch), 0x01U);
2447 }
2448 dsb_sev();
2449
2450 /* dbsc register set */
2451 dbsc_regset();
2452 MSG_LF(__func__ ":1\n");
2453
2454 /* dfi_reset negate */
2455 foreach_vch(ch) {
2456 mmio_write_32(DBSC_DBPDCNT0(ch), 0x00U);
2457 }
2458 dsb_sev();
2459
2460 /* dfi_init_start (start ddrphy) */
2461 err = dfi_init_start();
2462 if (err != 0U) {
2463 return INITDRAM_ERR_I;
2464 }
2465 MSG_LF(__func__ ":2\n");
2466
2467 /* ddr backupmode end */
2468 #ifdef DDR_BACKUPMODE
2469 if (ddr_backup != 0U) {
2470 NOTICE("BL2: [WARM_BOOT]\n");
2471 } else {
2472 NOTICE("BL2: [COLD_BOOT]\n");
2473 }
2474 #endif
2475 MSG_LF(__func__ ":3\n");
2476
2477 /* override term code after dfi_init_complete */
2478 err = set_term_code();
2479 if (err != 0U) {
2480 return INITDRAM_ERR_I;
2481 }
2482 MSG_LF(__func__ ":4\n");
2483
2484 /* rx offset calibration */
2485 if (prr_cut > PRR_PRODUCT_11) {
2486 err = rx_offset_cal_hw();
2487 } else {
2488 err = rx_offset_cal();
2489 }
2490 if (err != 0U) {
2491 return INITDRAM_ERR_O;
2492 }
2493 MSG_LF(__func__ ":5\n");
2494
2495 /* Dummy PDE */
2496 send_dbcmd(0x08840000U);
2497
2498 /* PDX */
2499 send_dbcmd(0x08840001U);
2500
2501 /* check register i/f is alive */
2502 err = ddrphy_regif_chk();
2503 if (err != 0U) {
2504 return INITDRAM_ERR_O;
2505 }
2506 MSG_LF(__func__ ":6\n");
2507
2508 /* phy initialize end */
2509
2510 /* setup DDR mode registers */
2511 /* CMOS MODE */
2512 change_lpddr4_en(0);
2513
2514 /* MRS */
2515 ddr_register_set();
2516
2517 /* Thermal sensor setting */
2518 /* THCTR Bit6: PONM=0 , Bit0: THSST=1 */
2519 data_l = (mmio_read_32(THS1_THCTR) & 0xFFFFFFBFU) | 0x00000001U;
2520 mmio_write_32(THS1_THCTR, data_l);
2521
2522 /* LPDDR4 MODE */
2523 change_lpddr4_en(1);
2524
2525 MSG_LF(__func__ ":7\n");
2526
2527 /* mask CS_MAP if RANKx is not found */
2528 foreach_vch(ch) {
2529 data_l = ddr_getval(ch, _reg_PI_CS_MAP);
2530 if ((ch_have_this_cs[1] & (1U << ch)) == 0U) {
2531 data_l = data_l & 0x05U;
2532 }
2533 ddr_setval(ch, _reg_PI_CS_MAP, data_l);
2534 }
2535
2536 /* exec pi_training */
2537 reg_ddrphy_write_a(ddr_regdef_adr(_reg_PHY_FREQ_SEL_MULTICAST_EN),
2538 BIT(ddr_regdef_lsb(_reg_PHY_FREQ_SEL_MULTICAST_EN)));
2539 ddr_setval_ach_as(_reg_PHY_PER_CS_TRAINING_MULTICAST_EN, 0x00U);
2540
2541 foreach_vch(ch) {
2542 for (slice = 0U; slice < SLICE_CNT; slice++) {
2543 ddr_setval_s(ch, slice,
2544 _reg_PHY_PER_CS_TRAINING_EN,
2545 ((ch_have_this_cs[1]) >> ch) & 0x01U);
2546 }
2547 }
2548
2549 phytrainingok = pi_training_go();
2550
2551 if (ddr_phyvalid != (phytrainingok & ddr_phyvalid)) {
2552 return INITDRAM_ERR_T | phytrainingok;
2553 }
2554
2555 MSG_LF(__func__ ":8\n");
2556
2557 /* CACS DLY ADJUST */
2558 data_l = board_cnf->cacs_dly + (uint32_t)_f_scale_adj(board_cnf->cacs_dly_adj);
2559 foreach_vch(ch) {
2560 for (i = 0U; i < _reg_PHY_CLK_CACS_SLAVE_DELAY_X_NUM; i++) {
2561 adj = _f_scale_adj(board_cnf->ch[ch].cacs_adj[i]);
2562 ddr_setval(ch, _reg_PHY_CLK_CACS_SLAVE_DELAY_X[i],
2563 data_l + (uint32_t)adj);
2564 }
2565
2566 if (ddr_phycaslice == 1U) {
2567 for (i = 0U; i < 6U; i++) {
2568 index = i + _reg_PHY_CLK_CACS_SLAVE_DELAY_X_NUM;
2569 adj = _f_scale_adj(board_cnf->ch[ch].cacs_adj[index]);
2570 ddr_setval_s(ch, 2U,
2571 _reg_PHY_CLK_CACS_SLAVE_DELAY_X[i],
2572 data_l + (uint32_t)adj);
2573 }
2574 }
2575 }
2576
2577 update_dly();
2578 MSG_LF(__func__ ":9\n");
2579
2580 /* Adjust write path latency */
2581 if (ddrtbl_getval(_cnf_DDR_PHY_SLICE_REGSET, _reg_PHY_WRITE_PATH_LAT_ADD) != 0U) {
2582 adjust_wpath_latency();
2583 }
2584
2585 /* RDQLVL Training */
2586 if (ddrtbl_getval(_cnf_DDR_PHY_SLICE_REGSET, _reg_PHY_IE_MODE) == 0U) {
2587 ddr_setval_ach_as(_reg_PHY_IE_MODE, 0x01U);
2588 }
2589
2590 err = rdqdm_man();
2591
2592 if (ddrtbl_getval(_cnf_DDR_PHY_SLICE_REGSET, _reg_PHY_IE_MODE) == 0U) {
2593 ddr_setval_ach_as(_reg_PHY_IE_MODE, 0x00U);
2594 }
2595
2596 if (err != 0U) {
2597 return INITDRAM_ERR_T;
2598 }
2599 update_dly();
2600 MSG_LF(__func__ ":10\n");
2601
2602 /* WDQLVL Training */
2603 err = wdqdm_man();
2604 if (err != 0U) {
2605 return INITDRAM_ERR_T;
2606 }
2607 update_dly();
2608 MSG_LF(__func__ ":11\n");
2609
2610 dbsc_regset_post();
2611 MSG_LF(__func__ ":12\n");
2612
2613 return phytrainingok;
2614 }
2615
2616 /* SW LEVELING COMMON */
swlvl1(uint32_t ddr_csn,uint32_t reg_cs,uint32_t reg_kick)2617 static uint32_t swlvl1(uint32_t ddr_csn, uint32_t reg_cs, uint32_t reg_kick)
2618 {
2619 const uint32_t RETRY_MAX = 0x1000U;
2620 uint32_t ch, data_l;
2621 uint32_t waiting;
2622 uint32_t retry;
2623 uint32_t err = 0U;
2624
2625 /* set EXIT -> OP_DONE is cleared */
2626 ddr_setval_ach(_reg_PI_SWLVL_EXIT, 0x01);
2627
2628 /* kick */
2629 foreach_vch(ch) {
2630 if ((ch_have_this_cs[ddr_csn % 2U] & (1U << ch)) != 0U) {
2631 ddr_setval(ch, reg_cs, ddr_csn);
2632 ddr_setval(ch, reg_kick, 0x01U);
2633 }
2634 }
2635 foreach_vch(ch) {
2636 /*PREPARE ADDR REGISTER (for SWLVL_OP_DONE) */
2637 ddr_getval(ch, _reg_PI_SWLVL_OP_DONE);
2638 }
2639 waiting = ch_have_this_cs[ddr_csn % 2U];
2640 dsb_sev();
2641 retry = RETRY_MAX;
2642 do {
2643 foreach_vch(ch) {
2644 if ((waiting & (1U << ch)) == 0U) {
2645 continue;
2646 }
2647 data_l = ddr_getval(ch, _reg_PI_SWLVL_OP_DONE);
2648 if ((data_l & 0x01U) != 0U) {
2649 waiting &= ~(1U << ch);
2650 }
2651 }
2652 retry--;
2653 } while ((waiting != 0U) && (retry > 0U));
2654 if (retry == 0U) {
2655 err = 1U;
2656 }
2657
2658 dsb_sev();
2659 /* set EXIT -> OP_DONE is cleared */
2660 ddr_setval_ach(_reg_PI_SWLVL_EXIT, 0x01U);
2661 dsb_sev();
2662
2663 return err;
2664 }
2665
2666 /* WDQ TRAINING */
2667 #ifndef DDR_FAST_INIT
wdqdm_clr1(uint32_t ch,uint32_t ddr_csn)2668 static void wdqdm_clr1(uint32_t ch, uint32_t ddr_csn)
2669 {
2670 uint32_t cs, slice;
2671 uint32_t data_l;
2672 int32_t i, k;
2673
2674 /* clr of training results buffer */
2675 cs = ddr_csn % 2U;
2676 data_l = board_cnf->dqdm_dly_w;
2677 for (slice = 0U; slice < SLICE_CNT; slice++) {
2678 k = (board_cnf->ch[ch].dqs_swap >> (4 * slice)) & 0x0f;
2679 if (((k >= 2) && (ddr_csn < 2)) || ((k < 2) && (ddr_csn >= 2))) {
2680 continue;
2681 }
2682
2683 for (i = 0; i <= 8; i++) {
2684 if ((ch_have_this_cs[CS_CNT - 1 - cs] & (1U << ch)) != 0U) {
2685 wdqdm_dly[ch][cs][slice][i] =
2686 wdqdm_dly[ch][CS_CNT - 1 - cs][slice][i];
2687 } else {
2688 wdqdm_dly[ch][cs][slice][i] = data_l;
2689 }
2690 wdqdm_le[ch][cs][slice][i] = 0U;
2691 wdqdm_te[ch][cs][slice][i] = 0U;
2692 }
2693 wdqdm_st[ch][cs][slice] = 0U;
2694 wdqdm_win[ch][cs][slice] = 0U;
2695 }
2696 }
2697
wdqdm_ana1(uint32_t ch,uint32_t ddr_csn)2698 static uint32_t wdqdm_ana1(uint32_t ch, uint32_t ddr_csn)
2699 {
2700 const uint32_t _par_WDQLVL_RETRY_THRES = 0x7c0U;
2701 uint32_t cs, slice;
2702 uint32_t data_l;
2703 int32_t min_win;
2704 int32_t i, k;
2705 uint32_t err;
2706 int32_t win;
2707 int8_t _adj;
2708 int16_t adj;
2709 uint32_t dq;
2710
2711 /* analysis of training results */
2712 err = 0U;
2713 for (slice = 0U; slice < SLICE_CNT; slice += 1U) {
2714 k = (board_cnf->ch[ch].dqs_swap >> (4 * slice)) & 0x0f;
2715 if (((k >= 2) && (ddr_csn < 2)) || ((k < 2) && (ddr_csn >= 2))) {
2716 continue;
2717 }
2718
2719 cs = ddr_csn % 2U;
2720 ddr_setval_s(ch, slice, _reg_PHY_PER_CS_TRAINING_INDEX, cs);
2721 for (i = 0; i < 9; i++) {
2722 dq = slice * 8U + i;
2723 if (i == 8) {
2724 _adj = board_cnf->ch[ch].dm_adj_w[slice];
2725 } else {
2726 _adj = board_cnf->ch[ch].dq_adj_w[dq];
2727 }
2728 adj = _f_scale_adj(_adj);
2729
2730 data_l =
2731 ddr_getval_s(ch, slice,
2732 _reg_PHY_CLK_WRX_SLAVE_DELAY[i]) + adj;
2733 ddr_setval_s(ch, slice, _reg_PHY_CLK_WRX_SLAVE_DELAY[i],
2734 data_l);
2735 wdqdm_dly[ch][cs][slice][i] = data_l;
2736 }
2737 ddr_setval_s(ch, slice, _reg_PHY_PER_CS_TRAINING_EN, 0x00);
2738 data_l = ddr_getval_s(ch, slice, _reg_PHY_WDQLVL_STATUS_OBS);
2739 wdqdm_st[ch][cs][slice] = data_l;
2740 min_win = INT_LEAST32_MAX;
2741 for (i = 0; i <= 8; i++) {
2742 ddr_setval_s(ch, slice, _reg_PHY_WDQLVL_DQDM_OBS_SELECT,
2743 i);
2744
2745 data_l =
2746 ddr_getval_s(ch, slice,
2747 _reg_PHY_WDQLVL_DQDM_TE_DLY_OBS);
2748 wdqdm_te[ch][cs][slice][i] = data_l;
2749 data_l =
2750 ddr_getval_s(ch, slice,
2751 _reg_PHY_WDQLVL_DQDM_LE_DLY_OBS);
2752 wdqdm_le[ch][cs][slice][i] = data_l;
2753 win = (int32_t)wdqdm_te[ch][cs][slice][i] -
2754 wdqdm_le[ch][cs][slice][i];
2755 if (min_win > win) {
2756 min_win = win;
2757 }
2758 if (data_l >= _par_WDQLVL_RETRY_THRES) {
2759 err = 2;
2760 }
2761 }
2762 wdqdm_win[ch][cs][slice] = min_win;
2763 ddr_setval_s(ch, slice, _reg_PHY_PER_CS_TRAINING_EN,
2764 ((ch_have_this_cs[1]) >> ch) & 0x01);
2765 }
2766 return err;
2767 }
2768 #endif/* DDR_FAST_INIT */
2769
wdqdm_cp(uint32_t ddr_csn,uint32_t restore)2770 static void wdqdm_cp(uint32_t ddr_csn, uint32_t restore)
2771 {
2772 uint32_t tgt_cs, src_cs;
2773 uint32_t ch, slice;
2774 uint32_t tmp_r;
2775 uint32_t i;
2776
2777 /* copy of training results */
2778 foreach_vch(ch) {
2779 for (tgt_cs = 0U; tgt_cs < CS_CNT; tgt_cs++) {
2780 for (slice = 0U; slice < SLICE_CNT; slice++) {
2781 ddr_setval_s(ch, slice,
2782 _reg_PHY_PER_CS_TRAINING_INDEX,
2783 tgt_cs);
2784 src_cs = ddr_csn % 2U;
2785 if ((ch_have_this_cs[1] & (1U << ch)) == 0U) {
2786 src_cs = 0U;
2787 }
2788 for (i = 0U; i <= 4U; i += 4U) {
2789 if (restore != 0U) {
2790 tmp_r = rdqdm_dly[ch][tgt_cs][slice][i];
2791 } else {
2792 tmp_r = rdqdm_dly[ch][src_cs][slice][i];
2793 }
2794
2795 ddr_setval_s(ch, slice,
2796 _reg_PHY_RDDQS_X_RISE_SLAVE_DELAY[i],
2797 tmp_r);
2798 }
2799 }
2800 }
2801 }
2802 }
2803
wdqdm_man1(void)2804 static uint32_t wdqdm_man1(void)
2805 {
2806 uint32_t mr14_csab0_bak[DRAM_CH_CNT];
2807 uint32_t ch, cs, ddr_csn;
2808 uint32_t data_l;
2809 uint32_t err = 0U;
2810 #ifndef DDR_FAST_INIT
2811 uint32_t err_flg = 0U;
2812 #endif/* DDR_FAST_INIT */
2813
2814 /* CLEAR PREV RESULT */
2815 for (cs = 0U; cs < CS_CNT; cs++) {
2816 ddr_setval_ach_as(_reg_PHY_PER_CS_TRAINING_INDEX, cs);
2817 ddr_setval_ach_as(_reg_PHY_WDQLVL_CLR_PREV_RESULTS, 0x01U);
2818 }
2819 ddrphy_regif_idle();
2820
2821 for (ddr_csn = 0U; ddr_csn < CSAB_CNT; ddr_csn++) {
2822 if (((prr_product == PRR_PRODUCT_M3) &&
2823 (prr_cut == PRR_PRODUCT_10))) {
2824 wdqdm_cp(ddr_csn, 0U);
2825 }
2826
2827 foreach_vch(ch) {
2828 data_l = ddr_getval(ch, reg_pi_mr14_data_fx_csx[1][ddr_csn]);
2829 ddr_setval(ch, reg_pi_mr14_data_fx_csx[1][0], data_l);
2830 }
2831
2832 /* KICK WDQLVL */
2833 err = swlvl1(ddr_csn, _reg_PI_WDQLVL_CS, _reg_PI_WDQLVL_REQ);
2834 if (err != 0U) {
2835 goto err_exit;
2836 }
2837
2838 if (ddr_csn == 0U) {
2839 foreach_vch(ch) {
2840 mr14_csab0_bak[ch] = ddr_getval(ch,
2841 reg_pi_mr14_data_fx_csx[1][0]);
2842 }
2843 } else {
2844 foreach_vch(ch) {
2845 ddr_setval(ch, reg_pi_mr14_data_fx_csx[1][0],
2846 mr14_csab0_bak[ch]);
2847 }
2848 }
2849 #ifndef DDR_FAST_INIT
2850 foreach_vch(ch) {
2851 if ((ch_have_this_cs[ddr_csn % 2U] & (1U << ch)) == 0U) {
2852 wdqdm_clr1(ch, ddr_csn);
2853 continue;
2854 }
2855 err = wdqdm_ana1(ch, ddr_csn);
2856 if (err != 0U) {
2857 err_flg |= (1U << (ddr_csn * 4U + ch));
2858 }
2859 ddrphy_regif_idle();
2860 }
2861 #endif/* DDR_FAST_INIT */
2862 }
2863 err_exit:
2864 #ifndef DDR_FAST_INIT
2865 err |= err_flg;
2866 #endif/* DDR_FAST_INIT */
2867
2868 return err;
2869 }
2870
wdqdm_man(void)2871 static uint32_t wdqdm_man(void)
2872 {
2873 uint32_t datal, ch, ddr_csn, mr14_bkup[4][4];
2874 const uint32_t retry_max = 0x10U;
2875 uint32_t err, retry_cnt;
2876
2877 datal = RL + js2[js2_tdqsck] + (16U / 2U) + 1U - WL + 2U + 2U + 19U;
2878 if ((mmio_read_32(DBSC_DBTR(11)) & 0xFF) > datal) {
2879 datal = mmio_read_32(DBSC_DBTR(11)) & 0xFF;
2880 }
2881 ddr_setval_ach(_reg_PI_TDFI_WDQLVL_RW, datal);
2882
2883 ddr_setval_ach(_reg_PI_TDFI_WDQLVL_WR,
2884 (mmio_read_32(DBSC_DBTR(12)) & 0xFF) + 10);
2885
2886 ddr_setval_ach(_reg_PI_TRFC_F0, mmio_read_32(DBSC_DBTR(13)) & 0x1FF);
2887 ddr_setval_ach(_reg_PI_TRFC_F1, mmio_read_32(DBSC_DBTR(13)) & 0x1FF);
2888
2889 retry_cnt = 0U;
2890 err = 0U;
2891 do {
2892 ddr_setval_ach(_reg_PI_WDQLVL_VREF_EN, 0x01);
2893 ddr_setval_ach(_reg_PI_WDQLVL_VREF_NORMAL_STEPSIZE, 0x01);
2894 ddr_setval_ach(_reg_PI_WDQLVL_VREF_DELTA, 0x0C);
2895 dsb_sev();
2896 err = wdqdm_man1();
2897 foreach_vch(ch) {
2898 for (ddr_csn = 0U; ddr_csn < CSAB_CNT; ddr_csn++) {
2899 mr14_bkup[ch][ddr_csn] =
2900 ddr_getval(ch, reg_pi_mr14_data_fx_csx
2901 [1][ddr_csn]);
2902 dsb_sev();
2903 }
2904 }
2905
2906 ddr_setval_ach(_reg_PI_WDQLVL_VREF_DELTA, 0x04);
2907
2908 pvtcode_update();
2909 err = wdqdm_man1();
2910 foreach_vch(ch) {
2911 for (ddr_csn = 0U; ddr_csn < CSAB_CNT; ddr_csn++) {
2912 mr14_bkup[ch][ddr_csn] =
2913 (mr14_bkup[ch][ddr_csn] +
2914 ddr_getval(ch, reg_pi_mr14_data_fx_csx
2915 [1][ddr_csn])) / 2U;
2916 ddr_setval(ch,
2917 reg_pi_mr14_data_fx_csx[1]
2918 [ddr_csn],
2919 mr14_bkup[ch][ddr_csn]);
2920 }
2921 }
2922
2923 ddr_setval_ach(_reg_PI_WDQLVL_VREF_NORMAL_STEPSIZE, 0x0U);
2924 ddr_setval_ach(_reg_PI_WDQLVL_VREF_DELTA, 0x0U);
2925 ddr_setval_ach(_reg_PI_WDQLVL_VREF_INITIAL_START_POINT, 0x0U);
2926 ddr_setval_ach(_reg_PI_WDQLVL_VREF_INITIAL_STOP_POINT, 0x0U);
2927 ddr_setval_ach(_reg_PI_WDQLVL_VREF_INITIAL_STEPSIZE, 0x0U);
2928
2929 pvtcode_update2();
2930 err = wdqdm_man1();
2931 ddr_setval_ach(_reg_PI_WDQLVL_VREF_EN, 0x0U);
2932
2933 } while ((err != 0U) && (++retry_cnt < retry_max));
2934
2935 if (prr_product == PRR_PRODUCT_M3 && prr_cut <= PRR_PRODUCT_10) {
2936 wdqdm_cp(0U, 1U);
2937 }
2938
2939 return (retry_cnt >= retry_max);
2940 }
2941
2942 /* RDQ TRAINING */
2943 #ifndef DDR_FAST_INIT
rdqdm_clr1(uint32_t ch,uint32_t ddr_csn)2944 static void rdqdm_clr1(uint32_t ch, uint32_t ddr_csn)
2945 {
2946 uint32_t cs, slice;
2947 uint32_t data_l;
2948 int32_t i, k;
2949
2950 /* clr of training results buffer */
2951 cs = ddr_csn % 2U;
2952 data_l = board_cnf->dqdm_dly_r;
2953 for (slice = 0U; slice < SLICE_CNT; slice++) {
2954 k = (board_cnf->ch[ch].dqs_swap >> (4 * slice)) & 0x0f;
2955 if (((k >= 2) && (ddr_csn < 2)) || ((k < 2) && (ddr_csn >= 2))) {
2956 continue;
2957 }
2958
2959 for (i = 0; i <= 8; i++) {
2960 if ((ch_have_this_cs[CS_CNT - 1 - cs] & (1U << ch)) != 0U) {
2961 rdqdm_dly[ch][cs][slice][i] =
2962 rdqdm_dly[ch][CS_CNT - 1 - cs][slice][i];
2963 rdqdm_dly[ch][cs][slice + SLICE_CNT][i] =
2964 rdqdm_dly[ch][CS_CNT - 1 - cs][slice + SLICE_CNT][i];
2965 } else {
2966 rdqdm_dly[ch][cs][slice][i] = data_l;
2967 rdqdm_dly[ch][cs][slice + SLICE_CNT][i] = data_l;
2968 }
2969 rdqdm_le[ch][cs][slice][i] = 0U;
2970 rdqdm_le[ch][cs][slice + SLICE_CNT][i] = 0U;
2971 rdqdm_te[ch][cs][slice][i] = 0U;
2972 rdqdm_te[ch][cs][slice + SLICE_CNT][i] = 0U;
2973 rdqdm_nw[ch][cs][slice][i] = 0U;
2974 rdqdm_nw[ch][cs][slice + SLICE_CNT][i] = 0U;
2975 }
2976 rdqdm_st[ch][cs][slice] = 0U;
2977 rdqdm_win[ch][cs][slice] = 0U;
2978 }
2979 }
2980
rdqdm_ana1(uint32_t ch,uint32_t ddr_csn)2981 static uint32_t rdqdm_ana1(uint32_t ch, uint32_t ddr_csn)
2982 {
2983 uint32_t rdq_status_obs_select;
2984 uint32_t cs, slice;
2985 uint32_t data_l;
2986 uint32_t err;
2987 uint32_t dq;
2988 int32_t min_win;
2989 int8_t _adj;
2990 int16_t adj;
2991 int32_t min_win;
2992 int32_t win;
2993 int32_t i, k;
2994
2995 /* analysis of training results */
2996 err = 0U;
2997 for (slice = 0U; slice < SLICE_CNT; slice++) {
2998 k = (board_cnf->ch[ch].dqs_swap >> (4 * slice)) & 0x0f;
2999 if (((k >= 2) && (ddr_csn < 2)) || ((k < 2) && (ddr_csn >= 2))) {
3000 continue;
3001 }
3002
3003 cs = ddr_csn % 2U;
3004 ddr_setval_s(ch, slice, _reg_PHY_PER_CS_TRAINING_INDEX, cs);
3005 ddrphy_regif_idle();
3006
3007 ddr_getval_s(ch, slice, _reg_PHY_PER_CS_TRAINING_INDEX);
3008 ddrphy_regif_idle();
3009
3010 for (i = 0; i <= 8; i++) {
3011 dq = slice * 8 + i;
3012 if (i == 8) {
3013 _adj = board_cnf->ch[ch].dm_adj_r[slice];
3014 } else {
3015 _adj = board_cnf->ch[ch].dq_adj_r[dq];
3016 }
3017
3018 adj = _f_scale_adj(_adj);
3019
3020 data_l = ddr_getval_s(ch, slice,
3021 _reg_PHY_RDDQS_X_RISE_SLAVE_DELAY[i]) + adj;
3022 ddr_setval_s(ch, slice,
3023 _reg_PHY_RDDQS_X_RISE_SLAVE_DELAY[i],
3024 data_l);
3025 rdqdm_dly[ch][cs][slice][i] = data_l;
3026
3027 data_l = ddr_getval_s(ch, slice,
3028 _reg_PHY_RDDQS_X_FALL_SLAVE_DELAY[i]) + adj;
3029 ddr_setval_s(ch, slice,
3030 _reg_PHY_RDDQS_X_FALL_SLAVE_DELAY[i],
3031 data_l);
3032 rdqdm_dly[ch][cs][slice + SLICE_CNT][i] = data_l;
3033 }
3034 min_win = INT_LEAST32_MAX;
3035 for (i = 0; i <= 8; i++) {
3036 data_l =
3037 ddr_getval_s(ch, slice, _reg_PHY_RDLVL_STATUS_OBS);
3038 rdqdm_st[ch][cs][slice] = data_l;
3039 rdqdm_st[ch][cs][slice + SLICE_CNT] = data_l;
3040 /* k : rise/fall */
3041 for (k = 0; k < 2; k++) {
3042 if (i == 8) {
3043 rdq_status_obs_select = 16 + 8 * k;
3044 } else {
3045 rdq_status_obs_select = i + k * 8;
3046 }
3047 ddr_setval_s(ch, slice,
3048 _reg_PHY_RDLVL_RDDQS_DQ_OBS_SELECT,
3049 rdq_status_obs_select);
3050
3051 data_l =
3052 ddr_getval_s(ch, slice,
3053 _reg_PHY_RDLVL_RDDQS_DQ_LE_DLY_OBS);
3054 rdqdm_le[ch][cs][slice + SLICE_CNT * k][i] = data_l;
3055
3056 data_l =
3057 ddr_getval_s(ch, slice,
3058 _reg_PHY_RDLVL_RDDQS_DQ_TE_DLY_OBS);
3059 rdqdm_te[ch][cs][slice + SLICE_CNT * k][i] = data_l;
3060
3061 data_l =
3062 ddr_getval_s(ch, slice,
3063 _reg_PHY_RDLVL_RDDQS_DQ_NUM_WINDOWS_OBS);
3064 rdqdm_nw[ch][cs][slice + SLICE_CNT * k][i] = data_l;
3065
3066 win =
3067 (int32_t)rdqdm_te[ch][cs][slice +
3068 SLICE_CNT *
3069 k][i] -
3070 rdqdm_le[ch][cs][slice + SLICE_CNT * k][i];
3071 if (i != 8) {
3072 if (min_win > win) {
3073 min_win = win;
3074 }
3075 }
3076 }
3077 }
3078 rdqdm_win[ch][cs][slice] = min_win;
3079 if (min_win <= 0) {
3080 err = 2;
3081 }
3082 }
3083 return err;
3084 }
3085 #else /* DDR_FAST_INIT */
rdqdm_man1_set(uint32_t ddr_csn,uint32_t ch,uint32_t slice)3086 static void rdqdm_man1_set(uint32_t ddr_csn, uint32_t ch, uint32_t slice)
3087 {
3088 uint32_t i, adj, data_l;
3089
3090 for (i = 0U; i <= 8U; i++) {
3091 if (i == 8U) {
3092 adj = _f_scale_adj(board_cnf->ch[ch].dm_adj_r[slice]);
3093 } else {
3094 adj = _f_scale_adj(board_cnf->ch[ch].dq_adj_r[slice * 8U + i]);
3095 }
3096 ddr_setval_s(ch, slice, _reg_PHY_PER_CS_TRAINING_INDEX, ddr_csn);
3097 data_l = ddr_getval_s(ch, slice, _reg_PHY_RDDQS_X_RISE_SLAVE_DELAY[i]) + adj;
3098 ddr_setval_s(ch, slice, _reg_PHY_RDDQS_X_RISE_SLAVE_DELAY[i], data_l);
3099 rdqdm_dly[ch][ddr_csn][slice][i] = data_l;
3100 rdqdm_dly[ch][ddr_csn | 1U][slice][i] = data_l;
3101
3102 data_l = ddr_getval_s(ch, slice, _reg_PHY_RDDQS_X_FALL_SLAVE_DELAY[i]) + adj;
3103 ddr_setval_s(ch, slice, _reg_PHY_RDDQS_X_FALL_SLAVE_DELAY[i], data_l);
3104 rdqdm_dly[ch][ddr_csn][slice + SLICE_CNT][i] = data_l;
3105 rdqdm_dly[ch][ddr_csn | 1U][slice + SLICE_CNT][i] = data_l;
3106 }
3107 }
3108 #endif /* DDR_FAST_INIT */
3109
rdqdm_man1(void)3110 static uint32_t rdqdm_man1(void)
3111 {
3112 uint32_t ch;
3113 uint32_t ddr_csn;
3114 uint32_t val;
3115 #ifdef DDR_FAST_INIT
3116 uint32_t slice;
3117 #endif/* DDR_FAST_INIT */
3118 uint32_t err;
3119
3120 /* manual execution of training */
3121 err = 0U;
3122
3123 for (ddr_csn = 0U; ddr_csn < CSAB_CNT; ddr_csn++) {
3124 /* KICK RDQLVL */
3125 err = swlvl1(ddr_csn, _reg_PI_RDLVL_CS, _reg_PI_RDLVL_REQ);
3126 if (err != 0U) {
3127 goto err_exit;
3128 }
3129 #ifndef DDR_FAST_INIT
3130 foreach_vch(ch) {
3131 if ((ch_have_this_cs[ddr_csn % 2] & (1U << ch)) == 0U) {
3132 rdqdm_clr1(ch, ddr_csn);
3133 ddrphy_regif_idle();
3134 continue;
3135 }
3136 err = rdqdm_ana1(ch, ddr_csn);
3137 ddrphy_regif_idle();
3138 if (err != 0U) {
3139 goto err_exit;
3140 }
3141 }
3142 #else/* DDR_FAST_INIT */
3143 foreach_vch(ch) {
3144 if ((ch_have_this_cs[ddr_csn] & (1U << ch)) != 0U) {
3145 for (slice = 0U; slice < SLICE_CNT; slice++) {
3146 val = ddr_getval_s(ch, slice, _reg_PHY_RDLVL_STATUS_OBS);
3147 if (val != 0x0D00FFFFU) {
3148 err = (1U << ch) | (0x10U << slice);
3149 goto err_exit;
3150 }
3151 }
3152 }
3153 if ((prr_product == PRR_PRODUCT_M3) &&
3154 (prr_cut <= PRR_PRODUCT_10)) {
3155 for (slice = 0U; slice < SLICE_CNT; slice++) {
3156 rdqdm_man1_set(ddr_csn, ch, slice);
3157 }
3158 }
3159 }
3160 ddrphy_regif_idle();
3161
3162 #endif/* DDR_FAST_INIT */
3163 }
3164
3165 err_exit:
3166 return err;
3167 }
3168
rdqdm_man(void)3169 static uint32_t rdqdm_man(void)
3170 {
3171 uint32_t err, retry_cnt;
3172 const uint32_t retry_max = 0x01U;
3173
3174 ddr_setval_ach_as(_reg_PHY_DQ_TSEL_ENABLE,
3175 0x00000004U | ddrtbl_getval(_cnf_DDR_PHY_SLICE_REGSET,
3176 _reg_PHY_DQ_TSEL_ENABLE));
3177 ddr_setval_ach_as(_reg_PHY_DQS_TSEL_ENABLE,
3178 0x00000004U | ddrtbl_getval(_cnf_DDR_PHY_SLICE_REGSET,
3179 _reg_PHY_DQS_TSEL_ENABLE));
3180 ddr_setval_ach_as(_reg_PHY_DQ_TSEL_SELECT,
3181 0xFF0FFFFFU & ddrtbl_getval(_cnf_DDR_PHY_SLICE_REGSET,
3182 _reg_PHY_DQ_TSEL_SELECT));
3183 ddr_setval_ach_as(_reg_PHY_DQS_TSEL_SELECT,
3184 0xFF0FFFFFU & ddrtbl_getval(_cnf_DDR_PHY_SLICE_REGSET,
3185 _reg_PHY_DQS_TSEL_SELECT));
3186
3187 retry_cnt = 0U;
3188 do {
3189 err = rdqdm_man1();
3190 ddrphy_regif_idle();
3191 } while ((err != 0U) && (++retry_cnt < retry_max));
3192 ddr_setval_ach_as(_reg_PHY_DQ_TSEL_ENABLE,
3193 ddrtbl_getval(_cnf_DDR_PHY_SLICE_REGSET,
3194 _reg_PHY_DQ_TSEL_ENABLE));
3195 ddr_setval_ach_as(_reg_PHY_DQS_TSEL_ENABLE,
3196 ddrtbl_getval(_cnf_DDR_PHY_SLICE_REGSET,
3197 _reg_PHY_DQS_TSEL_ENABLE));
3198 ddr_setval_ach_as(_reg_PHY_DQ_TSEL_SELECT,
3199 ddrtbl_getval(_cnf_DDR_PHY_SLICE_REGSET,
3200 _reg_PHY_DQ_TSEL_SELECT));
3201 ddr_setval_ach_as(_reg_PHY_DQS_TSEL_SELECT,
3202 ddrtbl_getval(_cnf_DDR_PHY_SLICE_REGSET,
3203 _reg_PHY_DQS_TSEL_SELECT));
3204
3205 return (retry_cnt >= retry_max);
3206 }
3207
3208 /* rx offset calibration */
_find_change(uint64_t val,uint32_t dir)3209 static int32_t _find_change(uint64_t val, uint32_t dir)
3210 {
3211 int32_t i;
3212 uint32_t startval;
3213 uint32_t curval;
3214 const int32_t VAL_END = 0x3fU;
3215
3216 if (dir == 0U) {
3217 startval = (val & 0x01U);
3218 for (i = 1; i <= VAL_END; i++) {
3219 curval = (val >> i) & 0x01U;
3220 if (curval != startval) {
3221 return i;
3222 }
3223 }
3224 return VAL_END;
3225 }
3226
3227 startval = (val >> dir) & 0x01U;
3228 for (i = (int32_t)dir - 1; i >= 0; i--) {
3229 curval = (val >> i) & 0x01U;
3230 if (curval != startval) {
3231 return i;
3232 }
3233 }
3234
3235 return 0;
3236 }
3237
_rx_offset_cal_updn(uint32_t code)3238 static uint32_t _rx_offset_cal_updn(uint32_t code)
3239 {
3240 const uint32_t CODE_MAX = 0x40U;
3241 uint32_t tmp;
3242
3243 if (code == 0U) {
3244 tmp = (1U << 6) | (CODE_MAX - 1U);
3245 } else {
3246 tmp = (code << 6) | (CODE_MAX - code);
3247 }
3248
3249 return tmp;
3250 }
3251
rx_offset_cal(void)3252 static uint32_t rx_offset_cal(void)
3253 {
3254 uint32_t index;
3255 uint32_t code;
3256 const uint32_t CODE_MAX = 0x40U;
3257 const uint32_t CODE_STEP = 2U;
3258 uint32_t ch, slice;
3259 uint32_t tmp;
3260 uint32_t tmp_ach_as[DRAM_CH_CNT][SLICE_CNT];
3261 uint64_t val[DRAM_CH_CNT][SLICE_CNT][_reg_PHY_RX_CAL_X_NUM];
3262 uint64_t tmpval;
3263 int32_t lsb, msb;
3264
3265 ddr_setval_ach_as(_reg_PHY_RX_CAL_OVERRIDE, 0x01);
3266 foreach_vch(ch) {
3267 for (slice = 0U; slice < SLICE_CNT; slice++) {
3268 for (index = 0U; index < _reg_PHY_RX_CAL_X_NUM; index++) {
3269 val[ch][slice][index] = 0U;
3270 }
3271 }
3272 }
3273
3274 for (code = 0U; code < CODE_MAX / CODE_STEP; code++) {
3275 tmp = _rx_offset_cal_updn(code * CODE_STEP);
3276 for (index = 0U; index < _reg_PHY_RX_CAL_X_NUM; index++) {
3277 ddr_setval_ach_as(_reg_PHY_RX_CAL_X[index], tmp);
3278 }
3279 dsb_sev();
3280 ddr_getval_ach_as(_reg_PHY_RX_CAL_OBS, (uint32_t *)tmp_ach_as);
3281
3282 foreach_vch(ch) {
3283 for (slice = 0U; slice < SLICE_CNT; slice++) {
3284 tmp = tmp_ach_as[ch][slice];
3285 for (index = 0U; index < _reg_PHY_RX_CAL_X_NUM;
3286 index++) {
3287 if ((tmp & (1U << index)) != 0U) {
3288 val[ch][slice][index] |=
3289 (1ULL << code);
3290 } else {
3291 val[ch][slice][index] &=
3292 ~(1ULL << code);
3293 }
3294 }
3295 }
3296 }
3297 }
3298 foreach_vch(ch) {
3299 for (slice = 0U; slice < SLICE_CNT; slice++) {
3300 for (index = 0U; index < _reg_PHY_RX_CAL_X_NUM;
3301 index++) {
3302 tmpval = val[ch][slice][index];
3303 lsb = _find_change(tmpval, 0U);
3304 msb = _find_change(tmpval,
3305 (CODE_MAX / CODE_STEP) - 1U);
3306 tmp = (lsb + msb) >> 1U;
3307
3308 tmp = _rx_offset_cal_updn(tmp * CODE_STEP);
3309 ddr_setval_s(ch, slice,
3310 _reg_PHY_RX_CAL_X[index], tmp);
3311 }
3312 }
3313 }
3314 ddr_setval_ach_as(_reg_PHY_RX_CAL_OVERRIDE, 0x00);
3315
3316 return 0U;
3317 }
3318
rx_offset_cal_hw(void)3319 static uint32_t rx_offset_cal_hw(void)
3320 {
3321 uint32_t ch, slice;
3322 uint32_t retry;
3323 uint32_t complete;
3324 uint32_t tmp;
3325 uint32_t tmp_ach_as[DRAM_CH_CNT][SLICE_CNT];
3326
3327 ddr_setval_ach_as(_reg_PHY_RX_CAL_X[9], 0x00);
3328 ddr_setval_ach_as(_reg_PHY_RX_CAL_OVERRIDE, 0x00);
3329 ddr_setval_ach_as(_reg_PHY_RX_CAL_SAMPLE_WAIT, 0x0f);
3330
3331 retry = 0U;
3332 while (retry < 4096U) {
3333 if ((retry & 0xffU) == 0U) {
3334 ddr_setval_ach_as(_reg_SC_PHY_RX_CAL_START, 0x01);
3335 }
3336 foreach_vch(ch) {
3337 for (slice = 0U; slice < SLICE_CNT; slice++) {
3338 tmp_ach_as[ch][slice] =
3339 ddr_getval_s(ch, slice,
3340 _reg_PHY_RX_CAL_X[9]);
3341 }
3342 }
3343
3344 complete = 1U;
3345 foreach_vch(ch) {
3346 for (slice = 0U; slice < SLICE_CNT; slice++) {
3347 tmp = tmp_ach_as[ch][slice];
3348 tmp = (tmp & 0x3fU) + ((tmp >> 6) & 0x3fU);
3349 if (tmp != 0x40U) {
3350 complete = 0U;
3351 }
3352 }
3353 }
3354 if (complete != 0U) {
3355 break;
3356 }
3357
3358 retry++;
3359 }
3360
3361 return (complete == 0U);
3362 }
3363
3364 /* adjust wpath latency */
adjust_wpath_latency(void)3365 static void adjust_wpath_latency(void)
3366 {
3367 uint32_t ch, cs, slice;
3368 uint32_t dly;
3369 uint32_t wpath_add;
3370 const uint32_t _par_EARLY_THRESHOLD_VAL = 0x180U;
3371
3372 foreach_vch(ch) {
3373 for (slice = 0U; slice < SLICE_CNT; slice += 1U) {
3374 for (cs = 0U; cs < CS_CNT; cs++) {
3375 ddr_setval_s(ch, slice,
3376 _reg_PHY_PER_CS_TRAINING_INDEX,
3377 cs);
3378 ddr_getval_s(ch, slice,
3379 _reg_PHY_PER_CS_TRAINING_INDEX);
3380 dly =
3381 ddr_getval_s(ch, slice,
3382 _reg_PHY_CLK_WRDQS_SLAVE_DELAY);
3383 if (dly <= _par_EARLY_THRESHOLD_VAL) {
3384 continue;
3385 }
3386
3387 wpath_add =
3388 ddr_getval_s(ch, slice,
3389 _reg_PHY_WRITE_PATH_LAT_ADD);
3390 ddr_setval_s(ch, slice,
3391 _reg_PHY_WRITE_PATH_LAT_ADD,
3392 wpath_add - 1U);
3393 }
3394 }
3395 }
3396 }
3397
3398 /* DDR Initialize entry */
rzg_dram_init(void)3399 int32_t rzg_dram_init(void)
3400 {
3401 uint32_t ch, cs;
3402 uint32_t data_l;
3403 uint32_t bus_mbps, bus_mbpsdiv;
3404 uint32_t tmp_tccd;
3405 uint32_t failcount;
3406 uint32_t cnf_boardtype;
3407 int32_t ret = INITDRAM_NG;
3408
3409 /* Thermal sensor setting */
3410 data_l = mmio_read_32(CPG_MSTPSR5);
3411 if ((data_l & BIT(22)) != 0U) { /* case THS/TSC Standby */
3412 data_l &= ~BIT(22);
3413 cpg_write_32(CPG_SMSTPCR5, data_l);
3414 while ((mmio_read_32(CPG_MSTPSR5) & BIT(22)) != 0U) {
3415 /* wait bit=0 */
3416 }
3417 }
3418
3419 /* THCTR Bit6: PONM=0 , Bit0: THSST=0 */
3420 data_l = mmio_read_32(THS1_THCTR) & 0xFFFFFFBE;
3421 mmio_write_32(THS1_THCTR, data_l);
3422
3423 /* Judge product and cut */
3424 #ifdef RCAR_DDR_FIXED_LSI_TYPE
3425 #if (RCAR_LSI == RCAR_AUTO)
3426 prr_product = mmio_read_32(PRR) & PRR_PRODUCT_MASK;
3427 prr_cut = mmio_read_32(PRR) & PRR_CUT_MASK;
3428 #else /* RCAR_LSI */
3429 #ifndef RCAR_LSI_CUT
3430 prr_cut = mmio_read_32(PRR) & PRR_CUT_MASK;
3431 #endif /* RCAR_LSI_CUT */
3432 #endif /* RCAR_LSI */
3433 #else /* RCAR_DDR_FIXED_LSI_TYPE */
3434 prr_product = mmio_read_32(PRR) & PRR_PRODUCT_MASK;
3435 prr_cut = mmio_read_32(PRR) & PRR_CUT_MASK;
3436 #endif /* RCAR_DDR_FIXED_LSI_TYPE */
3437
3438 if (prr_product == PRR_PRODUCT_M3) {
3439 p_ddr_regdef_tbl =
3440 (const uint32_t *)&DDR_REGDEF_TBL[1][0];
3441 } else {
3442 FATAL_MSG("BL2: DDR:Unknown Product\n");
3443 goto done;
3444 }
3445
3446 if ((prr_product == PRR_PRODUCT_M3) && (prr_cut < PRR_PRODUCT_30)) {
3447 /* non : G2M Ver.1.x not support */
3448 } else {
3449 mmio_write_32(DBSC_DBSYSCNT0, 0x00001234U);
3450 }
3451
3452 /* Judge board type */
3453 cnf_boardtype = boardcnf_get_brd_type(prr_product);
3454 if (cnf_boardtype >= (uint32_t)BOARDNUM) {
3455 FATAL_MSG("BL2: DDR:Unknown Board\n");
3456 goto done;
3457 }
3458 board_cnf = (const struct _boardcnf *)&boardcnfs[cnf_boardtype];
3459
3460 /* RCAR_DRAM_SPLIT_2CH (2U) */
3461 #if RCAR_DRAM_SPLIT == 2
3462 ddr_phyvalid = board_cnf->phyvalid;
3463 #else /* RCAR_DRAM_SPLIT_2CH */
3464 ddr_phyvalid = board_cnf->phyvalid;
3465 #endif /* RCAR_DRAM_SPLIT_2CH */
3466
3467 max_density = 0U;
3468
3469 for (cs = 0U; cs < CS_CNT; cs++) {
3470 ch_have_this_cs[cs] = 0U;
3471 }
3472
3473 foreach_ech(ch) {
3474 for (cs = 0U; cs < CS_CNT; cs++) {
3475 ddr_density[ch][cs] = 0xffU;
3476 }
3477 }
3478
3479 foreach_vch(ch) {
3480 for (cs = 0U; cs < CS_CNT; cs++) {
3481 data_l = board_cnf->ch[ch].ddr_density[cs];
3482 ddr_density[ch][cs] = data_l;
3483
3484 if (data_l == 0xffU) {
3485 continue;
3486 }
3487 if (data_l > max_density) {
3488 max_density = data_l;
3489 }
3490 ch_have_this_cs[cs] |= (1U << ch);
3491 }
3492 }
3493
3494 /* Judge board clock frequency (in MHz) */
3495 boardcnf_get_brd_clk(cnf_boardtype, &brd_clk, &brd_clkdiv);
3496 if ((brd_clk / brd_clkdiv) > 25U) {
3497 brd_clkdiva = 1U;
3498 } else {
3499 brd_clkdiva = 0U;
3500 }
3501
3502 /* Judge ddr operating frequency clock(in Mbps) */
3503 boardcnf_get_ddr_mbps(cnf_boardtype, &ddr_mbps, &ddr_mbpsdiv);
3504
3505 ddr0800_mul = CLK_DIV(800U, 2U, brd_clk, brd_clkdiv * (brd_clkdiva + 1U));
3506
3507 ddr_mul = CLK_DIV(ddr_mbps, ddr_mbpsdiv * 2U, brd_clk,
3508 brd_clkdiv * (brd_clkdiva + 1U));
3509
3510 /* Adjust tccd */
3511 data_l = (0x00006000 & mmio_read_32(RST_MODEMR)) >> 13;
3512 bus_mbps = 0U;
3513 bus_mbpsdiv = 0U;
3514 switch (data_l) {
3515 case 0:
3516 bus_mbps = brd_clk * 0x60U * 2U;
3517 bus_mbpsdiv = brd_clkdiv * 1U;
3518 break;
3519 case 1:
3520 bus_mbps = brd_clk * 0x50U * 2U;
3521 bus_mbpsdiv = brd_clkdiv * 1U;
3522 break;
3523 case 2:
3524 bus_mbps = brd_clk * 0x40U * 2U;
3525 bus_mbpsdiv = brd_clkdiv * 1U;
3526 break;
3527 case 3:
3528 bus_mbps = brd_clk * 0x60U * 2U;
3529 bus_mbpsdiv = brd_clkdiv * 2U;
3530 break;
3531 default:
3532 bus_mbps = brd_clk * 0x60U * 2U;
3533 bus_mbpsdiv = brd_clkdiv * 2U;
3534 WARN("BL2: DDR using default values for adjusting tccd");
3535 break;
3536 }
3537 tmp_tccd = CLK_DIV(ddr_mbps * 8U, ddr_mbpsdiv, bus_mbps, bus_mbpsdiv);
3538 if (8U * ddr_mbps * bus_mbpsdiv != tmp_tccd * bus_mbps * ddr_mbpsdiv) {
3539 tmp_tccd = tmp_tccd + 1U;
3540 }
3541
3542 if (tmp_tccd < 8U) {
3543 ddr_tccd = 8U;
3544 } else {
3545 ddr_tccd = tmp_tccd;
3546 }
3547
3548 NOTICE("BL2: DDR%d(%s)\n", ddr_mbps / ddr_mbpsdiv, RCAR_DDR_VERSION);
3549
3550 MSG_LF("Start\n");
3551
3552 /* PLL Setting */
3553 pll3_control(1U);
3554
3555 /* initialize DDR */
3556 data_l = init_ddr();
3557 if (data_l == ddr_phyvalid) {
3558 failcount = 0U;
3559 } else {
3560 failcount = 1U;
3561 }
3562
3563 foreach_vch(ch) {
3564 mmio_write_32(DBSC_DBPDLK(ch), 0x00000000U);
3565 }
3566 if ((prr_product == PRR_PRODUCT_M3) && (prr_cut < PRR_PRODUCT_30)) {
3567 /* non : G2M Ver.1.x not support */
3568 } else {
3569 mmio_write_32(DBSC_DBSYSCNT0, 0x00000000);
3570 }
3571
3572 if (failcount == 0U) {
3573 ret = INITDRAM_OK;
3574 }
3575
3576 done:
3577 return ret;
3578 }
3579
pvtcode_update(void)3580 static void pvtcode_update(void)
3581 {
3582 uint32_t ch;
3583 uint32_t data_l;
3584 uint32_t pvtp[4], pvtn[4], pvtp_init, pvtn_init;
3585 int32_t pvtp_tmp, pvtn_tmp;
3586
3587 foreach_vch(ch) {
3588 pvtn_init = (tcal.tcomp_cal[ch] & 0xFC0U) >> 6;
3589 pvtp_init = (tcal.tcomp_cal[ch] & 0x03FU) >> 0;
3590
3591 if (8912U * pvtp_init > 44230U) {
3592 pvtp_tmp = (5000U + 8912U * pvtp_init - 44230U) / 10000U;
3593 } else {
3594 pvtp_tmp =
3595 -((-(5000 + 8912 * pvtp_init - 44230)) / 10000);
3596 }
3597 pvtn_tmp = (5000U + 5776U * (uint32_t)pvtn_init + 30280U) / 10000U;
3598
3599 pvtn[ch] = (uint32_t)pvtn_tmp + pvtn_init;
3600 pvtp[ch] = (uint32_t)pvtp_tmp + pvtp_init;
3601
3602 if (pvtn[ch] > 63U) {
3603 pvtn[ch] = 63U;
3604 pvtp[ch] =
3605 (pvtp_tmp) * (63 - 6 * pvtn_tmp -
3606 pvtn_init) / (pvtn_tmp) +
3607 6 * pvtp_tmp + pvtp_init;
3608 }
3609
3610 data_l = pvtp[ch] | (pvtn[ch] << 6) | 0x00015000U;
3611 reg_ddrphy_write(ch, ddr_regdef_adr(_reg_PHY_PAD_FDBK_TERM),
3612 data_l | 0x00020000U);
3613 reg_ddrphy_write(ch, ddr_regdef_adr(_reg_PHY_PAD_DATA_TERM),
3614 data_l);
3615 reg_ddrphy_write(ch, ddr_regdef_adr(_reg_PHY_PAD_DQS_TERM),
3616 data_l);
3617 reg_ddrphy_write(ch, ddr_regdef_adr(_reg_PHY_PAD_ADDR_TERM),
3618 data_l);
3619 reg_ddrphy_write(ch, ddr_regdef_adr(_reg_PHY_PAD_CS_TERM),
3620 data_l);
3621 }
3622 }
3623
pvtcode_update2(void)3624 static void pvtcode_update2(void)
3625 {
3626 uint32_t ch;
3627
3628 foreach_vch(ch) {
3629 reg_ddrphy_write(ch, ddr_regdef_adr(_reg_PHY_PAD_FDBK_TERM),
3630 tcal.init_cal[ch] | 0x00020000U);
3631 reg_ddrphy_write(ch, ddr_regdef_adr(_reg_PHY_PAD_DATA_TERM),
3632 tcal.init_cal[ch]);
3633 reg_ddrphy_write(ch, ddr_regdef_adr(_reg_PHY_PAD_DQS_TERM),
3634 tcal.init_cal[ch]);
3635 reg_ddrphy_write(ch, ddr_regdef_adr(_reg_PHY_PAD_ADDR_TERM),
3636 tcal.init_cal[ch]);
3637 reg_ddrphy_write(ch, ddr_regdef_adr(_reg_PHY_PAD_CS_TERM),
3638 tcal.init_cal[ch]);
3639 }
3640 }
3641
ddr_padcal_tcompensate_getinit(uint32_t override)3642 static void ddr_padcal_tcompensate_getinit(uint32_t override)
3643 {
3644 uint32_t ch;
3645 uint32_t data_l;
3646 uint32_t pvtp, pvtn;
3647
3648 tcal.init_temp = 0;
3649 for (ch = 0U; ch < 4U; ch++) {
3650 tcal.init_cal[ch] = 0U;
3651 tcal.tcomp_cal[ch] = 0U;
3652 }
3653
3654 foreach_vch(ch) {
3655 tcal.init_cal[ch] = ddr_getval(ch, _reg_PHY_PAD_TERM_X[1]);
3656 tcal.tcomp_cal[ch] = ddr_getval(ch, _reg_PHY_PAD_TERM_X[1]);
3657 }
3658
3659 if (override == 0U) {
3660 data_l = mmio_read_32(THS1_TEMP);
3661 if (data_l < 2800U) {
3662 tcal.init_temp =
3663 (143 * (int32_t)data_l - 359000) / 1000;
3664 } else {
3665 tcal.init_temp =
3666 (121 * (int32_t)data_l - 296300) / 1000;
3667 }
3668
3669 foreach_vch(ch) {
3670 pvtp = (tcal.init_cal[ch] >> 0) & 0x000003FU;
3671 pvtn = (tcal.init_cal[ch] >> 6) & 0x000003FU;
3672 if ((int32_t)pvtp >
3673 ((tcal.init_temp * 29 - 3625) / 1000)) {
3674 pvtp = (int32_t)pvtp +
3675 ((3625 - tcal.init_temp * 29) / 1000);
3676 } else {
3677 pvtp = 0U;
3678 }
3679
3680 if ((int32_t)pvtn >
3681 ((tcal.init_temp * 54 - 6750) / 1000)) {
3682 pvtn = (int32_t)pvtn +
3683 ((6750 - tcal.init_temp * 54) / 1000);
3684 } else {
3685 pvtn = 0U;
3686 }
3687
3688 tcal.init_cal[ch] = 0x00015000U | (pvtn << 6) | pvtp;
3689 }
3690 tcal.init_temp = 125;
3691 }
3692 }
3693
3694 #ifndef DDR_QOS_INIT_SETTING
3695 /* For QoS init */
rzg_get_boardcnf_phyvalid(void)3696 uint8_t rzg_get_boardcnf_phyvalid(void)
3697 {
3698 return ddr_phyvalid;
3699 }
3700 #endif /* DDR_QOS_INIT_SETTING */
3701