1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * Copyright (C) Marvell International Ltd. and its affiliates
4 */
5
6 #include <common.h>
7 #include <i2c.h>
8 #include <spl.h>
9 #include <asm/io.h>
10 #include <asm/arch/cpu.h>
11 #include <asm/arch/soc.h>
12
13 #include "ddr3_hw_training.h"
14 #include "xor.h"
15 #include "xor_regs.h"
16
17 static void ddr3_flush_l1_line(u32 line);
18
19 extern u32 pbs_pattern[2][LEN_16BIT_PBS_PATTERN];
20 extern u32 pbs_pattern_32b[2][LEN_PBS_PATTERN];
21 #if defined(MV88F78X60)
22 extern u32 pbs_pattern_64b[2][LEN_PBS_PATTERN];
23 #endif
24 extern u32 pbs_dq_mapping[PUP_NUM_64BIT][DQ_NUM];
25
26 #if defined(MV88F78X60) || defined(MV88F672X)
27 /* PBS locked dq (per pup) */
28 u32 pbs_locked_dq[MAX_PUP_NUM][DQ_NUM] = { { 0 } };
29 u32 pbs_locked_dm[MAX_PUP_NUM] = { 0 };
30 u32 pbs_locked_value[MAX_PUP_NUM][DQ_NUM] = { { 0 } };
31
32 int per_bit_data[MAX_PUP_NUM][DQ_NUM];
33 #endif
34
35 static u32 sdram_data[LEN_KILLER_PATTERN] __aligned(32) = { 0 };
36
37 static struct crc_dma_desc dma_desc __aligned(32) = { 0 };
38
39 #define XOR_TIMEOUT 0x8000000
40
41 struct xor_channel_t {
42 struct crc_dma_desc *desc;
43 unsigned long desc_phys_addr;
44 };
45
46 #define XOR_CAUSE_DONE_MASK(chan) ((0x1 | 0x2) << (chan * 16))
47
xor_waiton_eng(int chan)48 void xor_waiton_eng(int chan)
49 {
50 int timeout;
51
52 timeout = 0;
53 while (!(reg_read(XOR_CAUSE_REG(XOR_UNIT(chan))) &
54 XOR_CAUSE_DONE_MASK(XOR_CHAN(chan)))) {
55 if (timeout > XOR_TIMEOUT)
56 goto timeout;
57
58 timeout++;
59 }
60
61 timeout = 0;
62 while (mv_xor_state_get(chan) != MV_IDLE) {
63 if (timeout > XOR_TIMEOUT)
64 goto timeout;
65
66 timeout++;
67 }
68
69 /* Clear int */
70 reg_write(XOR_CAUSE_REG(XOR_UNIT(chan)),
71 ~(XOR_CAUSE_DONE_MASK(XOR_CHAN(chan))));
72
73 timeout:
74 return;
75 }
76
special_compare_pattern(u32 uj)77 static int special_compare_pattern(u32 uj)
78 {
79 if ((uj == 30) || (uj == 31) || (uj == 61) || (uj == 62) ||
80 (uj == 93) || (uj == 94) || (uj == 126) || (uj == 127))
81 return 1;
82
83 return 0;
84 }
85
86 /*
87 * Compare code extracted as its used by multiple functions. This
88 * reduces code-size and makes it easier to maintain it. Additionally
89 * the code is not indented that much and therefore easier to read.
90 */
compare_pattern_v1(u32 uj,u32 * pup,u32 * pattern,u32 pup_groups,int debug_dqs)91 static void compare_pattern_v1(u32 uj, u32 *pup, u32 *pattern,
92 u32 pup_groups, int debug_dqs)
93 {
94 u32 val;
95 u32 uk;
96 u32 var1;
97 u32 var2;
98 __maybe_unused u32 dq;
99
100 if (((sdram_data[uj]) != (pattern[uj])) && (*pup != 0xFF)) {
101 for (uk = 0; uk < PUP_NUM_32BIT; uk++) {
102 val = CMP_BYTE_SHIFT * uk;
103 var1 = ((sdram_data[uj] >> val) & CMP_BYTE_MASK);
104 var2 = ((pattern[uj] >> val) & CMP_BYTE_MASK);
105
106 if (var1 != var2) {
107 *pup |= (1 << (uk + (PUP_NUM_32BIT *
108 (uj % pup_groups))));
109
110 #ifdef MV_DEBUG_DQS
111 if (!debug_dqs)
112 continue;
113
114 for (dq = 0; dq < DQ_NUM; dq++) {
115 val = uk + (PUP_NUM_32BIT *
116 (uj % pup_groups));
117 if (((var1 >> dq) & 0x1) !=
118 ((var2 >> dq) & 0x1))
119 per_bit_data[val][dq] = 1;
120 else
121 per_bit_data[val][dq] = 0;
122 }
123 #endif
124 }
125 }
126 }
127 }
128
compare_pattern_v2(u32 uj,u32 * pup,u32 * pattern)129 static void compare_pattern_v2(u32 uj, u32 *pup, u32 *pattern)
130 {
131 u32 val;
132 u32 uk;
133 u32 var1;
134 u32 var2;
135
136 if (((sdram_data[uj]) != (pattern[uj])) && (*pup != 0x3)) {
137 /* Found error */
138 for (uk = 0; uk < PUP_NUM_32BIT; uk++) {
139 val = CMP_BYTE_SHIFT * uk;
140 var1 = (sdram_data[uj] >> val) & CMP_BYTE_MASK;
141 var2 = (pattern[uj] >> val) & CMP_BYTE_MASK;
142 if (var1 != var2)
143 *pup |= (1 << (uk % PUP_NUM_16BIT));
144 }
145 }
146 }
147
148 /*
149 * Name: ddr3_sdram_compare
150 * Desc: Execute compare per PUP
151 * Args: unlock_pup Bit array of the unlock pups
152 * new_locked_pup Output bit array of the pups with failed compare
153 * pattern Pattern to compare
154 * pattern_len Length of pattern (in bytes)
155 * sdram_offset offset address to the SDRAM
156 * write write to the SDRAM before read
157 * mask compare pattern with mask;
158 * mask_pattern Mask to compare pattern
159 *
160 * Notes:
161 * Returns: MV_OK if success, other error code if fail.
162 */
ddr3_sdram_compare(MV_DRAM_INFO * dram_info,u32 unlock_pup,u32 * new_locked_pup,u32 * pattern,u32 pattern_len,u32 sdram_offset,int write,int mask,u32 * mask_pattern,int special_compare)163 int ddr3_sdram_compare(MV_DRAM_INFO *dram_info, u32 unlock_pup,
164 u32 *new_locked_pup, u32 *pattern,
165 u32 pattern_len, u32 sdram_offset, int write,
166 int mask, u32 *mask_pattern,
167 int special_compare)
168 {
169 u32 uj;
170 __maybe_unused u32 pup_groups;
171 __maybe_unused u32 dq;
172
173 #if !defined(MV88F67XX)
174 if (dram_info->num_of_std_pups == PUP_NUM_64BIT)
175 pup_groups = 2;
176 else
177 pup_groups = 1;
178 #endif
179
180 ddr3_reset_phy_read_fifo();
181
182 /* Check if need to write to sdram before read */
183 if (write == 1)
184 ddr3_dram_sram_burst((u32)pattern, sdram_offset, pattern_len);
185
186 ddr3_dram_sram_burst(sdram_offset, (u32)sdram_data, pattern_len);
187
188 /* Compare read result to write */
189 for (uj = 0; uj < pattern_len; uj++) {
190 if (special_compare && special_compare_pattern(uj))
191 continue;
192
193 #if defined(MV88F78X60) || defined(MV88F672X)
194 compare_pattern_v1(uj, new_locked_pup, pattern, pup_groups, 1);
195 #elif defined(MV88F67XX)
196 compare_pattern_v2(uj, new_locked_pup, pattern);
197 #endif
198 }
199
200 return MV_OK;
201 }
202
203 #if defined(MV88F78X60) || defined(MV88F672X)
204 /*
205 * Name: ddr3_sdram_dm_compare
206 * Desc: Execute compare per PUP
207 * Args: unlock_pup Bit array of the unlock pups
208 * new_locked_pup Output bit array of the pups with failed compare
209 * pattern Pattern to compare
210 * pattern_len Length of pattern (in bytes)
211 * sdram_offset offset address to the SDRAM
212 * write write to the SDRAM before read
213 * mask compare pattern with mask;
214 * mask_pattern Mask to compare pattern
215 *
216 * Notes:
217 * Returns: MV_OK if success, other error code if fail.
218 */
ddr3_sdram_dm_compare(MV_DRAM_INFO * dram_info,u32 unlock_pup,u32 * new_locked_pup,u32 * pattern,u32 sdram_offset)219 int ddr3_sdram_dm_compare(MV_DRAM_INFO *dram_info, u32 unlock_pup,
220 u32 *new_locked_pup, u32 *pattern,
221 u32 sdram_offset)
222 {
223 u32 uj, uk, var1, var2, pup_groups;
224 u32 val;
225 u32 pup = 0;
226
227 if (dram_info->num_of_std_pups == PUP_NUM_64BIT)
228 pup_groups = 2;
229 else
230 pup_groups = 1;
231
232 ddr3_dram_sram_burst((u32)pattern, SDRAM_PBS_TX_OFFS,
233 LEN_PBS_PATTERN);
234 ddr3_dram_sram_burst(SDRAM_PBS_TX_OFFS, (u32)sdram_data,
235 LEN_PBS_PATTERN);
236
237 /* Validate the correctness of the results */
238 for (uj = 0; uj < LEN_PBS_PATTERN; uj++)
239 compare_pattern_v1(uj, &pup, pattern, pup_groups, 0);
240
241 /* Test the DM Signals */
242 *(u32 *)(SDRAM_PBS_TX_OFFS + 0x10) = 0x12345678;
243 *(u32 *)(SDRAM_PBS_TX_OFFS + 0x14) = 0x12345678;
244
245 sdram_data[0] = *(u32 *)(SDRAM_PBS_TX_OFFS + 0x10);
246 sdram_data[1] = *(u32 *)(SDRAM_PBS_TX_OFFS + 0x14);
247
248 for (uj = 0; uj < 2; uj++) {
249 if (((sdram_data[uj]) != (pattern[uj])) &&
250 (*new_locked_pup != 0xFF)) {
251 for (uk = 0; uk < PUP_NUM_32BIT; uk++) {
252 val = CMP_BYTE_SHIFT * uk;
253 var1 = ((sdram_data[uj] >> val) & CMP_BYTE_MASK);
254 var2 = ((pattern[uj] >> val) & CMP_BYTE_MASK);
255 if (var1 != var2) {
256 *new_locked_pup |= (1 << (uk +
257 (PUP_NUM_32BIT * (uj % pup_groups))));
258 *new_locked_pup |= pup;
259 }
260 }
261 }
262 }
263
264 return MV_OK;
265 }
266
267 /*
268 * Name: ddr3_sdram_pbs_compare
269 * Desc: Execute SRAM compare per PUP and DQ.
270 * Args: pup_locked bit array of locked pups
271 * is_tx Indicate whether Rx or Tx
272 * pbs_pattern_idx Index of PBS pattern
273 * pbs_curr_val The PBS value
274 * pbs_lock_val The value to set to locked PBS
275 * skew_array Global array to update with the compare results
276 * ai_unlock_pup_dq_array bit array of the locked / unlocked pups per dq.
277 * Notes:
278 * Returns: MV_OK if success, other error code if fail.
279 */
ddr3_sdram_pbs_compare(MV_DRAM_INFO * dram_info,u32 pup_locked,int is_tx,u32 pbs_pattern_idx,u32 pbs_curr_val,u32 pbs_lock_val,u32 * skew_array,u8 * unlock_pup_dq_array,u32 ecc)280 int ddr3_sdram_pbs_compare(MV_DRAM_INFO *dram_info, u32 pup_locked,
281 int is_tx, u32 pbs_pattern_idx,
282 u32 pbs_curr_val, u32 pbs_lock_val,
283 u32 *skew_array, u8 *unlock_pup_dq_array,
284 u32 ecc)
285 {
286 /* bit array failed dq per pup for current compare */
287 u32 pbs_write_pup[DQ_NUM] = { 0 };
288 u32 update_pup; /* pup as HW convention */
289 u32 max_pup; /* maximal pup index */
290 u32 pup_addr;
291 u32 ui, dq, pup;
292 int var1, var2;
293 u32 sdram_offset, pup_groups, tmp_pup;
294 u32 *pattern_ptr;
295 u32 val;
296
297 /* Choose pattern */
298 switch (dram_info->ddr_width) {
299 #if defined(MV88F672X)
300 case 16:
301 pattern_ptr = (u32 *)&pbs_pattern[pbs_pattern_idx];
302 break;
303 #endif
304 case 32:
305 pattern_ptr = (u32 *)&pbs_pattern_32b[pbs_pattern_idx];
306 break;
307 #if defined(MV88F78X60)
308 case 64:
309 pattern_ptr = (u32 *)&pbs_pattern_64b[pbs_pattern_idx];
310 break;
311 #endif
312 default:
313 return MV_FAIL;
314 }
315
316 max_pup = dram_info->num_of_std_pups;
317
318 sdram_offset = SDRAM_PBS_I_OFFS + pbs_pattern_idx * SDRAM_PBS_NEXT_OFFS;
319
320 if (dram_info->num_of_std_pups == PUP_NUM_64BIT)
321 pup_groups = 2;
322 else
323 pup_groups = 1;
324
325 ddr3_reset_phy_read_fifo();
326
327 /* Check if need to write to sdram before read */
328 if (is_tx == 1) {
329 ddr3_dram_sram_burst((u32)pattern_ptr, sdram_offset,
330 LEN_PBS_PATTERN);
331 }
332
333 ddr3_dram_sram_read(sdram_offset, (u32)sdram_data, LEN_PBS_PATTERN);
334
335 /* Compare read result to write */
336 for (ui = 0; ui < LEN_PBS_PATTERN; ui++) {
337 if ((sdram_data[ui]) != (pattern_ptr[ui])) {
338 /* found error */
339 /* error in low pup group */
340 for (pup = 0; pup < PUP_NUM_32BIT; pup++) {
341 val = CMP_BYTE_SHIFT * pup;
342 var1 = ((sdram_data[ui] >> val) &
343 CMP_BYTE_MASK);
344 var2 = ((pattern_ptr[ui] >> val) &
345 CMP_BYTE_MASK);
346
347 if (var1 != var2) {
348 if (dram_info->ddr_width > 16) {
349 tmp_pup = (pup + PUP_NUM_32BIT *
350 (ui % pup_groups));
351 } else {
352 tmp_pup = (pup % PUP_NUM_16BIT);
353 }
354
355 update_pup = (1 << tmp_pup);
356 if (ecc && (update_pup != 0x1))
357 continue;
358
359 /*
360 * Pup is failed - Go over all DQs and
361 * look for failures
362 */
363 for (dq = 0; dq < DQ_NUM; dq++) {
364 val = tmp_pup * (1 - ecc) +
365 ecc * ECC_PUP;
366 if (((var1 >> dq) & 0x1) !=
367 ((var2 >> dq) & 0x1)) {
368 if (pbs_locked_dq[val][dq] == 1 &&
369 pbs_locked_value[val][dq] != pbs_curr_val)
370 continue;
371
372 /*
373 * Activate write to
374 * update PBS to
375 * pbs_lock_val
376 */
377 pbs_write_pup[dq] |=
378 update_pup;
379
380 /*
381 * Update the
382 * unlock_pup_dq_array
383 */
384 unlock_pup_dq_array[dq] &=
385 ~update_pup;
386
387 /*
388 * Lock PBS value for
389 * failed bits in
390 * compare operation
391 */
392 skew_array[tmp_pup * DQ_NUM + dq] =
393 pbs_curr_val;
394 }
395 }
396 }
397 }
398 }
399 }
400
401 pup_addr = (is_tx == 1) ? PUP_PBS_TX : PUP_PBS_RX;
402
403 /* Set last failed bits PBS to min / max pbs value */
404 for (dq = 0; dq < DQ_NUM; dq++) {
405 for (pup = 0; pup < max_pup; pup++) {
406 if (pbs_write_pup[dq] & (1 << pup)) {
407 val = pup * (1 - ecc) + ecc * ECC_PUP;
408 if (pbs_locked_dq[val][dq] == 1 &&
409 pbs_locked_value[val][dq] != pbs_curr_val)
410 continue;
411
412 /* Mark the dq as locked */
413 pbs_locked_dq[val][dq] = 1;
414 pbs_locked_value[val][dq] = pbs_curr_val;
415 ddr3_write_pup_reg(pup_addr +
416 pbs_dq_mapping[val][dq],
417 CS0, val, 0, pbs_lock_val);
418 }
419 }
420 }
421
422 return MV_OK;
423 }
424 #endif
425
426 /*
427 * Name: ddr3_sdram_direct_compare
428 * Desc: Execute compare per PUP without DMA (no burst mode)
429 * Args: unlock_pup Bit array of the unlock pups
430 * new_locked_pup Output bit array of the pups with failed compare
431 * pattern Pattern to compare
432 * pattern_len Length of pattern (in bytes)
433 * sdram_offset offset address to the SDRAM
434 * write write to the SDRAM before read
435 * mask compare pattern with mask;
436 * auiMaskPatter Mask to compare pattern
437 *
438 * Notes:
439 * Returns: MV_OK if success, other error code if fail.
440 */
ddr3_sdram_direct_compare(MV_DRAM_INFO * dram_info,u32 unlock_pup,u32 * new_locked_pup,u32 * pattern,u32 pattern_len,u32 sdram_offset,int write,int mask,u32 * mask_pattern)441 int ddr3_sdram_direct_compare(MV_DRAM_INFO *dram_info, u32 unlock_pup,
442 u32 *new_locked_pup, u32 *pattern,
443 u32 pattern_len, u32 sdram_offset,
444 int write, int mask, u32 *mask_pattern)
445 {
446 u32 uj, uk, pup_groups;
447 u32 *sdram_addr; /* used to read from SDRAM */
448
449 sdram_addr = (u32 *)sdram_offset;
450
451 if (dram_info->num_of_std_pups == PUP_NUM_64BIT)
452 pup_groups = 2;
453 else
454 pup_groups = 1;
455
456 /* Check if need to write before read */
457 if (write == 1) {
458 for (uk = 0; uk < pattern_len; uk++) {
459 *sdram_addr = pattern[uk];
460 sdram_addr++;
461 }
462 }
463
464 sdram_addr = (u32 *)sdram_offset;
465
466 for (uk = 0; uk < pattern_len; uk++) {
467 sdram_data[uk] = *sdram_addr;
468 sdram_addr++;
469 }
470
471 /* Compare read result to write */
472 for (uj = 0; uj < pattern_len; uj++) {
473 if (dram_info->ddr_width > 16) {
474 compare_pattern_v1(uj, new_locked_pup, pattern,
475 pup_groups, 0);
476 } else {
477 compare_pattern_v2(uj, new_locked_pup, pattern);
478 }
479 }
480
481 return MV_OK;
482 }
483
484 /*
485 * Name: ddr3_dram_sram_burst
486 * Desc: Read from the SDRAM in burst of 64 bytes
487 * Args: src
488 * dst
489 * Notes: Using the XOR mechanism
490 * Returns: MV_OK if success, other error code if fail.
491 */
ddr3_dram_sram_burst(u32 src,u32 dst,u32 len)492 int ddr3_dram_sram_burst(u32 src, u32 dst, u32 len)
493 {
494 u32 chan, byte_count, cs_num, byte;
495 struct xor_channel_t channel;
496
497 chan = 0;
498 byte_count = len * 4;
499
500 /* Wait for previous transfer completion */
501 while (mv_xor_state_get(chan) != MV_IDLE)
502 ;
503
504 /* Build the channel descriptor */
505 channel.desc = &dma_desc;
506
507 /* Enable Address Override and set correct src and dst */
508 if (src < SRAM_BASE) {
509 /* src is DRAM CS, dst is SRAM */
510 cs_num = (src / (1 + SDRAM_CS_SIZE));
511 reg_write(XOR_ADDR_OVRD_REG(0, 0),
512 ((cs_num << 1) | (1 << 0)));
513 channel.desc->src_addr0 = (src % (1 + SDRAM_CS_SIZE));
514 channel.desc->dst_addr = dst;
515 } else {
516 /* src is SRAM, dst is DRAM CS */
517 cs_num = (dst / (1 + SDRAM_CS_SIZE));
518 reg_write(XOR_ADDR_OVRD_REG(0, 0),
519 ((cs_num << 25) | (1 << 24)));
520 channel.desc->src_addr0 = (src);
521 channel.desc->dst_addr = (dst % (1 + SDRAM_CS_SIZE));
522 channel.desc->src_addr0 = src;
523 channel.desc->dst_addr = (dst % (1 + SDRAM_CS_SIZE));
524 }
525
526 channel.desc->src_addr1 = 0;
527 channel.desc->byte_cnt = byte_count;
528 channel.desc->next_desc_ptr = 0;
529 channel.desc->status = 1 << 31;
530 channel.desc->desc_cmd = 0x0;
531 channel.desc_phys_addr = (unsigned long)&dma_desc;
532
533 ddr3_flush_l1_line((u32)&dma_desc);
534
535 /* Issue the transfer */
536 if (mv_xor_transfer(chan, MV_DMA, channel.desc_phys_addr) != MV_OK)
537 return MV_FAIL;
538
539 /* Wait for completion */
540 xor_waiton_eng(chan);
541
542 if (dst > SRAM_BASE) {
543 for (byte = 0; byte < byte_count; byte += 0x20)
544 cache_inv(dst + byte);
545 }
546
547 return MV_OK;
548 }
549
550 /*
551 * Name: ddr3_flush_l1_line
552 * Desc:
553 * Args:
554 * Notes:
555 * Returns: MV_OK if success, other error code if fail.
556 */
ddr3_flush_l1_line(u32 line)557 static void ddr3_flush_l1_line(u32 line)
558 {
559 u32 reg;
560
561 #if defined(MV88F672X)
562 reg = 1;
563 #else
564 reg = reg_read(REG_SAMPLE_RESET_LOW_ADDR) &
565 (1 << REG_SAMPLE_RESET_CPU_ARCH_OFFS);
566 #ifdef MV88F67XX
567 reg = ~reg & (1 << REG_SAMPLE_RESET_CPU_ARCH_OFFS);
568 #endif
569 #endif
570
571 if (reg) {
572 /* V7 Arch mode */
573 flush_l1_v7(line);
574 flush_l1_v7(line + CACHE_LINE_SIZE);
575 } else {
576 /* V6 Arch mode */
577 flush_l1_v6(line);
578 flush_l1_v6(line + CACHE_LINE_SIZE);
579 }
580 }
581
ddr3_dram_sram_read(u32 src,u32 dst,u32 len)582 int ddr3_dram_sram_read(u32 src, u32 dst, u32 len)
583 {
584 u32 ui;
585 u32 *dst_ptr, *src_ptr;
586
587 dst_ptr = (u32 *)dst;
588 src_ptr = (u32 *)src;
589
590 for (ui = 0; ui < len; ui++) {
591 *dst_ptr = *src_ptr;
592 dst_ptr++;
593 src_ptr++;
594 }
595
596 return MV_OK;
597 }
598
ddr3_sdram_dqs_compare(MV_DRAM_INFO * dram_info,u32 unlock_pup,u32 * new_locked_pup,u32 * pattern,u32 pattern_len,u32 sdram_offset,int write,int mask,u32 * mask_pattern,int special_compare)599 int ddr3_sdram_dqs_compare(MV_DRAM_INFO *dram_info, u32 unlock_pup,
600 u32 *new_locked_pup, u32 *pattern,
601 u32 pattern_len, u32 sdram_offset, int write,
602 int mask, u32 *mask_pattern,
603 int special_compare)
604 {
605 u32 uj, pup_groups;
606
607 if (dram_info->num_of_std_pups == PUP_NUM_64BIT)
608 pup_groups = 2;
609 else
610 pup_groups = 1;
611
612 ddr3_reset_phy_read_fifo();
613
614 /* Check if need to write to sdram before read */
615 if (write == 1)
616 ddr3_dram_sram_burst((u32)pattern, sdram_offset, pattern_len);
617
618 ddr3_dram_sram_burst(sdram_offset, (u32)sdram_data, pattern_len);
619
620 /* Compare read result to write */
621 for (uj = 0; uj < pattern_len; uj++) {
622 if (special_compare && special_compare_pattern(uj))
623 continue;
624
625 if (dram_info->ddr_width > 16) {
626 compare_pattern_v1(uj, new_locked_pup, pattern,
627 pup_groups, 1);
628 } else {
629 compare_pattern_v2(uj, new_locked_pup, pattern);
630 }
631 }
632
633 return MV_OK;
634 }
635
ddr3_reset_phy_read_fifo(void)636 void ddr3_reset_phy_read_fifo(void)
637 {
638 u32 reg;
639
640 /* reset read FIFO */
641 reg = reg_read(REG_DRAM_TRAINING_ADDR);
642 /* Start Auto Read Leveling procedure */
643 reg |= (1 << REG_DRAM_TRAINING_RL_OFFS);
644
645 /* 0x15B0 - Training Register */
646 reg_write(REG_DRAM_TRAINING_ADDR, reg);
647
648 reg = reg_read(REG_DRAM_TRAINING_2_ADDR);
649 reg |= ((1 << REG_DRAM_TRAINING_2_FIFO_RST_OFFS) +
650 (1 << REG_DRAM_TRAINING_2_SW_OVRD_OFFS));
651
652 /* [0] = 1 - Enable SW override, [4] = 1 - FIFO reset */
653 /* 0x15B8 - Training SW 2 Register */
654 reg_write(REG_DRAM_TRAINING_2_ADDR, reg);
655
656 do {
657 reg = reg_read(REG_DRAM_TRAINING_2_ADDR) &
658 (1 << REG_DRAM_TRAINING_2_FIFO_RST_OFFS);
659 } while (reg); /* Wait for '0' */
660
661 reg = reg_read(REG_DRAM_TRAINING_ADDR);
662
663 /* Clear Auto Read Leveling procedure */
664 reg &= ~(1 << REG_DRAM_TRAINING_RL_OFFS);
665
666 /* 0x15B0 - Training Register */
667 reg_write(REG_DRAM_TRAINING_ADDR, reg);
668 }
669