1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3 * Copyright 2008, 2010-2016 Freescale Semiconductor, Inc.
4 * Copyright 2017-2018 NXP Semiconductor
5 */
6
7 #include <common.h>
8 #include <hwconfig.h>
9 #include <fsl_ddr_sdram.h>
10
11 #include <fsl_ddr.h>
12 #if defined(CONFIG_FSL_LSCH2) || defined(CONFIG_FSL_LSCH3) || \
13 defined(CONFIG_ARM)
14 #include <asm/arch/clock.h>
15 #endif
16
17 /*
18 * Use our own stack based buffer before relocation to allow accessing longer
19 * hwconfig strings that might be in the environment before we've relocated.
20 * This is pretty fragile on both the use of stack and if the buffer is big
21 * enough. However we will get a warning from env_get_f() for the latter.
22 */
23
24 /* Board-specific functions defined in each board's ddr.c */
25 extern void fsl_ddr_board_options(memctl_options_t *popts,
26 dimm_params_t *pdimm,
27 unsigned int ctrl_num);
28
29 struct dynamic_odt {
30 unsigned int odt_rd_cfg;
31 unsigned int odt_wr_cfg;
32 unsigned int odt_rtt_norm;
33 unsigned int odt_rtt_wr;
34 };
35
36 #ifdef CONFIG_SYS_FSL_DDR4
37 /* Quad rank is not verified yet due availability.
38 * Replacing 20 OHM with 34 OHM since DDR4 doesn't have 20 OHM option
39 */
40 static __maybe_unused const struct dynamic_odt single_Q[4] = {
41 { /* cs0 */
42 FSL_DDR_ODT_NEVER,
43 FSL_DDR_ODT_CS_AND_OTHER_DIMM,
44 DDR4_RTT_34_OHM, /* unverified */
45 DDR4_RTT_120_OHM
46 },
47 { /* cs1 */
48 FSL_DDR_ODT_NEVER,
49 FSL_DDR_ODT_NEVER,
50 DDR4_RTT_OFF,
51 DDR4_RTT_120_OHM
52 },
53 { /* cs2 */
54 FSL_DDR_ODT_NEVER,
55 FSL_DDR_ODT_CS_AND_OTHER_DIMM,
56 DDR4_RTT_34_OHM,
57 DDR4_RTT_120_OHM
58 },
59 { /* cs3 */
60 FSL_DDR_ODT_NEVER,
61 FSL_DDR_ODT_NEVER, /* tied high */
62 DDR4_RTT_OFF,
63 DDR4_RTT_120_OHM
64 }
65 };
66
67 static __maybe_unused const struct dynamic_odt single_D[4] = {
68 { /* cs0 */
69 FSL_DDR_ODT_NEVER,
70 FSL_DDR_ODT_ALL,
71 DDR4_RTT_40_OHM,
72 DDR4_RTT_OFF
73 },
74 { /* cs1 */
75 FSL_DDR_ODT_NEVER,
76 FSL_DDR_ODT_NEVER,
77 DDR4_RTT_OFF,
78 DDR4_RTT_OFF
79 },
80 {0, 0, 0, 0},
81 {0, 0, 0, 0}
82 };
83
84 static __maybe_unused const struct dynamic_odt single_S[4] = {
85 { /* cs0 */
86 FSL_DDR_ODT_NEVER,
87 FSL_DDR_ODT_ALL,
88 DDR4_RTT_40_OHM,
89 DDR4_RTT_OFF
90 },
91 {0, 0, 0, 0},
92 {0, 0, 0, 0},
93 {0, 0, 0, 0},
94 };
95
96 static __maybe_unused const struct dynamic_odt dual_DD[4] = {
97 { /* cs0 */
98 FSL_DDR_ODT_NEVER,
99 FSL_DDR_ODT_SAME_DIMM,
100 DDR4_RTT_120_OHM,
101 DDR4_RTT_OFF
102 },
103 { /* cs1 */
104 FSL_DDR_ODT_OTHER_DIMM,
105 FSL_DDR_ODT_OTHER_DIMM,
106 DDR4_RTT_34_OHM,
107 DDR4_RTT_OFF
108 },
109 { /* cs2 */
110 FSL_DDR_ODT_NEVER,
111 FSL_DDR_ODT_SAME_DIMM,
112 DDR4_RTT_120_OHM,
113 DDR4_RTT_OFF
114 },
115 { /* cs3 */
116 FSL_DDR_ODT_OTHER_DIMM,
117 FSL_DDR_ODT_OTHER_DIMM,
118 DDR4_RTT_34_OHM,
119 DDR4_RTT_OFF
120 }
121 };
122
123 static __maybe_unused const struct dynamic_odt dual_DS[4] = {
124 { /* cs0 */
125 FSL_DDR_ODT_NEVER,
126 FSL_DDR_ODT_SAME_DIMM,
127 DDR4_RTT_120_OHM,
128 DDR4_RTT_OFF
129 },
130 { /* cs1 */
131 FSL_DDR_ODT_OTHER_DIMM,
132 FSL_DDR_ODT_OTHER_DIMM,
133 DDR4_RTT_34_OHM,
134 DDR4_RTT_OFF
135 },
136 { /* cs2 */
137 FSL_DDR_ODT_OTHER_DIMM,
138 FSL_DDR_ODT_ALL,
139 DDR4_RTT_34_OHM,
140 DDR4_RTT_120_OHM
141 },
142 {0, 0, 0, 0}
143 };
144 static __maybe_unused const struct dynamic_odt dual_SD[4] = {
145 { /* cs0 */
146 FSL_DDR_ODT_OTHER_DIMM,
147 FSL_DDR_ODT_ALL,
148 DDR4_RTT_34_OHM,
149 DDR4_RTT_120_OHM
150 },
151 {0, 0, 0, 0},
152 { /* cs2 */
153 FSL_DDR_ODT_NEVER,
154 FSL_DDR_ODT_SAME_DIMM,
155 DDR4_RTT_120_OHM,
156 DDR4_RTT_OFF
157 },
158 { /* cs3 */
159 FSL_DDR_ODT_OTHER_DIMM,
160 FSL_DDR_ODT_OTHER_DIMM,
161 DDR4_RTT_34_OHM,
162 DDR4_RTT_OFF
163 }
164 };
165
166 static __maybe_unused const struct dynamic_odt dual_SS[4] = {
167 { /* cs0 */
168 FSL_DDR_ODT_OTHER_DIMM,
169 FSL_DDR_ODT_ALL,
170 DDR4_RTT_34_OHM,
171 DDR4_RTT_120_OHM
172 },
173 {0, 0, 0, 0},
174 { /* cs2 */
175 FSL_DDR_ODT_OTHER_DIMM,
176 FSL_DDR_ODT_ALL,
177 DDR4_RTT_34_OHM,
178 DDR4_RTT_120_OHM
179 },
180 {0, 0, 0, 0}
181 };
182
183 static __maybe_unused const struct dynamic_odt dual_D0[4] = {
184 { /* cs0 */
185 FSL_DDR_ODT_NEVER,
186 FSL_DDR_ODT_SAME_DIMM,
187 DDR4_RTT_40_OHM,
188 DDR4_RTT_OFF
189 },
190 { /* cs1 */
191 FSL_DDR_ODT_NEVER,
192 FSL_DDR_ODT_NEVER,
193 DDR4_RTT_OFF,
194 DDR4_RTT_OFF
195 },
196 {0, 0, 0, 0},
197 {0, 0, 0, 0}
198 };
199
200 static __maybe_unused const struct dynamic_odt dual_0D[4] = {
201 {0, 0, 0, 0},
202 {0, 0, 0, 0},
203 { /* cs2 */
204 FSL_DDR_ODT_NEVER,
205 FSL_DDR_ODT_SAME_DIMM,
206 DDR4_RTT_40_OHM,
207 DDR4_RTT_OFF
208 },
209 { /* cs3 */
210 FSL_DDR_ODT_NEVER,
211 FSL_DDR_ODT_NEVER,
212 DDR4_RTT_OFF,
213 DDR4_RTT_OFF
214 }
215 };
216
217 static __maybe_unused const struct dynamic_odt dual_S0[4] = {
218 { /* cs0 */
219 FSL_DDR_ODT_NEVER,
220 FSL_DDR_ODT_CS,
221 DDR4_RTT_40_OHM,
222 DDR4_RTT_OFF
223 },
224 {0, 0, 0, 0},
225 {0, 0, 0, 0},
226 {0, 0, 0, 0}
227
228 };
229
230 static __maybe_unused const struct dynamic_odt dual_0S[4] = {
231 {0, 0, 0, 0},
232 {0, 0, 0, 0},
233 { /* cs2 */
234 FSL_DDR_ODT_NEVER,
235 FSL_DDR_ODT_CS,
236 DDR4_RTT_40_OHM,
237 DDR4_RTT_OFF
238 },
239 {0, 0, 0, 0}
240
241 };
242
243 static __maybe_unused const struct dynamic_odt odt_unknown[4] = {
244 { /* cs0 */
245 FSL_DDR_ODT_NEVER,
246 FSL_DDR_ODT_CS,
247 DDR4_RTT_120_OHM,
248 DDR4_RTT_OFF
249 },
250 { /* cs1 */
251 FSL_DDR_ODT_NEVER,
252 FSL_DDR_ODT_CS,
253 DDR4_RTT_120_OHM,
254 DDR4_RTT_OFF
255 },
256 { /* cs2 */
257 FSL_DDR_ODT_NEVER,
258 FSL_DDR_ODT_CS,
259 DDR4_RTT_120_OHM,
260 DDR4_RTT_OFF
261 },
262 { /* cs3 */
263 FSL_DDR_ODT_NEVER,
264 FSL_DDR_ODT_CS,
265 DDR4_RTT_120_OHM,
266 DDR4_RTT_OFF
267 }
268 };
269 #elif defined(CONFIG_SYS_FSL_DDR3)
270 static __maybe_unused const struct dynamic_odt single_Q[4] = {
271 { /* cs0 */
272 FSL_DDR_ODT_NEVER,
273 FSL_DDR_ODT_CS_AND_OTHER_DIMM,
274 DDR3_RTT_20_OHM,
275 DDR3_RTT_120_OHM
276 },
277 { /* cs1 */
278 FSL_DDR_ODT_NEVER,
279 FSL_DDR_ODT_NEVER, /* tied high */
280 DDR3_RTT_OFF,
281 DDR3_RTT_120_OHM
282 },
283 { /* cs2 */
284 FSL_DDR_ODT_NEVER,
285 FSL_DDR_ODT_CS_AND_OTHER_DIMM,
286 DDR3_RTT_20_OHM,
287 DDR3_RTT_120_OHM
288 },
289 { /* cs3 */
290 FSL_DDR_ODT_NEVER,
291 FSL_DDR_ODT_NEVER, /* tied high */
292 DDR3_RTT_OFF,
293 DDR3_RTT_120_OHM
294 }
295 };
296
297 static __maybe_unused const struct dynamic_odt single_D[4] = {
298 { /* cs0 */
299 FSL_DDR_ODT_NEVER,
300 FSL_DDR_ODT_ALL,
301 DDR3_RTT_40_OHM,
302 DDR3_RTT_OFF
303 },
304 { /* cs1 */
305 FSL_DDR_ODT_NEVER,
306 FSL_DDR_ODT_NEVER,
307 DDR3_RTT_OFF,
308 DDR3_RTT_OFF
309 },
310 {0, 0, 0, 0},
311 {0, 0, 0, 0}
312 };
313
314 static __maybe_unused const struct dynamic_odt single_S[4] = {
315 { /* cs0 */
316 FSL_DDR_ODT_NEVER,
317 FSL_DDR_ODT_ALL,
318 DDR3_RTT_40_OHM,
319 DDR3_RTT_OFF
320 },
321 {0, 0, 0, 0},
322 {0, 0, 0, 0},
323 {0, 0, 0, 0},
324 };
325
326 static __maybe_unused const struct dynamic_odt dual_DD[4] = {
327 { /* cs0 */
328 FSL_DDR_ODT_NEVER,
329 FSL_DDR_ODT_SAME_DIMM,
330 DDR3_RTT_120_OHM,
331 DDR3_RTT_OFF
332 },
333 { /* cs1 */
334 FSL_DDR_ODT_OTHER_DIMM,
335 FSL_DDR_ODT_OTHER_DIMM,
336 DDR3_RTT_30_OHM,
337 DDR3_RTT_OFF
338 },
339 { /* cs2 */
340 FSL_DDR_ODT_NEVER,
341 FSL_DDR_ODT_SAME_DIMM,
342 DDR3_RTT_120_OHM,
343 DDR3_RTT_OFF
344 },
345 { /* cs3 */
346 FSL_DDR_ODT_OTHER_DIMM,
347 FSL_DDR_ODT_OTHER_DIMM,
348 DDR3_RTT_30_OHM,
349 DDR3_RTT_OFF
350 }
351 };
352
353 static __maybe_unused const struct dynamic_odt dual_DS[4] = {
354 { /* cs0 */
355 FSL_DDR_ODT_NEVER,
356 FSL_DDR_ODT_SAME_DIMM,
357 DDR3_RTT_120_OHM,
358 DDR3_RTT_OFF
359 },
360 { /* cs1 */
361 FSL_DDR_ODT_OTHER_DIMM,
362 FSL_DDR_ODT_OTHER_DIMM,
363 DDR3_RTT_30_OHM,
364 DDR3_RTT_OFF
365 },
366 { /* cs2 */
367 FSL_DDR_ODT_OTHER_DIMM,
368 FSL_DDR_ODT_ALL,
369 DDR3_RTT_20_OHM,
370 DDR3_RTT_120_OHM
371 },
372 {0, 0, 0, 0}
373 };
374 static __maybe_unused const struct dynamic_odt dual_SD[4] = {
375 { /* cs0 */
376 FSL_DDR_ODT_OTHER_DIMM,
377 FSL_DDR_ODT_ALL,
378 DDR3_RTT_20_OHM,
379 DDR3_RTT_120_OHM
380 },
381 {0, 0, 0, 0},
382 { /* cs2 */
383 FSL_DDR_ODT_NEVER,
384 FSL_DDR_ODT_SAME_DIMM,
385 DDR3_RTT_120_OHM,
386 DDR3_RTT_OFF
387 },
388 { /* cs3 */
389 FSL_DDR_ODT_OTHER_DIMM,
390 FSL_DDR_ODT_OTHER_DIMM,
391 DDR3_RTT_20_OHM,
392 DDR3_RTT_OFF
393 }
394 };
395
396 static __maybe_unused const struct dynamic_odt dual_SS[4] = {
397 { /* cs0 */
398 FSL_DDR_ODT_OTHER_DIMM,
399 FSL_DDR_ODT_ALL,
400 DDR3_RTT_30_OHM,
401 DDR3_RTT_120_OHM
402 },
403 {0, 0, 0, 0},
404 { /* cs2 */
405 FSL_DDR_ODT_OTHER_DIMM,
406 FSL_DDR_ODT_ALL,
407 DDR3_RTT_30_OHM,
408 DDR3_RTT_120_OHM
409 },
410 {0, 0, 0, 0}
411 };
412
413 static __maybe_unused const struct dynamic_odt dual_D0[4] = {
414 { /* cs0 */
415 FSL_DDR_ODT_NEVER,
416 FSL_DDR_ODT_SAME_DIMM,
417 DDR3_RTT_40_OHM,
418 DDR3_RTT_OFF
419 },
420 { /* cs1 */
421 FSL_DDR_ODT_NEVER,
422 FSL_DDR_ODT_NEVER,
423 DDR3_RTT_OFF,
424 DDR3_RTT_OFF
425 },
426 {0, 0, 0, 0},
427 {0, 0, 0, 0}
428 };
429
430 static __maybe_unused const struct dynamic_odt dual_0D[4] = {
431 {0, 0, 0, 0},
432 {0, 0, 0, 0},
433 { /* cs2 */
434 FSL_DDR_ODT_NEVER,
435 FSL_DDR_ODT_SAME_DIMM,
436 DDR3_RTT_40_OHM,
437 DDR3_RTT_OFF
438 },
439 { /* cs3 */
440 FSL_DDR_ODT_NEVER,
441 FSL_DDR_ODT_NEVER,
442 DDR3_RTT_OFF,
443 DDR3_RTT_OFF
444 }
445 };
446
447 static __maybe_unused const struct dynamic_odt dual_S0[4] = {
448 { /* cs0 */
449 FSL_DDR_ODT_NEVER,
450 FSL_DDR_ODT_CS,
451 DDR3_RTT_40_OHM,
452 DDR3_RTT_OFF
453 },
454 {0, 0, 0, 0},
455 {0, 0, 0, 0},
456 {0, 0, 0, 0}
457
458 };
459
460 static __maybe_unused const struct dynamic_odt dual_0S[4] = {
461 {0, 0, 0, 0},
462 {0, 0, 0, 0},
463 { /* cs2 */
464 FSL_DDR_ODT_NEVER,
465 FSL_DDR_ODT_CS,
466 DDR3_RTT_40_OHM,
467 DDR3_RTT_OFF
468 },
469 {0, 0, 0, 0}
470
471 };
472
473 static __maybe_unused const struct dynamic_odt odt_unknown[4] = {
474 { /* cs0 */
475 FSL_DDR_ODT_NEVER,
476 FSL_DDR_ODT_CS,
477 DDR3_RTT_120_OHM,
478 DDR3_RTT_OFF
479 },
480 { /* cs1 */
481 FSL_DDR_ODT_NEVER,
482 FSL_DDR_ODT_CS,
483 DDR3_RTT_120_OHM,
484 DDR3_RTT_OFF
485 },
486 { /* cs2 */
487 FSL_DDR_ODT_NEVER,
488 FSL_DDR_ODT_CS,
489 DDR3_RTT_120_OHM,
490 DDR3_RTT_OFF
491 },
492 { /* cs3 */
493 FSL_DDR_ODT_NEVER,
494 FSL_DDR_ODT_CS,
495 DDR3_RTT_120_OHM,
496 DDR3_RTT_OFF
497 }
498 };
499 #else /* CONFIG_SYS_FSL_DDR3 */
500 static __maybe_unused const struct dynamic_odt single_Q[4] = {
501 {0, 0, 0, 0},
502 {0, 0, 0, 0},
503 {0, 0, 0, 0},
504 {0, 0, 0, 0}
505 };
506
507 static __maybe_unused const struct dynamic_odt single_D[4] = {
508 { /* cs0 */
509 FSL_DDR_ODT_NEVER,
510 FSL_DDR_ODT_ALL,
511 DDR2_RTT_150_OHM,
512 DDR2_RTT_OFF
513 },
514 { /* cs1 */
515 FSL_DDR_ODT_NEVER,
516 FSL_DDR_ODT_NEVER,
517 DDR2_RTT_OFF,
518 DDR2_RTT_OFF
519 },
520 {0, 0, 0, 0},
521 {0, 0, 0, 0}
522 };
523
524 static __maybe_unused const struct dynamic_odt single_S[4] = {
525 { /* cs0 */
526 FSL_DDR_ODT_NEVER,
527 FSL_DDR_ODT_ALL,
528 DDR2_RTT_150_OHM,
529 DDR2_RTT_OFF
530 },
531 {0, 0, 0, 0},
532 {0, 0, 0, 0},
533 {0, 0, 0, 0},
534 };
535
536 static __maybe_unused const struct dynamic_odt dual_DD[4] = {
537 { /* cs0 */
538 FSL_DDR_ODT_OTHER_DIMM,
539 FSL_DDR_ODT_OTHER_DIMM,
540 DDR2_RTT_75_OHM,
541 DDR2_RTT_OFF
542 },
543 { /* cs1 */
544 FSL_DDR_ODT_NEVER,
545 FSL_DDR_ODT_NEVER,
546 DDR2_RTT_OFF,
547 DDR2_RTT_OFF
548 },
549 { /* cs2 */
550 FSL_DDR_ODT_OTHER_DIMM,
551 FSL_DDR_ODT_OTHER_DIMM,
552 DDR2_RTT_75_OHM,
553 DDR2_RTT_OFF
554 },
555 { /* cs3 */
556 FSL_DDR_ODT_NEVER,
557 FSL_DDR_ODT_NEVER,
558 DDR2_RTT_OFF,
559 DDR2_RTT_OFF
560 }
561 };
562
563 static __maybe_unused const struct dynamic_odt dual_DS[4] = {
564 { /* cs0 */
565 FSL_DDR_ODT_OTHER_DIMM,
566 FSL_DDR_ODT_OTHER_DIMM,
567 DDR2_RTT_75_OHM,
568 DDR2_RTT_OFF
569 },
570 { /* cs1 */
571 FSL_DDR_ODT_NEVER,
572 FSL_DDR_ODT_NEVER,
573 DDR2_RTT_OFF,
574 DDR2_RTT_OFF
575 },
576 { /* cs2 */
577 FSL_DDR_ODT_OTHER_DIMM,
578 FSL_DDR_ODT_OTHER_DIMM,
579 DDR2_RTT_75_OHM,
580 DDR2_RTT_OFF
581 },
582 {0, 0, 0, 0}
583 };
584
585 static __maybe_unused const struct dynamic_odt dual_SD[4] = {
586 { /* cs0 */
587 FSL_DDR_ODT_OTHER_DIMM,
588 FSL_DDR_ODT_OTHER_DIMM,
589 DDR2_RTT_75_OHM,
590 DDR2_RTT_OFF
591 },
592 {0, 0, 0, 0},
593 { /* cs2 */
594 FSL_DDR_ODT_OTHER_DIMM,
595 FSL_DDR_ODT_OTHER_DIMM,
596 DDR2_RTT_75_OHM,
597 DDR2_RTT_OFF
598 },
599 { /* cs3 */
600 FSL_DDR_ODT_NEVER,
601 FSL_DDR_ODT_NEVER,
602 DDR2_RTT_OFF,
603 DDR2_RTT_OFF
604 }
605 };
606
607 static __maybe_unused const struct dynamic_odt dual_SS[4] = {
608 { /* cs0 */
609 FSL_DDR_ODT_OTHER_DIMM,
610 FSL_DDR_ODT_OTHER_DIMM,
611 DDR2_RTT_75_OHM,
612 DDR2_RTT_OFF
613 },
614 {0, 0, 0, 0},
615 { /* cs2 */
616 FSL_DDR_ODT_OTHER_DIMM,
617 FSL_DDR_ODT_OTHER_DIMM,
618 DDR2_RTT_75_OHM,
619 DDR2_RTT_OFF
620 },
621 {0, 0, 0, 0}
622 };
623
624 static __maybe_unused const struct dynamic_odt dual_D0[4] = {
625 { /* cs0 */
626 FSL_DDR_ODT_NEVER,
627 FSL_DDR_ODT_ALL,
628 DDR2_RTT_150_OHM,
629 DDR2_RTT_OFF
630 },
631 { /* cs1 */
632 FSL_DDR_ODT_NEVER,
633 FSL_DDR_ODT_NEVER,
634 DDR2_RTT_OFF,
635 DDR2_RTT_OFF
636 },
637 {0, 0, 0, 0},
638 {0, 0, 0, 0}
639 };
640
641 static __maybe_unused const struct dynamic_odt dual_0D[4] = {
642 {0, 0, 0, 0},
643 {0, 0, 0, 0},
644 { /* cs2 */
645 FSL_DDR_ODT_NEVER,
646 FSL_DDR_ODT_ALL,
647 DDR2_RTT_150_OHM,
648 DDR2_RTT_OFF
649 },
650 { /* cs3 */
651 FSL_DDR_ODT_NEVER,
652 FSL_DDR_ODT_NEVER,
653 DDR2_RTT_OFF,
654 DDR2_RTT_OFF
655 }
656 };
657
658 static __maybe_unused const struct dynamic_odt dual_S0[4] = {
659 { /* cs0 */
660 FSL_DDR_ODT_NEVER,
661 FSL_DDR_ODT_CS,
662 DDR2_RTT_150_OHM,
663 DDR2_RTT_OFF
664 },
665 {0, 0, 0, 0},
666 {0, 0, 0, 0},
667 {0, 0, 0, 0}
668
669 };
670
671 static __maybe_unused const struct dynamic_odt dual_0S[4] = {
672 {0, 0, 0, 0},
673 {0, 0, 0, 0},
674 { /* cs2 */
675 FSL_DDR_ODT_NEVER,
676 FSL_DDR_ODT_CS,
677 DDR2_RTT_150_OHM,
678 DDR2_RTT_OFF
679 },
680 {0, 0, 0, 0}
681
682 };
683
684 static __maybe_unused const struct dynamic_odt odt_unknown[4] = {
685 { /* cs0 */
686 FSL_DDR_ODT_NEVER,
687 FSL_DDR_ODT_CS,
688 DDR2_RTT_75_OHM,
689 DDR2_RTT_OFF
690 },
691 { /* cs1 */
692 FSL_DDR_ODT_NEVER,
693 FSL_DDR_ODT_NEVER,
694 DDR2_RTT_OFF,
695 DDR2_RTT_OFF
696 },
697 { /* cs2 */
698 FSL_DDR_ODT_NEVER,
699 FSL_DDR_ODT_CS,
700 DDR2_RTT_75_OHM,
701 DDR2_RTT_OFF
702 },
703 { /* cs3 */
704 FSL_DDR_ODT_NEVER,
705 FSL_DDR_ODT_NEVER,
706 DDR2_RTT_OFF,
707 DDR2_RTT_OFF
708 }
709 };
710 #endif
711
712 /*
713 * Automatically seleect bank interleaving mode based on DIMMs
714 * in this order: cs0_cs1_cs2_cs3, cs0_cs1, null.
715 * This function only deal with one or two slots per controller.
716 */
auto_bank_intlv(dimm_params_t * pdimm)717 static inline unsigned int auto_bank_intlv(dimm_params_t *pdimm)
718 {
719 #if (CONFIG_DIMM_SLOTS_PER_CTLR == 1)
720 if (pdimm[0].n_ranks == 4)
721 return FSL_DDR_CS0_CS1_CS2_CS3;
722 else if (pdimm[0].n_ranks == 2)
723 return FSL_DDR_CS0_CS1;
724 #elif (CONFIG_DIMM_SLOTS_PER_CTLR == 2)
725 #ifdef CONFIG_FSL_DDR_FIRST_SLOT_QUAD_CAPABLE
726 if (pdimm[0].n_ranks == 4)
727 return FSL_DDR_CS0_CS1_CS2_CS3;
728 #endif
729 if (pdimm[0].n_ranks == 2) {
730 if (pdimm[1].n_ranks == 2)
731 return FSL_DDR_CS0_CS1_CS2_CS3;
732 else
733 return FSL_DDR_CS0_CS1;
734 }
735 #endif
736 return 0;
737 }
738
populate_memctl_options(const common_timing_params_t * common_dimm,memctl_options_t * popts,dimm_params_t * pdimm,unsigned int ctrl_num)739 unsigned int populate_memctl_options(const common_timing_params_t *common_dimm,
740 memctl_options_t *popts,
741 dimm_params_t *pdimm,
742 unsigned int ctrl_num)
743 {
744 unsigned int i;
745 char buffer[HWCONFIG_BUFFER_SIZE];
746 char *buf = NULL;
747 #if defined(CONFIG_SYS_FSL_DDR3) || \
748 defined(CONFIG_SYS_FSL_DDR2) || \
749 defined(CONFIG_SYS_FSL_DDR4)
750 const struct dynamic_odt *pdodt = odt_unknown;
751 #endif
752 #if (CONFIG_FSL_SDRAM_TYPE != SDRAM_TYPE_DDR4)
753 ulong ddr_freq;
754 #endif
755
756 /*
757 * Extract hwconfig from environment since we have not properly setup
758 * the environment but need it for ddr config params
759 */
760 if (env_get_f("hwconfig", buffer, sizeof(buffer)) > 0)
761 buf = buffer;
762
763 #if defined(CONFIG_SYS_FSL_DDR3) || \
764 defined(CONFIG_SYS_FSL_DDR2) || \
765 defined(CONFIG_SYS_FSL_DDR4)
766 /* Chip select options. */
767 #if (CONFIG_DIMM_SLOTS_PER_CTLR == 1)
768 switch (pdimm[0].n_ranks) {
769 case 1:
770 pdodt = single_S;
771 break;
772 case 2:
773 pdodt = single_D;
774 break;
775 case 4:
776 pdodt = single_Q;
777 break;
778 }
779 #elif (CONFIG_DIMM_SLOTS_PER_CTLR == 2)
780 switch (pdimm[0].n_ranks) {
781 #ifdef CONFIG_FSL_DDR_FIRST_SLOT_QUAD_CAPABLE
782 case 4:
783 pdodt = single_Q;
784 if (pdimm[1].n_ranks)
785 printf("Error: Quad- and Dual-rank DIMMs cannot be used together\n");
786 break;
787 #endif
788 case 2:
789 switch (pdimm[1].n_ranks) {
790 case 2:
791 pdodt = dual_DD;
792 break;
793 case 1:
794 pdodt = dual_DS;
795 break;
796 case 0:
797 pdodt = dual_D0;
798 break;
799 }
800 break;
801 case 1:
802 switch (pdimm[1].n_ranks) {
803 case 2:
804 pdodt = dual_SD;
805 break;
806 case 1:
807 pdodt = dual_SS;
808 break;
809 case 0:
810 pdodt = dual_S0;
811 break;
812 }
813 break;
814 case 0:
815 switch (pdimm[1].n_ranks) {
816 case 2:
817 pdodt = dual_0D;
818 break;
819 case 1:
820 pdodt = dual_0S;
821 break;
822 }
823 break;
824 }
825 #endif /* CONFIG_DIMM_SLOTS_PER_CTLR */
826 #endif /* CONFIG_SYS_FSL_DDR2, 3, 4 */
827
828 /* Pick chip-select local options. */
829 for (i = 0; i < CONFIG_CHIP_SELECTS_PER_CTRL; i++) {
830 #if defined(CONFIG_SYS_FSL_DDR3) || \
831 defined(CONFIG_SYS_FSL_DDR2) || \
832 defined(CONFIG_SYS_FSL_DDR4)
833 popts->cs_local_opts[i].odt_rd_cfg = pdodt[i].odt_rd_cfg;
834 popts->cs_local_opts[i].odt_wr_cfg = pdodt[i].odt_wr_cfg;
835 popts->cs_local_opts[i].odt_rtt_norm = pdodt[i].odt_rtt_norm;
836 popts->cs_local_opts[i].odt_rtt_wr = pdodt[i].odt_rtt_wr;
837 #else
838 popts->cs_local_opts[i].odt_rd_cfg = FSL_DDR_ODT_NEVER;
839 popts->cs_local_opts[i].odt_wr_cfg = FSL_DDR_ODT_CS;
840 #endif
841 popts->cs_local_opts[i].auto_precharge = 0;
842 }
843
844 /* Pick interleaving mode. */
845
846 /*
847 * 0 = no interleaving
848 * 1 = interleaving between 2 controllers
849 */
850 popts->memctl_interleaving = 0;
851
852 /*
853 * 0 = cacheline
854 * 1 = page
855 * 2 = (logical) bank
856 * 3 = superbank (only if CS interleaving is enabled)
857 */
858 popts->memctl_interleaving_mode = 0;
859
860 /*
861 * 0: cacheline: bit 30 of the 36-bit physical addr selects the memctl
862 * 1: page: bit to the left of the column bits selects the memctl
863 * 2: bank: bit to the left of the bank bits selects the memctl
864 * 3: superbank: bit to the left of the chip select selects the memctl
865 *
866 * NOTE: ba_intlv (rank interleaving) is independent of memory
867 * controller interleaving; it is only within a memory controller.
868 * Must use superbank interleaving if rank interleaving is used and
869 * memory controller interleaving is enabled.
870 */
871
872 /*
873 * 0 = no
874 * 0x40 = CS0,CS1
875 * 0x20 = CS2,CS3
876 * 0x60 = CS0,CS1 + CS2,CS3
877 * 0x04 = CS0,CS1,CS2,CS3
878 */
879 popts->ba_intlv_ctl = 0;
880
881 /* Memory Organization Parameters */
882 popts->registered_dimm_en = common_dimm->all_dimms_registered;
883
884 /* Operational Mode Paramters */
885
886 /* Pick ECC modes */
887 popts->ecc_mode = 0; /* 0 = disabled, 1 = enabled */
888 #ifdef CONFIG_DDR_ECC
889 if (hwconfig_sub_f("fsl_ddr", "ecc", buf)) {
890 if (hwconfig_subarg_cmp_f("fsl_ddr", "ecc", "on", buf))
891 popts->ecc_mode = 1;
892 } else
893 popts->ecc_mode = 1;
894 #endif
895 /* 1 = use memory controler to init data */
896 popts->ecc_init_using_memctl = popts->ecc_mode ? 1 : 0;
897
898 /*
899 * Choose DQS config
900 * 0 for DDR1
901 * 1 for DDR2
902 */
903 #if defined(CONFIG_SYS_FSL_DDR1)
904 popts->dqs_config = 0;
905 #elif defined(CONFIG_SYS_FSL_DDR2) || defined(CONFIG_SYS_FSL_DDR3)
906 popts->dqs_config = 1;
907 #endif
908
909 /* Choose self-refresh during sleep. */
910 popts->self_refresh_in_sleep = 1;
911
912 /* Choose dynamic power management mode. */
913 popts->dynamic_power = 0;
914
915 /*
916 * check first dimm for primary sdram width
917 * presuming all dimms are similar
918 * 0 = 64-bit, 1 = 32-bit, 2 = 16-bit
919 */
920 #if defined(CONFIG_SYS_FSL_DDR1) || defined(CONFIG_SYS_FSL_DDR2)
921 if (pdimm[0].n_ranks != 0) {
922 if ((pdimm[0].data_width >= 64) && \
923 (pdimm[0].data_width <= 72))
924 popts->data_bus_width = 0;
925 else if ((pdimm[0].data_width >= 32) && \
926 (pdimm[0].data_width <= 40))
927 popts->data_bus_width = 1;
928 else {
929 panic("Error: data width %u is invalid!\n",
930 pdimm[0].data_width);
931 }
932 }
933 #else
934 if (pdimm[0].n_ranks != 0) {
935 if (pdimm[0].primary_sdram_width == 64)
936 popts->data_bus_width = 0;
937 else if (pdimm[0].primary_sdram_width == 32)
938 popts->data_bus_width = 1;
939 else if (pdimm[0].primary_sdram_width == 16)
940 popts->data_bus_width = 2;
941 else {
942 panic("Error: primary sdram width %u is invalid!\n",
943 pdimm[0].primary_sdram_width);
944 }
945 }
946 #endif
947
948 popts->x4_en = (pdimm[0].device_width == 4) ? 1 : 0;
949
950 /* Choose burst length. */
951 #if defined(CONFIG_SYS_FSL_DDR3) || defined(CONFIG_SYS_FSL_DDR4)
952 #if defined(CONFIG_E500MC)
953 popts->otf_burst_chop_en = 0; /* on-the-fly burst chop disable */
954 popts->burst_length = DDR_BL8; /* Fixed 8-beat burst len */
955 #else
956 if ((popts->data_bus_width == 1) || (popts->data_bus_width == 2)) {
957 /* 32-bit or 16-bit bus */
958 popts->otf_burst_chop_en = 0;
959 popts->burst_length = DDR_BL8;
960 } else {
961 popts->otf_burst_chop_en = 1; /* on-the-fly burst chop */
962 popts->burst_length = DDR_OTF; /* on-the-fly BC4 and BL8 */
963 }
964 #endif
965 #else
966 popts->burst_length = DDR_BL4; /* has to be 4 for DDR2 */
967 #endif
968
969 /* Choose ddr controller address mirror mode */
970 #if defined(CONFIG_SYS_FSL_DDR3) || defined(CONFIG_SYS_FSL_DDR4)
971 for (i = 0; i < CONFIG_DIMM_SLOTS_PER_CTLR; i++) {
972 if (pdimm[i].n_ranks) {
973 popts->mirrored_dimm = pdimm[i].mirrored_dimm;
974 break;
975 }
976 }
977 #endif
978
979 /* Global Timing Parameters. */
980 debug("mclk_ps = %u ps\n", get_memory_clk_period_ps(ctrl_num));
981
982 /* Pick a caslat override. */
983 popts->cas_latency_override = 0;
984 popts->cas_latency_override_value = 3;
985 if (popts->cas_latency_override) {
986 debug("using caslat override value = %u\n",
987 popts->cas_latency_override_value);
988 }
989
990 /* Decide whether to use the computed derated latency */
991 popts->use_derated_caslat = 0;
992
993 /* Choose an additive latency. */
994 popts->additive_latency_override = 0;
995 popts->additive_latency_override_value = 3;
996 if (popts->additive_latency_override) {
997 debug("using additive latency override value = %u\n",
998 popts->additive_latency_override_value);
999 }
1000
1001 /*
1002 * 2T_EN setting
1003 *
1004 * Factors to consider for 2T_EN:
1005 * - number of DIMMs installed
1006 * - number of components, number of active ranks
1007 * - how much time you want to spend playing around
1008 */
1009 popts->twot_en = 0;
1010 popts->threet_en = 0;
1011
1012 /* for RDIMM and DDR4 UDIMM/discrete memory, address parity enable */
1013 if (popts->registered_dimm_en)
1014 popts->ap_en = 1; /* 0 = disable, 1 = enable */
1015 else
1016 popts->ap_en = 0; /* disabled for DDR4 UDIMM/discrete default */
1017
1018 if (hwconfig_sub_f("fsl_ddr", "parity", buf)) {
1019 if (hwconfig_subarg_cmp_f("fsl_ddr", "parity", "on", buf)) {
1020 if (popts->registered_dimm_en ||
1021 (CONFIG_FSL_SDRAM_TYPE == SDRAM_TYPE_DDR4))
1022 popts->ap_en = 1;
1023 }
1024 }
1025
1026 /*
1027 * BSTTOPRE precharge interval
1028 *
1029 * Set this to 0 for global auto precharge
1030 * The value of 0x100 has been used for DDR1, DDR2, DDR3.
1031 * It is not wrong. Any value should be OK. The performance depends on
1032 * applications. There is no one good value for all. One way to set
1033 * is to use 1/4 of refint value.
1034 */
1035 popts->bstopre = picos_to_mclk(ctrl_num, common_dimm->refresh_rate_ps)
1036 >> 2;
1037
1038 /*
1039 * Window for four activates -- tFAW
1040 *
1041 * FIXME: UM: applies only to DDR2/DDR3 with eight logical banks only
1042 * FIXME: varies depending upon number of column addresses or data
1043 * FIXME: width, was considering looking at pdimm->primary_sdram_width
1044 */
1045 #if defined(CONFIG_SYS_FSL_DDR1)
1046 popts->tfaw_window_four_activates_ps = mclk_to_picos(ctrl_num, 1);
1047
1048 #elif defined(CONFIG_SYS_FSL_DDR2)
1049 /*
1050 * x4/x8; some datasheets have 35000
1051 * x16 wide columns only? Use 50000?
1052 */
1053 popts->tfaw_window_four_activates_ps = 37500;
1054
1055 #else
1056 popts->tfaw_window_four_activates_ps = pdimm[0].tfaw_ps;
1057 #endif
1058 popts->zq_en = 0;
1059 popts->wrlvl_en = 0;
1060 #if defined(CONFIG_SYS_FSL_DDR3) || defined(CONFIG_SYS_FSL_DDR4)
1061 /*
1062 * due to ddr3 dimm is fly-by topology
1063 * we suggest to enable write leveling to
1064 * meet the tQDSS under different loading.
1065 */
1066 popts->wrlvl_en = 1;
1067 popts->zq_en = 1;
1068 popts->wrlvl_override = 0;
1069 #endif
1070
1071 /*
1072 * Check interleaving configuration from environment.
1073 * Please refer to doc/README.fsl-ddr for the detail.
1074 *
1075 * If memory controller interleaving is enabled, then the data
1076 * bus widths must be programmed identically for all memory controllers.
1077 *
1078 * Attempt to set all controllers to the same chip select
1079 * interleaving mode. It will do a best effort to get the
1080 * requested ranks interleaved together such that the result
1081 * should be a subset of the requested configuration.
1082 *
1083 * if CONFIG_SYS_FSL_DDR_INTLV_256B is defined, mandatory interleaving
1084 * with 256 Byte is enabled.
1085 */
1086 #if (CONFIG_SYS_NUM_DDR_CTLRS > 1)
1087 if (!hwconfig_sub_f("fsl_ddr", "ctlr_intlv", buf))
1088 #ifdef CONFIG_SYS_FSL_DDR_INTLV_256B
1089 ;
1090 #else
1091 goto done;
1092 #endif
1093 if (pdimm[0].n_ranks == 0) {
1094 printf("There is no rank on CS0 for controller %d.\n", ctrl_num);
1095 popts->memctl_interleaving = 0;
1096 goto done;
1097 }
1098 popts->memctl_interleaving = 1;
1099 #ifdef CONFIG_SYS_FSL_DDR_INTLV_256B
1100 popts->memctl_interleaving_mode = FSL_DDR_256B_INTERLEAVING;
1101 popts->memctl_interleaving = 1;
1102 debug("256 Byte interleaving\n");
1103 #else
1104 /*
1105 * test null first. if CONFIG_HWCONFIG is not defined
1106 * hwconfig_arg_cmp returns non-zero
1107 */
1108 if (hwconfig_subarg_cmp_f("fsl_ddr", "ctlr_intlv",
1109 "null", buf)) {
1110 popts->memctl_interleaving = 0;
1111 debug("memory controller interleaving disabled.\n");
1112 } else if (hwconfig_subarg_cmp_f("fsl_ddr",
1113 "ctlr_intlv",
1114 "cacheline", buf)) {
1115 popts->memctl_interleaving_mode =
1116 ((CONFIG_SYS_NUM_DDR_CTLRS == 3) && ctrl_num == 2) ?
1117 0 : FSL_DDR_CACHE_LINE_INTERLEAVING;
1118 popts->memctl_interleaving =
1119 ((CONFIG_SYS_NUM_DDR_CTLRS == 3) && ctrl_num == 2) ?
1120 0 : 1;
1121 } else if (hwconfig_subarg_cmp_f("fsl_ddr",
1122 "ctlr_intlv",
1123 "page", buf)) {
1124 popts->memctl_interleaving_mode =
1125 ((CONFIG_SYS_NUM_DDR_CTLRS == 3) && ctrl_num == 2) ?
1126 0 : FSL_DDR_PAGE_INTERLEAVING;
1127 popts->memctl_interleaving =
1128 ((CONFIG_SYS_NUM_DDR_CTLRS == 3) && ctrl_num == 2) ?
1129 0 : 1;
1130 } else if (hwconfig_subarg_cmp_f("fsl_ddr",
1131 "ctlr_intlv",
1132 "bank", buf)) {
1133 popts->memctl_interleaving_mode =
1134 ((CONFIG_SYS_NUM_DDR_CTLRS == 3) && ctrl_num == 2) ?
1135 0 : FSL_DDR_BANK_INTERLEAVING;
1136 popts->memctl_interleaving =
1137 ((CONFIG_SYS_NUM_DDR_CTLRS == 3) && ctrl_num == 2) ?
1138 0 : 1;
1139 } else if (hwconfig_subarg_cmp_f("fsl_ddr",
1140 "ctlr_intlv",
1141 "superbank", buf)) {
1142 popts->memctl_interleaving_mode =
1143 ((CONFIG_SYS_NUM_DDR_CTLRS == 3) && ctrl_num == 2) ?
1144 0 : FSL_DDR_SUPERBANK_INTERLEAVING;
1145 popts->memctl_interleaving =
1146 ((CONFIG_SYS_NUM_DDR_CTLRS == 3) && ctrl_num == 2) ?
1147 0 : 1;
1148 #if (CONFIG_SYS_NUM_DDR_CTLRS == 3)
1149 } else if (hwconfig_subarg_cmp_f("fsl_ddr",
1150 "ctlr_intlv",
1151 "3way_1KB", buf)) {
1152 popts->memctl_interleaving_mode =
1153 FSL_DDR_3WAY_1KB_INTERLEAVING;
1154 } else if (hwconfig_subarg_cmp_f("fsl_ddr",
1155 "ctlr_intlv",
1156 "3way_4KB", buf)) {
1157 popts->memctl_interleaving_mode =
1158 FSL_DDR_3WAY_4KB_INTERLEAVING;
1159 } else if (hwconfig_subarg_cmp_f("fsl_ddr",
1160 "ctlr_intlv",
1161 "3way_8KB", buf)) {
1162 popts->memctl_interleaving_mode =
1163 FSL_DDR_3WAY_8KB_INTERLEAVING;
1164 #elif (CONFIG_SYS_NUM_DDR_CTLRS == 4)
1165 } else if (hwconfig_subarg_cmp_f("fsl_ddr",
1166 "ctlr_intlv",
1167 "4way_1KB", buf)) {
1168 popts->memctl_interleaving_mode =
1169 FSL_DDR_4WAY_1KB_INTERLEAVING;
1170 } else if (hwconfig_subarg_cmp_f("fsl_ddr",
1171 "ctlr_intlv",
1172 "4way_4KB", buf)) {
1173 popts->memctl_interleaving_mode =
1174 FSL_DDR_4WAY_4KB_INTERLEAVING;
1175 } else if (hwconfig_subarg_cmp_f("fsl_ddr",
1176 "ctlr_intlv",
1177 "4way_8KB", buf)) {
1178 popts->memctl_interleaving_mode =
1179 FSL_DDR_4WAY_8KB_INTERLEAVING;
1180 #endif
1181 } else {
1182 popts->memctl_interleaving = 0;
1183 printf("hwconfig has unrecognized parameter for ctlr_intlv.\n");
1184 }
1185 #endif /* CONFIG_SYS_FSL_DDR_INTLV_256B */
1186 done:
1187 #endif /* CONFIG_SYS_NUM_DDR_CTLRS > 1 */
1188 if ((hwconfig_sub_f("fsl_ddr", "bank_intlv", buf)) &&
1189 (CONFIG_CHIP_SELECTS_PER_CTRL > 1)) {
1190 /* test null first. if CONFIG_HWCONFIG is not defined,
1191 * hwconfig_subarg_cmp_f returns non-zero */
1192 if (hwconfig_subarg_cmp_f("fsl_ddr", "bank_intlv",
1193 "null", buf))
1194 debug("bank interleaving disabled.\n");
1195 else if (hwconfig_subarg_cmp_f("fsl_ddr", "bank_intlv",
1196 "cs0_cs1", buf))
1197 popts->ba_intlv_ctl = FSL_DDR_CS0_CS1;
1198 else if (hwconfig_subarg_cmp_f("fsl_ddr", "bank_intlv",
1199 "cs2_cs3", buf))
1200 popts->ba_intlv_ctl = FSL_DDR_CS2_CS3;
1201 else if (hwconfig_subarg_cmp_f("fsl_ddr", "bank_intlv",
1202 "cs0_cs1_and_cs2_cs3", buf))
1203 popts->ba_intlv_ctl = FSL_DDR_CS0_CS1_AND_CS2_CS3;
1204 else if (hwconfig_subarg_cmp_f("fsl_ddr", "bank_intlv",
1205 "cs0_cs1_cs2_cs3", buf))
1206 popts->ba_intlv_ctl = FSL_DDR_CS0_CS1_CS2_CS3;
1207 else if (hwconfig_subarg_cmp_f("fsl_ddr", "bank_intlv",
1208 "auto", buf))
1209 popts->ba_intlv_ctl = auto_bank_intlv(pdimm);
1210 else
1211 printf("hwconfig has unrecognized parameter for bank_intlv.\n");
1212 switch (popts->ba_intlv_ctl & FSL_DDR_CS0_CS1_CS2_CS3) {
1213 case FSL_DDR_CS0_CS1_CS2_CS3:
1214 #if (CONFIG_DIMM_SLOTS_PER_CTLR == 1)
1215 if (pdimm[0].n_ranks < 4) {
1216 popts->ba_intlv_ctl = 0;
1217 printf("Not enough bank(chip-select) for "
1218 "CS0+CS1+CS2+CS3 on controller %d, "
1219 "interleaving disabled!\n", ctrl_num);
1220 }
1221 #elif (CONFIG_DIMM_SLOTS_PER_CTLR == 2)
1222 #ifdef CONFIG_FSL_DDR_FIRST_SLOT_QUAD_CAPABLE
1223 if (pdimm[0].n_ranks == 4)
1224 break;
1225 #endif
1226 if ((pdimm[0].n_ranks < 2) && (pdimm[1].n_ranks < 2)) {
1227 popts->ba_intlv_ctl = 0;
1228 printf("Not enough bank(chip-select) for "
1229 "CS0+CS1+CS2+CS3 on controller %d, "
1230 "interleaving disabled!\n", ctrl_num);
1231 }
1232 if (pdimm[0].capacity != pdimm[1].capacity) {
1233 popts->ba_intlv_ctl = 0;
1234 printf("Not identical DIMM size for "
1235 "CS0+CS1+CS2+CS3 on controller %d, "
1236 "interleaving disabled!\n", ctrl_num);
1237 }
1238 #endif
1239 break;
1240 case FSL_DDR_CS0_CS1:
1241 if (pdimm[0].n_ranks < 2) {
1242 popts->ba_intlv_ctl = 0;
1243 printf("Not enough bank(chip-select) for "
1244 "CS0+CS1 on controller %d, "
1245 "interleaving disabled!\n", ctrl_num);
1246 }
1247 break;
1248 case FSL_DDR_CS2_CS3:
1249 #if (CONFIG_DIMM_SLOTS_PER_CTLR == 1)
1250 if (pdimm[0].n_ranks < 4) {
1251 popts->ba_intlv_ctl = 0;
1252 printf("Not enough bank(chip-select) for CS2+CS3 "
1253 "on controller %d, interleaving disabled!\n", ctrl_num);
1254 }
1255 #elif (CONFIG_DIMM_SLOTS_PER_CTLR == 2)
1256 if (pdimm[1].n_ranks < 2) {
1257 popts->ba_intlv_ctl = 0;
1258 printf("Not enough bank(chip-select) for CS2+CS3 "
1259 "on controller %d, interleaving disabled!\n", ctrl_num);
1260 }
1261 #endif
1262 break;
1263 case FSL_DDR_CS0_CS1_AND_CS2_CS3:
1264 #if (CONFIG_DIMM_SLOTS_PER_CTLR == 1)
1265 if (pdimm[0].n_ranks < 4) {
1266 popts->ba_intlv_ctl = 0;
1267 printf("Not enough bank(CS) for CS0+CS1 and "
1268 "CS2+CS3 on controller %d, "
1269 "interleaving disabled!\n", ctrl_num);
1270 }
1271 #elif (CONFIG_DIMM_SLOTS_PER_CTLR == 2)
1272 if ((pdimm[0].n_ranks < 2) || (pdimm[1].n_ranks < 2)) {
1273 popts->ba_intlv_ctl = 0;
1274 printf("Not enough bank(CS) for CS0+CS1 and "
1275 "CS2+CS3 on controller %d, "
1276 "interleaving disabled!\n", ctrl_num);
1277 }
1278 #endif
1279 break;
1280 default:
1281 popts->ba_intlv_ctl = 0;
1282 break;
1283 }
1284 }
1285
1286 if (hwconfig_sub_f("fsl_ddr", "addr_hash", buf)) {
1287 if (hwconfig_subarg_cmp_f("fsl_ddr", "addr_hash", "null", buf))
1288 popts->addr_hash = 0;
1289 else if (hwconfig_subarg_cmp_f("fsl_ddr", "addr_hash",
1290 "true", buf))
1291 popts->addr_hash = 1;
1292 }
1293
1294 if (pdimm[0].n_ranks == 4)
1295 popts->quad_rank_present = 1;
1296
1297 popts->package_3ds = pdimm->package_3ds;
1298
1299 #if (CONFIG_FSL_SDRAM_TYPE != SDRAM_TYPE_DDR4)
1300 ddr_freq = get_ddr_freq(ctrl_num) / 1000000;
1301 if (popts->registered_dimm_en) {
1302 popts->rcw_override = 1;
1303 popts->rcw_1 = 0x000a5a00;
1304 if (ddr_freq <= 800)
1305 popts->rcw_2 = 0x00000000;
1306 else if (ddr_freq <= 1066)
1307 popts->rcw_2 = 0x00100000;
1308 else if (ddr_freq <= 1333)
1309 popts->rcw_2 = 0x00200000;
1310 else
1311 popts->rcw_2 = 0x00300000;
1312 }
1313 #endif
1314
1315 fsl_ddr_board_options(popts, pdimm, ctrl_num);
1316
1317 return 0;
1318 }
1319
check_interleaving_options(fsl_ddr_info_t * pinfo)1320 void check_interleaving_options(fsl_ddr_info_t *pinfo)
1321 {
1322 int i, j, k, check_n_ranks, intlv_invalid = 0;
1323 unsigned int check_intlv, check_n_row_addr, check_n_col_addr;
1324 unsigned long long check_rank_density;
1325 struct dimm_params_s *dimm;
1326 int first_ctrl = pinfo->first_ctrl;
1327 int last_ctrl = first_ctrl + pinfo->num_ctrls - 1;
1328
1329 /*
1330 * Check if all controllers are configured for memory
1331 * controller interleaving. Identical dimms are recommended. At least
1332 * the size, row and col address should be checked.
1333 */
1334 j = 0;
1335 check_n_ranks = pinfo->dimm_params[first_ctrl][0].n_ranks;
1336 check_rank_density = pinfo->dimm_params[first_ctrl][0].rank_density;
1337 check_n_row_addr = pinfo->dimm_params[first_ctrl][0].n_row_addr;
1338 check_n_col_addr = pinfo->dimm_params[first_ctrl][0].n_col_addr;
1339 check_intlv = pinfo->memctl_opts[first_ctrl].memctl_interleaving_mode;
1340 for (i = first_ctrl; i <= last_ctrl; i++) {
1341 dimm = &pinfo->dimm_params[i][0];
1342 if (!pinfo->memctl_opts[i].memctl_interleaving) {
1343 continue;
1344 } else if (((check_rank_density != dimm->rank_density) ||
1345 (check_n_ranks != dimm->n_ranks) ||
1346 (check_n_row_addr != dimm->n_row_addr) ||
1347 (check_n_col_addr != dimm->n_col_addr) ||
1348 (check_intlv !=
1349 pinfo->memctl_opts[i].memctl_interleaving_mode))){
1350 intlv_invalid = 1;
1351 break;
1352 } else {
1353 j++;
1354 }
1355
1356 }
1357 if (intlv_invalid) {
1358 for (i = first_ctrl; i <= last_ctrl; i++)
1359 pinfo->memctl_opts[i].memctl_interleaving = 0;
1360 printf("Not all DIMMs are identical. "
1361 "Memory controller interleaving disabled.\n");
1362 } else {
1363 switch (check_intlv) {
1364 case FSL_DDR_256B_INTERLEAVING:
1365 case FSL_DDR_CACHE_LINE_INTERLEAVING:
1366 case FSL_DDR_PAGE_INTERLEAVING:
1367 case FSL_DDR_BANK_INTERLEAVING:
1368 case FSL_DDR_SUPERBANK_INTERLEAVING:
1369 #if (3 == CONFIG_SYS_NUM_DDR_CTLRS)
1370 k = 2;
1371 #else
1372 k = CONFIG_SYS_NUM_DDR_CTLRS;
1373 #endif
1374 break;
1375 case FSL_DDR_3WAY_1KB_INTERLEAVING:
1376 case FSL_DDR_3WAY_4KB_INTERLEAVING:
1377 case FSL_DDR_3WAY_8KB_INTERLEAVING:
1378 case FSL_DDR_4WAY_1KB_INTERLEAVING:
1379 case FSL_DDR_4WAY_4KB_INTERLEAVING:
1380 case FSL_DDR_4WAY_8KB_INTERLEAVING:
1381 default:
1382 k = CONFIG_SYS_NUM_DDR_CTLRS;
1383 break;
1384 }
1385 debug("%d of %d controllers are interleaving.\n", j, k);
1386 if (j && (j != k)) {
1387 for (i = first_ctrl; i <= last_ctrl; i++)
1388 pinfo->memctl_opts[i].memctl_interleaving = 0;
1389 if ((last_ctrl - first_ctrl) > 1)
1390 puts("Not all controllers have compatible interleaving mode. All disabled.\n");
1391 }
1392 }
1393 debug("Checking interleaving options completed\n");
1394 }
1395
fsl_use_spd(void)1396 int fsl_use_spd(void)
1397 {
1398 int use_spd = 0;
1399
1400 #ifdef CONFIG_DDR_SPD
1401 char buffer[HWCONFIG_BUFFER_SIZE];
1402 char *buf = NULL;
1403
1404 /*
1405 * Extract hwconfig from environment since we have not properly setup
1406 * the environment but need it for ddr config params
1407 */
1408 if (env_get_f("hwconfig", buffer, sizeof(buffer)) > 0)
1409 buf = buffer;
1410
1411 /* if hwconfig is not enabled, or "sdram" is not defined, use spd */
1412 if (hwconfig_sub_f("fsl_ddr", "sdram", buf)) {
1413 if (hwconfig_subarg_cmp_f("fsl_ddr", "sdram", "spd", buf))
1414 use_spd = 1;
1415 else if (hwconfig_subarg_cmp_f("fsl_ddr", "sdram",
1416 "fixed", buf))
1417 use_spd = 0;
1418 else
1419 use_spd = 1;
1420 } else
1421 use_spd = 1;
1422 #endif
1423
1424 return use_spd;
1425 }
1426