• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * (C) Copyright 2006-2007 Freescale Semiconductor, Inc.
4  *
5  * (C) Copyright 2006
6  * Wolfgang Denk, DENX Software Engineering, wd@denx.de.
7  *
8  * Copyright (C) 2004-2006 Freescale Semiconductor, Inc.
9  * (C) Copyright 2003 Motorola Inc.
10  * Xianghua Xiao (X.Xiao@motorola.com)
11  */
12 
13 #include <common.h>
14 #include <asm/processor.h>
15 #include <asm/io.h>
16 #include <i2c.h>
17 #include <spd.h>
18 #include <asm/mmu.h>
19 #include <spd_sdram.h>
20 
21 DECLARE_GLOBAL_DATA_PTR;
22 
board_add_ram_info(int use_default)23 void board_add_ram_info(int use_default)
24 {
25 	volatile immap_t *immap = (immap_t *) CONFIG_SYS_IMMR;
26 	volatile ddr83xx_t *ddr = &immap->ddr;
27 	char buf[32];
28 
29 	printf(" (DDR%d", ((ddr->sdram_cfg & SDRAM_CFG_SDRAM_TYPE_MASK)
30 			   >> SDRAM_CFG_SDRAM_TYPE_SHIFT) - 1);
31 
32 #if defined(CONFIG_MPC8308) || defined(CONFIG_MPC831x)
33 	if ((ddr->sdram_cfg & SDRAM_CFG_DBW_MASK) == SDRAM_CFG_DBW_16)
34 		puts(", 16-bit");
35 	else if ((ddr->sdram_cfg & SDRAM_CFG_DBW_MASK) == SDRAM_CFG_DBW_32)
36 		puts(", 32-bit");
37 	else
38 		puts(", unknown width");
39 #else
40 	if (ddr->sdram_cfg & SDRAM_CFG_32_BE)
41 		puts(", 32-bit");
42 	else
43 		puts(", 64-bit");
44 #endif
45 
46 	if (ddr->sdram_cfg & SDRAM_CFG_ECC_EN)
47 		puts(", ECC on");
48 	else
49 		puts(", ECC off");
50 
51 	printf(", %s MHz)", strmhz(buf, gd->mem_clk));
52 
53 #if defined(CONFIG_SYS_LB_SDRAM) && defined(CONFIG_SYS_LBC_SDRAM_SIZE)
54 	puts("\nSDRAM: ");
55 	print_size (CONFIG_SYS_LBC_SDRAM_SIZE * 1024 * 1024, " (local bus)");
56 #endif
57 }
58 
59 #ifdef CONFIG_SPD_EEPROM
60 #ifndef	CONFIG_SYS_READ_SPD
61 #define CONFIG_SYS_READ_SPD	i2c_read
62 #endif
63 #ifndef SPD_EEPROM_OFFSET
64 #define SPD_EEPROM_OFFSET	0
65 #endif
66 #ifndef SPD_EEPROM_ADDR_LEN
67 #define SPD_EEPROM_ADDR_LEN     1
68 #endif
69 
70 /*
71  * Convert picoseconds into clock cycles (rounding up if needed).
72  */
73 int
picos_to_clk(int picos)74 picos_to_clk(int picos)
75 {
76 	unsigned int mem_bus_clk;
77 	int clks;
78 
79 	mem_bus_clk = gd->mem_clk >> 1;
80 	clks = picos / (1000000000 / (mem_bus_clk / 1000));
81 	if (picos % (1000000000 / (mem_bus_clk / 1000)) != 0)
82 		clks++;
83 
84 	return clks;
85 }
86 
banksize(unsigned char row_dens)87 unsigned int banksize(unsigned char row_dens)
88 {
89 	return ((row_dens >> 2) | ((row_dens & 3) << 6)) << 24;
90 }
91 
read_spd(uint addr)92 int read_spd(uint addr)
93 {
94 	return ((int) addr);
95 }
96 
97 #undef SPD_DEBUG
98 #ifdef SPD_DEBUG
spd_debug(spd_eeprom_t * spd)99 static void spd_debug(spd_eeprom_t *spd)
100 {
101 	printf ("\nDIMM type:       %-18.18s\n", spd->mpart);
102 	printf ("SPD size:        %d\n", spd->info_size);
103 	printf ("EEPROM size:     %d\n", 1 << spd->chip_size);
104 	printf ("Memory type:     %d\n", spd->mem_type);
105 	printf ("Row addr:        %d\n", spd->nrow_addr);
106 	printf ("Column addr:     %d\n", spd->ncol_addr);
107 	printf ("# of rows:       %d\n", spd->nrows);
108 	printf ("Row density:     %d\n", spd->row_dens);
109 	printf ("# of banks:      %d\n", spd->nbanks);
110 	printf ("Data width:      %d\n",
111 			256 * spd->dataw_msb + spd->dataw_lsb);
112 	printf ("Chip width:      %d\n", spd->primw);
113 	printf ("Refresh rate:    %02X\n", spd->refresh);
114 	printf ("CAS latencies:   %02X\n", spd->cas_lat);
115 	printf ("Write latencies: %02X\n", spd->write_lat);
116 	printf ("tRP:             %d\n", spd->trp);
117 	printf ("tRCD:            %d\n", spd->trcd);
118 	printf ("\n");
119 }
120 #endif /* SPD_DEBUG */
121 
spd_sdram()122 long int spd_sdram()
123 {
124 	volatile immap_t *immap = (immap_t *)CONFIG_SYS_IMMR;
125 	volatile ddr83xx_t *ddr = &immap->ddr;
126 	volatile law83xx_t *ecm = &immap->sysconf.ddrlaw[0];
127 	spd_eeprom_t spd;
128 	unsigned int n_ranks;
129 	unsigned int odt_rd_cfg, odt_wr_cfg;
130 	unsigned char twr_clk, twtr_clk;
131 	unsigned int sdram_type;
132 	unsigned int memsize;
133 	unsigned int law_size;
134 	unsigned char caslat, caslat_ctrl;
135 	unsigned int trfc, trfc_clk, trfc_low;
136 	unsigned int trcd_clk, trtp_clk;
137 	unsigned char cke_min_clk;
138 	unsigned char add_lat, wr_lat;
139 	unsigned char wr_data_delay;
140 	unsigned char four_act;
141 	unsigned char cpo;
142 	unsigned char burstlen;
143 	unsigned char odt_cfg, mode_odt_enable;
144 	unsigned int max_bus_clk;
145 	unsigned int max_data_rate, effective_data_rate;
146 	unsigned int ddrc_clk;
147 	unsigned int refresh_clk;
148 	unsigned int sdram_cfg;
149 	unsigned int ddrc_ecc_enable;
150 	unsigned int pvr = get_pvr();
151 
152 	/*
153 	 * First disable the memory controller (could be enabled
154 	 * by the debugger)
155 	 */
156 	clrsetbits_be32(&ddr->sdram_cfg, SDRAM_CFG_MEM_EN, 0);
157 	sync();
158 	isync();
159 
160 	/* Read SPD parameters with I2C */
161 	CONFIG_SYS_READ_SPD(SPD_EEPROM_ADDRESS, SPD_EEPROM_OFFSET,
162 		SPD_EEPROM_ADDR_LEN, (uchar *) &spd, sizeof(spd));
163 #ifdef SPD_DEBUG
164 	spd_debug(&spd);
165 #endif
166 	/* Check the memory type */
167 	if (spd.mem_type != SPD_MEMTYPE_DDR && spd.mem_type != SPD_MEMTYPE_DDR2) {
168 		debug("DDR: Module mem type is %02X\n", spd.mem_type);
169 		return 0;
170 	}
171 
172 	/* Check the number of physical bank */
173 	if (spd.mem_type == SPD_MEMTYPE_DDR) {
174 		n_ranks = spd.nrows;
175 	} else {
176 		n_ranks = (spd.nrows & 0x7) + 1;
177 	}
178 
179 	if (n_ranks > 2) {
180 		printf("DDR: The number of physical bank is %02X\n", n_ranks);
181 		return 0;
182 	}
183 
184 	/* Check if the number of row of the module is in the range of DDRC */
185 	if (spd.nrow_addr < 12 || spd.nrow_addr > 15) {
186 		printf("DDR: Row number is out of range of DDRC, row=%02X\n",
187 							 spd.nrow_addr);
188 		return 0;
189 	}
190 
191 	/* Check if the number of col of the module is in the range of DDRC */
192 	if (spd.ncol_addr < 8 || spd.ncol_addr > 11) {
193 		printf("DDR: Col number is out of range of DDRC, col=%02X\n",
194 							 spd.ncol_addr);
195 		return 0;
196 	}
197 
198 #ifdef CONFIG_SYS_DDRCDR_VALUE
199 	/*
200 	 * Adjust DDR II IO voltage biasing.  It just makes it work.
201 	 */
202 	if(spd.mem_type == SPD_MEMTYPE_DDR2) {
203 		immap->sysconf.ddrcdr = CONFIG_SYS_DDRCDR_VALUE;
204 	}
205 	udelay(50000);
206 #endif
207 
208 	/*
209 	 * ODT configuration recommendation from DDR Controller Chapter.
210 	 */
211 	odt_rd_cfg = 0;			/* Never assert ODT */
212 	odt_wr_cfg = 0;			/* Never assert ODT */
213 	if (spd.mem_type == SPD_MEMTYPE_DDR2) {
214 		odt_wr_cfg = 1;		/* Assert ODT on writes to CSn */
215 	}
216 
217 	/* Setup DDR chip select register */
218 #ifdef CONFIG_SYS_83XX_DDR_USES_CS0
219 	ddr->csbnds[0].csbnds = (banksize(spd.row_dens) >> 24) - 1;
220 	ddr->cs_config[0] = ( 1 << 31
221 			    | (odt_rd_cfg << 20)
222 			    | (odt_wr_cfg << 16)
223 			    | ((spd.nbanks == 8 ? 1 : 0) << 14)
224 			    | ((spd.nrow_addr - 12) << 8)
225 			    | (spd.ncol_addr - 8) );
226 	debug("\n");
227 	debug("cs0_bnds = 0x%08x\n",ddr->csbnds[0].csbnds);
228 	debug("cs0_config = 0x%08x\n",ddr->cs_config[0]);
229 
230 	if (n_ranks == 2) {
231 		ddr->csbnds[1].csbnds = ( (banksize(spd.row_dens) >> 8)
232 				  | ((banksize(spd.row_dens) >> 23) - 1) );
233 		ddr->cs_config[1] = ( 1<<31
234 				    | (odt_rd_cfg << 20)
235 				    | (odt_wr_cfg << 16)
236 				    | ((spd.nbanks == 8 ? 1 : 0) << 14)
237 				    | ((spd.nrow_addr - 12) << 8)
238 				    | (spd.ncol_addr - 8) );
239 		debug("cs1_bnds = 0x%08x\n",ddr->csbnds[1].csbnds);
240 		debug("cs1_config = 0x%08x\n",ddr->cs_config[1]);
241 	}
242 
243 #else
244 	ddr->csbnds[2].csbnds = (banksize(spd.row_dens) >> 24) - 1;
245 	ddr->cs_config[2] = ( 1 << 31
246 			    | (odt_rd_cfg << 20)
247 			    | (odt_wr_cfg << 16)
248 			    | ((spd.nbanks == 8 ? 1 : 0) << 14)
249 			    | ((spd.nrow_addr - 12) << 8)
250 			    | (spd.ncol_addr - 8) );
251 	debug("\n");
252 	debug("cs2_bnds = 0x%08x\n",ddr->csbnds[2].csbnds);
253 	debug("cs2_config = 0x%08x\n",ddr->cs_config[2]);
254 
255 	if (n_ranks == 2) {
256 		ddr->csbnds[3].csbnds = ( (banksize(spd.row_dens) >> 8)
257 				  | ((banksize(spd.row_dens) >> 23) - 1) );
258 		ddr->cs_config[3] = ( 1<<31
259 				    | (odt_rd_cfg << 20)
260 				    | (odt_wr_cfg << 16)
261 				    | ((spd.nbanks == 8 ? 1 : 0) << 14)
262 				    | ((spd.nrow_addr - 12) << 8)
263 				    | (spd.ncol_addr - 8) );
264 		debug("cs3_bnds = 0x%08x\n",ddr->csbnds[3].csbnds);
265 		debug("cs3_config = 0x%08x\n",ddr->cs_config[3]);
266 	}
267 #endif
268 
269 	/*
270 	 * Figure out memory size in Megabytes.
271 	 */
272 	memsize = n_ranks * banksize(spd.row_dens) / 0x100000;
273 
274 	/*
275 	 * First supported LAW size is 16M, at LAWAR_SIZE_16M == 23.
276 	 */
277 	law_size = 19 + __ilog2(memsize);
278 
279 	/*
280 	 * Set up LAWBAR for all of DDR.
281 	 */
282 	ecm->bar = CONFIG_SYS_DDR_SDRAM_BASE & 0xfffff000;
283 	ecm->ar  = (LAWAR_EN | LAWAR_TRGT_IF_DDR | (LAWAR_SIZE & law_size));
284 	debug("DDR:bar=0x%08x\n", ecm->bar);
285 	debug("DDR:ar=0x%08x\n", ecm->ar);
286 
287 	/*
288 	 * Find the largest CAS by locating the highest 1 bit
289 	 * in the spd.cas_lat field.  Translate it to a DDR
290 	 * controller field value:
291 	 *
292 	 *	CAS Lat	DDR I	DDR II	Ctrl
293 	 *	Clocks	SPD Bit	SPD Bit	Value
294 	 *	-------	-------	-------	-----
295 	 *	1.0	0		0001
296 	 *	1.5	1		0010
297 	 *	2.0	2	2	0011
298 	 *	2.5	3		0100
299 	 *	3.0	4	3	0101
300 	 *	3.5	5		0110
301 	 *	4.0	6	4	0111
302 	 *	4.5			1000
303 	 *	5.0		5	1001
304 	 */
305 	caslat = __ilog2(spd.cas_lat);
306 	if ((spd.mem_type == SPD_MEMTYPE_DDR)
307 	    && (caslat > 6)) {
308 		printf("DDR I: Invalid SPD CAS Latency: 0x%x.\n", spd.cas_lat);
309 		return 0;
310 	} else if (spd.mem_type == SPD_MEMTYPE_DDR2
311 		   && (caslat < 2 || caslat > 5)) {
312 		printf("DDR II: Invalid SPD CAS Latency: 0x%x.\n",
313 		       spd.cas_lat);
314 		return 0;
315 	}
316 	debug("DDR: caslat SPD bit is %d\n", caslat);
317 
318 	max_bus_clk = 1000 *10 / (((spd.clk_cycle & 0xF0) >> 4) * 10
319 			+ (spd.clk_cycle & 0x0f));
320 	max_data_rate = max_bus_clk * 2;
321 
322 	debug("DDR:Module maximum data rate is: %d MHz\n", max_data_rate);
323 
324 	ddrc_clk = gd->mem_clk / 1000000;
325 	effective_data_rate = 0;
326 
327 	if (max_data_rate >= 460) { /* it is DDR2-800, 667, 533 */
328 		if (spd.cas_lat & 0x08)
329 			caslat = 3;
330 		else
331 			caslat = 4;
332 		if (ddrc_clk <= 460 && ddrc_clk > 350)
333 			effective_data_rate = 400;
334 		else if (ddrc_clk <=350 && ddrc_clk > 280)
335 			effective_data_rate = 333;
336 		else if (ddrc_clk <= 280 && ddrc_clk > 230)
337 			effective_data_rate = 266;
338 		else
339 			effective_data_rate = 200;
340 	} else if (max_data_rate >= 390 && max_data_rate < 460) { /* it is DDR 400 */
341 		if (ddrc_clk <= 460 && ddrc_clk > 350) {
342 			/* DDR controller clk at 350~460 */
343 			effective_data_rate = 400; /* 5ns */
344 			caslat = caslat;
345 		} else if (ddrc_clk <= 350 && ddrc_clk > 280) {
346 			/* DDR controller clk at 280~350 */
347 			effective_data_rate = 333; /* 6ns */
348 			if (spd.clk_cycle2 == 0x60)
349 				caslat = caslat - 1;
350 			else
351 				caslat = caslat;
352 		} else if (ddrc_clk <= 280 && ddrc_clk > 230) {
353 			/* DDR controller clk at 230~280 */
354 			effective_data_rate = 266; /* 7.5ns */
355 			if (spd.clk_cycle3 == 0x75)
356 				caslat = caslat - 2;
357 			else if (spd.clk_cycle2 == 0x75)
358 				caslat = caslat - 1;
359 			else
360 				caslat = caslat;
361 		} else if (ddrc_clk <= 230 && ddrc_clk > 90) {
362 			/* DDR controller clk at 90~230 */
363 			effective_data_rate = 200; /* 10ns */
364 			if (spd.clk_cycle3 == 0xa0)
365 				caslat = caslat - 2;
366 			else if (spd.clk_cycle2 == 0xa0)
367 				caslat = caslat - 1;
368 			else
369 				caslat = caslat;
370 		}
371 	} else if (max_data_rate >= 323) { /* it is DDR 333 */
372 		if (ddrc_clk <= 350 && ddrc_clk > 280) {
373 			/* DDR controller clk at 280~350 */
374 			effective_data_rate = 333; /* 6ns */
375 			caslat = caslat;
376 		} else if (ddrc_clk <= 280 && ddrc_clk > 230) {
377 			/* DDR controller clk at 230~280 */
378 			effective_data_rate = 266; /* 7.5ns */
379 			if (spd.clk_cycle2 == 0x75)
380 				caslat = caslat - 1;
381 			else
382 				caslat = caslat;
383 		} else if (ddrc_clk <= 230 && ddrc_clk > 90) {
384 			/* DDR controller clk at 90~230 */
385 			effective_data_rate = 200; /* 10ns */
386 			if (spd.clk_cycle3 == 0xa0)
387 				caslat = caslat - 2;
388 			else if (spd.clk_cycle2 == 0xa0)
389 				caslat = caslat - 1;
390 			else
391 				caslat = caslat;
392 		}
393 	} else if (max_data_rate >= 256) { /* it is DDR 266 */
394 		if (ddrc_clk <= 350 && ddrc_clk > 280) {
395 			/* DDR controller clk at 280~350 */
396 			printf("DDR: DDR controller freq is more than "
397 				"max data rate of the module\n");
398 			return 0;
399 		} else if (ddrc_clk <= 280 && ddrc_clk > 230) {
400 			/* DDR controller clk at 230~280 */
401 			effective_data_rate = 266; /* 7.5ns */
402 			caslat = caslat;
403 		} else if (ddrc_clk <= 230 && ddrc_clk > 90) {
404 			/* DDR controller clk at 90~230 */
405 			effective_data_rate = 200; /* 10ns */
406 			if (spd.clk_cycle2 == 0xa0)
407 				caslat = caslat - 1;
408 		}
409 	} else if (max_data_rate >= 190) { /* it is DDR 200 */
410 		if (ddrc_clk <= 350 && ddrc_clk > 230) {
411 			/* DDR controller clk at 230~350 */
412 			printf("DDR: DDR controller freq is more than "
413 				"max data rate of the module\n");
414 			return 0;
415 		} else if (ddrc_clk <= 230 && ddrc_clk > 90) {
416 			/* DDR controller clk at 90~230 */
417 			effective_data_rate = 200; /* 10ns */
418 			caslat = caslat;
419 		}
420 	}
421 
422 	debug("DDR:Effective data rate is: %dMHz\n", effective_data_rate);
423 	debug("DDR:The MSB 1 of CAS Latency is: %d\n", caslat);
424 
425 	/*
426 	 * Errata DDR6 work around: input enable 2 cycles earlier.
427 	 * including MPC834x Rev1.0/1.1 and MPC8360 Rev1.1/1.2.
428 	 */
429 	if(PVR_MAJ(pvr) <= 1 && spd.mem_type == SPD_MEMTYPE_DDR){
430 		if (caslat == 2)
431 			ddr->debug_reg = 0x201c0000; /* CL=2 */
432 		else if (caslat == 3)
433 			ddr->debug_reg = 0x202c0000; /* CL=2.5 */
434 		else if (caslat == 4)
435 			ddr->debug_reg = 0x202c0000; /* CL=3.0 */
436 
437 		__asm__ __volatile__ ("sync");
438 
439 		debug("Errata DDR6 (debug_reg=0x%08x)\n", ddr->debug_reg);
440 	}
441 
442 	/*
443 	 * Convert caslat clocks to DDR controller value.
444 	 * Force caslat_ctrl to be DDR Controller field-sized.
445 	 */
446 	if (spd.mem_type == SPD_MEMTYPE_DDR) {
447 		caslat_ctrl = (caslat + 1) & 0x07;
448 	} else {
449 		caslat_ctrl =  (2 * caslat - 1) & 0x0f;
450 	}
451 
452 	debug("DDR: effective data rate is %d MHz\n", effective_data_rate);
453 	debug("DDR: caslat SPD bit is %d, controller field is 0x%x\n",
454 	      caslat, caslat_ctrl);
455 
456 	/*
457 	 * Timing Config 0.
458 	 * Avoid writing for DDR I.
459 	 */
460 	if (spd.mem_type == SPD_MEMTYPE_DDR2) {
461 		unsigned char taxpd_clk = 8;		/* By the book. */
462 		unsigned char tmrd_clk = 2;		/* By the book. */
463 		unsigned char act_pd_exit = 2;		/* Empirical? */
464 		unsigned char pre_pd_exit = 6;		/* Empirical? */
465 
466 		ddr->timing_cfg_0 = (0
467 			| ((act_pd_exit & 0x7) << 20)	/* ACT_PD_EXIT */
468 			| ((pre_pd_exit & 0x7) << 16)	/* PRE_PD_EXIT */
469 			| ((taxpd_clk & 0xf) << 8)	/* ODT_PD_EXIT */
470 			| ((tmrd_clk & 0xf) << 0)	/* MRS_CYC */
471 			);
472 		debug("DDR: timing_cfg_0 = 0x%08x\n", ddr->timing_cfg_0);
473 	}
474 
475 	/*
476 	 * For DDR I, WRREC(Twr) and WRTORD(Twtr) are not in SPD,
477 	 * use conservative value.
478 	 * For DDR II, they are bytes 36 and 37, in quarter nanos.
479 	 */
480 
481 	if (spd.mem_type == SPD_MEMTYPE_DDR) {
482 		twr_clk = 3;	/* Clocks */
483 		twtr_clk = 1;	/* Clocks */
484 	} else {
485 		twr_clk = picos_to_clk(spd.twr * 250);
486 		twtr_clk = picos_to_clk(spd.twtr * 250);
487 		if (twtr_clk < 2)
488 			twtr_clk = 2;
489 	}
490 
491 	/*
492 	 * Calculate Trfc, in picos.
493 	 * DDR I:  Byte 42 straight up in ns.
494 	 * DDR II: Byte 40 and 42 swizzled some, in ns.
495 	 */
496 	if (spd.mem_type == SPD_MEMTYPE_DDR) {
497 		trfc = spd.trfc * 1000;		/* up to ps */
498 	} else {
499 		unsigned int byte40_table_ps[8] = {
500 			0,
501 			250,
502 			330,
503 			500,
504 			660,
505 			750,
506 			0,
507 			0
508 		};
509 
510 		trfc = (((spd.trctrfc_ext & 0x1) * 256) + spd.trfc) * 1000
511 			+ byte40_table_ps[(spd.trctrfc_ext >> 1) & 0x7];
512 	}
513 	trfc_clk = picos_to_clk(trfc);
514 
515 	/*
516 	 * Trcd, Byte 29, from quarter nanos to ps and clocks.
517 	 */
518 	trcd_clk = picos_to_clk(spd.trcd * 250) & 0x7;
519 
520 	/*
521 	 * Convert trfc_clk to DDR controller fields.  DDR I should
522 	 * fit in the REFREC field (16-19) of TIMING_CFG_1, but the
523 	 * 83xx controller has an extended REFREC field of three bits.
524 	 * The controller automatically adds 8 clocks to this value,
525 	 * so preadjust it down 8 first before splitting it up.
526 	 */
527 	trfc_low = (trfc_clk - 8) & 0xf;
528 
529 	ddr->timing_cfg_1 =
530 	    (((picos_to_clk(spd.trp * 250) & 0x07) << 28 ) |	/* PRETOACT */
531 	     ((picos_to_clk(spd.tras * 1000) & 0x0f ) << 24 ) | /* ACTTOPRE */
532 	     (trcd_clk << 20 ) |				/* ACTTORW */
533 	     (caslat_ctrl << 16 ) |				/* CASLAT */
534 	     (trfc_low << 12 ) |				/* REFEC */
535 	     ((twr_clk & 0x07) << 8) |				/* WRRREC */
536 	     ((picos_to_clk(spd.trrd * 250) & 0x07) << 4) |	/* ACTTOACT */
537 	     ((twtr_clk & 0x07) << 0)				/* WRTORD */
538 	    );
539 
540 	/*
541 	 * Additive Latency
542 	 * For DDR I, 0.
543 	 * For DDR II, with ODT enabled, use "a value" less than ACTTORW,
544 	 * which comes from Trcd, and also note that:
545 	 *	add_lat + caslat must be >= 4
546 	 */
547 	add_lat = 0;
548 	if (spd.mem_type == SPD_MEMTYPE_DDR2
549 	    && (odt_wr_cfg || odt_rd_cfg)
550 	    && (caslat < 4)) {
551 		add_lat = 4 - caslat;
552 		if ((add_lat + caslat) < 4) {
553 			add_lat = 0;
554 		}
555 	}
556 
557 	/*
558 	 * Write Data Delay
559 	 * Historically 0x2 == 4/8 clock delay.
560 	 * Empirically, 0x3 == 6/8 clock delay is suggested for DDR I 266.
561 	 */
562 	wr_data_delay = 2;
563 #ifdef CONFIG_SYS_DDR_WRITE_DATA_DELAY
564 	wr_data_delay = CONFIG_SYS_DDR_WRITE_DATA_DELAY;
565 #endif
566 
567 	/*
568 	 * Write Latency
569 	 * Read to Precharge
570 	 * Minimum CKE Pulse Width.
571 	 * Four Activate Window
572 	 */
573 	if (spd.mem_type == SPD_MEMTYPE_DDR) {
574 		/*
575 		 * This is a lie.  It should really be 1, but if it is
576 		 * set to 1, bits overlap into the old controller's
577 		 * otherwise unused ACSM field.  If we leave it 0, then
578 		 * the HW will magically treat it as 1 for DDR 1.  Oh Yea.
579 		 */
580 		wr_lat = 0;
581 
582 		trtp_clk = 2;		/* By the book. */
583 		cke_min_clk = 1;	/* By the book. */
584 		four_act = 1;		/* By the book. */
585 
586 	} else {
587 		wr_lat = caslat - 1;
588 
589 		/* Convert SPD value from quarter nanos to picos. */
590 		trtp_clk = picos_to_clk(spd.trtp * 250);
591 		if (trtp_clk < 2)
592 			trtp_clk = 2;
593 		trtp_clk += add_lat;
594 
595 		cke_min_clk = 3;	/* By the book. */
596 		four_act = picos_to_clk(37500);	/* By the book. 1k pages? */
597 	}
598 
599 	/*
600 	 * Empirically set ~MCAS-to-preamble override for DDR 2.
601 	 * Your mileage will vary.
602 	 */
603 	cpo = 0;
604 	if (spd.mem_type == SPD_MEMTYPE_DDR2) {
605 #ifdef CONFIG_SYS_DDR_CPO
606 		cpo = CONFIG_SYS_DDR_CPO;
607 #else
608 		if (effective_data_rate == 266) {
609 			cpo = 0x4;		/* READ_LAT + 1/2 */
610 		} else if (effective_data_rate == 333) {
611 			cpo = 0x6;		/* READ_LAT + 1 */
612 		} else if (effective_data_rate == 400) {
613 			cpo = 0x7;		/* READ_LAT + 5/4 */
614 		} else {
615 			/* Automatic calibration */
616 			cpo = 0x1f;
617 		}
618 #endif
619 	}
620 
621 	ddr->timing_cfg_2 = (0
622 		| ((add_lat & 0x7) << 28)		/* ADD_LAT */
623 		| ((cpo & 0x1f) << 23)			/* CPO */
624 		| ((wr_lat & 0x7) << 19)		/* WR_LAT */
625 		| ((trtp_clk & 0x7) << 13)		/* RD_TO_PRE */
626 		| ((wr_data_delay & 0x7) << 10)		/* WR_DATA_DELAY */
627 		| ((cke_min_clk & 0x7) << 6)		/* CKE_PLS */
628 		| ((four_act & 0x1f) << 0)		/* FOUR_ACT */
629 		);
630 
631 	debug("DDR:timing_cfg_1=0x%08x\n", ddr->timing_cfg_1);
632 	debug("DDR:timing_cfg_2=0x%08x\n", ddr->timing_cfg_2);
633 
634 	/* Check DIMM data bus width */
635 	if (spd.dataw_lsb < 64) {
636 		if (spd.mem_type == SPD_MEMTYPE_DDR)
637 			burstlen = 0x03; /* 32 bit data bus, burst len is 8 */
638 		else
639 			burstlen = 0x02; /* 32 bit data bus, burst len is 4 */
640 		debug("\n   DDR DIMM: data bus width is 32 bit");
641 	} else {
642 		burstlen = 0x02; /* Others act as 64 bit bus, burst len is 4 */
643 		debug("\n   DDR DIMM: data bus width is 64 bit");
644 	}
645 
646 	/* Is this an ECC DDR chip? */
647 	if (spd.config == 0x02)
648 		debug(" with ECC\n");
649 	else
650 		debug(" without ECC\n");
651 
652 	/* Burst length is always 4 for 64 bit data bus, 8 for 32 bit data bus,
653 	   Burst type is sequential
654 	 */
655 	if (spd.mem_type == SPD_MEMTYPE_DDR) {
656 		switch (caslat) {
657 		case 1:
658 			ddr->sdram_mode = 0x50 | burstlen; /* CL=1.5 */
659 			break;
660 		case 2:
661 			ddr->sdram_mode = 0x20 | burstlen; /* CL=2.0 */
662 			break;
663 		case 3:
664 			ddr->sdram_mode = 0x60 | burstlen; /* CL=2.5 */
665 			break;
666 		case 4:
667 			ddr->sdram_mode = 0x30 | burstlen; /* CL=3.0 */
668 			break;
669 		default:
670 			printf("DDR:only CL 1.5, 2.0, 2.5, 3.0 is supported\n");
671 			return 0;
672 		}
673 	} else {
674 		mode_odt_enable = 0x0;                  /* Default disabled */
675 		if (odt_wr_cfg || odt_rd_cfg) {
676 			/*
677 			 * Bits 6 and 2 in Extended MRS(1)
678 			 * Bit 2 == 0x04 == 75 Ohm, with 2 DIMM modules.
679 			 * Bit 6 == 0x40 == 150 Ohm, with 1 DIMM module.
680 			 */
681 			mode_odt_enable = 0x40;         /* 150 Ohm */
682 		}
683 
684 		ddr->sdram_mode =
685 			(0
686 			 | (1 << (16 + 10))             /* DQS Differential disable */
687 #ifdef CONFIG_SYS_DDR_MODE_WEAK
688 			 | (1 << (16 + 1))		/* weak driver (~60%) */
689 #endif
690 			 | (add_lat << (16 + 3))        /* Additive Latency in EMRS1 */
691 			 | (mode_odt_enable << 16)      /* ODT Enable in EMRS1 */
692 			 | ((twr_clk - 1) << 9)         /* Write Recovery Autopre */
693 			 | (caslat << 4)                /* caslat */
694 			 | (burstlen << 0)              /* Burst length */
695 			);
696 	}
697 	debug("DDR:sdram_mode=0x%08x\n", ddr->sdram_mode);
698 
699 	/*
700 	 * Clear EMRS2 and EMRS3.
701 	 */
702 	ddr->sdram_mode2 = 0;
703 	debug("DDR: sdram_mode2 = 0x%08x\n", ddr->sdram_mode2);
704 
705 	switch (spd.refresh) {
706 		case 0x00:
707 		case 0x80:
708 			refresh_clk = picos_to_clk(15625000);
709 			break;
710 		case 0x01:
711 		case 0x81:
712 			refresh_clk = picos_to_clk(3900000);
713 			break;
714 		case 0x02:
715 		case 0x82:
716 			refresh_clk = picos_to_clk(7800000);
717 			break;
718 		case 0x03:
719 		case 0x83:
720 			refresh_clk = picos_to_clk(31300000);
721 			break;
722 		case 0x04:
723 		case 0x84:
724 			refresh_clk = picos_to_clk(62500000);
725 			break;
726 		case 0x05:
727 		case 0x85:
728 			refresh_clk = picos_to_clk(125000000);
729 			break;
730 		default:
731 			refresh_clk = 0x512;
732 			break;
733 	}
734 
735 	/*
736 	 * Set BSTOPRE to 0x100 for page mode
737 	 * If auto-charge is used, set BSTOPRE = 0
738 	 */
739 	ddr->sdram_interval = ((refresh_clk & 0x3fff) << 16) | 0x100;
740 	debug("DDR:sdram_interval=0x%08x\n", ddr->sdram_interval);
741 
742 	/*
743 	 * SDRAM Cfg 2
744 	 */
745 	odt_cfg = 0;
746 #ifndef CONFIG_NEVER_ASSERT_ODT_TO_CPU
747 	if (odt_rd_cfg | odt_wr_cfg) {
748 		odt_cfg = 0x2;		/* ODT to IOs during reads */
749 	}
750 #endif
751 	if (spd.mem_type == SPD_MEMTYPE_DDR2) {
752 		ddr->sdram_cfg2 = (0
753 			    | (0 << 26)	/* True DQS */
754 			    | (odt_cfg << 21)	/* ODT only read */
755 			    | (1 << 12)	/* 1 refresh at a time */
756 			    );
757 
758 		debug("DDR: sdram_cfg2  = 0x%08x\n", ddr->sdram_cfg2);
759 	}
760 
761 #ifdef CONFIG_SYS_DDR_SDRAM_CLK_CNTL	/* Optional platform specific value */
762 	ddr->sdram_clk_cntl = CONFIG_SYS_DDR_SDRAM_CLK_CNTL;
763 #endif
764 	debug("DDR:sdram_clk_cntl=0x%08x\n", ddr->sdram_clk_cntl);
765 
766 	asm("sync;isync");
767 
768 	udelay(600);
769 
770 	/*
771 	 * Figure out the settings for the sdram_cfg register. Build up
772 	 * the value in 'sdram_cfg' before writing since the write into
773 	 * the register will actually enable the memory controller, and all
774 	 * settings must be done before enabling.
775 	 *
776 	 * sdram_cfg[0]   = 1 (ddr sdram logic enable)
777 	 * sdram_cfg[1]   = 1 (self-refresh-enable)
778 	 * sdram_cfg[5:7] = (SDRAM type = DDR SDRAM)
779 	 *			010 DDR 1 SDRAM
780 	 *			011 DDR 2 SDRAM
781 	 * sdram_cfg[12] = 0 (32_BE =0 , 64 bit bus mode)
782 	 * sdram_cfg[13] = 0 (8_BE =0, 4-beat bursts)
783 	 */
784 	if (spd.mem_type == SPD_MEMTYPE_DDR)
785 		sdram_type = SDRAM_CFG_SDRAM_TYPE_DDR1;
786 	else
787 		sdram_type = SDRAM_CFG_SDRAM_TYPE_DDR2;
788 
789 	sdram_cfg = (0
790 		     | SDRAM_CFG_MEM_EN		/* DDR enable */
791 		     | SDRAM_CFG_SREN		/* Self refresh */
792 		     | sdram_type		/* SDRAM type */
793 		     );
794 
795 	/* sdram_cfg[3] = RD_EN - registered DIMM enable */
796 	if (spd.mod_attr & 0x02)
797 		sdram_cfg |= SDRAM_CFG_RD_EN;
798 
799 	/* The DIMM is 32bit width */
800 	if (spd.dataw_lsb < 64) {
801 		if (spd.mem_type == SPD_MEMTYPE_DDR)
802 			sdram_cfg |= SDRAM_CFG_32_BE | SDRAM_CFG_8_BE;
803 		if (spd.mem_type == SPD_MEMTYPE_DDR2)
804 			sdram_cfg |= SDRAM_CFG_32_BE;
805 	}
806 
807 	ddrc_ecc_enable = 0;
808 
809 #if defined(CONFIG_DDR_ECC)
810 	/* Enable ECC with sdram_cfg[2] */
811 	if (spd.config == 0x02) {
812 		sdram_cfg |= 0x20000000;
813 		ddrc_ecc_enable = 1;
814 		/* disable error detection */
815 		ddr->err_disable = ~ECC_ERROR_ENABLE;
816 		/* set single bit error threshold to maximum value,
817 		 * reset counter to zero */
818 		ddr->err_sbe = (255 << ECC_ERROR_MAN_SBET_SHIFT) |
819 				(0 << ECC_ERROR_MAN_SBEC_SHIFT);
820 	}
821 
822 	debug("DDR:err_disable=0x%08x\n", ddr->err_disable);
823 	debug("DDR:err_sbe=0x%08x\n", ddr->err_sbe);
824 #endif
825 	debug("   DDRC ECC mode: %s\n", ddrc_ecc_enable ? "ON":"OFF");
826 
827 #if defined(CONFIG_DDR_2T_TIMING)
828 	/*
829 	 * Enable 2T timing by setting sdram_cfg[16].
830 	 */
831 	sdram_cfg |= SDRAM_CFG_2T_EN;
832 #endif
833 	/* Enable controller, and GO! */
834 	ddr->sdram_cfg = sdram_cfg;
835 	asm("sync;isync");
836 	udelay(500);
837 
838 	debug("DDR:sdram_cfg=0x%08x\n", ddr->sdram_cfg);
839 	return memsize; /*in MBytes*/
840 }
841 #endif /* CONFIG_SPD_EEPROM */
842 
843 #if defined(CONFIG_DDR_ECC) && !defined(CONFIG_ECC_INIT_VIA_DDRCONTROLLER)
844 /*
845  * Use timebase counter, get_timer() is not available
846  * at this point of initialization yet.
847  */
get_tbms(void)848 static __inline__ unsigned long get_tbms (void)
849 {
850 	unsigned long tbl;
851 	unsigned long tbu1, tbu2;
852 	unsigned long ms;
853 	unsigned long long tmp;
854 
855 	ulong tbclk = get_tbclk();
856 
857 	/* get the timebase ticks */
858 	do {
859 		asm volatile ("mftbu %0":"=r" (tbu1):);
860 		asm volatile ("mftb %0":"=r" (tbl):);
861 		asm volatile ("mftbu %0":"=r" (tbu2):);
862 	} while (tbu1 != tbu2);
863 
864 	/* convert ticks to ms */
865 	tmp = (unsigned long long)(tbu1);
866 	tmp = (tmp << 32);
867 	tmp += (unsigned long long)(tbl);
868 	ms = tmp/(tbclk/1000);
869 
870 	return ms;
871 }
872 
873 /*
874  * Initialize all of memory for ECC, then enable errors.
875  */
ddr_enable_ecc(unsigned int dram_size)876 void ddr_enable_ecc(unsigned int dram_size)
877 {
878 	volatile immap_t *immap = (immap_t *)CONFIG_SYS_IMMR;
879 	volatile ddr83xx_t *ddr= &immap->ddr;
880 	unsigned long t_start, t_end;
881 	register u64 *p;
882 	register uint size;
883 	unsigned int pattern[2];
884 
885 	icache_enable();
886 	t_start = get_tbms();
887 	pattern[0] = 0xdeadbeef;
888 	pattern[1] = 0xdeadbeef;
889 
890 #if defined(CONFIG_DDR_ECC_INIT_VIA_DMA)
891 	dma_meminit(pattern[0], dram_size);
892 #else
893 	debug("ddr init: CPU FP write method\n");
894 	size = dram_size;
895 	for (p = 0; p < (u64*)(size); p++) {
896 		ppcDWstore((u32*)p, pattern);
897 	}
898 	__asm__ __volatile__ ("sync");
899 #endif
900 
901 	t_end = get_tbms();
902 	icache_disable();
903 
904 	debug("\nREADY!!\n");
905 	debug("ddr init duration: %ld ms\n", t_end - t_start);
906 
907 	/* Clear All ECC Errors */
908 	if ((ddr->err_detect & ECC_ERROR_DETECT_MME) == ECC_ERROR_DETECT_MME)
909 		ddr->err_detect |= ECC_ERROR_DETECT_MME;
910 	if ((ddr->err_detect & ECC_ERROR_DETECT_MBE) == ECC_ERROR_DETECT_MBE)
911 		ddr->err_detect |= ECC_ERROR_DETECT_MBE;
912 	if ((ddr->err_detect & ECC_ERROR_DETECT_SBE) == ECC_ERROR_DETECT_SBE)
913 		ddr->err_detect |= ECC_ERROR_DETECT_SBE;
914 	if ((ddr->err_detect & ECC_ERROR_DETECT_MSE) == ECC_ERROR_DETECT_MSE)
915 		ddr->err_detect |= ECC_ERROR_DETECT_MSE;
916 
917 	/* Disable ECC-Interrupts */
918 	ddr->err_int_en &= ECC_ERR_INT_DISABLE;
919 
920 	/* Enable errors for ECC */
921 	ddr->err_disable &= ECC_ERROR_ENABLE;
922 
923 	__asm__ __volatile__ ("sync");
924 	__asm__ __volatile__ ("isync");
925 }
926 #endif	/* CONFIG_DDR_ECC */
927