• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2018-2024, STMicroelectronics - All Rights Reserved
3  *
4  * SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause
5  */
6 
7 #include <errno.h>
8 #include <stddef.h>
9 
10 #include <arch.h>
11 #include <arch_helpers.h>
12 #include <common/debug.h>
13 #include <drivers/clk.h>
14 #include <drivers/delay_timer.h>
15 #include <drivers/st/stm32mp1_ddr.h>
16 #include <drivers/st/stm32mp1_ddr_regs.h>
17 #include <drivers/st/stm32mp1_pwr.h>
18 #include <drivers/st/stm32mp1_ram.h>
19 #include <drivers/st/stm32mp_ddr.h>
20 #include <lib/mmio.h>
21 #include <plat/common/platform.h>
22 
23 #include <platform_def.h>
24 
25 #define DDRCTL_REG(x, y)					\
26 	{							\
27 		.offset = offsetof(struct stm32mp_ddrctl, x),	\
28 		.par_offset = offsetof(struct y, x)		\
29 	}
30 
31 #define DDRPHY_REG(x, y)					\
32 	{							\
33 		.offset = offsetof(struct stm32mp_ddrphy, x),	\
34 		.par_offset = offsetof(struct y, x)		\
35 	}
36 
37 /*
38  * PARAMETERS: value get from device tree :
39  *             size / order need to be aligned with binding
40  *             modification NOT ALLOWED !!!
41  */
42 #define DDRCTL_REG_REG_SIZE	25	/* st,ctl-reg */
43 #define DDRCTL_REG_TIMING_SIZE	12	/* st,ctl-timing */
44 #define DDRCTL_REG_MAP_SIZE	9	/* st,ctl-map */
45 #if STM32MP_DDR_DUAL_AXI_PORT
46 #define DDRCTL_REG_PERF_SIZE	17	/* st,ctl-perf */
47 #else
48 #define DDRCTL_REG_PERF_SIZE	11	/* st,ctl-perf */
49 #endif
50 
51 #if STM32MP_DDR_32BIT_INTERFACE
52 #define DDRPHY_REG_REG_SIZE	11	/* st,phy-reg */
53 #else
54 #define DDRPHY_REG_REG_SIZE	9	/* st,phy-reg */
55 #endif
56 #define	DDRPHY_REG_TIMING_SIZE	10	/* st,phy-timing */
57 
58 #define DDRCTL_REG_REG(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_reg)
59 static const struct stm32mp_ddr_reg_desc ddr_reg[DDRCTL_REG_REG_SIZE] = {
60 	DDRCTL_REG_REG(mstr),
61 	DDRCTL_REG_REG(mrctrl0),
62 	DDRCTL_REG_REG(mrctrl1),
63 	DDRCTL_REG_REG(derateen),
64 	DDRCTL_REG_REG(derateint),
65 	DDRCTL_REG_REG(pwrctl),
66 	DDRCTL_REG_REG(pwrtmg),
67 	DDRCTL_REG_REG(hwlpctl),
68 	DDRCTL_REG_REG(rfshctl0),
69 	DDRCTL_REG_REG(rfshctl3),
70 	DDRCTL_REG_REG(crcparctl0),
71 	DDRCTL_REG_REG(zqctl0),
72 	DDRCTL_REG_REG(dfitmg0),
73 	DDRCTL_REG_REG(dfitmg1),
74 	DDRCTL_REG_REG(dfilpcfg0),
75 	DDRCTL_REG_REG(dfiupd0),
76 	DDRCTL_REG_REG(dfiupd1),
77 	DDRCTL_REG_REG(dfiupd2),
78 	DDRCTL_REG_REG(dfiphymstr),
79 	DDRCTL_REG_REG(odtmap),
80 	DDRCTL_REG_REG(dbg0),
81 	DDRCTL_REG_REG(dbg1),
82 	DDRCTL_REG_REG(dbgcmd),
83 	DDRCTL_REG_REG(poisoncfg),
84 	DDRCTL_REG_REG(pccfg),
85 };
86 
87 #define DDRCTL_REG_TIMING(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_timing)
88 static const struct stm32mp_ddr_reg_desc ddr_timing[DDRCTL_REG_TIMING_SIZE] = {
89 	DDRCTL_REG_TIMING(rfshtmg),
90 	DDRCTL_REG_TIMING(dramtmg0),
91 	DDRCTL_REG_TIMING(dramtmg1),
92 	DDRCTL_REG_TIMING(dramtmg2),
93 	DDRCTL_REG_TIMING(dramtmg3),
94 	DDRCTL_REG_TIMING(dramtmg4),
95 	DDRCTL_REG_TIMING(dramtmg5),
96 	DDRCTL_REG_TIMING(dramtmg6),
97 	DDRCTL_REG_TIMING(dramtmg7),
98 	DDRCTL_REG_TIMING(dramtmg8),
99 	DDRCTL_REG_TIMING(dramtmg14),
100 	DDRCTL_REG_TIMING(odtcfg),
101 };
102 
103 #define DDRCTL_REG_MAP(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_map)
104 static const struct stm32mp_ddr_reg_desc ddr_map[DDRCTL_REG_MAP_SIZE] = {
105 	DDRCTL_REG_MAP(addrmap1),
106 	DDRCTL_REG_MAP(addrmap2),
107 	DDRCTL_REG_MAP(addrmap3),
108 	DDRCTL_REG_MAP(addrmap4),
109 	DDRCTL_REG_MAP(addrmap5),
110 	DDRCTL_REG_MAP(addrmap6),
111 	DDRCTL_REG_MAP(addrmap9),
112 	DDRCTL_REG_MAP(addrmap10),
113 	DDRCTL_REG_MAP(addrmap11),
114 };
115 
116 #define DDRCTL_REG_PERF(x)	DDRCTL_REG(x, stm32mp1_ddrctrl_perf)
117 static const struct stm32mp_ddr_reg_desc ddr_perf[DDRCTL_REG_PERF_SIZE] = {
118 	DDRCTL_REG_PERF(sched),
119 	DDRCTL_REG_PERF(sched1),
120 	DDRCTL_REG_PERF(perfhpr1),
121 	DDRCTL_REG_PERF(perflpr1),
122 	DDRCTL_REG_PERF(perfwr1),
123 	DDRCTL_REG_PERF(pcfgr_0),
124 	DDRCTL_REG_PERF(pcfgw_0),
125 	DDRCTL_REG_PERF(pcfgqos0_0),
126 	DDRCTL_REG_PERF(pcfgqos1_0),
127 	DDRCTL_REG_PERF(pcfgwqos0_0),
128 	DDRCTL_REG_PERF(pcfgwqos1_0),
129 #if STM32MP_DDR_DUAL_AXI_PORT
130 	DDRCTL_REG_PERF(pcfgr_1),
131 	DDRCTL_REG_PERF(pcfgw_1),
132 	DDRCTL_REG_PERF(pcfgqos0_1),
133 	DDRCTL_REG_PERF(pcfgqos1_1),
134 	DDRCTL_REG_PERF(pcfgwqos0_1),
135 	DDRCTL_REG_PERF(pcfgwqos1_1),
136 #endif
137 };
138 
139 #define DDRPHY_REG_REG(x)	DDRPHY_REG(x, stm32mp1_ddrphy_reg)
140 static const struct stm32mp_ddr_reg_desc ddrphy_reg[DDRPHY_REG_REG_SIZE] = {
141 	DDRPHY_REG_REG(pgcr),
142 	DDRPHY_REG_REG(aciocr),
143 	DDRPHY_REG_REG(dxccr),
144 	DDRPHY_REG_REG(dsgcr),
145 	DDRPHY_REG_REG(dcr),
146 	DDRPHY_REG_REG(odtcr),
147 	DDRPHY_REG_REG(zq0cr1),
148 	DDRPHY_REG_REG(dx0gcr),
149 	DDRPHY_REG_REG(dx1gcr),
150 #if STM32MP_DDR_32BIT_INTERFACE
151 	DDRPHY_REG_REG(dx2gcr),
152 	DDRPHY_REG_REG(dx3gcr),
153 #endif
154 };
155 
156 #define DDRPHY_REG_TIMING(x)	DDRPHY_REG(x, stm32mp1_ddrphy_timing)
157 static const struct stm32mp_ddr_reg_desc ddrphy_timing[DDRPHY_REG_TIMING_SIZE] = {
158 	DDRPHY_REG_TIMING(ptr0),
159 	DDRPHY_REG_TIMING(ptr1),
160 	DDRPHY_REG_TIMING(ptr2),
161 	DDRPHY_REG_TIMING(dtpr0),
162 	DDRPHY_REG_TIMING(dtpr1),
163 	DDRPHY_REG_TIMING(dtpr2),
164 	DDRPHY_REG_TIMING(mr0),
165 	DDRPHY_REG_TIMING(mr1),
166 	DDRPHY_REG_TIMING(mr2),
167 	DDRPHY_REG_TIMING(mr3),
168 };
169 
170 /*
171  * REGISTERS ARRAY: used to parse device tree and interactive mode
172  */
173 static const struct stm32mp_ddr_reg_info ddr_registers[REG_TYPE_NB] = {
174 	[REG_REG] = {
175 		.name = "static",
176 		.desc = ddr_reg,
177 		.size = DDRCTL_REG_REG_SIZE,
178 		.base = DDR_BASE
179 	},
180 	[REG_TIMING] = {
181 		.name = "timing",
182 		.desc = ddr_timing,
183 		.size = DDRCTL_REG_TIMING_SIZE,
184 		.base = DDR_BASE
185 	},
186 	[REG_PERF] = {
187 		.name = "perf",
188 		.desc = ddr_perf,
189 		.size = DDRCTL_REG_PERF_SIZE,
190 		.base = DDR_BASE
191 	},
192 	[REG_MAP] = {
193 		.name = "map",
194 		.desc = ddr_map,
195 		.size = DDRCTL_REG_MAP_SIZE,
196 		.base = DDR_BASE
197 	},
198 	[REGPHY_REG] = {
199 		.name = "static",
200 		.desc = ddrphy_reg,
201 		.size = DDRPHY_REG_REG_SIZE,
202 		.base = DDRPHY_BASE
203 	},
204 	[REGPHY_TIMING] = {
205 		.name = "timing",
206 		.desc = ddrphy_timing,
207 		.size = DDRPHY_REG_TIMING_SIZE,
208 		.base = DDRPHY_BASE
209 	},
210 };
211 
stm32mp1_ddrphy_idone_wait(struct stm32mp_ddrphy * phy)212 static void stm32mp1_ddrphy_idone_wait(struct stm32mp_ddrphy *phy)
213 {
214 	uint32_t pgsr;
215 	int error = 0;
216 	uint64_t timeout = timeout_init_us(DDR_TIMEOUT_US_1S);
217 
218 	do {
219 		pgsr = mmio_read_32((uintptr_t)&phy->pgsr);
220 
221 		VERBOSE("  > [0x%lx] pgsr = 0x%x &\n",
222 			(uintptr_t)&phy->pgsr, pgsr);
223 
224 		if (timeout_elapsed(timeout)) {
225 			panic();
226 		}
227 
228 		if ((pgsr & DDRPHYC_PGSR_DTERR) != 0U) {
229 			VERBOSE("DQS Gate Trainig Error\n");
230 			error++;
231 		}
232 
233 		if ((pgsr & DDRPHYC_PGSR_DTIERR) != 0U) {
234 			VERBOSE("DQS Gate Trainig Intermittent Error\n");
235 			error++;
236 		}
237 
238 		if ((pgsr & DDRPHYC_PGSR_DFTERR) != 0U) {
239 			VERBOSE("DQS Drift Error\n");
240 			error++;
241 		}
242 
243 		if ((pgsr & DDRPHYC_PGSR_RVERR) != 0U) {
244 			VERBOSE("Read Valid Training Error\n");
245 			error++;
246 		}
247 
248 		if ((pgsr & DDRPHYC_PGSR_RVEIRR) != 0U) {
249 			VERBOSE("Read Valid Training Intermittent Error\n");
250 			error++;
251 		}
252 	} while (((pgsr & DDRPHYC_PGSR_IDONE) == 0U) && (error == 0));
253 	VERBOSE("\n[0x%lx] pgsr = 0x%x\n",
254 		(uintptr_t)&phy->pgsr, pgsr);
255 }
256 
stm32mp1_ddrphy_init(struct stm32mp_ddrphy * phy,uint32_t pir)257 static void stm32mp1_ddrphy_init(struct stm32mp_ddrphy *phy, uint32_t pir)
258 {
259 	uint32_t pir_init = pir | DDRPHYC_PIR_INIT;
260 
261 	mmio_write_32((uintptr_t)&phy->pir, pir_init);
262 	VERBOSE("[0x%lx] pir = 0x%x -> 0x%x\n",
263 		(uintptr_t)&phy->pir, pir_init,
264 		mmio_read_32((uintptr_t)&phy->pir));
265 
266 	/* Need to wait 10 configuration clock before start polling */
267 	udelay(DDR_DELAY_10US);
268 
269 	/* Wait DRAM initialization and Gate Training Evaluation complete */
270 	stm32mp1_ddrphy_idone_wait(phy);
271 }
272 
273 /* Wait quasi dynamic register update */
stm32mp1_wait_operating_mode(struct stm32mp_ddr_priv * priv,uint32_t mode)274 static void stm32mp1_wait_operating_mode(struct stm32mp_ddr_priv *priv, uint32_t mode)
275 {
276 	uint64_t timeout;
277 	uint32_t stat;
278 	int break_loop = 0;
279 
280 	timeout = timeout_init_us(DDR_TIMEOUT_US_1S);
281 	for ( ; ; ) {
282 		uint32_t operating_mode;
283 		uint32_t selref_type;
284 
285 		stat = mmio_read_32((uintptr_t)&priv->ctl->stat);
286 		operating_mode = stat & DDRCTRL_STAT_OPERATING_MODE_MASK;
287 		selref_type = stat & DDRCTRL_STAT_SELFREF_TYPE_MASK;
288 		VERBOSE("[0x%lx] stat = 0x%x\n",
289 			(uintptr_t)&priv->ctl->stat, stat);
290 		if (timeout_elapsed(timeout)) {
291 			panic();
292 		}
293 
294 		if (mode == DDRCTRL_STAT_OPERATING_MODE_SR) {
295 			/*
296 			 * Self-refresh due to software
297 			 * => checking also STAT.selfref_type.
298 			 */
299 			if ((operating_mode ==
300 			     DDRCTRL_STAT_OPERATING_MODE_SR) &&
301 			    (selref_type == DDRCTRL_STAT_SELFREF_TYPE_SR)) {
302 				break_loop = 1;
303 			}
304 		} else if (operating_mode == mode) {
305 			break_loop = 1;
306 		} else if ((mode == DDRCTRL_STAT_OPERATING_MODE_NORMAL) &&
307 			   (operating_mode == DDRCTRL_STAT_OPERATING_MODE_SR) &&
308 			   (selref_type == DDRCTRL_STAT_SELFREF_TYPE_ASR)) {
309 			/* Normal mode: handle also automatic self refresh */
310 			break_loop = 1;
311 		}
312 
313 		if (break_loop == 1) {
314 			break;
315 		}
316 	}
317 
318 	VERBOSE("[0x%lx] stat = 0x%x\n",
319 		(uintptr_t)&priv->ctl->stat, stat);
320 }
321 
322 /* Mode Register Writes (MRW or MRS) */
stm32mp1_mode_register_write(struct stm32mp_ddr_priv * priv,uint8_t addr,uint32_t data)323 static void stm32mp1_mode_register_write(struct stm32mp_ddr_priv *priv, uint8_t addr,
324 					 uint32_t data)
325 {
326 	uint32_t mrctrl0;
327 
328 	VERBOSE("MRS: %d = %x\n", addr, data);
329 
330 	/*
331 	 * 1. Poll MRSTAT.mr_wr_busy until it is '0'.
332 	 *    This checks that there is no outstanding MR transaction.
333 	 *    No write should be performed to MRCTRL0 and MRCTRL1
334 	 *    if MRSTAT.mr_wr_busy = 1.
335 	 */
336 	while ((mmio_read_32((uintptr_t)&priv->ctl->mrstat) &
337 		DDRCTRL_MRSTAT_MR_WR_BUSY) != 0U) {
338 		;
339 	}
340 
341 	/*
342 	 * 2. Write the MRCTRL0.mr_type, MRCTRL0.mr_addr, MRCTRL0.mr_rank
343 	 *    and (for MRWs) MRCTRL1.mr_data to define the MR transaction.
344 	 */
345 	mrctrl0 = DDRCTRL_MRCTRL0_MR_TYPE_WRITE |
346 		  DDRCTRL_MRCTRL0_MR_RANK_ALL |
347 		  (((uint32_t)addr << DDRCTRL_MRCTRL0_MR_ADDR_SHIFT) &
348 		   DDRCTRL_MRCTRL0_MR_ADDR_MASK);
349 	mmio_write_32((uintptr_t)&priv->ctl->mrctrl0, mrctrl0);
350 	VERBOSE("[0x%lx] mrctrl0 = 0x%x (0x%x)\n",
351 		(uintptr_t)&priv->ctl->mrctrl0,
352 		mmio_read_32((uintptr_t)&priv->ctl->mrctrl0), mrctrl0);
353 	mmio_write_32((uintptr_t)&priv->ctl->mrctrl1, data);
354 	VERBOSE("[0x%lx] mrctrl1 = 0x%x\n",
355 		(uintptr_t)&priv->ctl->mrctrl1,
356 		mmio_read_32((uintptr_t)&priv->ctl->mrctrl1));
357 
358 	/*
359 	 * 3. In a separate APB transaction, write the MRCTRL0.mr_wr to 1. This
360 	 *    bit is self-clearing, and triggers the MR transaction.
361 	 *    The uMCTL2 then asserts the MRSTAT.mr_wr_busy while it performs
362 	 *    the MR transaction to SDRAM, and no further access can be
363 	 *    initiated until it is deasserted.
364 	 */
365 	mrctrl0 |= DDRCTRL_MRCTRL0_MR_WR;
366 	mmio_write_32((uintptr_t)&priv->ctl->mrctrl0, mrctrl0);
367 
368 	while ((mmio_read_32((uintptr_t)&priv->ctl->mrstat) &
369 	       DDRCTRL_MRSTAT_MR_WR_BUSY) != 0U) {
370 		;
371 	}
372 
373 	VERBOSE("[0x%lx] mrctrl0 = 0x%x\n",
374 		(uintptr_t)&priv->ctl->mrctrl0, mrctrl0);
375 }
376 
377 /* Switch DDR3 from DLL-on to DLL-off */
stm32mp1_ddr3_dll_off(struct stm32mp_ddr_priv * priv)378 static void stm32mp1_ddr3_dll_off(struct stm32mp_ddr_priv *priv)
379 {
380 	uint32_t mr1 = mmio_read_32((uintptr_t)&priv->phy->mr1);
381 	uint32_t mr2 = mmio_read_32((uintptr_t)&priv->phy->mr2);
382 	uint32_t dbgcam;
383 
384 	VERBOSE("mr1: 0x%x\n", mr1);
385 	VERBOSE("mr2: 0x%x\n", mr2);
386 
387 	/*
388 	 * 1. Set the DBG1.dis_hif = 1.
389 	 *    This prevents further reads/writes being received on the HIF.
390 	 */
391 	mmio_setbits_32((uintptr_t)&priv->ctl->dbg1, DDRCTRL_DBG1_DIS_HIF);
392 	VERBOSE("[0x%lx] dbg1 = 0x%x\n",
393 		(uintptr_t)&priv->ctl->dbg1,
394 		mmio_read_32((uintptr_t)&priv->ctl->dbg1));
395 
396 	/*
397 	 * 2. Ensure all commands have been flushed from the uMCTL2 by polling
398 	 *    DBGCAM.wr_data_pipeline_empty = 1,
399 	 *    DBGCAM.rd_data_pipeline_empty = 1,
400 	 *    DBGCAM.dbg_wr_q_depth = 0 ,
401 	 *    DBGCAM.dbg_lpr_q_depth = 0, and
402 	 *    DBGCAM.dbg_hpr_q_depth = 0.
403 	 */
404 	do {
405 		dbgcam = mmio_read_32((uintptr_t)&priv->ctl->dbgcam);
406 		VERBOSE("[0x%lx] dbgcam = 0x%x\n",
407 			(uintptr_t)&priv->ctl->dbgcam, dbgcam);
408 	} while ((((dbgcam & DDRCTRL_DBGCAM_DATA_PIPELINE_EMPTY) ==
409 		   DDRCTRL_DBGCAM_DATA_PIPELINE_EMPTY)) &&
410 		 ((dbgcam & DDRCTRL_DBGCAM_DBG_Q_DEPTH) == 0U));
411 
412 	/*
413 	 * 3. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
414 	 *    to disable RTT_NOM:
415 	 *    a. DDR3: Write to MR1[9], MR1[6] and MR1[2]
416 	 *    b. DDR4: Write to MR1[10:8]
417 	 */
418 	mr1 &= ~(BIT(9) | BIT(6) | BIT(2));
419 	stm32mp1_mode_register_write(priv, 1, mr1);
420 
421 	/*
422 	 * 4. For DDR4 only: Perform an MRS command
423 	 *    (using MRCTRL0 and MRCTRL1 registers) to write to MR5[8:6]
424 	 *    to disable RTT_PARK
425 	 */
426 
427 	/*
428 	 * 5. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
429 	 *    to write to MR2[10:9], to disable RTT_WR
430 	 *    (and therefore disable dynamic ODT).
431 	 *    This applies for both DDR3 and DDR4.
432 	 */
433 	mr2 &= ~GENMASK(10, 9);
434 	stm32mp1_mode_register_write(priv, 2, mr2);
435 
436 	/*
437 	 * 6. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
438 	 *    to disable the DLL. The timing of this MRS is automatically
439 	 *    handled by the uMCTL2.
440 	 *    a. DDR3: Write to MR1[0]
441 	 *    b. DDR4: Write to MR1[0]
442 	 */
443 	mr1 |= BIT(0);
444 	stm32mp1_mode_register_write(priv, 1, mr1);
445 
446 	/*
447 	 * 7. Put the SDRAM into self-refresh mode by setting
448 	 *    PWRCTL.selfref_sw = 1, and polling STAT.operating_mode to ensure
449 	 *    the DDRC has entered self-refresh.
450 	 */
451 	mmio_setbits_32((uintptr_t)&priv->ctl->pwrctl,
452 			DDRCTRL_PWRCTL_SELFREF_SW);
453 	VERBOSE("[0x%lx] pwrctl = 0x%x\n",
454 		(uintptr_t)&priv->ctl->pwrctl,
455 		mmio_read_32((uintptr_t)&priv->ctl->pwrctl));
456 
457 	/*
458 	 * 8. Wait until STAT.operating_mode[1:0]==11 indicating that the
459 	 *    DWC_ddr_umctl2 core is in self-refresh mode.
460 	 *    Ensure transition to self-refresh was due to software
461 	 *    by checking that STAT.selfref_type[1:0]=2.
462 	 */
463 	stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_SR);
464 
465 	/*
466 	 * 9. Set the MSTR.dll_off_mode = 1.
467 	 *    warning: MSTR.dll_off_mode is a quasi-dynamic type 2 field
468 	 */
469 	stm32mp_ddr_start_sw_done(priv->ctl);
470 
471 	mmio_setbits_32((uintptr_t)&priv->ctl->mstr, DDRCTRL_MSTR_DLL_OFF_MODE);
472 	VERBOSE("[0x%lx] mstr = 0x%x\n",
473 		(uintptr_t)&priv->ctl->mstr,
474 		mmio_read_32((uintptr_t)&priv->ctl->mstr));
475 
476 	stm32mp_ddr_wait_sw_done_ack(priv->ctl);
477 
478 	/* 10. Change the clock frequency to the desired value. */
479 
480 	/*
481 	 * 11. Update any registers which may be required to change for the new
482 	 *     frequency. This includes static and dynamic registers.
483 	 *     This includes both uMCTL2 registers and PHY registers.
484 	 */
485 
486 	/* Change Bypass Mode Frequency Range */
487 	if (clk_get_rate(DDRPHYC) < 100000000U) {
488 		mmio_clrbits_32((uintptr_t)&priv->phy->dllgcr,
489 				DDRPHYC_DLLGCR_BPS200);
490 	} else {
491 		mmio_setbits_32((uintptr_t)&priv->phy->dllgcr,
492 				DDRPHYC_DLLGCR_BPS200);
493 	}
494 
495 	mmio_setbits_32((uintptr_t)&priv->phy->acdllcr, DDRPHYC_ACDLLCR_DLLDIS);
496 
497 	mmio_setbits_32((uintptr_t)&priv->phy->dx0dllcr,
498 			DDRPHYC_DXNDLLCR_DLLDIS);
499 	mmio_setbits_32((uintptr_t)&priv->phy->dx1dllcr,
500 			DDRPHYC_DXNDLLCR_DLLDIS);
501 #if STM32MP_DDR_32BIT_INTERFACE
502 	mmio_setbits_32((uintptr_t)&priv->phy->dx2dllcr,
503 			DDRPHYC_DXNDLLCR_DLLDIS);
504 	mmio_setbits_32((uintptr_t)&priv->phy->dx3dllcr,
505 			DDRPHYC_DXNDLLCR_DLLDIS);
506 #endif
507 
508 	/* 12. Exit the self-refresh state by setting PWRCTL.selfref_sw = 0. */
509 	stm32mp_ddr_sw_selfref_exit(priv->ctl);
510 	stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL);
511 
512 	/*
513 	 * 13. If ZQCTL0.dis_srx_zqcl = 0, the uMCTL2 performs a ZQCL command
514 	 *     at this point.
515 	 */
516 
517 	/*
518 	 * 14. Perform MRS commands as required to re-program timing registers
519 	 *     in the SDRAM for the new frequency
520 	 *     (in particular, CL, CWL and WR may need to be changed).
521 	 */
522 
523 	/* 15. Write DBG1.dis_hif = 0 to re-enable reads and writes. */
524 	stm32mp_ddr_enable_host_interface(priv->ctl);
525 }
526 
stm32mp1_refresh_disable(struct stm32mp_ddrctl * ctl)527 static void stm32mp1_refresh_disable(struct stm32mp_ddrctl *ctl)
528 {
529 	stm32mp_ddr_start_sw_done(ctl);
530 	/* Quasi-dynamic register update*/
531 	mmio_setbits_32((uintptr_t)&ctl->rfshctl3,
532 			DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
533 	mmio_clrbits_32((uintptr_t)&ctl->pwrctl, DDRCTRL_PWRCTL_POWERDOWN_EN);
534 	mmio_clrbits_32((uintptr_t)&ctl->dfimisc,
535 			DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
536 	stm32mp_ddr_wait_sw_done_ack(ctl);
537 }
538 
stm32mp1_refresh_restore(struct stm32mp_ddrctl * ctl,uint32_t rfshctl3,uint32_t pwrctl)539 static void stm32mp1_refresh_restore(struct stm32mp_ddrctl *ctl,
540 				     uint32_t rfshctl3, uint32_t pwrctl)
541 {
542 	stm32mp_ddr_start_sw_done(ctl);
543 	if ((rfshctl3 & DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH) == 0U) {
544 		mmio_clrbits_32((uintptr_t)&ctl->rfshctl3,
545 				DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
546 	}
547 	if ((pwrctl & DDRCTRL_PWRCTL_POWERDOWN_EN) != 0U) {
548 		mmio_setbits_32((uintptr_t)&ctl->pwrctl,
549 				DDRCTRL_PWRCTL_POWERDOWN_EN);
550 	}
551 	mmio_setbits_32((uintptr_t)&ctl->dfimisc,
552 			DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
553 	stm32mp_ddr_wait_sw_done_ack(ctl);
554 }
555 
stm32mp1_ddr_init(struct stm32mp_ddr_priv * priv,struct stm32mp_ddr_config * config)556 void stm32mp1_ddr_init(struct stm32mp_ddr_priv *priv,
557 		       struct stm32mp_ddr_config *config)
558 {
559 	uint32_t pir;
560 	int ret = -EINVAL;
561 
562 	if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) != 0U) {
563 		ret = stm32mp_board_ddr_power_init(STM32MP_DDR3);
564 	} else if ((config->c_reg.mstr & DDRCTRL_MSTR_LPDDR2) != 0U) {
565 		ret = stm32mp_board_ddr_power_init(STM32MP_LPDDR2);
566 	} else if ((config->c_reg.mstr & DDRCTRL_MSTR_LPDDR3) != 0U) {
567 		ret = stm32mp_board_ddr_power_init(STM32MP_LPDDR3);
568 	} else {
569 		ERROR("DDR type not supported\n");
570 	}
571 
572 	if (ret != 0) {
573 		panic();
574 	}
575 
576 	VERBOSE("name = %s\n", config->info.name);
577 	VERBOSE("speed = %u kHz\n", config->info.speed);
578 	VERBOSE("size  = 0x%x\n", config->info.size);
579 
580 	/* DDR INIT SEQUENCE */
581 
582 	/*
583 	 * 1. Program the DWC_ddr_umctl2 registers
584 	 *     nota: check DFIMISC.dfi_init_complete = 0
585 	 */
586 
587 	/* 1.1 RESETS: presetn, core_ddrc_rstn, aresetn */
588 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
589 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
590 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
591 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
592 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
593 	mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
594 
595 	/* 1.2. start CLOCK */
596 	if (stm32mp1_ddr_clk_enable(priv, config->info.speed) != 0) {
597 		panic();
598 	}
599 
600 	/* 1.3. deassert reset */
601 	/* De-assert PHY rstn and ctl_rstn via DPHYRST and DPHYCTLRST. */
602 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
603 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
604 	/*
605 	 * De-assert presetn once the clocks are active
606 	 * and stable via DDRCAPBRST bit.
607 	 */
608 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
609 
610 	/* 1.4. wait 128 cycles to permit initialization of end logic */
611 	udelay(DDR_DELAY_2US);
612 	/* For PCLK = 133MHz => 1 us is enough, 2 to allow lower frequency */
613 
614 	/* 1.5. initialize registers ddr_umctl2 */
615 	/* Stop uMCTL2 before PHY is ready */
616 	mmio_clrbits_32((uintptr_t)&priv->ctl->dfimisc,
617 			DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
618 	VERBOSE("[0x%lx] dfimisc = 0x%x\n",
619 		(uintptr_t)&priv->ctl->dfimisc,
620 		mmio_read_32((uintptr_t)&priv->ctl->dfimisc));
621 
622 	stm32mp_ddr_set_reg(priv, REG_REG, &config->c_reg, ddr_registers);
623 
624 	/* DDR3 = don't set DLLOFF for init mode */
625 	if ((config->c_reg.mstr &
626 	     (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE))
627 	    == (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) {
628 		VERBOSE("deactivate DLL OFF in mstr\n");
629 		mmio_clrbits_32((uintptr_t)&priv->ctl->mstr,
630 				DDRCTRL_MSTR_DLL_OFF_MODE);
631 		VERBOSE("[0x%lx] mstr = 0x%x\n",
632 			(uintptr_t)&priv->ctl->mstr,
633 			mmio_read_32((uintptr_t)&priv->ctl->mstr));
634 	}
635 
636 	stm32mp_ddr_set_reg(priv, REG_TIMING, &config->c_timing, ddr_registers);
637 	stm32mp_ddr_set_reg(priv, REG_MAP, &config->c_map, ddr_registers);
638 
639 	/* Skip CTRL init, SDRAM init is done by PHY PUBL */
640 	mmio_clrsetbits_32((uintptr_t)&priv->ctl->init0,
641 			   DDRCTRL_INIT0_SKIP_DRAM_INIT_MASK,
642 			   DDRCTRL_INIT0_SKIP_DRAM_INIT_NORMAL);
643 	VERBOSE("[0x%lx] init0 = 0x%x\n",
644 		(uintptr_t)&priv->ctl->init0,
645 		mmio_read_32((uintptr_t)&priv->ctl->init0));
646 
647 	stm32mp_ddr_set_reg(priv, REG_PERF, &config->c_perf, ddr_registers);
648 
649 	/*  2. deassert reset signal core_ddrc_rstn, aresetn and presetn */
650 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
651 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
652 	mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
653 
654 	/*
655 	 * 3. start PHY init by accessing relevant PUBL registers
656 	 *    (DXGCR, DCR, PTR*, MR*, DTPR*)
657 	 */
658 	stm32mp_ddr_set_reg(priv, REGPHY_REG, &config->p_reg, ddr_registers);
659 	stm32mp_ddr_set_reg(priv, REGPHY_TIMING, &config->p_timing, ddr_registers);
660 
661 	/* DDR3 = don't set DLLOFF for init mode */
662 	if ((config->c_reg.mstr &
663 	     (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE))
664 	    == (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) {
665 		VERBOSE("deactivate DLL OFF in mr1\n");
666 		mmio_clrbits_32((uintptr_t)&priv->phy->mr1, BIT(0));
667 		VERBOSE("[0x%lx] mr1 = 0x%x\n",
668 			(uintptr_t)&priv->phy->mr1,
669 			mmio_read_32((uintptr_t)&priv->phy->mr1));
670 	}
671 
672 	/*
673 	 *  4. Monitor PHY init status by polling PUBL register PGSR.IDONE
674 	 *     Perform DDR PHY DRAM initialization and Gate Training Evaluation
675 	 */
676 	stm32mp1_ddrphy_idone_wait(priv->phy);
677 
678 	/*
679 	 *  5. Indicate to PUBL that controller performs SDRAM initialization
680 	 *     by setting PIR.INIT and PIR CTLDINIT and pool PGSR.IDONE
681 	 *     DRAM init is done by PHY, init0.skip_dram.init = 1
682 	 */
683 
684 	pir = DDRPHYC_PIR_DLLSRST | DDRPHYC_PIR_DLLLOCK | DDRPHYC_PIR_ZCAL |
685 	      DDRPHYC_PIR_ITMSRST | DDRPHYC_PIR_DRAMINIT | DDRPHYC_PIR_ICPC;
686 
687 	if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) != 0U) {
688 		pir |= DDRPHYC_PIR_DRAMRST; /* Only for DDR3 */
689 	}
690 
691 	stm32mp1_ddrphy_init(priv->phy, pir);
692 
693 	/*
694 	 *  6. SET DFIMISC.dfi_init_complete_en to 1
695 	 *  Enable quasi-dynamic register programming.
696 	 */
697 	stm32mp_ddr_start_sw_done(priv->ctl);
698 
699 	mmio_setbits_32((uintptr_t)&priv->ctl->dfimisc,
700 			DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
701 	VERBOSE("[0x%lx] dfimisc = 0x%x\n",
702 		(uintptr_t)&priv->ctl->dfimisc,
703 		mmio_read_32((uintptr_t)&priv->ctl->dfimisc));
704 
705 	stm32mp_ddr_wait_sw_done_ack(priv->ctl);
706 
707 	/*
708 	 *  7. Wait for DWC_ddr_umctl2 to move to normal operation mode
709 	 *     by monitoring STAT.operating_mode signal
710 	 */
711 
712 	/* Wait uMCTL2 ready */
713 	stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL);
714 
715 	/* Switch to DLL OFF mode */
716 	if ((config->c_reg.mstr & DDRCTRL_MSTR_DLL_OFF_MODE) != 0U) {
717 		stm32mp1_ddr3_dll_off(priv);
718 	}
719 
720 	VERBOSE("DDR DQS training : ");
721 
722 	/*
723 	 *  8. Disable Auto refresh and power down by setting
724 	 *    - RFSHCTL3.dis_au_refresh = 1
725 	 *    - PWRCTL.powerdown_en = 0
726 	 *    - DFIMISC.dfiinit_complete_en = 0
727 	 */
728 	stm32mp1_refresh_disable(priv->ctl);
729 
730 	/*
731 	 *  9. Program PUBL PGCR to enable refresh during training
732 	 *     and rank to train
733 	 *     not done => keep the programed value in PGCR
734 	 */
735 
736 	/*
737 	 * 10. configure PUBL PIR register to specify which training step
738 	 * to run
739 	 * RVTRN is executed only on LPDDR2/LPDDR3
740 	 */
741 	pir = DDRPHYC_PIR_QSTRN;
742 	if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) == 0U) {
743 		pir |= DDRPHYC_PIR_RVTRN;
744 	}
745 
746 	stm32mp1_ddrphy_init(priv->phy, pir);
747 
748 	/* 11. monitor PUB PGSR.IDONE to poll cpmpletion of training sequence */
749 	stm32mp1_ddrphy_idone_wait(priv->phy);
750 
751 	/*
752 	 * 12. set back registers in step 8 to the original values if desidered
753 	 */
754 	stm32mp1_refresh_restore(priv->ctl, config->c_reg.rfshctl3,
755 				 config->c_reg.pwrctl);
756 
757 	stm32mp_ddr_enable_axi_port(priv->ctl);
758 }
759