• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2024, STMicroelectronics - All Rights Reserved
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #include <errno.h>
8 
9 #include <arch_helpers.h>
10 #include <common/debug.h>
11 
12 #include <drivers/delay_timer.h>
13 #include <drivers/st/stm32mp2_ddr.h>
14 #include <drivers/st/stm32mp2_ddr_helpers.h>
15 #include <drivers/st/stm32mp2_ddr_regs.h>
16 #include <drivers/st/stm32mp_ddr.h>
17 
18 #include <lib/mmio.h>
19 
20 #include <platform_def.h>
21 
22 /* HW idle period (unit: Multiples of 32 DFI clock cycles) */
23 #define HW_IDLE_PERIOD			0x3U
24 
25 static enum stm32mp2_ddr_sr_mode saved_ddr_sr_mode;
26 
27 #pragma weak stm32_ddrdbg_get_base
stm32_ddrdbg_get_base(void)28 uintptr_t stm32_ddrdbg_get_base(void)
29 {
30 	return 0U;
31 }
32 
set_qd1_qd3_update_conditions(struct stm32mp_ddrctl * ctl)33 static void set_qd1_qd3_update_conditions(struct stm32mp_ddrctl *ctl)
34 {
35 	mmio_setbits_32((uintptr_t)&ctl->dbg1, DDRCTRL_DBG1_DIS_DQ);
36 
37 	stm32mp_ddr_set_qd3_update_conditions(ctl);
38 }
39 
unset_qd1_qd3_update_conditions(struct stm32mp_ddrctl * ctl)40 static void unset_qd1_qd3_update_conditions(struct stm32mp_ddrctl *ctl)
41 {
42 	stm32mp_ddr_unset_qd3_update_conditions(ctl);
43 
44 	mmio_clrbits_32((uintptr_t)&ctl->dbg1, DDRCTRL_DBG1_DIS_DQ);
45 }
46 
wait_dfi_init_complete(struct stm32mp_ddrctl * ctl)47 static void wait_dfi_init_complete(struct stm32mp_ddrctl *ctl)
48 {
49 	uint64_t timeout;
50 	uint32_t dfistat;
51 
52 	timeout = timeout_init_us(DDR_TIMEOUT_US_1S);
53 	do {
54 		dfistat = mmio_read_32((uintptr_t)&ctl->dfistat);
55 		VERBOSE("[0x%lx] dfistat = 0x%x ", (uintptr_t)&ctl->dfistat, dfistat);
56 
57 		if (timeout_elapsed(timeout)) {
58 			panic();
59 		}
60 	} while ((dfistat & DDRCTRL_DFISTAT_DFI_INIT_COMPLETE) == 0U);
61 
62 	VERBOSE("[0x%lx] dfistat = 0x%x\n", (uintptr_t)&ctl->dfistat, dfistat);
63 }
64 
disable_dfi_low_power_interface(struct stm32mp_ddrctl * ctl)65 static void disable_dfi_low_power_interface(struct stm32mp_ddrctl *ctl)
66 {
67 	uint64_t timeout;
68 	uint32_t dfistat;
69 	uint32_t stat;
70 
71 	mmio_clrbits_32((uintptr_t)&ctl->dfilpcfg0, DDRCTRL_DFILPCFG0_DFI_LP_EN_SR);
72 
73 	timeout = timeout_init_us(DDR_TIMEOUT_US_1S);
74 	do {
75 		dfistat = mmio_read_32((uintptr_t)&ctl->dfistat);
76 		stat = mmio_read_32((uintptr_t)&ctl->stat);
77 		VERBOSE("[0x%lx] dfistat = 0x%x ", (uintptr_t)&ctl->dfistat, dfistat);
78 		VERBOSE("[0x%lx] stat = 0x%x ", (uintptr_t)&ctl->stat, stat);
79 
80 		if (timeout_elapsed(timeout)) {
81 			panic();
82 		}
83 	} while (((dfistat & DDRCTRL_DFISTAT_DFI_LP_ACK) != 0U) ||
84 		 ((stat & DDRCTRL_STAT_OPERATING_MODE_MASK) == DDRCTRL_STAT_OPERATING_MODE_SR));
85 
86 	VERBOSE("[0x%lx] dfistat = 0x%x\n", (uintptr_t)&ctl->dfistat, dfistat);
87 	VERBOSE("[0x%lx] stat = 0x%x\n", (uintptr_t)&ctl->stat, stat);
88 }
89 
ddr_activate_controller(struct stm32mp_ddrctl * ctl,bool sr_entry)90 void ddr_activate_controller(struct stm32mp_ddrctl *ctl, bool sr_entry)
91 {
92 	/*
93 	 * Manage quasi-dynamic registers modification
94 	 * dfimisc.dfi_frequency : Group 1
95 	 * dfimisc.dfi_init_complete_en and dfimisc.dfi_init_start : Group 3
96 	 */
97 	set_qd1_qd3_update_conditions(ctl);
98 
99 	if (sr_entry) {
100 		mmio_setbits_32((uintptr_t)&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_FREQUENCY);
101 	} else {
102 		mmio_clrbits_32((uintptr_t)&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_FREQUENCY);
103 	}
104 
105 	mmio_setbits_32((uintptr_t)&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_START);
106 	mmio_clrbits_32((uintptr_t)&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_START);
107 
108 	wait_dfi_init_complete(ctl);
109 
110 	udelay(DDR_DELAY_1US);
111 
112 	if (sr_entry) {
113 		mmio_clrbits_32((uintptr_t)&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
114 	} else {
115 		mmio_setbits_32((uintptr_t)&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
116 	}
117 
118 	udelay(DDR_DELAY_1US);
119 
120 	unset_qd1_qd3_update_conditions(ctl);
121 }
122 
123 #if STM32MP_LPDDR4_TYPE
disable_phy_ddc(void)124 static void disable_phy_ddc(void)
125 {
126 	/* Enable APB access to internal CSR registers */
127 	mmio_write_32(stm32mp_ddrphyc_base() + DDRPHY_APBONLY0_MICROCONTMUXSEL, 0U);
128 	mmio_write_32(stm32mp_ddrphyc_base() + DDRPHY_DRTUB0_UCCLKHCLKENABLES,
129 		      DDRPHY_DRTUB0_UCCLKHCLKENABLES_UCCLKEN |
130 		      DDRPHY_DRTUB0_UCCLKHCLKENABLES_HCLKEN);
131 
132 	/* Disable DRAM drift compensation */
133 	mmio_write_32(stm32mp_ddrphyc_base() + DDRPHY_INITENG0_P0_SEQ0BDISABLEFLAG6, 0xFFFFU);
134 
135 	/* Disable APB access to internal CSR registers */
136 	mmio_write_32(stm32mp_ddrphyc_base() + DDRPHY_DRTUB0_UCCLKHCLKENABLES,
137 		      DDRPHY_DRTUB0_UCCLKHCLKENABLES_HCLKEN);
138 	mmio_write_32(stm32mp_ddrphyc_base() + DDRPHY_APBONLY0_MICROCONTMUXSEL,
139 		      DDRPHY_APBONLY0_MICROCONTMUXSEL_MICROCONTMUXSEL);
140 }
141 #endif /* STM32MP_LPDDR4_TYPE */
142 
ddr_wait_lp3_mode(bool sr_entry)143 void ddr_wait_lp3_mode(bool sr_entry)
144 {
145 	uint64_t timeout;
146 	bool repeat_loop = false;
147 
148 	/* Enable APB access to internal CSR registers */
149 	mmio_write_32(stm32mp_ddrphyc_base() + DDRPHY_APBONLY0_MICROCONTMUXSEL, 0U);
150 	mmio_write_32(stm32mp_ddrphyc_base() + DDRPHY_DRTUB0_UCCLKHCLKENABLES,
151 		      DDRPHY_DRTUB0_UCCLKHCLKENABLES_UCCLKEN |
152 		      DDRPHY_DRTUB0_UCCLKHCLKENABLES_HCLKEN);
153 
154 	timeout = timeout_init_us(DDR_TIMEOUT_US_1S);
155 	do {
156 		uint16_t phyinlpx = mmio_read_32(stm32mp_ddrphyc_base() +
157 						 DDRPHY_INITENG0_P0_PHYINLPX);
158 
159 		if (timeout_elapsed(timeout)) {
160 			panic();
161 		}
162 
163 		if (sr_entry) {
164 			repeat_loop = (phyinlpx & DDRPHY_INITENG0_P0_PHYINLPX_PHYINLP3) == 0U;
165 		} else {
166 			repeat_loop = (phyinlpx & DDRPHY_INITENG0_P0_PHYINLPX_PHYINLP3) != 0U;
167 		}
168 	} while (repeat_loop);
169 
170 	/* Disable APB access to internal CSR registers */
171 #if STM32MP_DDR3_TYPE || STM32MP_DDR4_TYPE
172 	mmio_write_32(stm32mp_ddrphyc_base() + DDRPHY_DRTUB0_UCCLKHCLKENABLES, 0U);
173 #else /* STM32MP_LPDDR4_TYPE */
174 	mmio_write_32(stm32mp_ddrphyc_base() + DDRPHY_DRTUB0_UCCLKHCLKENABLES,
175 		      DDRPHY_DRTUB0_UCCLKHCLKENABLES_HCLKEN);
176 #endif /* STM32MP_DDR3_TYPE || STM32MP_DDR4_TYPE */
177 	mmio_write_32(stm32mp_ddrphyc_base() + DDRPHY_APBONLY0_MICROCONTMUXSEL,
178 		      DDRPHY_APBONLY0_MICROCONTMUXSEL_MICROCONTMUXSEL);
179 }
180 
sr_loop(bool is_entry)181 static int sr_loop(bool is_entry)
182 {
183 	uint32_t type;
184 	uint32_t state __maybe_unused;
185 	uint64_t timeout = timeout_init_us(DDR_TIMEOUT_US_1S);
186 	bool repeat_loop = false;
187 
188 	/*
189 	 * Wait for DDRCTRL to be out of or back to "normal/mission mode".
190 	 * Consider also SRPD mode for LPDDR4 only.
191 	 */
192 	do {
193 		type = mmio_read_32(stm32mp_ddrctrl_base() + DDRCTRL_STAT) &
194 		       DDRCTRL_STAT_SELFREF_TYPE_MASK;
195 #if STM32MP_LPDDR4_TYPE
196 		state = mmio_read_32(stm32mp_ddrctrl_base() + DDRCTRL_STAT) &
197 		       DDRCTRL_STAT_SELFREF_STATE_MASK;
198 #endif /* STM32MP_LPDDR4_TYPE */
199 
200 		if (timeout_elapsed(timeout)) {
201 			return -ETIMEDOUT;
202 		}
203 
204 		if (is_entry) {
205 #if STM32MP_LPDDR4_TYPE
206 			repeat_loop = (type == 0x0U) || (state != DDRCTRL_STAT_SELFREF_STATE_SRPD);
207 #else /* !STM32MP_LPDDR4_TYPE */
208 			repeat_loop = (type == 0x0U);
209 #endif /* STM32MP_LPDDR4_TYPE */
210 		} else {
211 #if STM32MP_LPDDR4_TYPE
212 			repeat_loop = (type != 0x0U) || (state != 0x0U);
213 #else /* !STM32MP_LPDDR4_TYPE */
214 			repeat_loop = (type != 0x0U);
215 #endif /* STM32MP_LPDDR4_TYPE */
216 		}
217 	} while (repeat_loop);
218 
219 	return 0;
220 }
221 
sr_entry_loop(void)222 static int sr_entry_loop(void)
223 {
224 	return sr_loop(true);
225 }
226 
ddr_sr_exit_loop(void)227 int ddr_sr_exit_loop(void)
228 {
229 	return sr_loop(false);
230 }
231 
sr_ssr_set(void)232 static int sr_ssr_set(void)
233 {
234 	uintptr_t ddrctrl_base = stm32mp_ddrctrl_base();
235 
236 	/*
237 	 * Disable Clock disable with LP modes
238 	 * (used in RUN mode for LPDDR2 with specific timing).
239 	 */
240 	mmio_clrbits_32(ddrctrl_base + DDRCTRL_PWRCTL, DDRCTRL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE);
241 
242 	/* Disable automatic Self-Refresh mode */
243 	mmio_clrbits_32(ddrctrl_base + DDRCTRL_PWRCTL, DDRCTRL_PWRCTL_SELFREF_EN);
244 
245 	mmio_write_32(stm32_ddrdbg_get_base() + DDRDBG_LP_DISABLE,
246 		      DDRDBG_LP_DISABLE_LPI_XPI_DISABLE | DDRDBG_LP_DISABLE_LPI_DDRC_DISABLE);
247 
248 	return 0;
249 }
250 
sr_ssr_entry(bool standby)251 static int sr_ssr_entry(bool standby)
252 {
253 	uintptr_t ddrctrl_base = stm32mp_ddrctrl_base();
254 	uintptr_t rcc_base = stm32mp_rcc_base();
255 
256 	if (stm32mp_ddr_disable_axi_port((struct stm32mp_ddrctl *)ddrctrl_base) != 0) {
257 		panic();
258 	}
259 
260 #if STM32MP_LPDDR4_TYPE
261 	if (standby) {
262 		/* Disable DRAM drift compensation */
263 		disable_phy_ddc();
264 	}
265 #endif /* STM32MP_LPDDR4_TYPE */
266 
267 	disable_dfi_low_power_interface((struct stm32mp_ddrctl *)ddrctrl_base);
268 
269 	/* SW self refresh entry prequested */
270 	mmio_setbits_32(ddrctrl_base + DDRCTRL_PWRCTL, DDRCTRL_PWRCTL_SELFREF_SW);
271 #if STM32MP_LPDDR4_TYPE
272 	mmio_clrbits_32(ddrctrl_base + DDRCTRL_PWRCTL, DDRCTRL_PWRCTL_STAY_IN_SELFREF);
273 #endif /* STM32MP_LPDDR4_TYPE */
274 
275 	if (sr_entry_loop() != 0) {
276 		return -1;
277 	}
278 
279 	ddr_activate_controller((struct stm32mp_ddrctl *)ddrctrl_base, true);
280 
281 	/* Poll on ddrphy_initeng0_phyinlpx.phyinlp3 = 1 */
282 	ddr_wait_lp3_mode(true);
283 
284 	if (standby) {
285 		mmio_clrbits_32(stm32mp_pwr_base() + PWR_CR11, PWR_CR11_DDRRETDIS);
286 	}
287 
288 	mmio_clrsetbits_32(rcc_base + RCC_DDRCPCFGR, RCC_DDRCPCFGR_DDRCPLPEN,
289 			   RCC_DDRCPCFGR_DDRCPEN);
290 	mmio_setbits_32(rcc_base + RCC_DDRPHYCCFGR, RCC_DDRPHYCCFGR_DDRPHYCEN);
291 	mmio_setbits_32(rcc_base + RCC_DDRITFCFGR, RCC_DDRITFCFGR_DDRPHYDLP);
292 
293 	return 0;
294 }
295 
sr_ssr_exit(void)296 static int sr_ssr_exit(void)
297 {
298 	uintptr_t ddrctrl_base = stm32mp_ddrctrl_base();
299 	uintptr_t rcc_base = stm32mp_rcc_base();
300 
301 	mmio_setbits_32(rcc_base + RCC_DDRCPCFGR,
302 			RCC_DDRCPCFGR_DDRCPLPEN | RCC_DDRCPCFGR_DDRCPEN);
303 	mmio_clrbits_32(rcc_base + RCC_DDRITFCFGR, RCC_DDRITFCFGR_DDRPHYDLP);
304 	mmio_setbits_32(rcc_base + RCC_DDRPHYCCFGR, RCC_DDRPHYCCFGR_DDRPHYCEN);
305 
306 	udelay(DDR_DELAY_1US);
307 
308 	ddr_activate_controller((struct stm32mp_ddrctl *)ddrctrl_base, false);
309 
310 	/* Poll on ddrphy_initeng0_phyinlpx.phyinlp3 = 0 */
311 	ddr_wait_lp3_mode(false);
312 
313 	/* SW self refresh exit prequested */
314 	mmio_clrbits_32(ddrctrl_base + DDRCTRL_PWRCTL, DDRCTRL_PWRCTL_SELFREF_SW);
315 
316 	if (ddr_sr_exit_loop() != 0) {
317 		return -1;
318 	}
319 
320 	/* Re-enable DFI low-power interface */
321 	mmio_setbits_32(ddrctrl_base + DDRCTRL_DFILPCFG0, DDRCTRL_DFILPCFG0_DFI_LP_EN_SR);
322 
323 	stm32mp_ddr_enable_axi_port((struct stm32mp_ddrctl *)ddrctrl_base);
324 
325 	return 0;
326 }
327 
sr_hsr_set(void)328 static int sr_hsr_set(void)
329 {
330 	uintptr_t ddrctrl_base = stm32mp_ddrctrl_base();
331 
332 	mmio_clrsetbits_32(stm32mp_rcc_base() + RCC_DDRITFCFGR,
333 			   RCC_DDRITFCFGR_DDRCKMOD_MASK, RCC_DDRITFCFGR_DDRCKMOD_HSR);
334 
335 	/*
336 	 * manage quasi-dynamic registers modification
337 	 * hwlpctl.hw_lp_en : Group 2
338 	 */
339 	if (stm32mp_ddr_sw_selfref_entry((struct stm32mp_ddrctl *)ddrctrl_base) != 0) {
340 		panic();
341 	}
342 	stm32mp_ddr_start_sw_done((struct stm32mp_ddrctl *)ddrctrl_base);
343 
344 	mmio_write_32(ddrctrl_base + DDRCTRL_HWLPCTL,
345 		      DDRCTRL_HWLPCTL_HW_LP_EN | DDRCTRL_HWLPCTL_HW_LP_EXIT_IDLE_EN |
346 		      (HW_IDLE_PERIOD << DDRCTRL_HWLPCTL_HW_LP_IDLE_X32_SHIFT));
347 
348 	stm32mp_ddr_wait_sw_done_ack((struct stm32mp_ddrctl *)ddrctrl_base);
349 	stm32mp_ddr_sw_selfref_exit((struct stm32mp_ddrctl *)ddrctrl_base);
350 
351 	return 0;
352 }
353 
sr_hsr_entry(void)354 static int sr_hsr_entry(void)
355 {
356 	mmio_write_32(stm32mp_rcc_base() + RCC_DDRCPCFGR, RCC_DDRCPCFGR_DDRCPLPEN);
357 
358 	return sr_entry_loop(); /* read_data should be equal to 0x223 */
359 }
360 
sr_hsr_exit(void)361 static int sr_hsr_exit(void)
362 {
363 	mmio_write_32(stm32mp_rcc_base() + RCC_DDRCPCFGR,
364 		      RCC_DDRCPCFGR_DDRCPLPEN | RCC_DDRCPCFGR_DDRCPEN);
365 
366 	/* TODO: check if ddr_sr_exit_loop() is needed here */
367 
368 	return 0;
369 }
370 
sr_asr_set(void)371 static int sr_asr_set(void)
372 {
373 	mmio_write_32(stm32_ddrdbg_get_base() + DDRDBG_LP_DISABLE, 0U);
374 
375 	return 0;
376 }
377 
sr_asr_entry(void)378 static int sr_asr_entry(void)
379 {
380 	/*
381 	 * Automatically enter into self refresh when there is no ddr traffic
382 	 * for the delay programmed into SYSCONF_DDRC_AUTO_SR_DELAY register.
383 	 * Default value is 0x20 (unit: Multiples of 32 DFI clock cycles).
384 	 */
385 	return sr_entry_loop();
386 }
387 
sr_asr_exit(void)388 static int sr_asr_exit(void)
389 {
390 	return ddr_sr_exit_loop();
391 }
392 
ddr_get_io_calibration_val(void)393 uint32_t ddr_get_io_calibration_val(void)
394 {
395 	/* TODO create related service */
396 
397 	return 0U;
398 }
399 
ddr_sr_entry(bool standby)400 int ddr_sr_entry(bool standby)
401 {
402 	int ret = -EINVAL;
403 
404 	switch (saved_ddr_sr_mode) {
405 	case DDR_SSR_MODE:
406 		ret = sr_ssr_entry(standby);
407 		break;
408 	case DDR_HSR_MODE:
409 		ret = sr_hsr_entry();
410 		break;
411 	case DDR_ASR_MODE:
412 		ret = sr_asr_entry();
413 		break;
414 	default:
415 		break;
416 	}
417 
418 	return ret;
419 }
420 
ddr_sr_exit(void)421 int ddr_sr_exit(void)
422 {
423 	int ret = -EINVAL;
424 
425 	switch (saved_ddr_sr_mode) {
426 	case DDR_SSR_MODE:
427 		ret = sr_ssr_exit();
428 		break;
429 	case DDR_HSR_MODE:
430 		ret = sr_hsr_exit();
431 		break;
432 	case DDR_ASR_MODE:
433 		ret = sr_asr_exit();
434 		break;
435 	default:
436 		break;
437 	}
438 
439 	return ret;
440 }
441 
ddr_read_sr_mode(void)442 enum stm32mp2_ddr_sr_mode ddr_read_sr_mode(void)
443 {
444 	uint32_t pwrctl = mmio_read_32(stm32mp_ddrctrl_base() + DDRCTRL_PWRCTL);
445 	enum stm32mp2_ddr_sr_mode mode = DDR_SR_MODE_INVALID;
446 
447 	switch (pwrctl & (DDRCTRL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE |
448 			  DDRCTRL_PWRCTL_SELFREF_EN)) {
449 	case 0U:
450 		mode = DDR_SSR_MODE;
451 		break;
452 	case DDRCTRL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE:
453 		mode = DDR_HSR_MODE;
454 		break;
455 	case DDRCTRL_PWRCTL_EN_DFI_DRAM_CLK_DISABLE | DDRCTRL_PWRCTL_SELFREF_EN:
456 		mode = DDR_ASR_MODE;
457 		break;
458 	default:
459 		break;
460 	}
461 
462 	return mode;
463 }
464 
ddr_set_sr_mode(enum stm32mp2_ddr_sr_mode mode)465 void ddr_set_sr_mode(enum stm32mp2_ddr_sr_mode mode)
466 {
467 	int ret = -EINVAL;
468 
469 	if (mode == saved_ddr_sr_mode) {
470 		return;
471 	}
472 
473 	switch (mode) {
474 	case DDR_SSR_MODE:
475 		ret = sr_ssr_set();
476 		break;
477 	case DDR_HSR_MODE:
478 		ret = sr_hsr_set();
479 		break;
480 	case DDR_ASR_MODE:
481 		ret = sr_asr_set();
482 		break;
483 	default:
484 		break;
485 	}
486 
487 	if (ret != 0) {
488 		ERROR("Unknown Self Refresh mode\n");
489 		panic();
490 	}
491 
492 	saved_ddr_sr_mode = mode;
493 }
494 
ddr_save_sr_mode(void)495 void ddr_save_sr_mode(void)
496 {
497 	saved_ddr_sr_mode = ddr_read_sr_mode();
498 }
499 
ddr_restore_sr_mode(void)500 void ddr_restore_sr_mode(void)
501 {
502 	ddr_set_sr_mode(saved_ddr_sr_mode);
503 }
504 
ddr_sub_system_clk_init(void)505 void ddr_sub_system_clk_init(void)
506 {
507 	mmio_write_32(stm32mp_rcc_base() + RCC_DDRCPCFGR,
508 		      RCC_DDRCPCFGR_DDRCPEN | RCC_DDRCPCFGR_DDRCPLPEN);
509 }
510 
ddr_sub_system_clk_off(void)511 void ddr_sub_system_clk_off(void)
512 {
513 	uintptr_t rcc_base = stm32mp_rcc_base();
514 
515 	/* Clear DDR IO retention */
516 	mmio_clrbits_32(stm32mp_pwr_base() + PWR_CR11, PWR_CR11_DDRRETDIS);
517 
518 	/* Reset DDR sub system */
519 	mmio_write_32(rcc_base + RCC_DDRCPCFGR, RCC_DDRCPCFGR_DDRCPRST);
520 	mmio_write_32(rcc_base + RCC_DDRITFCFGR, RCC_DDRITFCFGR_DDRRST);
521 	mmio_write_32(rcc_base + RCC_DDRPHYCAPBCFGR, RCC_DDRPHYCAPBCFGR_DDRPHYCAPBRST);
522 	mmio_write_32(rcc_base + RCC_DDRCAPBCFGR, RCC_DDRCAPBCFGR_DDRCAPBRST);
523 
524 	/* Deactivate clocks and PLL2 */
525 	mmio_clrbits_32(rcc_base + RCC_DDRPHYCCFGR, RCC_DDRPHYCCFGR_DDRPHYCEN);
526 	mmio_clrbits_32(rcc_base + RCC_PLL2CFGR1, RCC_PLL2CFGR1_PLLEN);
527 }
528