• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause
2 /*
3  * (C) Copyright 2017 Rockchip Electronics Co., Ltd
4  */
5 #include <common.h>
6 #include <clk.h>
7 #include <dm.h>
8 #include <dt-structs.h>
9 #include <errno.h>
10 #include <ram.h>
11 #include <regmap.h>
12 #include <syscon.h>
13 #include <asm/io.h>
14 #include <asm/arch-rockchip/clock.h>
15 #include <asm/arch-rockchip/cru_rk322x.h>
16 #include <asm/arch-rockchip/grf_rk322x.h>
17 #include <asm/arch-rockchip/hardware.h>
18 #include <asm/arch-rockchip/sdram_rk322x.h>
19 #include <asm/arch-rockchip/uart.h>
20 #include <asm/arch-rockchip/sdram.h>
21 #include <asm/types.h>
22 #include <linux/err.h>
23 
24 DECLARE_GLOBAL_DATA_PTR;
25 struct chan_info {
26 	struct rk322x_ddr_pctl *pctl;
27 	struct rk322x_ddr_phy *phy;
28 	struct rk322x_service_sys *msch;
29 };
30 
31 struct dram_info {
32 	struct chan_info chan[1];
33 	struct ram_info info;
34 	struct clk ddr_clk;
35 	struct rk322x_cru *cru;
36 	struct rk322x_grf *grf;
37 };
38 
39 struct rk322x_sdram_params {
40 #if CONFIG_IS_ENABLED(OF_PLATDATA)
41 		struct dtd_rockchip_rk3228_dmc of_plat;
42 #endif
43 		struct rk322x_sdram_channel ch[1];
44 		struct rk322x_pctl_timing pctl_timing;
45 		struct rk322x_phy_timing phy_timing;
46 		struct rk322x_base_params base;
47 		int num_channels;
48 		struct regmap *map;
49 };
50 
51 #ifdef CONFIG_TPL_BUILD
52 /*
53  * [7:6]  bank(n:n bit bank)
54  * [5:4]  row(13+n)
55  * [3]    cs(0:1 cs, 1:2 cs)
56  * [2:1]  bank(n:n bit bank)
57  * [0]    col(10+n)
58  */
59 const char ddr_cfg_2_rbc[] = {
60 	((0 << 6) | (0 << 4) | (0 << 3) | (1 << 2) | 1),
61 	((0 << 6) | (1 << 4) | (0 << 3) | (1 << 2) | 1),
62 	((0 << 6) | (2 << 4) | (0 << 3) | (1 << 2) | 1),
63 	((0 << 6) | (3 << 4) | (0 << 3) | (1 << 2) | 1),
64 	((0 << 6) | (1 << 4) | (0 << 3) | (1 << 2) | 2),
65 	((0 << 6) | (2 << 4) | (0 << 3) | (1 << 2) | 2),
66 	((0 << 6) | (3 << 4) | (0 << 3) | (1 << 2) | 2),
67 	((0 << 6) | (0 << 4) | (0 << 3) | (1 << 2) | 0),
68 	((0 << 6) | (1 << 4) | (0 << 3) | (1 << 2) | 0),
69 	((0 << 6) | (2 << 4) | (0 << 3) | (1 << 2) | 0),
70 	((0 << 6) | (3 << 4) | (0 << 3) | (1 << 2) | 0),
71 	((0 << 6) | (2 << 4) | (0 << 3) | (0 << 2) | 1),
72 	((1 << 6) | (1 << 4) | (0 << 3) | (0 << 2) | 2),
73 	((1 << 6) | (1 << 4) | (0 << 3) | (0 << 2) | 1),
74 	((0 << 6) | (3 << 4) | (1 << 3) | (1 << 2) | 1),
75 	((0 << 6) | (3 << 4) | (1 << 3) | (1 << 2) | 0),
76 };
77 
copy_to_reg(u32 * dest,const u32 * src,u32 n)78 static void copy_to_reg(u32 *dest, const u32 *src, u32 n)
79 {
80 	int i;
81 
82 	for (i = 0; i < n / sizeof(u32); i++) {
83 		writel(*src, dest);
84 		src++;
85 		dest++;
86 	}
87 }
88 
phy_pctrl_reset(struct rk322x_cru * cru,struct rk322x_ddr_phy * ddr_phy)89 void phy_pctrl_reset(struct rk322x_cru *cru,
90 		     struct rk322x_ddr_phy *ddr_phy)
91 {
92 	rk_clrsetreg(&cru->cru_softrst_con[5], 1 << DDRCTRL_PSRST_SHIFT |
93 			1 << DDRCTRL_SRST_SHIFT | 1 << DDRPHY_PSRST_SHIFT |
94 			1 << DDRPHY_SRST_SHIFT,
95 			1 << DDRCTRL_PSRST_SHIFT | 1 << DDRCTRL_SRST_SHIFT |
96 			1 << DDRPHY_PSRST_SHIFT | 1 << DDRPHY_SRST_SHIFT);
97 
98 	udelay(10);
99 
100 	rk_clrreg(&cru->cru_softrst_con[5], 1 << DDRPHY_PSRST_SHIFT |
101 						  1 << DDRPHY_SRST_SHIFT);
102 	udelay(10);
103 
104 	rk_clrreg(&cru->cru_softrst_con[5], 1 << DDRCTRL_PSRST_SHIFT |
105 						  1 << DDRCTRL_SRST_SHIFT);
106 	udelay(10);
107 
108 	clrbits_le32(&ddr_phy->ddrphy_reg[0],
109 		     SOFT_RESET_MASK << SOFT_RESET_SHIFT);
110 	udelay(10);
111 	setbits_le32(&ddr_phy->ddrphy_reg[0],
112 		     SOFT_DERESET_ANALOG);
113 	udelay(5);
114 	setbits_le32(&ddr_phy->ddrphy_reg[0],
115 		     SOFT_DERESET_DIGITAL);
116 
117 	udelay(1);
118 }
119 
phy_dll_bypass_set(struct rk322x_ddr_phy * ddr_phy,u32 freq)120 void phy_dll_bypass_set(struct rk322x_ddr_phy *ddr_phy, u32 freq)
121 {
122 	u32 tmp;
123 
124 	setbits_le32(&ddr_phy->ddrphy_reg[0x13], 0x10);
125 	setbits_le32(&ddr_phy->ddrphy_reg[0x26], 0x10);
126 	setbits_le32(&ddr_phy->ddrphy_reg[0x36], 0x10);
127 	setbits_le32(&ddr_phy->ddrphy_reg[0x46], 0x10);
128 	setbits_le32(&ddr_phy->ddrphy_reg[0x56], 0x10);
129 
130 	clrbits_le32(&ddr_phy->ddrphy_reg[0x14], 0x8);
131 	clrbits_le32(&ddr_phy->ddrphy_reg[0x27], 0x8);
132 	clrbits_le32(&ddr_phy->ddrphy_reg[0x37], 0x8);
133 	clrbits_le32(&ddr_phy->ddrphy_reg[0x47], 0x8);
134 	clrbits_le32(&ddr_phy->ddrphy_reg[0x57], 0x8);
135 
136 	if (freq <= 400)
137 		setbits_le32(&ddr_phy->ddrphy_reg[0xa4], 0x1f);
138 	else
139 		clrbits_le32(&ddr_phy->ddrphy_reg[0xa4], 0x1f);
140 
141 	if (freq <= 680)
142 		tmp = 3;
143 	else
144 		tmp = 2;
145 
146 	writel(tmp, &ddr_phy->ddrphy_reg[0x28]);
147 	writel(tmp, &ddr_phy->ddrphy_reg[0x38]);
148 	writel(tmp, &ddr_phy->ddrphy_reg[0x48]);
149 	writel(tmp, &ddr_phy->ddrphy_reg[0x58]);
150 }
151 
send_command(struct rk322x_ddr_pctl * pctl,u32 rank,u32 cmd,u32 arg)152 static void send_command(struct rk322x_ddr_pctl *pctl,
153 			 u32 rank, u32 cmd, u32 arg)
154 {
155 	writel((START_CMD | (rank << 20) | arg | cmd), &pctl->mcmd);
156 	udelay(1);
157 	while (readl(&pctl->mcmd) & START_CMD)
158 		;
159 }
160 
memory_init(struct chan_info * chan,struct rk322x_sdram_params * sdram_params)161 static void memory_init(struct chan_info *chan,
162 			struct rk322x_sdram_params *sdram_params)
163 {
164 	struct rk322x_ddr_pctl *pctl = chan->pctl;
165 	u32 dramtype = sdram_params->base.dramtype;
166 
167 	if (dramtype == DDR3) {
168 		send_command(pctl, 3, DESELECT_CMD, 0);
169 		udelay(1);
170 		send_command(pctl, 3, PREA_CMD, 0);
171 		send_command(pctl, 3, MRS_CMD,
172 			     (0x02 & BANK_ADDR_MASK) << BANK_ADDR_SHIFT |
173 			     (sdram_params->phy_timing.mr[2] & CMD_ADDR_MASK) <<
174 			     CMD_ADDR_SHIFT);
175 
176 		send_command(pctl, 3, MRS_CMD,
177 			     (0x03 & BANK_ADDR_MASK) << BANK_ADDR_SHIFT |
178 			     (sdram_params->phy_timing.mr[3] & CMD_ADDR_MASK) <<
179 			     CMD_ADDR_SHIFT);
180 
181 		send_command(pctl, 3, MRS_CMD,
182 			     (0x01 & BANK_ADDR_MASK) << BANK_ADDR_SHIFT |
183 			     (sdram_params->phy_timing.mr[1] & CMD_ADDR_MASK) <<
184 			     CMD_ADDR_SHIFT);
185 
186 		send_command(pctl, 3, MRS_CMD,
187 			     (0x00 & BANK_ADDR_MASK) << BANK_ADDR_SHIFT |
188 			     ((sdram_params->phy_timing.mr[0] |
189 			       DDR3_DLL_RESET) &
190 			     CMD_ADDR_MASK) << CMD_ADDR_SHIFT);
191 
192 		send_command(pctl, 3, ZQCL_CMD, 0);
193 	} else {
194 		send_command(pctl, 3, MRS_CMD,
195 			     (0x63 & LPDDR23_MA_MASK) << LPDDR23_MA_SHIFT |
196 			     (0 & LPDDR23_OP_MASK) <<
197 			     LPDDR23_OP_SHIFT);
198 		udelay(10);
199 		send_command(pctl, 3, MRS_CMD,
200 			     (0x10 & LPDDR23_MA_MASK) << LPDDR23_MA_SHIFT |
201 			     (0xff & LPDDR23_OP_MASK) <<
202 			     LPDDR23_OP_SHIFT);
203 		udelay(1);
204 		send_command(pctl, 3, MRS_CMD,
205 			     (0x10 & LPDDR23_MA_MASK) << LPDDR23_MA_SHIFT |
206 			     (0xff & LPDDR23_OP_MASK) <<
207 			     LPDDR23_OP_SHIFT);
208 		udelay(1);
209 		send_command(pctl, 3, MRS_CMD,
210 			     (1 & LPDDR23_MA_MASK) << LPDDR23_MA_SHIFT |
211 			     (sdram_params->phy_timing.mr[1] &
212 			      LPDDR23_OP_MASK) << LPDDR23_OP_SHIFT);
213 		send_command(pctl, 3, MRS_CMD,
214 			     (2 & LPDDR23_MA_MASK) << LPDDR23_MA_SHIFT |
215 			     (sdram_params->phy_timing.mr[2] &
216 			      LPDDR23_OP_MASK) << LPDDR23_OP_SHIFT);
217 		send_command(pctl, 3, MRS_CMD,
218 			     (3 & LPDDR23_MA_MASK) << LPDDR23_MA_SHIFT |
219 			     (sdram_params->phy_timing.mr[3] &
220 			      LPDDR23_OP_MASK) << LPDDR23_OP_SHIFT);
221 		if (dramtype == LPDDR3)
222 			send_command(pctl, 3, MRS_CMD, (11 & LPDDR23_MA_MASK) <<
223 				     LPDDR23_MA_SHIFT |
224 				     (sdram_params->phy_timing.mr11 &
225 				      LPDDR23_OP_MASK) << LPDDR23_OP_SHIFT);
226 	}
227 }
228 
data_training(struct chan_info * chan)229 static u32 data_training(struct chan_info *chan)
230 {
231 	struct rk322x_ddr_phy *ddr_phy = chan->phy;
232 	struct rk322x_ddr_pctl *pctl = chan->pctl;
233 	u32 value;
234 	u32 bw = (readl(&ddr_phy->ddrphy_reg[0]) >> 4) & 0xf;
235 	u32 ret;
236 
237 	/* disable auto refresh */
238 	value = readl(&pctl->trefi) | (1 << 31);
239 	writel(1 << 31, &pctl->trefi);
240 
241 	clrsetbits_le32(&ddr_phy->ddrphy_reg[2], 0x30,
242 			DQS_SQU_CAL_SEL_CS0);
243 	setbits_le32(&ddr_phy->ddrphy_reg[2], DQS_SQU_CAL_START);
244 
245 	udelay(30);
246 	ret = readl(&ddr_phy->ddrphy_reg[0xff]);
247 
248 	clrbits_le32(&ddr_phy->ddrphy_reg[2],
249 		     DQS_SQU_CAL_START);
250 
251 	/*
252 	 * since data training will take about 20us, so send some auto
253 	 * refresh(about 7.8us) to complement the lost time
254 	 */
255 	send_command(pctl, 3, PREA_CMD, 0);
256 	send_command(pctl, 3, REF_CMD, 0);
257 
258 	writel(value, &pctl->trefi);
259 
260 	if (ret & 0x10) {
261 		ret = -1;
262 	} else {
263 		ret = (ret & 0xf) ^ bw;
264 		ret = (ret == 0) ? 0 : -1;
265 	}
266 	return ret;
267 }
268 
move_to_config_state(struct rk322x_ddr_pctl * pctl)269 static void move_to_config_state(struct rk322x_ddr_pctl *pctl)
270 {
271 	unsigned int state;
272 
273 	while (1) {
274 		state = readl(&pctl->stat) & PCTL_STAT_MASK;
275 		switch (state) {
276 		case LOW_POWER:
277 			writel(WAKEUP_STATE, &pctl->sctl);
278 			while ((readl(&pctl->stat) & PCTL_STAT_MASK)
279 				!= ACCESS)
280 				;
281 			/*
282 			 * If at low power state, need wakeup first, and then
283 			 * enter the config, so fallthrough
284 			 */
285 		case ACCESS:
286 			/* fallthrough */
287 		case INIT_MEM:
288 			writel(CFG_STATE, &pctl->sctl);
289 			while ((readl(&pctl->stat) & PCTL_STAT_MASK) != CONFIG)
290 				;
291 			break;
292 		case CONFIG:
293 			return;
294 		default:
295 			break;
296 		}
297 	}
298 }
299 
move_to_access_state(struct rk322x_ddr_pctl * pctl)300 static void move_to_access_state(struct rk322x_ddr_pctl *pctl)
301 {
302 	unsigned int state;
303 
304 	while (1) {
305 		state = readl(&pctl->stat) & PCTL_STAT_MASK;
306 		switch (state) {
307 		case LOW_POWER:
308 			writel(WAKEUP_STATE, &pctl->sctl);
309 			while ((readl(&pctl->stat) & PCTL_STAT_MASK) != ACCESS)
310 				;
311 			break;
312 		case INIT_MEM:
313 			writel(CFG_STATE, &pctl->sctl);
314 			while ((readl(&pctl->stat) & PCTL_STAT_MASK) != CONFIG)
315 				;
316 			/* fallthrough */
317 		case CONFIG:
318 			writel(GO_STATE, &pctl->sctl);
319 			while ((readl(&pctl->stat) & PCTL_STAT_MASK) != ACCESS)
320 				;
321 			break;
322 		case ACCESS:
323 			return;
324 		default:
325 			break;
326 		}
327 	}
328 }
329 
move_to_lowpower_state(struct rk322x_ddr_pctl * pctl)330 static void move_to_lowpower_state(struct rk322x_ddr_pctl *pctl)
331 {
332 	unsigned int state;
333 
334 	while (1) {
335 		state = readl(&pctl->stat) & PCTL_STAT_MASK;
336 		switch (state) {
337 		case INIT_MEM:
338 			writel(CFG_STATE, &pctl->sctl);
339 			while ((readl(&pctl->stat) & PCTL_STAT_MASK) != CONFIG)
340 				;
341 			/* fallthrough */
342 		case CONFIG:
343 			writel(GO_STATE, &pctl->sctl);
344 			while ((readl(&pctl->stat) & PCTL_STAT_MASK) != ACCESS)
345 				;
346 			break;
347 		case ACCESS:
348 			writel(SLEEP_STATE, &pctl->sctl);
349 			while ((readl(&pctl->stat) & PCTL_STAT_MASK) !=
350 			       LOW_POWER)
351 				;
352 			break;
353 		case LOW_POWER:
354 			return;
355 		default:
356 			break;
357 		}
358 	}
359 }
360 
361 /* pctl should in low power mode when call this function */
phy_softreset(struct dram_info * dram)362 static void phy_softreset(struct dram_info *dram)
363 {
364 	struct rk322x_ddr_phy *ddr_phy = dram->chan[0].phy;
365 	struct rk322x_grf *grf = dram->grf;
366 
367 	writel(GRF_DDRPHY_BUFFEREN_CORE_EN, &grf->soc_con[0]);
368 	clrbits_le32(&ddr_phy->ddrphy_reg[0], 0x3 << 2);
369 	udelay(1);
370 	setbits_le32(&ddr_phy->ddrphy_reg[0], 1 << 2);
371 	udelay(5);
372 	setbits_le32(&ddr_phy->ddrphy_reg[0], 1 << 3);
373 	writel(GRF_DDRPHY_BUFFEREN_CORE_DIS, &grf->soc_con[0]);
374 }
375 
376 /* bw: 2: 32bit, 1:16bit */
set_bw(struct dram_info * dram,u32 bw)377 static void set_bw(struct dram_info *dram, u32 bw)
378 {
379 	struct rk322x_ddr_pctl *pctl = dram->chan[0].pctl;
380 	struct rk322x_ddr_phy *ddr_phy = dram->chan[0].phy;
381 	struct rk322x_grf *grf = dram->grf;
382 
383 	if (bw == 1) {
384 		setbits_le32(&pctl->ppcfg, 1);
385 		clrbits_le32(&ddr_phy->ddrphy_reg[0], 0xc << 4);
386 		writel(GRF_MSCH_NOC_16BIT_EN, &grf->soc_con[0]);
387 		clrbits_le32(&ddr_phy->ddrphy_reg[0x46], 0x8);
388 		clrbits_le32(&ddr_phy->ddrphy_reg[0x56], 0x8);
389 	} else {
390 		clrbits_le32(&pctl->ppcfg, 1);
391 		setbits_le32(&ddr_phy->ddrphy_reg[0], 0xf << 4);
392 		writel(GRF_DDR_32BIT_EN | GRF_MSCH_NOC_32BIT_EN,
393 		       &grf->soc_con[0]);
394 		setbits_le32(&ddr_phy->ddrphy_reg[0x46], 0x8);
395 		setbits_le32(&ddr_phy->ddrphy_reg[0x56], 0x8);
396 	}
397 }
398 
pctl_cfg(struct rk322x_ddr_pctl * pctl,struct rk322x_sdram_params * sdram_params,struct rk322x_grf * grf)399 static void pctl_cfg(struct rk322x_ddr_pctl *pctl,
400 		     struct rk322x_sdram_params *sdram_params,
401 		     struct rk322x_grf *grf)
402 {
403 	u32 burst_len;
404 	u32 bw;
405 	u32 dramtype = sdram_params->base.dramtype;
406 
407 	if (sdram_params->ch[0].bw == 2)
408 		bw = GRF_DDR_32BIT_EN | GRF_MSCH_NOC_32BIT_EN;
409 	else
410 		bw = GRF_MSCH_NOC_16BIT_EN;
411 
412 	writel(DFI_INIT_START | DFI_DATA_BYTE_DISABLE_EN, &pctl->dfistcfg0);
413 	writel(DFI_DRAM_CLK_SR_EN | DFI_DRAM_CLK_DPD_EN, &pctl->dfistcfg1);
414 	writel(DFI_PARITY_INTR_EN | DFI_PARITY_EN, &pctl->dfistcfg2);
415 	writel(0x51010, &pctl->dfilpcfg0);
416 
417 	writel(1, &pctl->dfitphyupdtype0);
418 	writel(0x0d, &pctl->dfitphyrdlat);
419 	writel(0, &pctl->dfitphywrdata);
420 
421 	writel(0, &pctl->dfiupdcfg);
422 	copy_to_reg(&pctl->togcnt1u, &sdram_params->pctl_timing.togcnt1u,
423 		    sizeof(struct rk322x_pctl_timing));
424 	if (dramtype == DDR3) {
425 		writel((1 << 3) | (1 << 11),
426 		       &pctl->dfiodtcfg);
427 		writel(7 << 16, &pctl->dfiodtcfg1);
428 		writel((readl(&pctl->tcl) - 1) / 2 - 1, &pctl->dfitrddataen);
429 		writel((readl(&pctl->tcwl) - 1) / 2 - 1, &pctl->dfitphywrlat);
430 		writel(500, &pctl->trsth);
431 		writel(0 << MDDR_LPDDR2_CLK_STOP_IDLE_SHIFT | DDR3_EN |
432 		       DDR2_DDR3_BL_8 | (6 - 4) << TFAW_SHIFT | PD_EXIT_SLOW |
433 		       1 << PD_TYPE_SHIFT | 0 << PD_IDLE_SHIFT,
434 		       &pctl->mcfg);
435 		writel(bw | GRF_DDR3_EN, &grf->soc_con[0]);
436 	} else {
437 		if (sdram_params->phy_timing.bl & PHT_BL_8)
438 			burst_len = MDDR_LPDDR2_BL_8;
439 		else
440 			burst_len = MDDR_LPDDR2_BL_4;
441 
442 		writel(readl(&pctl->tcl) / 2 - 1, &pctl->dfitrddataen);
443 		writel(readl(&pctl->tcwl) / 2 - 1, &pctl->dfitphywrlat);
444 		writel(0, &pctl->trsth);
445 		if (dramtype == LPDDR2) {
446 			writel(0 << MDDR_LPDDR2_CLK_STOP_IDLE_SHIFT |
447 			       LPDDR2_S4 | LPDDR2_EN | burst_len |
448 			       (6 - 4) << TFAW_SHIFT | PD_EXIT_FAST |
449 			       1 << PD_TYPE_SHIFT | 0 << PD_IDLE_SHIFT,
450 			       &pctl->mcfg);
451 			writel(0, &pctl->dfiodtcfg);
452 			writel(0, &pctl->dfiodtcfg1);
453 		} else {
454 			writel(0 << MDDR_LPDDR2_CLK_STOP_IDLE_SHIFT |
455 			       LPDDR2_S4 | LPDDR3_EN | burst_len |
456 			       (6 - 4) << TFAW_SHIFT | PD_EXIT_FAST |
457 			       1 << PD_TYPE_SHIFT | 0 << PD_IDLE_SHIFT,
458 			       &pctl->mcfg);
459 			writel((1 << 3) | (1 << 2), &pctl->dfiodtcfg);
460 			writel((7 << 16) | 4, &pctl->dfiodtcfg1);
461 		}
462 		writel(bw | GRF_LPDDR2_3_EN, &grf->soc_con[0]);
463 	}
464 	setbits_le32(&pctl->scfg, 1);
465 }
466 
phy_cfg(struct chan_info * chan,struct rk322x_sdram_params * sdram_params)467 static void phy_cfg(struct chan_info *chan,
468 		    struct rk322x_sdram_params *sdram_params)
469 {
470 	struct rk322x_ddr_phy *ddr_phy = chan->phy;
471 	struct rk322x_service_sys *axi_bus = chan->msch;
472 	struct rk322x_msch_timings *noc_timing = &sdram_params->base.noc_timing;
473 	struct rk322x_phy_timing *phy_timing = &sdram_params->phy_timing;
474 	struct rk322x_pctl_timing *pctl_timing = &sdram_params->pctl_timing;
475 	u32 cmd_drv, clk_drv, dqs_drv, dqs_odt;
476 
477 	writel(noc_timing->ddrtiming, &axi_bus->ddrtiming);
478 	writel(noc_timing->ddrmode, &axi_bus->ddrmode);
479 	writel(noc_timing->readlatency, &axi_bus->readlatency);
480 	writel(noc_timing->activate, &axi_bus->activate);
481 	writel(noc_timing->devtodev, &axi_bus->devtodev);
482 
483 	switch (sdram_params->base.dramtype) {
484 	case DDR3:
485 		writel(PHY_DDR3 | phy_timing->bl, &ddr_phy->ddrphy_reg[1]);
486 		break;
487 	case LPDDR2:
488 		writel(PHY_LPDDR2 | phy_timing->bl, &ddr_phy->ddrphy_reg[1]);
489 		break;
490 	default:
491 		writel(PHY_LPDDR2 | phy_timing->bl, &ddr_phy->ddrphy_reg[1]);
492 		break;
493 	}
494 
495 	writel(phy_timing->cl_al, &ddr_phy->ddrphy_reg[0xb]);
496 	writel(pctl_timing->tcwl, &ddr_phy->ddrphy_reg[0xc]);
497 
498 	cmd_drv = PHY_RON_RTT_34OHM;
499 	clk_drv = PHY_RON_RTT_45OHM;
500 	dqs_drv = PHY_RON_RTT_34OHM;
501 	if (sdram_params->base.dramtype == LPDDR2)
502 		dqs_odt = PHY_RON_RTT_DISABLE;
503 	else
504 		dqs_odt = PHY_RON_RTT_225OHM;
505 
506 	writel(cmd_drv, &ddr_phy->ddrphy_reg[0x11]);
507 	clrsetbits_le32(&ddr_phy->ddrphy_reg[0x12], (0x1f << 3), cmd_drv << 3);
508 	writel(clk_drv, &ddr_phy->ddrphy_reg[0x16]);
509 	writel(clk_drv, &ddr_phy->ddrphy_reg[0x18]);
510 
511 	writel(dqs_drv, &ddr_phy->ddrphy_reg[0x20]);
512 	writel(dqs_drv, &ddr_phy->ddrphy_reg[0x2f]);
513 	writel(dqs_drv, &ddr_phy->ddrphy_reg[0x30]);
514 	writel(dqs_drv, &ddr_phy->ddrphy_reg[0x3f]);
515 	writel(dqs_drv, &ddr_phy->ddrphy_reg[0x40]);
516 	writel(dqs_drv, &ddr_phy->ddrphy_reg[0x4f]);
517 	writel(dqs_drv, &ddr_phy->ddrphy_reg[0x50]);
518 	writel(dqs_drv, &ddr_phy->ddrphy_reg[0x5f]);
519 
520 	writel(dqs_odt, &ddr_phy->ddrphy_reg[0x21]);
521 	writel(dqs_odt, &ddr_phy->ddrphy_reg[0x2e]);
522 	writel(dqs_odt, &ddr_phy->ddrphy_reg[0x31]);
523 	writel(dqs_odt, &ddr_phy->ddrphy_reg[0x3e]);
524 	writel(dqs_odt, &ddr_phy->ddrphy_reg[0x41]);
525 	writel(dqs_odt, &ddr_phy->ddrphy_reg[0x4e]);
526 	writel(dqs_odt, &ddr_phy->ddrphy_reg[0x51]);
527 	writel(dqs_odt, &ddr_phy->ddrphy_reg[0x5e]);
528 }
529 
dram_cfg_rbc(struct chan_info * chan,struct rk322x_sdram_params * sdram_params)530 void dram_cfg_rbc(struct chan_info *chan,
531 		  struct rk322x_sdram_params *sdram_params)
532 {
533 	char noc_config;
534 	int i = 0;
535 	struct rk322x_sdram_channel *config = &sdram_params->ch[0];
536 	struct rk322x_service_sys *axi_bus = chan->msch;
537 
538 	move_to_config_state(chan->pctl);
539 
540 	if ((config->rank == 2) && (config->cs1_row == config->cs0_row)) {
541 		if ((config->col + config->bw) == 12) {
542 			i = 14;
543 			goto finish;
544 		} else if ((config->col + config->bw) == 11) {
545 			i = 15;
546 			goto finish;
547 		}
548 	}
549 	noc_config = ((config->cs0_row - 13) << 4) | ((config->bk - 2) << 2) |
550 				(config->col + config->bw - 11);
551 	for (i = 0; i < 11; i++) {
552 		if (noc_config == ddr_cfg_2_rbc[i])
553 			break;
554 	}
555 
556 	if (i < 11)
557 		goto finish;
558 
559 	noc_config = ((config->bk - 2) << 6) | ((config->cs0_row - 13) << 4) |
560 				(config->col + config->bw - 11);
561 
562 	for (i = 11; i < 14; i++) {
563 		if (noc_config == ddr_cfg_2_rbc[i])
564 			break;
565 	}
566 	if (i < 14)
567 		goto finish;
568 	else
569 		i = 0;
570 
571 finish:
572 	writel(i, &axi_bus->ddrconf);
573 	move_to_access_state(chan->pctl);
574 }
575 
dram_all_config(const struct dram_info * dram,struct rk322x_sdram_params * sdram_params)576 static void dram_all_config(const struct dram_info *dram,
577 			    struct rk322x_sdram_params *sdram_params)
578 {
579 	struct rk322x_sdram_channel *info = &sdram_params->ch[0];
580 	u32 sys_reg = 0;
581 
582 	sys_reg |= sdram_params->base.dramtype << SYS_REG_DDRTYPE_SHIFT;
583 	sys_reg |= (1 - 1) << SYS_REG_NUM_CH_SHIFT;
584 	sys_reg |= info->row_3_4 << SYS_REG_ROW_3_4_SHIFT(0);
585 	sys_reg |= 1 << SYS_REG_CHINFO_SHIFT(0);
586 	sys_reg |= (info->rank - 1) << SYS_REG_RANK_SHIFT(0);
587 	sys_reg |= (info->col - 9) << SYS_REG_COL_SHIFT(0);
588 	sys_reg |= info->bk == 3 ? 0 : 1 << SYS_REG_BK_SHIFT(0);
589 	sys_reg |= (info->cs0_row - 13) << SYS_REG_CS0_ROW_SHIFT(0);
590 	sys_reg |= (info->cs1_row - 13) << SYS_REG_CS1_ROW_SHIFT(0);
591 	sys_reg |= (2 >> info->bw) << SYS_REG_BW_SHIFT(0);
592 	sys_reg |= (2 >> info->dbw) << SYS_REG_DBW_SHIFT(0);
593 
594 	writel(sys_reg, &dram->grf->os_reg[2]);
595 }
596 
597 #define TEST_PATTEN	0x5aa5f00f
598 
dram_cap_detect(struct dram_info * dram,struct rk322x_sdram_params * sdram_params)599 static int dram_cap_detect(struct dram_info *dram,
600 			   struct rk322x_sdram_params *sdram_params)
601 {
602 	u32 bw, row, col, addr;
603 	u32 ret = 0;
604 	struct rk322x_service_sys *axi_bus = dram->chan[0].msch;
605 
606 	if (sdram_params->base.dramtype == DDR3)
607 		sdram_params->ch[0].dbw = 1;
608 	else
609 		sdram_params->ch[0].dbw = 2;
610 
611 	move_to_config_state(dram->chan[0].pctl);
612 	/* bw detect */
613 	set_bw(dram, 2);
614 	if (data_training(&dram->chan[0]) == 0) {
615 		bw = 2;
616 	} else {
617 		bw = 1;
618 		set_bw(dram, 1);
619 		move_to_lowpower_state(dram->chan[0].pctl);
620 		phy_softreset(dram);
621 		move_to_config_state(dram->chan[0].pctl);
622 		if (data_training(&dram->chan[0])) {
623 			printf("BW detect error\n");
624 			ret = -EINVAL;
625 		}
626 	}
627 	sdram_params->ch[0].bw = bw;
628 	sdram_params->ch[0].bk = 3;
629 
630 	if (bw == 2)
631 		writel(6, &axi_bus->ddrconf);
632 	else
633 		writel(3, &axi_bus->ddrconf);
634 	move_to_access_state(dram->chan[0].pctl);
635 	for (col = 11; col >= 9; col--) {
636 		writel(0, CONFIG_SYS_SDRAM_BASE);
637 		addr = CONFIG_SYS_SDRAM_BASE +
638 			(1 << (col + bw - 1));
639 		writel(TEST_PATTEN, addr);
640 		if ((readl(addr) == TEST_PATTEN) &&
641 		    (readl(CONFIG_SYS_SDRAM_BASE) == 0))
642 			break;
643 	}
644 	if (col == 8) {
645 		printf("Col detect error\n");
646 		ret = -EINVAL;
647 		goto out;
648 	} else {
649 		sdram_params->ch[0].col = col;
650 	}
651 
652 	writel(10, &axi_bus->ddrconf);
653 
654 	/* Detect row*/
655 	for (row = 16; row >= 12; row--) {
656 		writel(0, CONFIG_SYS_SDRAM_BASE);
657 		addr = CONFIG_SYS_SDRAM_BASE + (1u << (row + 11 + 3 - 1));
658 		writel(TEST_PATTEN, addr);
659 		if ((readl(addr) == TEST_PATTEN) &&
660 		    (readl(CONFIG_SYS_SDRAM_BASE) == 0))
661 			break;
662 	}
663 	if (row == 11) {
664 		printf("Row detect error\n");
665 		ret = -EINVAL;
666 	} else {
667 		sdram_params->ch[0].cs1_row = row;
668 		sdram_params->ch[0].row_3_4 = 0;
669 		sdram_params->ch[0].cs0_row = row;
670 	}
671 	/* cs detect */
672 	writel(0, CONFIG_SYS_SDRAM_BASE);
673 	writel(TEST_PATTEN, CONFIG_SYS_SDRAM_BASE + (1u << 30));
674 	writel(~TEST_PATTEN, CONFIG_SYS_SDRAM_BASE + (1u << 30) + 4);
675 	if ((readl(CONFIG_SYS_SDRAM_BASE + (1u << 30)) == TEST_PATTEN) &&
676 	    (readl(CONFIG_SYS_SDRAM_BASE) == 0))
677 		sdram_params->ch[0].rank = 2;
678 	else
679 		sdram_params->ch[0].rank = 1;
680 out:
681 	return ret;
682 }
683 
sdram_init(struct dram_info * dram,struct rk322x_sdram_params * sdram_params)684 static int sdram_init(struct dram_info *dram,
685 		      struct rk322x_sdram_params *sdram_params)
686 {
687 	int ret;
688 
689 	ret = clk_set_rate(&dram->ddr_clk,
690 			   sdram_params->base.ddr_freq * MHz * 2);
691 	if (ret < 0) {
692 		printf("Could not set DDR clock\n");
693 		return ret;
694 	}
695 
696 	phy_pctrl_reset(dram->cru, dram->chan[0].phy);
697 	phy_dll_bypass_set(dram->chan[0].phy, sdram_params->base.ddr_freq);
698 	pctl_cfg(dram->chan[0].pctl, sdram_params, dram->grf);
699 	phy_cfg(&dram->chan[0], sdram_params);
700 	writel(POWER_UP_START, &dram->chan[0].pctl->powctl);
701 	while (!(readl(&dram->chan[0].pctl->powstat) & POWER_UP_DONE))
702 		;
703 	memory_init(&dram->chan[0], sdram_params);
704 	move_to_access_state(dram->chan[0].pctl);
705 	ret = dram_cap_detect(dram, sdram_params);
706 	if (ret)
707 		goto out;
708 	dram_cfg_rbc(&dram->chan[0], sdram_params);
709 	dram_all_config(dram, sdram_params);
710 out:
711 	return ret;
712 }
713 
rk322x_dmc_ofdata_to_platdata(struct udevice * dev)714 static int rk322x_dmc_ofdata_to_platdata(struct udevice *dev)
715 {
716 #if !CONFIG_IS_ENABLED(OF_PLATDATA)
717 	struct rk322x_sdram_params *params = dev_get_platdata(dev);
718 	const void *blob = gd->fdt_blob;
719 	int node = dev_of_offset(dev);
720 	int ret;
721 
722 	params->num_channels = 1;
723 
724 	ret = fdtdec_get_int_array(blob, node, "rockchip,pctl-timing",
725 				   (u32 *)&params->pctl_timing,
726 				   sizeof(params->pctl_timing) / sizeof(u32));
727 	if (ret) {
728 		printf("%s: Cannot read rockchip,pctl-timing\n", __func__);
729 		return -EINVAL;
730 	}
731 	ret = fdtdec_get_int_array(blob, node, "rockchip,phy-timing",
732 				   (u32 *)&params->phy_timing,
733 				   sizeof(params->phy_timing) / sizeof(u32));
734 	if (ret) {
735 		printf("%s: Cannot read rockchip,phy-timing\n", __func__);
736 		return -EINVAL;
737 	}
738 	ret = fdtdec_get_int_array(blob, node, "rockchip,sdram-params",
739 				   (u32 *)&params->base,
740 				   sizeof(params->base) / sizeof(u32));
741 	if (ret) {
742 		printf("%s: Cannot read rockchip,sdram-params\n", __func__);
743 		return -EINVAL;
744 	}
745 	ret = regmap_init_mem(dev_ofnode(dev), &params->map);
746 	if (ret)
747 		return ret;
748 #endif
749 
750 	return 0;
751 }
752 #endif /* CONFIG_TPL_BUILD */
753 
754 #if CONFIG_IS_ENABLED(OF_PLATDATA)
conv_of_platdata(struct udevice * dev)755 static int conv_of_platdata(struct udevice *dev)
756 {
757 	struct rk322x_sdram_params *plat = dev_get_platdata(dev);
758 	struct dtd_rockchip_rk322x_dmc *of_plat = &plat->of_plat;
759 	int ret;
760 
761 	memcpy(&plat->pctl_timing, of_plat->rockchip_pctl_timing,
762 	       sizeof(plat->pctl_timing));
763 	memcpy(&plat->phy_timing, of_plat->rockchip_phy_timing,
764 	       sizeof(plat->phy_timing));
765 	memcpy(&plat->base, of_plat->rockchip_sdram_params, sizeof(plat->base));
766 
767 	plat->num_channels = 1;
768 	ret = regmap_init_mem_platdata(dev, of_plat->reg,
769 				       ARRAY_SIZE(of_plat->reg) / 2,
770 				       &plat->map);
771 	if (ret)
772 		return ret;
773 
774 	return 0;
775 }
776 #endif
777 
rk322x_dmc_probe(struct udevice * dev)778 static int rk322x_dmc_probe(struct udevice *dev)
779 {
780 #ifdef CONFIG_TPL_BUILD
781 	struct rk322x_sdram_params *plat = dev_get_platdata(dev);
782 	int ret;
783 	struct udevice *dev_clk;
784 #endif
785 	struct dram_info *priv = dev_get_priv(dev);
786 
787 	priv->grf = syscon_get_first_range(ROCKCHIP_SYSCON_GRF);
788 #ifdef CONFIG_TPL_BUILD
789 #if CONFIG_IS_ENABLED(OF_PLATDATA)
790 	ret = conv_of_platdata(dev);
791 	if (ret)
792 		return ret;
793 #endif
794 
795 	priv->chan[0].msch = syscon_get_first_range(ROCKCHIP_SYSCON_MSCH);
796 	priv->chan[0].pctl = regmap_get_range(plat->map, 0);
797 	priv->chan[0].phy = regmap_get_range(plat->map, 1);
798 	ret = rockchip_get_clk(&dev_clk);
799 	if (ret)
800 		return ret;
801 	priv->ddr_clk.id = CLK_DDR;
802 	ret = clk_request(dev_clk, &priv->ddr_clk);
803 	if (ret)
804 		return ret;
805 
806 	priv->cru = rockchip_get_cru();
807 	if (IS_ERR(priv->cru))
808 		return PTR_ERR(priv->cru);
809 	ret = sdram_init(priv, plat);
810 	if (ret)
811 		return ret;
812 #else
813 	priv->info.base = CONFIG_SYS_SDRAM_BASE;
814 	priv->info.size = rockchip_sdram_size(
815 			(phys_addr_t)&priv->grf->os_reg[2]);
816 #endif
817 
818 	return 0;
819 }
820 
rk322x_dmc_get_info(struct udevice * dev,struct ram_info * info)821 static int rk322x_dmc_get_info(struct udevice *dev, struct ram_info *info)
822 {
823 	struct dram_info *priv = dev_get_priv(dev);
824 
825 	*info = priv->info;
826 
827 	return 0;
828 }
829 
830 static struct ram_ops rk322x_dmc_ops = {
831 	.get_info = rk322x_dmc_get_info,
832 };
833 
834 static const struct udevice_id rk322x_dmc_ids[] = {
835 	{ .compatible = "rockchip,rk3228-dmc" },
836 	{ }
837 };
838 
839 U_BOOT_DRIVER(dmc_rk322x) = {
840 	.name = "rockchip_rk322x_dmc",
841 	.id = UCLASS_RAM,
842 	.of_match = rk322x_dmc_ids,
843 	.ops = &rk322x_dmc_ops,
844 #ifdef CONFIG_TPL_BUILD
845 	.ofdata_to_platdata = rk322x_dmc_ofdata_to_platdata,
846 #endif
847 	.probe = rk322x_dmc_probe,
848 	.priv_auto_alloc_size = sizeof(struct dram_info),
849 #ifdef CONFIG_TPL_BUILD
850 	.platdata_auto_alloc_size = sizeof(struct rk322x_sdram_params),
851 #endif
852 };
853 
854