xref: /rk3399_rockchip-uboot/drivers/ram/rockchip/sdram_px30.c (revision 7c1937d6d1c7daf8e59b4760f8adc7ee42bd7bea)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * (C) Copyright 2018 Rockchip Electronics Co., Ltd.
4  */
5 
6 #include <common.h>
7 #include <debug_uart.h>
8 #include <dm.h>
9 #include <ram.h>
10 #include <syscon.h>
11 #include <asm/io.h>
12 #include <asm/arch/clock.h>
13 #include <asm/arch/cru_px30.h>
14 #include <asm/arch/grf_px30.h>
15 #include <asm/arch/hardware.h>
16 #include <asm/arch/sdram_common.h>
17 #include <asm/arch/sdram_px30.h>
18 
19 /*
20  * Because px30 sram size is small, so need define CONFIG_TPL_TINY_FRAMEWORK
21  * to reduce TPL size when build TPL firmware.
22  */
23 #ifdef CONFIG_TPL_BUILD
24 #ifndef CONFIG_TPL_TINY_FRAMEWORK
25 #error please defined CONFIG_TPL_TINY_FRAMEWORK for px30 !!!
26 #endif
27 #endif
28 
29 DECLARE_GLOBAL_DATA_PTR;
30 struct dram_info {
31 #ifdef CONFIG_TPL_BUILD
32 	struct px30_ddr_pctl_regs *pctl;
33 	struct px30_ddr_phy_regs *phy;
34 	struct px30_cru *cru;
35 	struct px30_msch_regs *msch;
36 	struct px30_ddr_grf_regs *ddr_grf;
37 	struct px30_grf *grf;
38 #endif
39 	struct ram_info info;
40 	struct px30_pmugrf *pmugrf;
41 };
42 
43 #ifdef CONFIG_TPL_BUILD
44 #define PMUGRF_BASE_ADDR		0xFF010000
45 #define CRU_BASE_ADDR			0xFF2B0000
46 #define GRF_BASE_ADDR			0xFF140000
47 #define DDRC_BASE_ADDR			0xFF600000
48 #define DDR_PHY_BASE_ADDR		0xFF2A0000
49 #define SERVER_MSCH0_BASE_ADDR		0xFF530000
50 #define DDR_GRF_BASE_ADDR		0xff630000
51 
52 struct dram_info dram_info;
53 
54 struct px30_sdram_params sdram_configs[] = {
55 #include	"sdram-px30-lpddr3-detect-333.inc"
56 };
57 
58 struct px30_ddr_skew skew = {
59 #include	"sdram-px30-ddr_skew.inc"
60 };
61 
62 static void rkclk_ddr_reset(struct dram_info *dram,
63 			    u32 ctl_srstn, u32 ctl_psrstn,
64 			    u32 phy_srstn, u32 phy_psrstn)
65 {
66 	writel(upctl2_srstn_req(ctl_srstn) | upctl2_psrstn_req(ctl_psrstn) |
67 	       upctl2_asrstn_req(ctl_srstn),
68 	       &dram->cru->softrst_con[1]);
69 	writel(ddrphy_srstn_req(phy_srstn) | ddrphy_psrstn_req(phy_psrstn),
70 	       &dram->cru->softrst_con[2]);
71 }
72 
73 static void rkclk_set_dpll(struct dram_info *dram, unsigned int mhz)
74 {
75 	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
76 	int delay = 1000;
77 
78 	refdiv = 1;
79 	if (mhz <= 300) {
80 		postdiv1 = 4;
81 		postdiv2 = 2;
82 	} else if (mhz <= 400) {
83 		postdiv1 = 6;
84 		postdiv2 = 1;
85 	} else if (mhz <= 600) {
86 		postdiv1 = 4;
87 		postdiv2 = 1;
88 	} else if (mhz <= 800) {
89 		postdiv1 = 3;
90 		postdiv2 = 1;
91 	} else if (mhz <= 1600) {
92 		postdiv1 = 2;
93 		postdiv2 = 1;
94 	} else {
95 		postdiv1 = 1;
96 		postdiv2 = 1;
97 	}
98 	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
99 
100 	writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
101 
102 	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
103 	writel(DSMPD(1) | POSTDIV2(postdiv2) | REFDIV(refdiv),
104 	       &dram->cru->pll[1].con1);
105 
106 	while (delay > 0) {
107 		udelay(1);
108 		if (LOCK(readl(&dram->cru->pll[1].con1)))
109 			break;
110 		delay--;
111 	}
112 
113 	writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
114 }
115 
116 static void rkclk_configure_ddr(struct dram_info *dram,
117 				struct px30_sdram_params *sdram_params)
118 {
119 	/* for inno ddr phy need 2*freq */
120 	rkclk_set_dpll(dram,  sdram_params->ddr_freq * 2);
121 }
122 
123 static void phy_soft_reset(struct dram_info *dram)
124 {
125 	void __iomem *phy_base = dram->phy;
126 
127 	clrbits_le32(PHY_REG(phy_base, 0), 0x3 << 2);
128 	udelay(1);
129 	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET);
130 	udelay(5);
131 	setbits_le32(PHY_REG(phy_base, 0), DIGITAL_DERESET);
132 	udelay(1);
133 }
134 
135 static int pctl_cfg(struct dram_info *dram,
136 		    struct px30_sdram_params *sdram_params)
137 {
138 	u32 i;
139 	void __iomem *pctl_base = dram->pctl;
140 
141 	for (i = 0; sdram_params->pctl_regs.pctl[i][0] != 0xFFFFFFFF; i++) {
142 		writel(sdram_params->pctl_regs.pctl[i][1],
143 		       pctl_base + sdram_params->pctl_regs.pctl[i][0]);
144 	}
145 	clrsetbits_le32(pctl_base + DDR_PCTL2_PWRTMG,
146 			(0xff << 16) | 0x1f,
147 			((SR_IDLE & 0xff) << 16) | (PD_IDLE & 0x1f));
148 
149 	clrsetbits_le32(pctl_base + DDR_PCTL2_HWLPCTL,
150 			0xfff << 16,
151 			5 << 16);
152 	/* disable zqcs */
153 	setbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1u << 31);
154 
155 	return 0;
156 }
157 
158 /* return ddrconfig value
159  *       (-1), find ddrconfig fail
160  *       other, the ddrconfig value
161  * only support cs0_row >= cs1_row
162  */
163 static unsigned int calculate_ddrconfig(struct px30_sdram_params *sdram_params)
164 {
165 	u32 bw, die_bw, col, bank;
166 	u32 i, tmp;
167 	u32 ddrconf = -1;
168 
169 	bw = sdram_params->ch.bw;
170 	die_bw = sdram_params->ch.dbw;
171 	col = sdram_params->ch.col;
172 	bank = sdram_params->ch.bk;
173 
174 	if (sdram_params->dramtype == DDR4) {
175 		if (die_bw == 0)
176 			ddrconf = 7 + bw;
177 		else
178 			ddrconf = 12 - bw;
179 		ddrconf = d4_rbc_2_d3_rbc[ddrconf - 7];
180 	} else {
181 		tmp = ((bank - 2) << 3) | (col + bw - 10);
182 		for (i = 0; i < 7; i++)
183 			if ((ddr_cfg_2_rbc[i] & 0xf) == tmp) {
184 				ddrconf = i;
185 				break;
186 			}
187 		if (i > 6)
188 			printascii("calculate ddrconfig error\n");
189 	}
190 
191 	return ddrconf;
192 }
193 
194 /* n: Unit bytes */
195 static void copy_to_reg(u32 *dest, u32 *src, u32 n)
196 {
197 	int i;
198 
199 	for (i = 0; i < n / sizeof(u32); i++) {
200 		writel(*src, dest);
201 		src++;
202 		dest++;
203 	}
204 }
205 
206 /*
207  * calculate controller dram address map, and setting to register.
208  * argument sdram_params->ch.ddrconf must be right value before
209  * call this function.
210  */
211 static void set_ctl_address_map(struct dram_info *dram,
212 				struct px30_sdram_params *sdram_params)
213 {
214 	void __iomem *pctl_base = dram->pctl;
215 	u32 cs_pst, bg, max_row, ddrconf;
216 	u32 i;
217 
218 	if (sdram_params->dramtype == DDR4)
219 		/*
220 		 * DDR4 8bit dram BG = 2(4bank groups),
221 		 * 16bit dram BG = 1 (2 bank groups)
222 		 */
223 		bg = (sdram_params->ch.dbw == 0) ? 2 : 1;
224 	else
225 		bg = 0;
226 
227 	cs_pst = sdram_params->ch.bw + sdram_params->ch.col +
228 		bg + sdram_params->ch.bk + sdram_params->ch.cs0_row;
229 	if (cs_pst >= 32 || sdram_params->ch.rank == 1)
230 		writel(0x1f, pctl_base + DDR_PCTL2_ADDRMAP0);
231 	else
232 		writel(cs_pst - 8, pctl_base + DDR_PCTL2_ADDRMAP0);
233 
234 	ddrconf = sdram_params->ch.ddrconfig;
235 	if (sdram_params->dramtype == DDR4) {
236 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc); i++) {
237 			if (d4_rbc_2_d3_rbc[i] == ddrconf) {
238 				ddrconf = 7 + i;
239 				break;
240 			}
241 		}
242 	}
243 
244 	copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP1),
245 		    &addrmap[ddrconf][0], 8 * 4);
246 	max_row = cs_pst - 1 - 8 - (addrmap[ddrconf][5] & 0xf);
247 
248 	if (max_row < 12)
249 		printascii("set addrmap fail\n");
250 	/* need to disable row ahead of rank by set to 0xf */
251 	for (i = 17; i > max_row; i--)
252 		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
253 			((i - 12) * 8 / 32) * 4,
254 			0xf << ((i - 12) * 8 % 32),
255 			0xf << ((i - 12) * 8 % 32));
256 
257 	if ((sdram_params->dramtype == LPDDR3 ||
258 	     sdram_params->dramtype == LPDDR2) &&
259 		 sdram_params->ch.row_3_4)
260 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
261 	if (sdram_params->dramtype == DDR4 && sdram_params->ch.bw != 0x2)
262 		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
263 }
264 
265 static void phy_dll_bypass_set(struct dram_info *dram, u32 freq)
266 {
267 	void __iomem *phy_base = dram->phy;
268 	u32 tmp;
269 	u32 i, j;
270 
271 	setbits_le32(PHY_REG(phy_base, 0x13), 1 << 4);
272 	clrbits_le32(PHY_REG(phy_base, 0x14), 1 << 3);
273 	for (i = 0; i < 4; i++) {
274 		j = 0x26 + i * 0x10;
275 		setbits_le32(PHY_REG(phy_base, j), 1 << 4);
276 		clrbits_le32(PHY_REG(phy_base, j + 0x1), 1 << 3);
277 	}
278 
279 	if (freq <= (400 * MHz))
280 		/* DLL bypass */
281 		setbits_le32(PHY_REG(phy_base, 0xa4), 0x1f);
282 	else
283 		clrbits_le32(PHY_REG(phy_base, 0xa4), 0x1f);
284 
285 	if (freq <= (801 * MHz))
286 		tmp = 2;
287 	else
288 		tmp = 1;
289 
290 	for (i = 0; i < 4; i++) {
291 		j = 0x28 + i * 0x10;
292 		writel(tmp, PHY_REG(phy_base, j));
293 	}
294 }
295 
296 static void set_ds_odt(struct dram_info *dram,
297 		       struct px30_sdram_params *sdram_params)
298 {
299 	void __iomem *phy_base = dram->phy;
300 	u32 cmd_drv, clk_drv, dqs_drv, dqs_odt;
301 	u32 i, j;
302 
303 	if (sdram_params->dramtype == DDR3) {
304 		cmd_drv = PHY_DDR3_RON_RTT_34ohm;
305 		clk_drv = PHY_DDR3_RON_RTT_45ohm;
306 		dqs_drv = PHY_DDR3_RON_RTT_34ohm;
307 		dqs_odt = PHY_DDR3_RON_RTT_225ohm;
308 	} else {
309 		cmd_drv = PHY_DDR4_LPDDR3_RON_RTT_34ohm;
310 		clk_drv = PHY_DDR4_LPDDR3_RON_RTT_43ohm;
311 		dqs_drv = PHY_DDR4_LPDDR3_RON_RTT_34ohm;
312 		if (sdram_params->dramtype == LPDDR2)
313 			dqs_odt = PHY_DDR4_LPDDR3_RON_RTT_DISABLE;
314 		else
315 			dqs_odt = PHY_DDR4_LPDDR3_RON_RTT_240ohm;
316 	}
317 	/* DS */
318 	writel(cmd_drv, PHY_REG(phy_base, 0x11));
319 	clrsetbits_le32(PHY_REG(phy_base, 0x12), 0x1f << 3, cmd_drv << 3);
320 	writel(clk_drv, PHY_REG(phy_base, 0x16));
321 	writel(clk_drv, PHY_REG(phy_base, 0x18));
322 
323 	for (i = 0; i < 4; i++) {
324 		j = 0x20 + i * 0x10;
325 		writel(dqs_drv, PHY_REG(phy_base, j));
326 		writel(dqs_drv, PHY_REG(phy_base, j + 0xf));
327 		/* ODT */
328 		writel(dqs_odt, PHY_REG(phy_base, j + 0x1));
329 		writel(dqs_odt, PHY_REG(phy_base, j + 0xe));
330 	}
331 }
332 
333 static void phy_cfg(struct dram_info *dram,
334 		    struct px30_sdram_params *sdram_params)
335 {
336 	void __iomem *phy_base = dram->phy;
337 	u32 i;
338 
339 	phy_dll_bypass_set(dram, sdram_params->ddr_freq);
340 	for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
341 		writel(sdram_params->phy_regs.phy[i][1],
342 		       phy_base + sdram_params->phy_regs.phy[i][0]);
343 	}
344 	if (sdram_params->ch.bw == 2) {
345 		clrsetbits_le32(PHY_REG(phy_base, 0), 0xf << 4, 0xf << 4);
346 	} else if (sdram_params->ch.bw == 1) {
347 		clrsetbits_le32(PHY_REG(phy_base, 0), 0xf << 4, 3 << 4);
348 		/* disable DQS2,DQS3 tx dll  for saving power */
349 		clrbits_le32(PHY_REG(phy_base, 0x46), 1 << 3);
350 		clrbits_le32(PHY_REG(phy_base, 0x56), 1 << 3);
351 	} else {
352 		clrsetbits_le32(PHY_REG(phy_base, 0), 0xf << 4, 1 << 4);
353 		/* disable DQS2,DQS3 tx dll  for saving power */
354 		clrbits_le32(PHY_REG(phy_base, 0x36), 1 << 3);
355 		clrbits_le32(PHY_REG(phy_base, 0x46), 1 << 3);
356 		clrbits_le32(PHY_REG(phy_base, 0x56), 1 << 3);
357 	}
358 	set_ds_odt(dram, sdram_params);
359 
360 	/* deskew */
361 	setbits_le32(PHY_REG(phy_base, 2), 8);
362 	copy_to_reg(PHY_REG(phy_base, 0xb0),
363 		    &sdram_params->skew->a0_a1_skew[0], 15 * 4);
364 	copy_to_reg(PHY_REG(phy_base, 0x70),
365 		    &sdram_params->skew->cs0_dm0_skew[0], 44 * 4);
366 	copy_to_reg(PHY_REG(phy_base, 0xc0),
367 		    &sdram_params->skew->cs0_dm1_skew[0], 44 * 4);
368 }
369 
370 static int update_refresh_reg(struct dram_info *dram)
371 {
372 	void __iomem *pctl_base = dram->pctl;
373 	u32 ret;
374 
375 	ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
376 	writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
377 
378 	return 0;
379 }
380 
381 /*
382  * rank = 1: cs0
383  * rank = 2: cs1
384  */
385 int read_mr(struct dram_info *dram, u32 rank, u32 mr_num)
386 {
387 	void __iomem *pctl_base = dram->pctl;
388 	void __iomem *ddr_grf_base = dram->ddr_grf;
389 
390 	writel((rank << 4) | (1 << 0), pctl_base + DDR_PCTL2_MRCTRL0);
391 	writel((mr_num << 8), pctl_base + DDR_PCTL2_MRCTRL1);
392 	setbits_le32(pctl_base + DDR_PCTL2_MRCTRL0, 1u << 31);
393 	while (readl(pctl_base + DDR_PCTL2_MRCTRL0) & (1u << 31))
394 		continue;
395 	while (readl(pctl_base + DDR_PCTL2_MRSTAT) & MR_WR_BUSY)
396 		continue;
397 
398 	return (readl(ddr_grf_base + DDR_GRF_STATUS(0)) & 0xff);
399 }
400 
401 u32 disable_zqcs_arefresh(struct dram_info *dram)
402 {
403 	void __iomem *pctl_base = dram->pctl;
404 	u32 dis_auto_zq = 0;
405 
406 	/* disable zqcs */
407 	if (!(readl(pctl_base + DDR_PCTL2_ZQCTL0) &
408 		(1ul << 31))) {
409 		dis_auto_zq = 1;
410 		setbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1 << 31);
411 	}
412 
413 	/* disable auto refresh */
414 	setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 1);
415 
416 	update_refresh_reg(dram);
417 
418 	return dis_auto_zq;
419 }
420 
421 void restore_zqcs_arefresh(struct dram_info *dram, u32 dis_auto_zq)
422 {
423 	void __iomem *pctl_base = dram->pctl;
424 
425 	/* restore zqcs */
426 	if (dis_auto_zq)
427 		clrbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1 << 31);
428 
429 	/* restore auto refresh */
430 	clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 1);
431 
432 	update_refresh_reg(dram);
433 }
434 
435 #define MIN(a, b)	(((a) > (b)) ? (b) : (a))
436 #define MAX(a, b)	(((a) > (b)) ? (a) : (b))
437 static u32 check_rd_gate(struct dram_info *dram)
438 {
439 	void __iomem *phy_base = dram->phy;
440 
441 	u32 max_val = 0;
442 	u32 min_val = 0xff;
443 	u32 gate[4];
444 	u32 i, bw;
445 
446 	bw = (readl(PHY_REG(phy_base, 0x0)) >> 4) & 0xf;
447 	switch (bw) {
448 	case 0x1:
449 		bw = 1;
450 		break;
451 	case 0x3:
452 		bw = 2;
453 		break;
454 	case 0xf:
455 	default:
456 		bw = 4;
457 		break;
458 	}
459 
460 	for (i = 0; i < bw; i++) {
461 		gate[i] = readl(PHY_REG(phy_base, 0xfb + i));
462 		max_val = MAX(max_val, gate[i]);
463 		min_val = MIN(min_val, gate[i]);
464 	}
465 
466 	if (max_val > 0x80 || min_val < 0x20)
467 		return -1;
468 	else
469 		return 0;
470 }
471 
472 static int data_training(struct dram_info *dram, u32 cs, u32 dramtype)
473 {
474 	void __iomem *phy_base = dram->phy;
475 	u32 ret;
476 	u32 dis_auto_zq = 0;
477 	u32 odt_val;
478 	u32 i, j;
479 
480 	odt_val = readl(PHY_REG(phy_base, 0x2e));
481 
482 	for (i = 0; i < 4; i++) {
483 		j = 0x20 + i * 0x10;
484 		writel(PHY_DDR3_RON_RTT_225ohm, PHY_REG(phy_base, j + 0x1));
485 		writel(0, PHY_REG(phy_base, j + 0xe));
486 	}
487 
488 	dis_auto_zq = disable_zqcs_arefresh(dram);
489 
490 	if (dramtype == DDR4) {
491 		clrsetbits_le32(PHY_REG(phy_base, 0x29), 0x3, 0);
492 		clrsetbits_le32(PHY_REG(phy_base, 0x39), 0x3, 0);
493 		clrsetbits_le32(PHY_REG(phy_base, 0x49), 0x3, 0);
494 		clrsetbits_le32(PHY_REG(phy_base, 0x59), 0x3, 0);
495 	}
496 	/* choose training cs */
497 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
498 	/* enable gate training */
499 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
500 	udelay(50);
501 	ret = readl(PHY_REG(phy_base, 0xff));
502 	/* disable gate training */
503 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
504 	clrbits_le32(PHY_REG(phy_base, 2), 0x30);
505 	restore_zqcs_arefresh(dram, dis_auto_zq);
506 
507 	if (dramtype == DDR4) {
508 		clrsetbits_le32(PHY_REG(phy_base, 0x29), 0x3, 0x2);
509 		clrsetbits_le32(PHY_REG(phy_base, 0x39), 0x3, 0x2);
510 		clrsetbits_le32(PHY_REG(phy_base, 0x49), 0x3, 0x2);
511 		clrsetbits_le32(PHY_REG(phy_base, 0x59), 0x3, 0x2);
512 	}
513 
514 	if (ret & 0x10) {
515 		ret = -1;
516 	} else {
517 		ret = (ret & 0xf) ^ (readl(PHY_REG(phy_base, 0)) >> 4);
518 		ret = (ret == 0) ? 0 : -1;
519 	}
520 
521 	for (i = 0; i < 4; i++) {
522 		j = 0x20 + i * 0x10;
523 		writel(odt_val, PHY_REG(phy_base, j + 0x1));
524 		writel(odt_val, PHY_REG(phy_base, j + 0xe));
525 	}
526 
527 	return ret;
528 }
529 
530 /* rank = 1: cs0
531  * rank = 2: cs1
532  * rank = 3: cs0 & cs1
533  * note: be careful of keep mr original val
534  */
535 static int write_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 arg,
536 		    u32 dramtype)
537 {
538 	void __iomem *pctl_base = dram->pctl;
539 
540 	while (readl(pctl_base + DDR_PCTL2_MRSTAT) & MR_WR_BUSY)
541 		continue;
542 	if (dramtype == DDR3 || dramtype == DDR4) {
543 		writel((mr_num << 12) | (rank << 4) | (0 << 0),
544 		       pctl_base + DDR_PCTL2_MRCTRL0);
545 		writel(arg, pctl_base + DDR_PCTL2_MRCTRL1);
546 	} else {
547 		writel((rank << 4) | (0 << 0),
548 		       pctl_base + DDR_PCTL2_MRCTRL0);
549 		writel((mr_num << 8) | (arg & 0xff),
550 		       pctl_base + DDR_PCTL2_MRCTRL1);
551 	}
552 
553 	setbits_le32(pctl_base + DDR_PCTL2_MRCTRL0, 1u << 31);
554 	while (readl(pctl_base + DDR_PCTL2_MRCTRL0) & (1u << 31))
555 		continue;
556 	while (readl(pctl_base + DDR_PCTL2_MRSTAT) & MR_WR_BUSY)
557 		continue;
558 
559 	return 0;
560 }
561 
562 /*
563  * rank : 1:cs0, 2:cs1, 3:cs0&cs1
564  * vrefrate: 4500: 45%,
565  */
566 static int write_vrefdq(struct dram_info *dram, u32 rank, u32 vrefrate,
567 			u32 dramtype)
568 {
569 	void __iomem *pctl_base = dram->pctl;
570 	u32 tccd_l, value;
571 	u32 dis_auto_zq = 0;
572 
573 	if (dramtype != DDR4 || vrefrate < 4500 ||
574 	    vrefrate > 9200)
575 		return (-1);
576 
577 	tccd_l = (readl(pctl_base + DDR_PCTL2_DRAMTMG4) >> 16) & 0xf;
578 	tccd_l = (tccd_l - 4) << 10;
579 
580 	if (vrefrate > 7500) {
581 		/* range 1 */
582 		value = ((vrefrate - 6000) / 65) | tccd_l;
583 	} else {
584 		/* range 2 */
585 		value = ((vrefrate - 4500) / 65) | tccd_l | (1 << 6);
586 	}
587 
588 	dis_auto_zq = disable_zqcs_arefresh(dram);
589 
590 	/* enable vrefdq calibratin */
591 	write_mr(dram, rank, 6, value | (1 << 7), dramtype);
592 	udelay(1);/* tvrefdqe */
593 	/* write vrefdq value */
594 	write_mr(dram, rank, 6, value | (1 << 7), dramtype);
595 	udelay(1);/* tvref_time */
596 	write_mr(dram, rank, 6, value | (0 << 7), dramtype);
597 	udelay(1);/* tvrefdqx */
598 
599 	restore_zqcs_arefresh(dram, dis_auto_zq);
600 
601 	return 0;
602 }
603 
604 /*
605  * cs: 0:cs0
606  *	   1:cs1
607  *     else cs0+cs1
608  * note: it didn't consider about row_3_4
609  */
610 u64 get_cs_cap(struct px30_sdram_params *sdram_params, u32 cs)
611 {
612 	u32 bg;
613 	u64 cap[2];
614 
615 	if (sdram_params->dramtype == DDR4)
616 		/* DDR4 8bit dram BG = 2(4bank groups),
617 		 * 16bit dram BG = 1 (2 bank groups)
618 		 */
619 		bg = (sdram_params->ch.dbw == 0) ? 2 : 1;
620 	else
621 		bg = 0;
622 	cap[0] = 1llu << (sdram_params->ch.bw + sdram_params->ch.col +
623 		bg + sdram_params->ch.bk + sdram_params->ch.cs0_row);
624 
625 	if (sdram_params->ch.rank == 2)
626 		cap[1] = 1llu << (sdram_params->ch.bw + sdram_params->ch.col +
627 			bg + sdram_params->ch.bk + sdram_params->ch.cs1_row);
628 	else
629 		cap[1] = 0;
630 
631 	if (cs == 0)
632 		return cap[0];
633 	else if (cs == 1)
634 		return cap[1];
635 	else
636 		return (cap[0] + cap[1]);
637 }
638 
639 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
640 {
641 	writel(ddrconfig | (ddrconfig << 8), &dram->msch->deviceconf);
642 	rk_clrsetreg(&dram->grf->soc_noc_con[1], 0x3 << 14, 0 << 14);
643 }
644 
645 static void dram_all_config(struct dram_info *dram,
646 			    struct px30_sdram_params *sdram_params)
647 {
648 	u32 sys_reg = 0;
649 	u32 sys_reg3 = 0;
650 	u64 cs_cap[2];
651 
652 	set_ddrconfig(dram, sdram_params->ch.ddrconfig);
653 
654 	sys_reg |= SYS_REG_ENC_DDRTYPE(sdram_params->dramtype);
655 	sys_reg |= SYS_REG_ENC_ROW_3_4(sdram_params->ch.row_3_4);
656 	sys_reg |= SYS_REG_ENC_RANK(sdram_params->ch.rank);
657 	sys_reg |= SYS_REG_ENC_COL(sdram_params->ch.col);
658 	sys_reg |= SYS_REG_ENC_BK(sdram_params->ch.bk);
659 	sys_reg |= SYS_REG_ENC_BW(sdram_params->ch.bw);
660 	sys_reg |= SYS_REG_ENC_DBW(sdram_params->ch.dbw);
661 
662 	SYS_REG_ENC_CS0_ROW_(sdram_params->ch.cs0_row, sys_reg, sys_reg3);
663 	if (sdram_params->ch.cs1_row)
664 		SYS_REG_ENC_CS1_ROW_(sdram_params->ch.cs1_row, sys_reg,
665 				     sys_reg3);
666 	sys_reg3 |= SYS_REG_ENC_CS1_COL(sdram_params->ch.col);
667 	sys_reg3 |= SYS_REG_ENC_VERSION(DDR_SYS_REG_VERSION);
668 
669 	writel(sys_reg, &dram->pmugrf->os_reg[2]);
670 	writel(sys_reg3, &dram->pmugrf->os_reg[3]);
671 
672 	cs_cap[0] = get_cs_cap(sdram_params, 0);
673 	cs_cap[1] = get_cs_cap(sdram_params, 1);
674 	writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
675 			(((cs_cap[0] >> 20) / 64) & 0xff),
676 			&dram->msch->devicesize);
677 
678 	writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
679 	       &dram->msch->ddrtiminga0);
680 	writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
681 	       &dram->msch->ddrtimingb0);
682 	writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
683 	       &dram->msch->ddrtimingc0);
684 	writel(sdram_params->ch.noc_timings.devtodev0.d32,
685 	       &dram->msch->devtodev0);
686 	writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
687 	writel(sdram_params->ch.noc_timings.ddr4timing.d32,
688 	       &dram->msch->ddr4timing);
689 	writel(sdram_params->ch.noc_timings.agingx0, &dram->msch->agingx0);
690 	writel(sdram_params->ch.noc_timings.agingx0, &dram->msch->aging0);
691 	writel(sdram_params->ch.noc_timings.agingx0, &dram->msch->aging1);
692 	writel(sdram_params->ch.noc_timings.agingx0, &dram->msch->aging2);
693 	writel(sdram_params->ch.noc_timings.agingx0, &dram->msch->aging3);
694 }
695 
696 static void enable_low_power(struct dram_info *dram,
697 			     struct px30_sdram_params *sdram_params)
698 {
699 	void __iomem *pctl_base = dram->pctl;
700 	void __iomem *ddr_grf_base = dram->ddr_grf;
701 	u32 grf_lp_con;
702 
703 	/*
704 	 * bit0: grf_upctl_axi_cg_en = 1 enable upctl2 axi clk auto gating
705 	 * bit1: grf_upctl_apb_cg_en = 1 ungated axi,core clk for apb access
706 	 * bit2: grf_upctl_core_cg_en = 1 enable upctl2 core clk auto gating
707 	 * bit3: grf_selfref_type2_en = 0 disable core clk gating when type2 sr
708 	 * bit4: grf_upctl_syscreq_cg_en = 1
709 	 *       ungating coreclk when c_sysreq assert
710 	 * bit8-11: grf_auto_sr_dly = 6
711 	 */
712 	writel(0x1f1f0617, &dram->ddr_grf->ddr_grf_con[1]);
713 
714 	if (sdram_params->dramtype == DDR4)
715 		grf_lp_con = (0x7 << 16) | (1 << 1);
716 	else if (sdram_params->dramtype == DDR3)
717 		grf_lp_con = (0x7 << 16) | (1 << 0);
718 	else
719 		grf_lp_con = (0x7 << 16) | (1 << 2);
720 
721 	/* en lpckdis_en */
722 	grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
723 	writel(grf_lp_con, ddr_grf_base + DDR_GRF_LP_CON);
724 
725 	/* enable sr, pd */
726 	if (PD_IDLE == 0)
727 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
728 	else
729 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
730 	if (SR_IDLE == 0)
731 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
732 	else
733 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
734 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
735 }
736 
737 static void print_ddr_info(struct px30_sdram_params *sdram_params)
738 {
739 	u64 cap;
740 	u32 bg;
741 	u32 split;
742 
743 	split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON);
744 	bg = (sdram_params->ch.dbw == 0) ? 2 : 1;
745 	switch (sdram_params->dramtype) {
746 	case LPDDR3:
747 		printascii("LPDDR3\n");
748 		break;
749 	case DDR3:
750 		printascii("DDR3\n");
751 		break;
752 	case DDR4:
753 		printascii("DDR4\n");
754 		break;
755 	case LPDDR2:
756 		printascii("LPDDR2\n");
757 		break;
758 	default:
759 		printascii("Unknown Device\n");
760 		break;
761 	}
762 
763 	printdec(sdram_params->ddr_freq);
764 	printascii("MHz\n");
765 	printascii("BW=");
766 	printdec(8 << sdram_params->ch.bw);
767 	printascii(" Col=");
768 	printdec(sdram_params->ch.col);
769 	printascii(" Bk=");
770 	printdec(0x1 << sdram_params->ch.bk);
771 	if (sdram_params->dramtype == DDR4) {
772 		printascii(" BG=");
773 		printdec(1 << bg);
774 	}
775 	printascii(" CS0 Row=");
776 	printdec(sdram_params->ch.cs0_row);
777 	if (sdram_params->ch.cs0_high16bit_row !=
778 		sdram_params->ch.cs0_row) {
779 		printascii("/");
780 		printdec(sdram_params->ch.cs0_high16bit_row);
781 	}
782 	if (sdram_params->ch.rank > 1) {
783 		printascii(" CS1 Row=");
784 		printdec(sdram_params->ch.cs1_row);
785 		if (sdram_params->ch.cs1_high16bit_row !=
786 			sdram_params->ch.cs1_row) {
787 			printascii("/");
788 			printdec(sdram_params->ch.cs1_high16bit_row);
789 		}
790 	}
791 	printascii(" CS=");
792 	printdec(sdram_params->ch.rank);
793 	printascii(" Die BW=");
794 	printdec(8 << sdram_params->ch.dbw);
795 
796 	cap = get_cs_cap(sdram_params, 3);
797 	if (sdram_params->ch.row_3_4)
798 		cap = cap * 3 / 4;
799 	else if (!(split & (1 << SPLIT_BYPASS_OFFSET)))
800 		cap = cap / 2 + ((split & 0xff) << 24) / 2;
801 
802 	printascii(" Size=");
803 	printdec(cap >> 20);
804 	printascii("MB\n");
805 }
806 
807 /*
808  * pre_init: 0: pre init for dram cap detect
809  * 1: detect correct cap(except cs1 row)info, than reinit
810  * 2: after reinit, we detect cs1_row, if cs1_row not equal
811  *    to cs0_row and cs is in middle on ddrconf map, we need
812  *    to reinit dram, than set the correct ddrconf.
813  */
814 static int sdram_init_(struct dram_info *dram,
815 		       struct px30_sdram_params *sdram_params, u32 pre_init)
816 {
817 	void __iomem *pctl_base = dram->pctl;
818 
819 	rkclk_ddr_reset(dram, 1, 1, 1, 1);
820 	udelay(10);
821 	/*
822 	 * dereset ddr phy psrstn to config pll,
823 	 * if using phy pll psrstn must be dereset
824 	 * before config pll
825 	 */
826 	rkclk_ddr_reset(dram, 1, 1, 1, 0);
827 	rkclk_configure_ddr(dram, sdram_params);
828 
829 	/* release phy srst to provide clk to ctrl */
830 	rkclk_ddr_reset(dram, 1, 1, 0, 0);
831 	udelay(10);
832 	phy_soft_reset(dram);
833 	/* release ctrl presetn, and config ctl registers */
834 	rkclk_ddr_reset(dram, 1, 0, 0, 0);
835 	pctl_cfg(dram, sdram_params);
836 	sdram_params->ch.ddrconfig = calculate_ddrconfig(sdram_params);
837 	set_ctl_address_map(dram, sdram_params);
838 	phy_cfg(dram, sdram_params);
839 
840 	/* enable dfi_init_start to init phy after ctl srstn deassert */
841 	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
842 
843 	rkclk_ddr_reset(dram, 0, 0, 0, 0);
844 	/* wait for dfi_init_done and dram init complete */
845 	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
846 		continue;
847 
848 	if (sdram_params->dramtype == LPDDR3)
849 		write_mr(dram, 3, 11, 3, LPDDR3);
850 
851 	/* do ddr gate training */
852 redo_cs0_training:
853 	if (data_training(dram, 0, sdram_params->dramtype) != 0) {
854 		if (pre_init != 0)
855 			printascii("DTT cs0 error\n");
856 		return -1;
857 	}
858 	if (check_rd_gate(dram)) {
859 		printascii("re training cs0");
860 		goto redo_cs0_training;
861 	}
862 
863 	if (sdram_params->dramtype == LPDDR3) {
864 		if ((read_mr(dram, 1, 8) & 0x3) != 0x3)
865 			return -1;
866 	} else if (sdram_params->dramtype == LPDDR2) {
867 		if ((read_mr(dram, 1, 8) & 0x3) != 0x0)
868 			return -1;
869 	}
870 	/* for px30: when 2cs, both 2 cs should be training */
871 	if (pre_init != 0 && sdram_params->ch.rank == 2) {
872 redo_cs1_training:
873 		if (data_training(dram, 1, sdram_params->dramtype) != 0) {
874 			printascii("DTT cs1 error\n");
875 			return -1;
876 		}
877 		if (check_rd_gate(dram)) {
878 			printascii("re training cs1");
879 			goto redo_cs1_training;
880 		}
881 	}
882 
883 	if (sdram_params->dramtype == DDR4)
884 		write_vrefdq(dram, 0x3, 5670, sdram_params->dramtype);
885 
886 	dram_all_config(dram, sdram_params);
887 	enable_low_power(dram, sdram_params);
888 
889 	return 0;
890 }
891 
892 static u64 dram_detect_cap(struct dram_info *dram,
893 			   struct px30_sdram_params *sdram_params,
894 			   unsigned char channel)
895 {
896 	void __iomem *pctl_base = dram->pctl;
897 	void __iomem *phy_base = dram->phy;
898 
899 	/*
900 	 * for ddr3: ddrconf = 3
901 	 * for ddr4: ddrconf = 12
902 	 * for lpddr3: ddrconf = 3
903 	 * default bw = 1
904 	 */
905 	u32 bk, bktmp;
906 	u32 col, coltmp;
907 	u32 row, rowtmp, row_3_4;
908 	void __iomem *test_addr, *test_addr1;
909 	u32 dbw;
910 	u32 cs;
911 	u32 bw = 1;
912 	u64 cap = 0;
913 	u32 dram_type = sdram_params->dramtype;
914 	u32 pwrctl;
915 
916 	if (dram_type != DDR4) {
917 		/* detect col and bk for ddr3/lpddr3 */
918 		coltmp = 12;
919 		bktmp = 3;
920 		rowtmp = 16;
921 
922 		for (col = coltmp; col >= 9; col -= 1) {
923 			writel(0, CONFIG_SYS_SDRAM_BASE);
924 			test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
925 					(1ul << (col + bw - 1ul)));
926 			writel(PATTERN, test_addr);
927 			if ((readl(test_addr) == PATTERN) &&
928 			    (readl(CONFIG_SYS_SDRAM_BASE) == 0))
929 				break;
930 		}
931 		if (col == 8) {
932 			printascii("col error\n");
933 			goto cap_err;
934 		}
935 
936 		test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
937 				(1ul << (coltmp + bktmp + bw - 1ul)));
938 		writel(0, CONFIG_SYS_SDRAM_BASE);
939 		writel(PATTERN, test_addr);
940 		if ((readl(test_addr) == PATTERN) &&
941 		    (readl(CONFIG_SYS_SDRAM_BASE) == 0))
942 			bk = 3;
943 		else
944 			bk = 2;
945 		if (dram_type == DDR3)
946 			dbw = 1;
947 		else
948 			dbw = 2;
949 	} else {
950 		/* detect bg for ddr4 */
951 		coltmp = 10;
952 		bktmp = 4;
953 		rowtmp = 17;
954 
955 		col = 10;
956 		bk = 2;
957 		test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
958 				(1ul << (coltmp + bw + 1ul)));
959 		writel(0, CONFIG_SYS_SDRAM_BASE);
960 		writel(PATTERN, test_addr);
961 		if ((readl(test_addr) == PATTERN) &&
962 		    (readl(CONFIG_SYS_SDRAM_BASE) == 0))
963 			dbw = 0;
964 		else
965 			dbw = 1;
966 	}
967 	/* detect row */
968 	for (row = rowtmp; row > 12; row--) {
969 		writel(0, CONFIG_SYS_SDRAM_BASE);
970 		test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
971 				(1ul << (row + bktmp + coltmp + bw - 1ul)));
972 		writel(PATTERN, test_addr);
973 		if ((readl(test_addr) == PATTERN) &&
974 		    (readl(CONFIG_SYS_SDRAM_BASE) == 0))
975 			break;
976 	}
977 	if (row == 12) {
978 		printascii("row error");
979 		goto cap_err;
980 	}
981 	/* detect row_3_4 */
982 	test_addr = CONFIG_SYS_SDRAM_BASE;
983 	test_addr1 = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
984 			(0x3ul << (row + bktmp + coltmp + bw - 1ul - 1ul)));
985 
986 	writel(0, test_addr);
987 	writel(PATTERN, test_addr1);
988 	if ((readl(test_addr) == 0) &&
989 	    (readl(test_addr1) == PATTERN))
990 		row_3_4 = 0;
991 	else
992 		row_3_4 = 1;
993 
994 	/* disable auto low-power */
995 	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
996 	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
997 
998 	/* bw and cs detect using phy read gate training */
999 	if (data_training(dram, 1, dram_type) == 0)
1000 		cs = 1;
1001 	else
1002 		cs = 0;
1003 
1004 	clrsetbits_le32(PHY_REG(phy_base, 0), 0xf << 4, 0xf << 4);
1005 	setbits_le32(PHY_REG(phy_base, 0x46), 1 << 3);
1006 	setbits_le32(PHY_REG(phy_base, 0x56), 1 << 3);
1007 
1008 	phy_soft_reset(dram);
1009 
1010 	if (data_training(dram, 0, dram_type) == 0)
1011 		bw = 2;
1012 	else
1013 		bw = 1;
1014 
1015 	/* restore auto low-power */
1016 	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
1017 
1018 	sdram_params->ch.rank = cs + 1;
1019 	sdram_params->ch.col = col;
1020 	sdram_params->ch.bk = bk;
1021 	sdram_params->ch.dbw = dbw;
1022 	sdram_params->ch.bw = bw;
1023 	sdram_params->ch.cs0_row = row;
1024 	sdram_params->ch.cs0_high16bit_row = row;
1025 	if (cs) {
1026 		sdram_params->ch.cs1_row = row;
1027 		sdram_params->ch.cs1_high16bit_row = row;
1028 	} else {
1029 		sdram_params->ch.cs1_row = 0;
1030 		sdram_params->ch.cs1_high16bit_row = 0;
1031 	}
1032 	sdram_params->ch.row_3_4 = row_3_4;
1033 
1034 	if (dram_type == DDR4)
1035 		cap = 1llu << (cs + row + bk + col + ((dbw == 0) ? 2 : 1) + bw);
1036 	else
1037 		cap = 1llu << (cs + row + bk + col + bw);
1038 
1039 	return cap;
1040 
1041 cap_err:
1042 	return 0;
1043 }
1044 
1045 static u32 remodify_sdram_params(struct px30_sdram_params *sdram_params)
1046 {
1047 	u32 tmp = 0, tmp_adr = 0, i;
1048 
1049 	for (i = 0; sdram_params->pctl_regs.pctl[i][0] != 0xFFFFFFFF; i++) {
1050 		if (sdram_params->pctl_regs.pctl[i][0] == 0) {
1051 			tmp = sdram_params->pctl_regs.pctl[i][1];/* MSTR */
1052 			tmp_adr = i;
1053 		}
1054 	}
1055 
1056 	tmp &= ~((3ul << 30) | (3ul << 24) | (3ul << 12));
1057 
1058 	switch (sdram_params->ch.dbw) {
1059 	case 2:
1060 		tmp |= (3ul << 30);
1061 		break;
1062 	case 1:
1063 		tmp |= (2ul << 30);
1064 		break;
1065 	case 0:
1066 	default:
1067 		tmp |= (1ul << 30);
1068 		break;
1069 	}
1070 
1071 	if (sdram_params->ch.rank == 2)
1072 		tmp |= 3 << 24;
1073 	else
1074 		tmp |= 1 << 24;
1075 
1076 	tmp |= (2 - sdram_params->ch.bw) << 12;
1077 
1078 	sdram_params->pctl_regs.pctl[tmp_adr][1] = tmp;
1079 
1080 	return 0;
1081 }
1082 
1083 int dram_detect_high_row(struct dram_info *dram,
1084 			 struct px30_sdram_params *sdram_params,
1085 			 unsigned char channel)
1086 {
1087 	sdram_params->ch.cs0_high16bit_row = sdram_params->ch.cs0_row;
1088 	sdram_params->ch.cs1_high16bit_row = sdram_params->ch.cs1_row;
1089 
1090 	return 0;
1091 }
1092 
1093 static int dram_detect_cs1_row(struct px30_sdram_params *sdram_params,
1094 			       unsigned char channel)
1095 {
1096 	u32 ret = 0;
1097 	void __iomem *test_addr;
1098 	u32 row, bktmp, coltmp, bw;
1099 	u64 cs0_cap;
1100 	u32 byte_mask;
1101 
1102 	if (sdram_params->ch.rank == 2) {
1103 		cs0_cap = get_cs_cap(sdram_params, 0);
1104 
1105 		if (sdram_params->dramtype == DDR4) {
1106 			if (sdram_params->ch.dbw == 0)
1107 				bktmp = sdram_params->ch.bk + 2;
1108 			else
1109 				bktmp = sdram_params->ch.bk + 1;
1110 		} else {
1111 			bktmp = sdram_params->ch.bk;
1112 		}
1113 		bw = sdram_params->ch.bw;
1114 		coltmp = sdram_params->ch.col;
1115 
1116 		/*
1117 		 * because px30 support axi split,min bandwidth
1118 		 * is 8bit. if cs0 is 32bit, cs1 may 32bit or 16bit
1119 		 * so we check low 16bit data when detect cs1 row.
1120 		 * if cs0 is 16bit/8bit, we check low 8bit data.
1121 		 */
1122 		if (bw == 2)
1123 			byte_mask = 0xFFFF;
1124 		else
1125 			byte_mask = 0xFF;
1126 
1127 		/* detect cs1 row */
1128 		for (row = sdram_params->ch.cs0_row; row > 12; row--) {
1129 			test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
1130 				    cs0_cap +
1131 				    (1ul << (row + bktmp + coltmp + bw - 1ul)));
1132 			writel(0, CONFIG_SYS_SDRAM_BASE + cs0_cap);
1133 			writel(PATTERN, test_addr);
1134 
1135 			if (((readl(test_addr) & byte_mask) ==
1136 			     (PATTERN & byte_mask)) &&
1137 			    ((readl(CONFIG_SYS_SDRAM_BASE + cs0_cap) &
1138 			      byte_mask) == 0)) {
1139 				ret = row;
1140 				break;
1141 			}
1142 		}
1143 	}
1144 
1145 	return ret;
1146 }
1147 
1148 /* return: 0 = success, other = fail */
1149 static int sdram_init_detect(struct dram_info *dram,
1150 			     struct px30_sdram_params *sdram_params)
1151 {
1152 	u32 ret;
1153 	u32 sys_reg = 0;
1154 	u32 sys_reg3 = 0;
1155 
1156 	if (sdram_init_(dram, sdram_params, 0) != 0)
1157 		return -1;
1158 
1159 	if (dram_detect_cap(dram, sdram_params, 0) == 0)
1160 		return -1;
1161 
1162 	/* modify bw, cs related timing */
1163 	remodify_sdram_params(sdram_params);
1164 	/* reinit sdram by real dram cap */
1165 	ret = sdram_init_(dram, sdram_params, 1);
1166 	if (ret != 0)
1167 		goto out;
1168 
1169 	/* redetect cs1 row */
1170 	sdram_params->ch.cs1_row =
1171 		dram_detect_cs1_row(sdram_params, 0);
1172 	if (sdram_params->ch.cs1_row) {
1173 		sys_reg = readl(&dram->pmugrf->os_reg[2]);
1174 		sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
1175 		SYS_REG_ENC_CS1_ROW_(sdram_params->ch.cs1_row,
1176 				     sys_reg, sys_reg3);
1177 		writel(sys_reg, &dram->pmugrf->os_reg[2]);
1178 		writel(sys_reg3, &dram->pmugrf->os_reg[3]);
1179 	}
1180 
1181 	ret = dram_detect_high_row(dram, sdram_params, 0);
1182 
1183 out:
1184 	return ret;
1185 }
1186 
1187 struct px30_sdram_params
1188 		*get_default_sdram_config(void)
1189 {
1190 	sdram_configs[0].skew = &skew;
1191 
1192 	return &sdram_configs[0];
1193 }
1194 
1195 /* return: 0 = success, other = fail */
1196 int sdram_init(void)
1197 {
1198 	struct px30_sdram_params *sdram_params;
1199 	int ret = 0;
1200 
1201 	dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
1202 	dram_info.pctl = (void *)DDRC_BASE_ADDR;
1203 	dram_info.grf = (void *)GRF_BASE_ADDR;
1204 	dram_info.cru = (void *)CRU_BASE_ADDR;
1205 	dram_info.msch = (void *)SERVER_MSCH0_BASE_ADDR;
1206 	dram_info.ddr_grf = (void *)DDR_GRF_BASE_ADDR;
1207 	dram_info.pmugrf = (void *)PMUGRF_BASE_ADDR;
1208 
1209 	sdram_params = get_default_sdram_config();
1210 	ret = sdram_init_detect(&dram_info, sdram_params);
1211 
1212 	if (ret)
1213 		goto error;
1214 
1215 	print_ddr_info(sdram_params);
1216 
1217 	printascii("out\n");
1218 	return ret;
1219 error:
1220 	return (-1);
1221 }
1222 
1223 #else /* CONFIG_TPL_BUILD */
1224 
1225 static int px30_dmc_probe(struct udevice *dev)
1226 {
1227 	struct dram_info *priv = dev_get_priv(dev);
1228 
1229 	priv->pmugrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUGRF);
1230 	debug("%s: pmugrf=%p\n", __func__, priv->pmugrf);
1231 	priv->info.base = CONFIG_SYS_SDRAM_BASE;
1232 	priv->info.size =
1233 		rockchip_sdram_size((phys_addr_t)&priv->pmugrf->os_reg[2]);
1234 
1235 	return 0;
1236 }
1237 
1238 static int px30_dmc_get_info(struct udevice *dev, struct ram_info *info)
1239 {
1240 	struct dram_info *priv = dev_get_priv(dev);
1241 
1242 	*info = priv->info;
1243 
1244 	return 0;
1245 }
1246 
1247 static struct ram_ops px30_dmc_ops = {
1248 	.get_info = px30_dmc_get_info,
1249 };
1250 
1251 static const struct udevice_id px30_dmc_ids[] = {
1252 	{ .compatible = "rockchip,px30-dmc" },
1253 	{ }
1254 };
1255 
1256 U_BOOT_DRIVER(dmc_px30) = {
1257 	.name = "rockchip_px30_dmc",
1258 	.id = UCLASS_RAM,
1259 	.of_match = px30_dmc_ids,
1260 	.ops = &px30_dmc_ops,
1261 	.probe = px30_dmc_probe,
1262 	.priv_auto_alloc_size = sizeof(struct dram_info),
1263 };
1264 #endif /* CONFIG_TPL_BUILD */
1265