xref: /rk3399_rockchip-uboot/drivers/ram/rockchip/sdram_px30.c (revision 1881cdb1bc0debc2d5c5e1f752bc5bca78ae8fc0)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * (C) Copyright 2018 Rockchip Electronics Co., Ltd.
4  */
5 
6 #include <common.h>
7 #include <debug_uart.h>
8 #include <dm.h>
9 #include <ram.h>
10 #include <syscon.h>
11 #include <asm/io.h>
12 #include <asm/arch/clock.h>
13 #include <asm/arch/cru_px30.h>
14 #include <asm/arch/grf_px30.h>
15 #include <asm/arch/hardware.h>
16 #include <asm/arch/sdram_common.h>
17 #include <asm/arch/sdram_px30.h>
18 
19 /*
20  * Because px30 sram size is small, so need define CONFIG_TPL_TINY_FRAMEWORK
21  * to reduce TPL size when build TPL firmware.
22  */
23 #ifdef CONFIG_TPL_BUILD
24 #ifndef CONFIG_TPL_TINY_FRAMEWORK
25 #error please defined CONFIG_TPL_TINY_FRAMEWORK for px30 !!!
26 #endif
27 #endif
28 
29 DECLARE_GLOBAL_DATA_PTR;
30 struct dram_info {
31 #ifdef CONFIG_TPL_BUILD
32 	struct px30_ddr_pctl_regs *pctl;
33 	struct px30_ddr_phy_regs *phy;
34 	struct px30_cru *cru;
35 	struct px30_msch_regs *msch;
36 	struct px30_ddr_grf_regs *ddr_grf;
37 	struct px30_grf *grf;
38 #endif
39 	struct ram_info info;
40 	struct px30_pmugrf *pmugrf;
41 };
42 
43 #ifdef CONFIG_TPL_BUILD
44 #define PMUGRF_BASE_ADDR		0xFF010000
45 #define CRU_BASE_ADDR			0xFF2B0000
46 #define GRF_BASE_ADDR			0xFF140000
47 #define DDRC_BASE_ADDR			0xFF600000
48 #define DDR_PHY_BASE_ADDR		0xFF2A0000
49 #define SERVER_MSCH0_BASE_ADDR		0xFF530000
50 #define DDR_GRF_BASE_ADDR		0xff630000
51 
52 struct dram_info dram_info;
53 
54 struct px30_sdram_params sdram_configs[] = {
55 #include	"sdram-px30-lpddr3-detect-333.inc"
56 };
57 
58 struct px30_ddr_skew skew = {
59 #include	"sdram-px30-ddr_skew.inc"
60 };
61 
62 static void rkclk_ddr_reset(struct dram_info *dram,
63 			    u32 ctl_srstn, u32 ctl_psrstn,
64 			    u32 phy_srstn, u32 phy_psrstn)
65 {
66 	writel(upctl2_srstn_req(ctl_srstn) | upctl2_psrstn_req(ctl_psrstn) |
67 	       upctl2_asrstn_req(ctl_srstn),
68 	       &dram->cru->softrst_con[1]);
69 	writel(ddrphy_srstn_req(phy_srstn) | ddrphy_psrstn_req(phy_psrstn),
70 	       &dram->cru->softrst_con[2]);
71 }
72 
73 static void rkclk_set_dpll(struct dram_info *dram, unsigned int mhz)
74 {
75 	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
76 	int delay = 1000;
77 
78 	refdiv = 1;
79 	if (mhz <= 300) {
80 		postdiv1 = 4;
81 		postdiv2 = 2;
82 	} else if (mhz <= 400) {
83 		postdiv1 = 6;
84 		postdiv2 = 1;
85 	} else if (mhz <= 600) {
86 		postdiv1 = 4;
87 		postdiv2 = 1;
88 	} else if (mhz <= 800) {
89 		postdiv1 = 3;
90 		postdiv2 = 1;
91 	} else if (mhz <= 1600) {
92 		postdiv1 = 2;
93 		postdiv2 = 1;
94 	} else {
95 		postdiv1 = 1;
96 		postdiv2 = 1;
97 	}
98 	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
99 
100 	writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
101 
102 	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
103 	writel(DSMPD(1) | POSTDIV2(postdiv2) | REFDIV(refdiv),
104 	       &dram->cru->pll[1].con1);
105 
106 	while (delay > 0) {
107 		udelay(1);
108 		if (LOCK(readl(&dram->cru->pll[1].con1)))
109 			break;
110 		delay--;
111 	}
112 
113 	writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
114 }
115 
116 static void rkclk_configure_ddr(struct dram_info *dram,
117 				struct px30_sdram_params *sdram_params)
118 {
119 	/* for inno ddr phy need 2*freq */
120 	rkclk_set_dpll(dram,  sdram_params->ddr_freq * 2);
121 }
122 
123 static void phy_soft_reset(struct dram_info *dram)
124 {
125 	void __iomem *phy_base = dram->phy;
126 
127 	clrbits_le32(PHY_REG(phy_base, 0), 0x3 << 2);
128 	udelay(1);
129 	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET);
130 	udelay(5);
131 	setbits_le32(PHY_REG(phy_base, 0), DIGITAL_DERESET);
132 	udelay(1);
133 }
134 
135 static int pctl_cfg(struct dram_info *dram,
136 		    struct px30_sdram_params *sdram_params)
137 {
138 	u32 i;
139 	void __iomem *pctl_base = dram->pctl;
140 
141 	for (i = 0; sdram_params->pctl_regs.pctl[i][0] != 0xFFFFFFFF; i++) {
142 		writel(sdram_params->pctl_regs.pctl[i][1],
143 		       pctl_base + sdram_params->pctl_regs.pctl[i][0]);
144 	}
145 	clrsetbits_le32(pctl_base + DDR_PCTL2_PWRTMG,
146 			(0xff << 16) | 0x1f,
147 			((SR_IDLE & 0xff) << 16) | (PD_IDLE & 0x1f));
148 
149 	clrsetbits_le32(pctl_base + DDR_PCTL2_HWLPCTL,
150 			0xfff << 16,
151 			5 << 16);
152 	/* disable zqcs */
153 	setbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1u << 31);
154 
155 	return 0;
156 }
157 
158 /* return ddrconfig value
159  *       (-1), find ddrconfig fail
160  *       other, the ddrconfig value
161  * only support cs0_row >= cs1_row
162  */
163 static unsigned int calculate_ddrconfig(struct px30_sdram_params *sdram_params)
164 {
165 	u32 bw, die_bw, col, bank;
166 	u32 i, tmp;
167 	u32 ddrconf = -1;
168 
169 	bw = sdram_params->ch.bw;
170 	die_bw = sdram_params->ch.dbw;
171 	col = sdram_params->ch.col;
172 	bank = sdram_params->ch.bk;
173 
174 	if (sdram_params->dramtype == DDR4) {
175 		if (die_bw == 0)
176 			ddrconf = 7 + bw;
177 		else
178 			ddrconf = 12 - bw;
179 		ddrconf = d4_rbc_2_d3_rbc[ddrconf - 7];
180 	} else {
181 		tmp = ((bank - 2) << 3) | (col + bw - 10);
182 		for (i = 0; i < 7; i++)
183 			if ((ddr_cfg_2_rbc[i] & 0xf) == tmp) {
184 				ddrconf = i;
185 				break;
186 			}
187 		if (i > 6)
188 			printascii("calculate ddrconfig error\n");
189 	}
190 
191 	return ddrconf;
192 }
193 
194 /* n: Unit bytes */
195 static void copy_to_reg(u32 *dest, u32 *src, u32 n)
196 {
197 	int i;
198 
199 	for (i = 0; i < n / sizeof(u32); i++) {
200 		writel(*src, dest);
201 		src++;
202 		dest++;
203 	}
204 }
205 
206 /*
207  * calculate controller dram address map, and setting to register.
208  * argument sdram_params->ch.ddrconf must be right value before
209  * call this function.
210  */
211 static void set_ctl_address_map(struct dram_info *dram,
212 				struct px30_sdram_params *sdram_params)
213 {
214 	void __iomem *pctl_base = dram->pctl;
215 	u32 cs_pst, bg, max_row, ddrconf;
216 	u32 i;
217 
218 	if (sdram_params->dramtype == DDR4)
219 		/*
220 		 * DDR4 8bit dram BG = 2(4bank groups),
221 		 * 16bit dram BG = 1 (2 bank groups)
222 		 */
223 		bg = (sdram_params->ch.dbw == 0) ? 2 : 1;
224 	else
225 		bg = 0;
226 
227 	cs_pst = sdram_params->ch.bw + sdram_params->ch.col +
228 		bg + sdram_params->ch.bk + sdram_params->ch.cs0_row;
229 	if (cs_pst >= 32 || sdram_params->ch.rank == 1)
230 		writel(0x1f, pctl_base + DDR_PCTL2_ADDRMAP0);
231 	else
232 		writel(cs_pst - 8, pctl_base + DDR_PCTL2_ADDRMAP0);
233 
234 	ddrconf = sdram_params->ch.ddrconfig;
235 	if (sdram_params->dramtype == DDR4) {
236 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc); i++) {
237 			if (d4_rbc_2_d3_rbc[i] == ddrconf) {
238 				ddrconf = 7 + i;
239 				break;
240 			}
241 		}
242 	}
243 
244 	copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP1),
245 		    &addrmap[ddrconf][0], 8 * 4);
246 	max_row = cs_pst - 1 - 8 - (addrmap[ddrconf][5] & 0xf);
247 
248 	if (max_row < 12)
249 		printascii("set addrmap fail\n");
250 	/* need to disable row ahead of rank by set to 0xf */
251 	for (i = 17; i > max_row; i--)
252 		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
253 			((i - 12) * 8 / 32) * 4,
254 			0xf << ((i - 12) * 8 % 32),
255 			0xf << ((i - 12) * 8 % 32));
256 
257 	if ((sdram_params->dramtype == LPDDR3 ||
258 	     sdram_params->dramtype == LPDDR2) &&
259 		 sdram_params->ch.row_3_4)
260 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
261 	if (sdram_params->dramtype == DDR4 && sdram_params->ch.bw != 0x2)
262 		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
263 }
264 
265 static void phy_dll_bypass_set(struct dram_info *dram, u32 freq)
266 {
267 	void __iomem *phy_base = dram->phy;
268 	u32 tmp;
269 	u32 i, j;
270 
271 	setbits_le32(PHY_REG(phy_base, 0x13), 1 << 4);
272 	clrbits_le32(PHY_REG(phy_base, 0x14), 1 << 3);
273 	for (i = 0; i < 4; i++) {
274 		j = 0x26 + i * 0x10;
275 		setbits_le32(PHY_REG(phy_base, j), 1 << 4);
276 		clrbits_le32(PHY_REG(phy_base, j + 0x1), 1 << 3);
277 	}
278 
279 	if (freq <= (400 * MHz))
280 		/* DLL bypass */
281 		setbits_le32(PHY_REG(phy_base, 0xa4), 0x1f);
282 	else
283 		clrbits_le32(PHY_REG(phy_base, 0xa4), 0x1f);
284 
285 	if (freq <= (801 * MHz))
286 		tmp = 2;
287 	else
288 		tmp = 1;
289 
290 	for (i = 0; i < 4; i++) {
291 		j = 0x28 + i * 0x10;
292 		writel(tmp, PHY_REG(phy_base, j));
293 	}
294 }
295 
296 static void set_ds_odt(struct dram_info *dram,
297 		       struct px30_sdram_params *sdram_params)
298 {
299 	void __iomem *phy_base = dram->phy;
300 	u32 cmd_drv, clk_drv, dqs_drv, dqs_odt;
301 	u32 i, j;
302 
303 	if (sdram_params->dramtype == DDR3) {
304 		cmd_drv = PHY_DDR3_RON_RTT_34ohm;
305 		clk_drv = PHY_DDR3_RON_RTT_45ohm;
306 		dqs_drv = PHY_DDR3_RON_RTT_34ohm;
307 		dqs_odt = PHY_DDR3_RON_RTT_225ohm;
308 	} else {
309 		cmd_drv = PHY_DDR4_LPDDR3_RON_RTT_34ohm;
310 		clk_drv = PHY_DDR4_LPDDR3_RON_RTT_43ohm;
311 		dqs_drv = PHY_DDR4_LPDDR3_RON_RTT_34ohm;
312 		if (sdram_params->dramtype == LPDDR2)
313 			dqs_odt = PHY_DDR4_LPDDR3_RON_RTT_DISABLE;
314 		else
315 			dqs_odt = PHY_DDR4_LPDDR3_RON_RTT_240ohm;
316 	}
317 	/* DS */
318 	writel(cmd_drv, PHY_REG(phy_base, 0x11));
319 	clrsetbits_le32(PHY_REG(phy_base, 0x12), 0x1f << 3, cmd_drv << 3);
320 	writel(clk_drv, PHY_REG(phy_base, 0x16));
321 	writel(clk_drv, PHY_REG(phy_base, 0x18));
322 
323 	for (i = 0; i < 4; i++) {
324 		j = 0x20 + i * 0x10;
325 		writel(dqs_drv, PHY_REG(phy_base, j));
326 		writel(dqs_drv, PHY_REG(phy_base, j + 0xf));
327 		/* ODT */
328 		writel(dqs_odt, PHY_REG(phy_base, j + 0x1));
329 		writel(dqs_odt, PHY_REG(phy_base, j + 0xe));
330 	}
331 }
332 
333 static void phy_cfg(struct dram_info *dram,
334 		    struct px30_sdram_params *sdram_params)
335 {
336 	void __iomem *phy_base = dram->phy;
337 	u32 i;
338 
339 	phy_dll_bypass_set(dram, sdram_params->ddr_freq);
340 	for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
341 		writel(sdram_params->phy_regs.phy[i][1],
342 		       phy_base + sdram_params->phy_regs.phy[i][0]);
343 	}
344 	if (sdram_params->ch.bw == 2) {
345 		clrsetbits_le32(PHY_REG(phy_base, 0), 0xf << 4, 0xf << 4);
346 	} else if (sdram_params->ch.bw == 1) {
347 		clrsetbits_le32(PHY_REG(phy_base, 0), 0xf << 4, 3 << 4);
348 		/* disable DQS2,DQS3 tx dll  for saving power */
349 		clrbits_le32(PHY_REG(phy_base, 0x46), 1 << 3);
350 		clrbits_le32(PHY_REG(phy_base, 0x56), 1 << 3);
351 	} else {
352 		clrsetbits_le32(PHY_REG(phy_base, 0), 0xf << 4, 1 << 4);
353 		/* disable DQS2,DQS3 tx dll  for saving power */
354 		clrbits_le32(PHY_REG(phy_base, 0x36), 1 << 3);
355 		clrbits_le32(PHY_REG(phy_base, 0x46), 1 << 3);
356 		clrbits_le32(PHY_REG(phy_base, 0x56), 1 << 3);
357 	}
358 	set_ds_odt(dram, sdram_params);
359 
360 	/* deskew */
361 	setbits_le32(PHY_REG(phy_base, 2), 8);
362 	copy_to_reg(PHY_REG(phy_base, 0xb0),
363 		    &sdram_params->skew->a0_a1_skew[0], 15 * 4);
364 	copy_to_reg(PHY_REG(phy_base, 0x70),
365 		    &sdram_params->skew->cs0_dm0_skew[0], 44 * 4);
366 	copy_to_reg(PHY_REG(phy_base, 0xc0),
367 		    &sdram_params->skew->cs0_dm1_skew[0], 44 * 4);
368 }
369 
370 static int update_refresh_reg(struct dram_info *dram)
371 {
372 	void __iomem *pctl_base = dram->pctl;
373 	u32 ret;
374 
375 	ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
376 	writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
377 
378 	return 0;
379 }
380 
381 /*
382  * rank = 1: cs0
383  * rank = 2: cs1
384  */
385 int read_mr(struct dram_info *dram, u32 rank, u32 mr_num)
386 {
387 	void __iomem *pctl_base = dram->pctl;
388 	void __iomem *ddr_grf_base = dram->ddr_grf;
389 
390 	writel((rank << 4) | (1 << 0), pctl_base + DDR_PCTL2_MRCTRL0);
391 	writel((mr_num << 8), pctl_base + DDR_PCTL2_MRCTRL1);
392 	setbits_le32(pctl_base + DDR_PCTL2_MRCTRL0, 1u << 31);
393 	while (readl(pctl_base + DDR_PCTL2_MRCTRL0) & (1u << 31))
394 		continue;
395 	while (readl(pctl_base + DDR_PCTL2_MRSTAT) & MR_WR_BUSY)
396 		continue;
397 
398 	return (readl(ddr_grf_base + DDR_GRF_STATUS(0)) & 0xff);
399 }
400 
401 u32 disable_zqcs_arefresh(struct dram_info *dram)
402 {
403 	void __iomem *pctl_base = dram->pctl;
404 	u32 dis_auto_zq = 0;
405 
406 	/* disable zqcs */
407 	if (!(readl(pctl_base + DDR_PCTL2_ZQCTL0) &
408 		(1ul << 31))) {
409 		dis_auto_zq = 1;
410 		setbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1 << 31);
411 	}
412 
413 	/* disable auto refresh */
414 	setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 1);
415 
416 	update_refresh_reg(dram);
417 
418 	return dis_auto_zq;
419 }
420 
421 void restore_zqcs_arefresh(struct dram_info *dram, u32 dis_auto_zq)
422 {
423 	void __iomem *pctl_base = dram->pctl;
424 
425 	/* restore zqcs */
426 	if (dis_auto_zq)
427 		clrbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1 << 31);
428 
429 	/* restore auto refresh */
430 	clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 1);
431 
432 	update_refresh_reg(dram);
433 }
434 
435 #define MIN(a, b)	(((a) > (b)) ? (b) : (a))
436 #define MAX(a, b)	(((a) > (b)) ? (a) : (b))
437 static u32 check_rd_gate(struct dram_info *dram)
438 {
439 	void __iomem *phy_base = dram->phy;
440 
441 	u32 max_val = 0;
442 	u32 min_val = 0xff;
443 	u32 gate[4];
444 	u32 i, bw;
445 
446 	bw = (readl(PHY_REG(phy_base, 0x0)) >> 4) & 0xf;
447 	switch (bw) {
448 	case 0x1:
449 		bw = 1;
450 		break;
451 	case 0x3:
452 		bw = 2;
453 		break;
454 	case 0xf:
455 	default:
456 		bw = 4;
457 		break;
458 	}
459 
460 	for (i = 0; i < bw; i++) {
461 		gate[i] = readl(PHY_REG(phy_base, 0xfb + i));
462 		max_val = MAX(max_val, gate[i]);
463 		min_val = MIN(min_val, gate[i]);
464 	}
465 
466 	if (max_val > 0x80 || min_val < 0x20)
467 		return -1;
468 	else
469 		return 0;
470 }
471 
472 static int data_training(struct dram_info *dram, u32 cs, u32 dramtype)
473 {
474 	void __iomem *phy_base = dram->phy;
475 	u32 ret;
476 	u32 dis_auto_zq = 0;
477 	u32 odt_val;
478 	u32 i, j;
479 
480 	odt_val = readl(PHY_REG(phy_base, 0x2e));
481 
482 	for (i = 0; i < 4; i++) {
483 		j = 0x20 + i * 0x10;
484 		writel(PHY_DDR3_RON_RTT_225ohm, PHY_REG(phy_base, j + 0x1));
485 		writel(0, PHY_REG(phy_base, j + 0xe));
486 	}
487 
488 	dis_auto_zq = disable_zqcs_arefresh(dram);
489 
490 	if (dramtype == DDR4) {
491 		clrsetbits_le32(PHY_REG(phy_base, 0x29), 0x3, 0);
492 		clrsetbits_le32(PHY_REG(phy_base, 0x39), 0x3, 0);
493 		clrsetbits_le32(PHY_REG(phy_base, 0x49), 0x3, 0);
494 		clrsetbits_le32(PHY_REG(phy_base, 0x59), 0x3, 0);
495 	}
496 	/* choose training cs */
497 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
498 	/* enable gate training */
499 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
500 	udelay(50);
501 	ret = readl(PHY_REG(phy_base, 0xff));
502 	/* disable gate training */
503 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
504 	clrbits_le32(PHY_REG(phy_base, 2), 0x30);
505 	restore_zqcs_arefresh(dram, dis_auto_zq);
506 
507 	if (dramtype == DDR4) {
508 		clrsetbits_le32(PHY_REG(phy_base, 0x29), 0x3, 0x2);
509 		clrsetbits_le32(PHY_REG(phy_base, 0x39), 0x3, 0x2);
510 		clrsetbits_le32(PHY_REG(phy_base, 0x49), 0x3, 0x2);
511 		clrsetbits_le32(PHY_REG(phy_base, 0x59), 0x3, 0x2);
512 	}
513 
514 	if (ret & 0x10) {
515 		ret = -1;
516 	} else {
517 		ret = (ret & 0xf) ^ (readl(PHY_REG(phy_base, 0)) >> 4);
518 		ret = (ret == 0) ? 0 : -1;
519 	}
520 
521 	for (i = 0; i < 4; i++) {
522 		j = 0x20 + i * 0x10;
523 		writel(odt_val, PHY_REG(phy_base, j + 0x1));
524 		writel(odt_val, PHY_REG(phy_base, j + 0xe));
525 	}
526 
527 	return ret;
528 }
529 
530 /* rank = 1: cs0
531  * rank = 2: cs1
532  * rank = 3: cs0 & cs1
533  * note: be careful of keep mr original val
534  */
535 static int write_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 arg,
536 		    u32 dramtype)
537 {
538 	void __iomem *pctl_base = dram->pctl;
539 
540 	while (readl(pctl_base + DDR_PCTL2_MRSTAT) & MR_WR_BUSY)
541 		continue;
542 	if (dramtype == DDR3 || dramtype == DDR4) {
543 		writel((mr_num << 12) | (rank << 4) | (0 << 0),
544 		       pctl_base + DDR_PCTL2_MRCTRL0);
545 		writel(arg, pctl_base + DDR_PCTL2_MRCTRL1);
546 	} else {
547 		writel((rank << 4) | (0 << 0),
548 		       pctl_base + DDR_PCTL2_MRCTRL0);
549 		writel((mr_num << 8) | (arg & 0xff),
550 		       pctl_base + DDR_PCTL2_MRCTRL1);
551 	}
552 
553 	setbits_le32(pctl_base + DDR_PCTL2_MRCTRL0, 1u << 31);
554 	while (readl(pctl_base + DDR_PCTL2_MRCTRL0) & (1u << 31))
555 		continue;
556 	while (readl(pctl_base + DDR_PCTL2_MRSTAT) & MR_WR_BUSY)
557 		continue;
558 
559 	return 0;
560 }
561 
562 /*
563  * rank : 1:cs0, 2:cs1, 3:cs0&cs1
564  * vrefrate: 4500: 45%,
565  */
566 static int write_vrefdq(struct dram_info *dram, u32 rank, u32 vrefrate,
567 			u32 dramtype)
568 {
569 	void __iomem *pctl_base = dram->pctl;
570 	u32 tccd_l, value;
571 	u32 dis_auto_zq = 0;
572 
573 	if (dramtype != DDR4 || vrefrate < 4500 ||
574 	    vrefrate > 9200)
575 		return (-1);
576 
577 	tccd_l = (readl(pctl_base + DDR_PCTL2_DRAMTMG4) >> 16) & 0xf;
578 	tccd_l = (tccd_l - 4) << 10;
579 
580 	if (vrefrate > 7500) {
581 		/* range 1 */
582 		value = ((vrefrate - 6000) / 65) | tccd_l;
583 	} else {
584 		/* range 2 */
585 		value = ((vrefrate - 4500) / 65) | tccd_l | (1 << 6);
586 	}
587 
588 	dis_auto_zq = disable_zqcs_arefresh(dram);
589 
590 	/* enable vrefdq calibratin */
591 	write_mr(dram, rank, 6, value | (1 << 7), dramtype);
592 	udelay(1);/* tvrefdqe */
593 	/* write vrefdq value */
594 	write_mr(dram, rank, 6, value | (1 << 7), dramtype);
595 	udelay(1);/* tvref_time */
596 	write_mr(dram, rank, 6, value | (0 << 7), dramtype);
597 	udelay(1);/* tvrefdqx */
598 
599 	restore_zqcs_arefresh(dram, dis_auto_zq);
600 
601 	return 0;
602 }
603 
604 /*
605  * cs: 0:cs0
606  *	   1:cs1
607  *     else cs0+cs1
608  * note: it didn't consider about row_3_4
609  */
610 u64 get_cs_cap(struct px30_sdram_params *sdram_params, u32 cs)
611 {
612 	u32 bg;
613 	u64 cap[2];
614 
615 	if (sdram_params->dramtype == DDR4)
616 		/* DDR4 8bit dram BG = 2(4bank groups),
617 		 * 16bit dram BG = 1 (2 bank groups)
618 		 */
619 		bg = (sdram_params->ch.dbw == 0) ? 2 : 1;
620 	else
621 		bg = 0;
622 	cap[0] = 1llu << (sdram_params->ch.bw + sdram_params->ch.col +
623 		bg + sdram_params->ch.bk + sdram_params->ch.cs0_row);
624 
625 	if (sdram_params->ch.rank == 2)
626 		cap[1] = 1llu << (sdram_params->ch.bw + sdram_params->ch.col +
627 			bg + sdram_params->ch.bk + sdram_params->ch.cs1_row);
628 	else
629 		cap[1] = 0;
630 
631 	if (cs == 0)
632 		return cap[0];
633 	else if (cs == 1)
634 		return cap[1];
635 	else
636 		return (cap[0] + cap[1]);
637 }
638 
639 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
640 {
641 	writel(ddrconfig | (ddrconfig << 8), &dram->msch->deviceconf);
642 	rk_clrsetreg(&dram->grf->soc_noc_con[1], 0x3 << 14, 0 << 14);
643 }
644 
645 static void dram_all_config(struct dram_info *dram,
646 			    struct px30_sdram_params *sdram_params)
647 {
648 	u32 sys_reg = 0;
649 	u32 sys_reg3 = 0;
650 	u64 cs_cap[2];
651 
652 	set_ddrconfig(dram, sdram_params->ch.ddrconfig);
653 
654 	sys_reg |= SYS_REG_ENC_DDRTYPE(sdram_params->dramtype);
655 	sys_reg |= SYS_REG_ENC_ROW_3_4(sdram_params->ch.row_3_4);
656 	sys_reg |= SYS_REG_ENC_RANK(sdram_params->ch.rank);
657 	sys_reg |= SYS_REG_ENC_COL(sdram_params->ch.col);
658 	sys_reg |= SYS_REG_ENC_BK(sdram_params->ch.bk);
659 	sys_reg |= SYS_REG_ENC_BW(sdram_params->ch.bw);
660 	sys_reg |= SYS_REG_ENC_DBW(sdram_params->ch.dbw);
661 
662 	SYS_REG_ENC_CS0_ROW_(sdram_params->ch.cs0_row, sys_reg, sys_reg3);
663 	if (sdram_params->ch.cs1_row)
664 		SYS_REG_ENC_CS1_ROW_(sdram_params->ch.cs1_row, sys_reg,
665 				     sys_reg3);
666 	sys_reg3 |= SYS_REG_ENC_CS1_COL(sdram_params->ch.col);
667 	sys_reg3 |= SYS_REG_ENC_VERSION(DDR_SYS_REG_VERSION);
668 
669 	writel(sys_reg, &dram->pmugrf->os_reg[2]);
670 	writel(sys_reg3, &dram->pmugrf->os_reg[3]);
671 
672 	cs_cap[0] = get_cs_cap(sdram_params, 0);
673 	cs_cap[1] = get_cs_cap(sdram_params, 1);
674 	writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
675 			(((cs_cap[0] >> 20) / 64) & 0xff),
676 			&dram->msch->devicesize);
677 
678 	writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
679 	       &dram->msch->ddrtiminga0);
680 	writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
681 	       &dram->msch->ddrtimingb0);
682 	writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
683 	       &dram->msch->ddrtimingc0);
684 	writel(sdram_params->ch.noc_timings.devtodev0.d32,
685 	       &dram->msch->devtodev0);
686 	writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
687 	writel(sdram_params->ch.noc_timings.ddr4timing.d32,
688 	       &dram->msch->ddr4timing);
689 	writel(sdram_params->ch.noc_timings.agingx0, &dram->msch->agingx0);
690 	writel(sdram_params->ch.noc_timings.agingx0, &dram->msch->aging0);
691 	writel(sdram_params->ch.noc_timings.agingx0, &dram->msch->aging1);
692 	writel(sdram_params->ch.noc_timings.agingx0, &dram->msch->aging2);
693 	writel(sdram_params->ch.noc_timings.agingx0, &dram->msch->aging3);
694 }
695 
696 static void enable_low_power(struct dram_info *dram,
697 			     struct px30_sdram_params *sdram_params)
698 {
699 	void __iomem *pctl_base = dram->pctl;
700 	void __iomem *ddr_grf_base = dram->ddr_grf;
701 	u32 grf_lp_con;
702 
703 	/*
704 	 * bit0: grf_upctl_axi_cg_en = 1 enable upctl2 axi clk auto gating
705 	 * bit1: grf_upctl_apb_cg_en = 1 ungated axi,core clk for apb access
706 	 * bit2: grf_upctl_core_cg_en = 1 enable upctl2 core clk auto gating
707 	 * bit3: grf_selfref_type2_en = 0 disable core clk gating when type2 sr
708 	 * bit4: grf_upctl_syscreq_cg_en = 1
709 	 *       ungating coreclk when c_sysreq assert
710 	 * bit8-11: grf_auto_sr_dly = 6
711 	 */
712 	writel(0x1f1f0617, &dram->ddr_grf->ddr_grf_con[1]);
713 
714 	if (sdram_params->dramtype == DDR4)
715 		grf_lp_con = (0x7 << 16) | (1 << 1);
716 	else if (sdram_params->dramtype == DDR3)
717 		grf_lp_con = (0x7 << 16) | (1 << 0);
718 	else
719 		grf_lp_con = (0x7 << 16) | (1 << 2);
720 
721 	/* en lpckdis_en */
722 	grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
723 	writel(grf_lp_con, ddr_grf_base + DDR_GRF_LP_CON);
724 
725 	/* enable sr, pd */
726 	if (PD_IDLE == 0)
727 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
728 	else
729 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
730 	if (SR_IDLE == 0)
731 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
732 	else
733 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
734 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
735 }
736 
737 static int print_dec2hex(int i)
738 {
739 	int tmp;
740 
741 	tmp = (i % 10);
742 	tmp |= ((i % 100) / 10) << 4;
743 	tmp |= ((i % 1000) / 100) << 8;
744 
745 	return tmp;
746 }
747 
748 /*
749  * pre_init: 0: pre init for dram cap detect
750  * 1: detect correct cap(except cs1 row)info, than reinit
751  * 2: after reinit, we detect cs1_row, if cs1_row not equal
752  *    to cs0_row and cs is in middle on ddrconf map, we need
753  *    to reinit dram, than set the correct ddrconf.
754  */
755 static int sdram_init_(struct dram_info *dram,
756 		       struct px30_sdram_params *sdram_params, u32 pre_init)
757 {
758 	void __iomem *pctl_base = dram->pctl;
759 
760 	rkclk_ddr_reset(dram, 1, 1, 1, 1);
761 	udelay(10);
762 	/*
763 	 * dereset ddr phy psrstn to config pll,
764 	 * if using phy pll psrstn must be dereset
765 	 * before config pll
766 	 */
767 	rkclk_ddr_reset(dram, 1, 1, 1, 0);
768 	rkclk_configure_ddr(dram, sdram_params);
769 
770 	if (pre_init == 1) {
771 		switch (sdram_params->dramtype) {
772 		case DDR3:
773 			printascii("DDR3\n");
774 			break;
775 		case DDR4:
776 			printascii("DDR4\n");
777 			break;
778 		case LPDDR2:
779 			printascii("LPDDR2\n");
780 			break;
781 		case LPDDR3:
782 		default:
783 			printascii("LPDDR3\n");
784 			break;
785 		}
786 		printhex4(print_dec2hex(sdram_params->ddr_freq));
787 		printascii("MHz\n");
788 	}
789 
790 	/* release phy srst to provide clk to ctrl */
791 	rkclk_ddr_reset(dram, 1, 1, 0, 0);
792 	udelay(10);
793 	phy_soft_reset(dram);
794 	/* release ctrl presetn, and config ctl registers */
795 	rkclk_ddr_reset(dram, 1, 0, 0, 0);
796 	pctl_cfg(dram, sdram_params);
797 	sdram_params->ch.ddrconfig = calculate_ddrconfig(sdram_params);
798 	set_ctl_address_map(dram, sdram_params);
799 	phy_cfg(dram, sdram_params);
800 
801 	/* enable dfi_init_start to init phy after ctl srstn deassert */
802 	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
803 
804 	rkclk_ddr_reset(dram, 0, 0, 0, 0);
805 	/* wait for dfi_init_done and dram init complete */
806 	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
807 		continue;
808 
809 	if (sdram_params->dramtype == LPDDR3)
810 		write_mr(dram, 3, 11, 3, LPDDR3);
811 
812 	/* do ddr gate training */
813 redo_cs0_training:
814 	if (data_training(dram, 0, sdram_params->dramtype) != 0) {
815 		if (pre_init != 0)
816 			printascii("DTT cs0 error\n");
817 		return -1;
818 	}
819 	if (check_rd_gate(dram)) {
820 		printascii("re training cs0");
821 		goto redo_cs0_training;
822 	}
823 
824 	if (sdram_params->dramtype == LPDDR3) {
825 		if ((read_mr(dram, 1, 8) & 0x3) != 0x3)
826 			return -1;
827 	} else if (sdram_params->dramtype == LPDDR2) {
828 		if ((read_mr(dram, 1, 8) & 0x3) != 0x0)
829 			return -1;
830 	}
831 	/* for px30: when 2cs, both 2 cs should be training */
832 	if (pre_init != 0 && sdram_params->ch.rank == 2) {
833 redo_cs1_training:
834 		if (data_training(dram, 1, sdram_params->dramtype) != 0) {
835 			printascii("DTT cs1 error\n");
836 			return -1;
837 		}
838 		if (check_rd_gate(dram)) {
839 			printascii("re training cs1");
840 			goto redo_cs1_training;
841 		}
842 	}
843 
844 	if (sdram_params->dramtype == DDR4)
845 		write_vrefdq(dram, 0x3, 5670, sdram_params->dramtype);
846 
847 	dram_all_config(dram, sdram_params);
848 	enable_low_power(dram, sdram_params);
849 
850 	return 0;
851 }
852 
853 static u64 dram_detect_cap(struct dram_info *dram,
854 			   struct px30_sdram_params *sdram_params,
855 			   unsigned char channel)
856 {
857 	void __iomem *pctl_base = dram->pctl;
858 	void __iomem *phy_base = dram->phy;
859 
860 	/*
861 	 * for ddr3: ddrconf = 3
862 	 * for ddr4: ddrconf = 12
863 	 * for lpddr3: ddrconf = 3
864 	 * default bw = 1
865 	 */
866 	u32 bk, bktmp;
867 	u32 col, coltmp;
868 	u32 row, rowtmp, row_3_4;
869 	void __iomem *test_addr, *test_addr1;
870 	u32 dbw;
871 	u32 cs;
872 	u32 bw = 1;
873 	u64 cap = 0;
874 	u32 dram_type = sdram_params->dramtype;
875 	u32 pwrctl;
876 
877 	if (dram_type != DDR4) {
878 		/* detect col and bk for ddr3/lpddr3 */
879 		coltmp = 12;
880 		bktmp = 3;
881 		rowtmp = 16;
882 
883 		for (col = coltmp; col >= 9; col -= 1) {
884 			writel(0, CONFIG_SYS_SDRAM_BASE);
885 			test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
886 					(1ul << (col + bw - 1ul)));
887 			writel(PATTERN, test_addr);
888 			if ((readl(test_addr) == PATTERN) &&
889 			    (readl(CONFIG_SYS_SDRAM_BASE) == 0))
890 				break;
891 		}
892 		if (col == 8) {
893 			printascii("col error\n");
894 			goto cap_err;
895 		}
896 
897 		test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
898 				(1ul << (coltmp + bktmp + bw - 1ul)));
899 		writel(0, CONFIG_SYS_SDRAM_BASE);
900 		writel(PATTERN, test_addr);
901 		if ((readl(test_addr) == PATTERN) &&
902 		    (readl(CONFIG_SYS_SDRAM_BASE) == 0))
903 			bk = 3;
904 		else
905 			bk = 2;
906 		if (dram_type == DDR3)
907 			dbw = 1;
908 		else
909 			dbw = 2;
910 	} else {
911 		/* detect bg for ddr4 */
912 		coltmp = 10;
913 		bktmp = 4;
914 		rowtmp = 17;
915 
916 		col = 10;
917 		bk = 2;
918 		test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
919 				(1ul << (coltmp + bw + 1ul)));
920 		writel(0, CONFIG_SYS_SDRAM_BASE);
921 		writel(PATTERN, test_addr);
922 		if ((readl(test_addr) == PATTERN) &&
923 		    (readl(CONFIG_SYS_SDRAM_BASE) == 0))
924 			dbw = 0;
925 		else
926 			dbw = 1;
927 	}
928 	/* detect row */
929 	for (row = rowtmp; row > 12; row--) {
930 		writel(0, CONFIG_SYS_SDRAM_BASE);
931 		test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
932 				(1ul << (row + bktmp + coltmp + bw - 1ul)));
933 		writel(PATTERN, test_addr);
934 		if ((readl(test_addr) == PATTERN) &&
935 		    (readl(CONFIG_SYS_SDRAM_BASE) == 0))
936 			break;
937 	}
938 	if (row == 12) {
939 		printascii("row error");
940 		goto cap_err;
941 	}
942 	/* detect row_3_4 */
943 	test_addr = CONFIG_SYS_SDRAM_BASE;
944 	test_addr1 = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
945 			(0x3ul << (row + bktmp + coltmp + bw - 1ul - 1ul)));
946 
947 	writel(0, test_addr);
948 	writel(PATTERN, test_addr1);
949 	if ((readl(test_addr) == 0) &&
950 	    (readl(test_addr1) == PATTERN))
951 		row_3_4 = 0;
952 	else
953 		row_3_4 = 1;
954 
955 	/* disable auto low-power */
956 	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
957 	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
958 
959 	/* bw and cs detect using phy read gate training */
960 	if (data_training(dram, 1, dram_type) == 0)
961 		cs = 1;
962 	else
963 		cs = 0;
964 
965 	clrsetbits_le32(PHY_REG(phy_base, 0), 0xf << 4, 0xf << 4);
966 	setbits_le32(PHY_REG(phy_base, 0x46), 1 << 3);
967 	setbits_le32(PHY_REG(phy_base, 0x56), 1 << 3);
968 
969 	phy_soft_reset(dram);
970 
971 	if (data_training(dram, 0, dram_type) == 0)
972 		bw = 2;
973 	else
974 		bw = 1;
975 
976 	/* restore auto low-power */
977 	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
978 
979 	sdram_params->ch.rank = cs + 1;
980 	sdram_params->ch.col = col;
981 	sdram_params->ch.bk = bk;
982 	sdram_params->ch.dbw = dbw;
983 	sdram_params->ch.bw = bw;
984 	sdram_params->ch.cs0_row = row;
985 	sdram_params->ch.cs0_high16bit_row = row;
986 	if (cs) {
987 		sdram_params->ch.cs1_row = row;
988 		sdram_params->ch.cs1_high16bit_row = row;
989 	} else {
990 		sdram_params->ch.cs1_row = 0;
991 		sdram_params->ch.cs1_high16bit_row = 0;
992 	}
993 	sdram_params->ch.row_3_4 = row_3_4;
994 
995 	if (dram_type == DDR4)
996 		cap = 1llu << (cs + row + bk + col + ((dbw == 0) ? 2 : 1) + bw);
997 	else
998 		cap = 1llu << (cs + row + bk + col + bw);
999 
1000 	return cap;
1001 
1002 cap_err:
1003 	return 0;
1004 }
1005 
1006 static u32 remodify_sdram_params(struct px30_sdram_params *sdram_params)
1007 {
1008 	u32 tmp = 0, tmp_adr = 0, i;
1009 
1010 	for (i = 0; sdram_params->pctl_regs.pctl[i][0] != 0xFFFFFFFF; i++) {
1011 		if (sdram_params->pctl_regs.pctl[i][0] == 0) {
1012 			tmp = sdram_params->pctl_regs.pctl[i][1];/* MSTR */
1013 			tmp_adr = i;
1014 		}
1015 	}
1016 
1017 	tmp &= ~((3ul << 30) | (3ul << 24) | (3ul << 12));
1018 
1019 	switch (sdram_params->ch.dbw) {
1020 	case 2:
1021 		tmp |= (3ul << 30);
1022 		break;
1023 	case 1:
1024 		tmp |= (2ul << 30);
1025 		break;
1026 	case 0:
1027 	default:
1028 		tmp |= (1ul << 30);
1029 		break;
1030 	}
1031 
1032 	if (sdram_params->ch.rank == 2)
1033 		tmp |= 3 << 24;
1034 	else
1035 		tmp |= 1 << 24;
1036 
1037 	tmp |= (2 - sdram_params->ch.bw) << 12;
1038 
1039 	sdram_params->pctl_regs.pctl[tmp_adr][1] = tmp;
1040 
1041 	return 0;
1042 }
1043 
1044 int dram_detect_high_row(struct dram_info *dram,
1045 			 struct px30_sdram_params *sdram_params,
1046 			 unsigned char channel)
1047 {
1048 	sdram_params->ch.cs0_high16bit_row = sdram_params->ch.cs0_row;
1049 	sdram_params->ch.cs1_high16bit_row = sdram_params->ch.cs1_row;
1050 
1051 	return 0;
1052 }
1053 
1054 static int dram_detect_cs1_row(struct px30_sdram_params *sdram_params,
1055 			       unsigned char channel)
1056 {
1057 	u32 ret = 0;
1058 	void __iomem *test_addr;
1059 	u32 row, bktmp, coltmp, bw;
1060 	u64 cs0_cap;
1061 	u32 byte_mask;
1062 
1063 	if (sdram_params->ch.rank == 2) {
1064 		cs0_cap = get_cs_cap(sdram_params, 0);
1065 
1066 		if (sdram_params->dramtype == DDR4) {
1067 			if (sdram_params->ch.dbw == 0)
1068 				bktmp = sdram_params->ch.bk + 2;
1069 			else
1070 				bktmp = sdram_params->ch.bk + 1;
1071 		} else {
1072 			bktmp = sdram_params->ch.bk;
1073 		}
1074 		bw = sdram_params->ch.bw;
1075 		coltmp = sdram_params->ch.col;
1076 
1077 		/*
1078 		 * because px30 support axi split,min bandwidth
1079 		 * is 8bit. if cs0 is 32bit, cs1 may 32bit or 16bit
1080 		 * so we check low 16bit data when detect cs1 row.
1081 		 * if cs0 is 16bit/8bit, we check low 8bit data.
1082 		 */
1083 		if (bw == 2)
1084 			byte_mask = 0xFFFF;
1085 		else
1086 			byte_mask = 0xFF;
1087 
1088 		/* detect cs1 row */
1089 		for (row = sdram_params->ch.cs0_row; row > 12; row--) {
1090 			test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
1091 				    cs0_cap +
1092 				    (1ul << (row + bktmp + coltmp + bw - 1ul)));
1093 			writel(0, CONFIG_SYS_SDRAM_BASE + cs0_cap);
1094 			writel(PATTERN, test_addr);
1095 
1096 			if (((readl(test_addr) & byte_mask) ==
1097 			     (PATTERN & byte_mask)) &&
1098 			    ((readl(CONFIG_SYS_SDRAM_BASE + cs0_cap) &
1099 			      byte_mask) == 0)) {
1100 				ret = row;
1101 				break;
1102 			}
1103 		}
1104 	}
1105 
1106 	return ret;
1107 }
1108 
1109 /* return: 0 = success, other = fail */
1110 static int sdram_init_detect(struct dram_info *dram,
1111 			     struct px30_sdram_params *sdram_params)
1112 {
1113 	u32 ret;
1114 	u32 sys_reg = 0;
1115 	u32 sys_reg3 = 0;
1116 
1117 	if (sdram_init_(dram, sdram_params, 0) != 0)
1118 		return -1;
1119 
1120 	if (dram_detect_cap(dram, sdram_params, 0) == 0)
1121 		return -1;
1122 
1123 	/* modify bw, cs related timing */
1124 	remodify_sdram_params(sdram_params);
1125 	/* reinit sdram by real dram cap */
1126 	ret = sdram_init_(dram, sdram_params, 1);
1127 	if (ret != 0)
1128 		goto out;
1129 
1130 	/* redetect cs1 row */
1131 	sdram_params->ch.cs1_row =
1132 		dram_detect_cs1_row(sdram_params, 0);
1133 	if (sdram_params->ch.cs1_row) {
1134 		sys_reg = readl(&dram->pmugrf->os_reg[2]);
1135 		sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
1136 		SYS_REG_ENC_CS1_ROW_(sdram_params->ch.cs1_row,
1137 				     sys_reg, sys_reg3);
1138 		writel(sys_reg, &dram->pmugrf->os_reg[2]);
1139 		writel(sys_reg3, &dram->pmugrf->os_reg[3]);
1140 	}
1141 
1142 	ret = dram_detect_high_row(dram, sdram_params, 0);
1143 
1144 out:
1145 	return ret;
1146 }
1147 
1148 struct px30_sdram_params
1149 		*get_default_sdram_config(void)
1150 {
1151 	sdram_configs[0].skew = &skew;
1152 
1153 	return &sdram_configs[0];
1154 }
1155 
1156 /* return: 0 = success, other = fail */
1157 int sdram_init(void)
1158 {
1159 	struct px30_sdram_params *sdram_params;
1160 	int ret = 0;
1161 
1162 	printascii("DDR Init V1.07\n");
1163 
1164 	dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
1165 	dram_info.pctl = (void *)DDRC_BASE_ADDR;
1166 	dram_info.grf = (void *)GRF_BASE_ADDR;
1167 	dram_info.cru = (void *)CRU_BASE_ADDR;
1168 	dram_info.msch = (void *)SERVER_MSCH0_BASE_ADDR;
1169 	dram_info.ddr_grf = (void *)DDR_GRF_BASE_ADDR;
1170 	dram_info.pmugrf = (void *)PMUGRF_BASE_ADDR;
1171 
1172 	sdram_params = get_default_sdram_config();
1173 	ret = sdram_init_detect(&dram_info, sdram_params);
1174 
1175 	if (ret)
1176 		goto error;
1177 
1178 	printascii("out\n");
1179 	return ret;
1180 error:
1181 	return (-1);
1182 }
1183 
1184 #else /* CONFIG_TPL_BUILD */
1185 
1186 static int px30_dmc_probe(struct udevice *dev)
1187 {
1188 	struct dram_info *priv = dev_get_priv(dev);
1189 
1190 	priv->pmugrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUGRF);
1191 	debug("%s: pmugrf=%p\n", __func__, priv->pmugrf);
1192 	priv->info.base = CONFIG_SYS_SDRAM_BASE;
1193 	priv->info.size =
1194 		rockchip_sdram_size((phys_addr_t)&priv->pmugrf->os_reg[2]);
1195 
1196 	return 0;
1197 }
1198 
1199 static int px30_dmc_get_info(struct udevice *dev, struct ram_info *info)
1200 {
1201 	struct dram_info *priv = dev_get_priv(dev);
1202 
1203 	*info = priv->info;
1204 
1205 	return 0;
1206 }
1207 
1208 static struct ram_ops px30_dmc_ops = {
1209 	.get_info = px30_dmc_get_info,
1210 };
1211 
1212 static const struct udevice_id px30_dmc_ids[] = {
1213 	{ .compatible = "rockchip,px30-dmc" },
1214 	{ }
1215 };
1216 
1217 U_BOOT_DRIVER(dmc_px30) = {
1218 	.name = "rockchip_px30_dmc",
1219 	.id = UCLASS_RAM,
1220 	.of_match = px30_dmc_ids,
1221 	.ops = &px30_dmc_ops,
1222 	.probe = px30_dmc_probe,
1223 	.priv_auto_alloc_size = sizeof(struct dram_info),
1224 };
1225 #endif /* CONFIG_TPL_BUILD */
1226