xref: /rk3399_rockchip-uboot/drivers/ram/rockchip/sdram_px30.c (revision 87e4c6020eff05133e40ab8b7b0e37e6a2be37e4)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * (C) Copyright 2018 Rockchip Electronics Co., Ltd.
4  */
5 
6 #include <common.h>
7 #include <debug_uart.h>
8 #include <dm.h>
9 #include <ram.h>
10 #include <syscon.h>
11 #include <asm/io.h>
12 #include <asm/arch/clock.h>
13 #include <asm/arch/cru_px30.h>
14 #include <asm/arch/grf_px30.h>
15 #include <asm/arch/hardware.h>
16 #include <asm/arch/sdram_common.h>
17 #include <asm/arch/sdram_px30.h>
18 
19 /*
20  * Because px30 sram size is small, so need define CONFIG_TPL_TINY_FRAMEWORK
21  * to reduce TPL size when build TPL firmware.
22  */
23 #ifdef CONFIG_TPL_BUILD
24 #ifndef CONFIG_TPL_TINY_FRAMEWORK
25 #error please defined CONFIG_TPL_TINY_FRAMEWORK for px30 !!!
26 #endif
27 #endif
28 
29 #ifdef CONFIG_TPL_BUILD
30 
31 DECLARE_GLOBAL_DATA_PTR;
32 struct dram_info {
33 	struct ddr_pctl_regs *pctl;
34 	struct ddr_phy_regs *phy;
35 	struct px30_cru *cru;
36 	struct msch_regs *msch;
37 	struct px30_ddr_grf_regs *ddr_grf;
38 	struct px30_grf *grf;
39 	struct ram_info info;
40 	struct px30_pmugrf *pmugrf;
41 };
42 
43 #define PMUGRF_BASE_ADDR		0xFF010000
44 #define CRU_BASE_ADDR			0xFF2B0000
45 #define GRF_BASE_ADDR			0xFF140000
46 #define DDRC_BASE_ADDR			0xFF600000
47 #define DDR_PHY_BASE_ADDR		0xFF2A0000
48 #define SERVER_MSCH0_BASE_ADDR		0xFF530000
49 #define DDR_GRF_BASE_ADDR		0xff630000
50 
51 struct dram_info dram_info;
52 
53 struct px30_sdram_params sdram_configs[] = {
54 #include	"sdram-px30-lpddr3-detect-333.inc"
55 };
56 
57 struct ddr_phy_skew skew = {
58 #include	"sdram-px30-ddr_skew.inc"
59 };
60 
61 static void rkclk_ddr_reset(struct dram_info *dram,
62 			    u32 ctl_srstn, u32 ctl_psrstn,
63 			    u32 phy_srstn, u32 phy_psrstn)
64 {
65 	writel(upctl2_srstn_req(ctl_srstn) | upctl2_psrstn_req(ctl_psrstn) |
66 	       upctl2_asrstn_req(ctl_srstn),
67 	       &dram->cru->softrst_con[1]);
68 	writel(ddrphy_srstn_req(phy_srstn) | ddrphy_psrstn_req(phy_psrstn),
69 	       &dram->cru->softrst_con[2]);
70 }
71 
72 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
73 {
74 	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
75 	int delay = 1000;
76 	u32 mhz = hz / MHz;
77 
78 	refdiv = 1;
79 	if (mhz <= 300) {
80 		postdiv1 = 4;
81 		postdiv2 = 2;
82 	} else if (mhz <= 400) {
83 		postdiv1 = 6;
84 		postdiv2 = 1;
85 	} else if (mhz <= 600) {
86 		postdiv1 = 4;
87 		postdiv2 = 1;
88 	} else if (mhz <= 800) {
89 		postdiv1 = 3;
90 		postdiv2 = 1;
91 	} else if (mhz <= 1600) {
92 		postdiv1 = 2;
93 		postdiv2 = 1;
94 	} else {
95 		postdiv1 = 1;
96 		postdiv2 = 1;
97 	}
98 	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
99 
100 	writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
101 
102 	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
103 	writel(DSMPD(1) | POSTDIV2(postdiv2) | REFDIV(refdiv),
104 	       &dram->cru->pll[1].con1);
105 
106 	while (delay > 0) {
107 		udelay(1);
108 		if (LOCK(readl(&dram->cru->pll[1].con1)))
109 			break;
110 		delay--;
111 	}
112 
113 	writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
114 }
115 
116 static void rkclk_configure_ddr(struct dram_info *dram,
117 				struct px30_sdram_params *sdram_params)
118 {
119 	/* for inno ddr phy need 2*freq */
120 	rkclk_set_dpll(dram,  sdram_params->base.ddr_freq * MHz * 2);
121 }
122 
123 /* return ddrconfig value
124  *       (-1), find ddrconfig fail
125  *       other, the ddrconfig value
126  * only support cs0_row >= cs1_row
127  */
128 static unsigned int calculate_ddrconfig(struct px30_sdram_params *sdram_params)
129 {
130 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
131 	u32 bw, die_bw, col, bank;
132 	u32 i, tmp;
133 	u32 ddrconf = -1;
134 
135 	bw = cap_info->bw;
136 	die_bw = cap_info->dbw;
137 	col = cap_info->col;
138 	bank = cap_info->bk;
139 
140 	if (sdram_params->base.dramtype == DDR4) {
141 		if (die_bw == 0)
142 			ddrconf = 7 + bw;
143 		else
144 			ddrconf = 12 - bw;
145 		ddrconf = d4_rbc_2_d3_rbc[ddrconf - 7];
146 	} else {
147 		tmp = ((bank - 2) << 3) | (col + bw - 10);
148 		for (i = 0; i < 7; i++)
149 			if ((ddr_cfg_2_rbc[i] & 0xf) == tmp) {
150 				ddrconf = i;
151 				break;
152 			}
153 		if (i > 6)
154 			printascii("calculate ddrconfig error\n");
155 	}
156 
157 	return ddrconf;
158 }
159 
160 /*
161  * calculate controller dram address map, and setting to register.
162  * argument sdram_params->ch.ddrconf must be right value before
163  * call this function.
164  */
165 static void set_ctl_address_map(struct dram_info *dram,
166 				struct px30_sdram_params *sdram_params)
167 {
168 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
169 	void __iomem *pctl_base = dram->pctl;
170 	u32 cs_pst, bg, max_row, ddrconf;
171 	u32 i;
172 
173 	if (sdram_params->base.dramtype == DDR4)
174 		/*
175 		 * DDR4 8bit dram BG = 2(4bank groups),
176 		 * 16bit dram BG = 1 (2 bank groups)
177 		 */
178 		bg = (cap_info->dbw == 0) ? 2 : 1;
179 	else
180 		bg = 0;
181 
182 	cs_pst = cap_info->bw + cap_info->col +
183 		bg + cap_info->bk + cap_info->cs0_row;
184 	if (cs_pst >= 32 || cap_info->rank == 1)
185 		writel(0x1f, pctl_base + DDR_PCTL2_ADDRMAP0);
186 	else
187 		writel(cs_pst - 8, pctl_base + DDR_PCTL2_ADDRMAP0);
188 
189 	ddrconf = cap_info->ddrconfig;
190 	if (sdram_params->base.dramtype == DDR4) {
191 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc); i++) {
192 			if (d4_rbc_2_d3_rbc[i] == ddrconf) {
193 				ddrconf = 7 + i;
194 				break;
195 			}
196 		}
197 	}
198 
199 	sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP1),
200 			  &addrmap[ddrconf][0], 8 * 4);
201 	max_row = cs_pst - 1 - 8 - (addrmap[ddrconf][5] & 0xf);
202 
203 	if (max_row < 12)
204 		printascii("set addrmap fail\n");
205 	/* need to disable row ahead of rank by set to 0xf */
206 	for (i = 17; i > max_row; i--)
207 		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
208 			((i - 12) * 8 / 32) * 4,
209 			0xf << ((i - 12) * 8 % 32),
210 			0xf << ((i - 12) * 8 % 32));
211 
212 	if ((sdram_params->base.dramtype == LPDDR3 ||
213 	     sdram_params->base.dramtype == LPDDR2) &&
214 		 cap_info->row_3_4)
215 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
216 	if (sdram_params->base.dramtype == DDR4 && cap_info->bw != 0x2)
217 		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
218 }
219 
220 /*
221  * rank = 1: cs0
222  * rank = 2: cs1
223  */
224 int read_mr(struct dram_info *dram, u32 rank, u32 mr_num)
225 {
226 	void __iomem *ddr_grf_base = dram->ddr_grf;
227 
228 	pctl_read_mr(dram->pctl, rank, mr_num);
229 
230 	return (readl(ddr_grf_base + DDR_GRF_STATUS(0)) & 0xff);
231 }
232 
233 #define MIN(a, b)	(((a) > (b)) ? (b) : (a))
234 #define MAX(a, b)	(((a) > (b)) ? (a) : (b))
235 static u32 check_rd_gate(struct dram_info *dram)
236 {
237 	void __iomem *phy_base = dram->phy;
238 
239 	u32 max_val = 0;
240 	u32 min_val = 0xff;
241 	u32 gate[4];
242 	u32 i, bw;
243 
244 	bw = (readl(PHY_REG(phy_base, 0x0)) >> 4) & 0xf;
245 	switch (bw) {
246 	case 0x1:
247 		bw = 1;
248 		break;
249 	case 0x3:
250 		bw = 2;
251 		break;
252 	case 0xf:
253 	default:
254 		bw = 4;
255 		break;
256 	}
257 
258 	for (i = 0; i < bw; i++) {
259 		gate[i] = readl(PHY_REG(phy_base, 0xfb + i));
260 		max_val = MAX(max_val, gate[i]);
261 		min_val = MIN(min_val, gate[i]);
262 	}
263 
264 	if (max_val > 0x80 || min_val < 0x20)
265 		return -1;
266 	else
267 		return 0;
268 }
269 
270 static int data_training(struct dram_info *dram, u32 cs, u32 dramtype)
271 {
272 	void __iomem *pctl_base = dram->pctl;
273 	u32 dis_auto_zq = 0;
274 	u32 pwrctl;
275 	u32 ret;
276 
277 	/* disable auto low-power */
278 	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
279 	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
280 
281 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
282 
283 	ret = phy_data_training(dram->phy, cs, dramtype);
284 
285 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
286 
287 	/* restore auto low-power */
288 	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
289 
290 	return ret;
291 }
292 
293 static void dram_set_bw(struct dram_info *dram, u32 bw)
294 {
295 	phy_dram_set_bw(dram->phy, bw);
296 }
297 
298 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
299 {
300 	writel(ddrconfig | (ddrconfig << 8), &dram->msch->deviceconf);
301 	rk_clrsetreg(&dram->grf->soc_noc_con[1], 0x3 << 14, 0 << 14);
302 }
303 
304 static void dram_all_config(struct dram_info *dram,
305 			    struct px30_sdram_params *sdram_params)
306 {
307 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
308 	u32 sys_reg2 = 0;
309 	u32 sys_reg3 = 0;
310 
311 	set_ddrconfig(dram, cap_info->ddrconfig);
312 	sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
313 			 &sys_reg3, 0);
314 	writel(sys_reg2, &dram->pmugrf->os_reg[2]);
315 	writel(sys_reg3, &dram->pmugrf->os_reg[3]);
316 	sdram_msch_config(dram->msch, &sdram_params->ch.noc_timings, cap_info,
317 			  &sdram_params->base);
318 }
319 
320 static void enable_low_power(struct dram_info *dram,
321 			     struct px30_sdram_params *sdram_params)
322 {
323 	void __iomem *pctl_base = dram->pctl;
324 	void __iomem *phy_base = dram->phy;
325 	void __iomem *ddr_grf_base = dram->ddr_grf;
326 	u32 grf_lp_con;
327 
328 	/*
329 	 * bit0: grf_upctl_axi_cg_en = 1 enable upctl2 axi clk auto gating
330 	 * bit1: grf_upctl_apb_cg_en = 1 ungated axi,core clk for apb access
331 	 * bit2: grf_upctl_core_cg_en = 1 enable upctl2 core clk auto gating
332 	 * bit3: grf_selfref_type2_en = 0 disable core clk gating when type2 sr
333 	 * bit4: grf_upctl_syscreq_cg_en = 1
334 	 *       ungating coreclk when c_sysreq assert
335 	 * bit8-11: grf_auto_sr_dly = 6
336 	 */
337 	writel(0x1f1f0617, &dram->ddr_grf->ddr_grf_con[1]);
338 
339 	if (sdram_params->base.dramtype == DDR4)
340 		grf_lp_con = (0x7 << 16) | (1 << 1);
341 	else if (sdram_params->base.dramtype == DDR3)
342 		grf_lp_con = (0x7 << 16) | (1 << 0);
343 	else
344 		grf_lp_con = (0x7 << 16) | (1 << 2);
345 
346 	/* en lpckdis_en */
347 	grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
348 	writel(grf_lp_con, ddr_grf_base + DDR_GRF_LP_CON);
349 
350 	/* off digit module clock when enter power down */
351 	setbits_le32(PHY_REG(phy_base, 7), 1 << 7);
352 
353 	/* enable sr, pd */
354 	if (PD_IDLE == 0)
355 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
356 	else
357 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
358 	if (SR_IDLE == 0)
359 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
360 	else
361 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
362 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
363 }
364 
365 /*
366  * pre_init: 0: pre init for dram cap detect
367  * 1: detect correct cap(except cs1 row)info, than reinit
368  * 2: after reinit, we detect cs1_row, if cs1_row not equal
369  *    to cs0_row and cs is in middle on ddrconf map, we need
370  *    to reinit dram, than set the correct ddrconf.
371  */
372 static int sdram_init_(struct dram_info *dram,
373 		       struct px30_sdram_params *sdram_params, u32 pre_init)
374 {
375 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
376 	void __iomem *pctl_base = dram->pctl;
377 
378 	rkclk_ddr_reset(dram, 1, 1, 1, 1);
379 	udelay(10);
380 	/*
381 	 * dereset ddr phy psrstn to config pll,
382 	 * if using phy pll psrstn must be dereset
383 	 * before config pll
384 	 */
385 	rkclk_ddr_reset(dram, 1, 1, 1, 0);
386 	rkclk_configure_ddr(dram, sdram_params);
387 
388 	/* release phy srst to provide clk to ctrl */
389 	rkclk_ddr_reset(dram, 1, 1, 0, 0);
390 	udelay(10);
391 	phy_soft_reset(dram->phy);
392 	/* release ctrl presetn, and config ctl registers */
393 	rkclk_ddr_reset(dram, 1, 0, 0, 0);
394 	pctl_cfg(dram->pctl, &sdram_params->pctl_regs, SR_IDLE, PD_IDLE);
395 	cap_info->ddrconfig = calculate_ddrconfig(sdram_params);
396 	set_ctl_address_map(dram, sdram_params);
397 	phy_cfg(dram->phy, &sdram_params->phy_regs, sdram_params->skew,
398 		&sdram_params->base, cap_info->bw);
399 
400 	/* enable dfi_init_start to init phy after ctl srstn deassert */
401 	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
402 
403 	rkclk_ddr_reset(dram, 0, 0, 0, 0);
404 	/* wait for dfi_init_done and dram init complete */
405 	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
406 		continue;
407 
408 	if (sdram_params->base.dramtype == LPDDR3)
409 		pctl_write_mr(dram->pctl, 3, 11, 3, LPDDR3);
410 
411 	/* do ddr gate training */
412 redo_cs0_training:
413 	if (data_training(dram, 0, sdram_params->base.dramtype) != 0) {
414 		if (pre_init != 0)
415 			printascii("DTT cs0 error\n");
416 		return -1;
417 	}
418 	if (check_rd_gate(dram)) {
419 		printascii("re training cs0");
420 		goto redo_cs0_training;
421 	}
422 
423 	if (sdram_params->base.dramtype == LPDDR3) {
424 		if ((read_mr(dram, 1, 8) & 0x3) != 0x3)
425 			return -1;
426 	} else if (sdram_params->base.dramtype == LPDDR2) {
427 		if ((read_mr(dram, 1, 8) & 0x3) != 0x0)
428 			return -1;
429 	}
430 	/* for px30: when 2cs, both 2 cs should be training */
431 	if (pre_init != 0 && cap_info->rank == 2) {
432 redo_cs1_training:
433 		if (data_training(dram, 1, sdram_params->base.dramtype) != 0) {
434 			printascii("DTT cs1 error\n");
435 			return -1;
436 		}
437 		if (check_rd_gate(dram)) {
438 			printascii("re training cs1");
439 			goto redo_cs1_training;
440 		}
441 	}
442 
443 	if (sdram_params->base.dramtype == DDR4)
444 		pctl_write_vrefdq(dram->pctl, 0x3, 5670,
445 				  sdram_params->base.dramtype);
446 
447 	dram_all_config(dram, sdram_params);
448 	enable_low_power(dram, sdram_params);
449 
450 	return 0;
451 }
452 
453 static int dram_detect_cap(struct dram_info *dram,
454 			   struct px30_sdram_params *sdram_params,
455 			   unsigned char channel)
456 {
457 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
458 
459 	/*
460 	 * for ddr3: ddrconf = 3
461 	 * for ddr4: ddrconf = 12
462 	 * for lpddr3: ddrconf = 3
463 	 * default bw = 1
464 	 */
465 	u32 bk, bktmp;
466 	u32 col, coltmp;
467 	u32 rowtmp;
468 	u32 cs;
469 	u32 bw = 1;
470 	u32 dram_type = sdram_params->base.dramtype;
471 
472 	if (dram_type != DDR4) {
473 		/* detect col and bk for ddr3/lpddr3 */
474 		coltmp = 12;
475 		bktmp = 3;
476 		if (dram_type == LPDDR2)
477 			rowtmp = 15;
478 		else
479 			rowtmp = 16;
480 
481 		if (sdram_detect_col(cap_info, coltmp) != 0)
482 			goto cap_err;
483 		sdram_detect_bank(cap_info, coltmp, bktmp);
484 		sdram_detect_dbw(cap_info, dram_type);
485 	} else {
486 		/* detect bg for ddr4 */
487 		coltmp = 10;
488 		bktmp = 4;
489 		rowtmp = 17;
490 
491 		col = 10;
492 		bk = 2;
493 		cap_info->col = col;
494 		cap_info->bk = bk;
495 		sdram_detect_bg(cap_info, coltmp);
496 	}
497 
498 	/* detect row */
499 	if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
500 		goto cap_err;
501 
502 	/* detect row_3_4 */
503 	sdram_detect_row_3_4(cap_info, coltmp, bktmp);
504 
505 	/* bw and cs detect using data training */
506 	if (data_training(dram, 1, dram_type) == 0)
507 		cs = 1;
508 	else
509 		cs = 0;
510 	cap_info->rank = cs + 1;
511 
512 	dram_set_bw(dram, 2);
513 	if (data_training(dram, 0, dram_type) == 0)
514 		bw = 2;
515 	else
516 		bw = 1;
517 	cap_info->bw = bw;
518 
519 	cap_info->cs0_high16bit_row = cap_info->cs0_row;
520 	if (cs) {
521 		cap_info->cs1_row = cap_info->cs0_row;
522 		cap_info->cs1_high16bit_row = cap_info->cs0_row;
523 	} else {
524 		cap_info->cs1_row = 0;
525 		cap_info->cs1_high16bit_row = 0;
526 	}
527 
528 	return 0;
529 cap_err:
530 	return -1;
531 }
532 
533 void get_ddr_param(struct px30_sdram_params *sdram_params,
534 		   struct ddr_param *ddr_param)
535 {
536 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
537 	u32 dram_type = sdram_params->base.dramtype;
538 	u64 cs_cap[2];
539 
540 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
541 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
542 
543 	if (cap_info->row_3_4) {
544 		cs_cap[0] =  cs_cap[0] * 3 / 4;
545 		cs_cap[1] =  cs_cap[1] * 3 / 4;
546 	}
547 
548 	if (cap_info->row_3_4 && cap_info->rank == 2) {
549 		ddr_param->count = 2;
550 		ddr_param->para[0] = 0;
551 		ddr_param->para[1] = cs_cap[0] * 4 / 3;
552 		ddr_param->para[2] = cs_cap[0];
553 		ddr_param->para[3] = cs_cap[1];
554 	} else {
555 		ddr_param->count = 1;
556 		ddr_param->para[0] = 0;
557 		ddr_param->para[1] = (u64)cs_cap[0] + (u64)cs_cap[1];
558 	}
559 }
560 
561 /* return: 0 = success, other = fail */
562 static int sdram_init_detect(struct dram_info *dram,
563 			     struct px30_sdram_params *sdram_params)
564 {
565 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
566 	u32 ret;
567 	u32 sys_reg = 0;
568 	u32 sys_reg3 = 0;
569 
570 	if (sdram_init_(dram, sdram_params, 0) != 0)
571 		return -1;
572 
573 	if (dram_detect_cap(dram, sdram_params, 0) != 0)
574 		return -1;
575 
576 	/* modify bw, cs related timing */
577 	pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
578 				   sdram_params->base.dramtype);
579 	/* reinit sdram by real dram cap */
580 	ret = sdram_init_(dram, sdram_params, 1);
581 	if (ret != 0)
582 		goto out;
583 
584 	/* redetect cs1 row */
585 	sdram_detect_cs1_row(cap_info, sdram_params->base.dramtype);
586 	if (cap_info->cs1_row) {
587 		sys_reg = readl(&dram->pmugrf->os_reg[2]);
588 		sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
589 		SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
590 				    sys_reg, sys_reg3, 0);
591 		writel(sys_reg, &dram->pmugrf->os_reg[2]);
592 		writel(sys_reg3, &dram->pmugrf->os_reg[3]);
593 	}
594 
595 	ret = sdram_detect_high_row(cap_info);
596 
597 out:
598 	return ret;
599 }
600 
601 struct px30_sdram_params
602 		*get_default_sdram_config(void)
603 {
604 	sdram_configs[0].skew = &skew;
605 
606 	return &sdram_configs[0];
607 }
608 
609 /* return: 0 = success, other = fail */
610 int sdram_init(void)
611 {
612 	struct px30_sdram_params *sdram_params;
613 	int ret = 0;
614 	struct ddr_param ddr_param;
615 
616 	dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
617 	dram_info.pctl = (void *)DDRC_BASE_ADDR;
618 	dram_info.grf = (void *)GRF_BASE_ADDR;
619 	dram_info.cru = (void *)CRU_BASE_ADDR;
620 	dram_info.msch = (void *)SERVER_MSCH0_BASE_ADDR;
621 	dram_info.ddr_grf = (void *)DDR_GRF_BASE_ADDR;
622 	dram_info.pmugrf = (void *)PMUGRF_BASE_ADDR;
623 
624 	sdram_params = get_default_sdram_config();
625 	ret = sdram_init_detect(&dram_info, sdram_params);
626 
627 	if (ret)
628 		goto error;
629 
630 	get_ddr_param(sdram_params, &ddr_param);
631 	rockchip_setup_ddr_param(&ddr_param);
632 	sdram_print_ddr_info(&sdram_params->ch.cap_info,
633 			     &sdram_params->base, 0);
634 
635 	printascii("out\n");
636 	return ret;
637 error:
638 	return (-1);
639 }
640 #endif /* CONFIG_TPL_BUILD */
641