xref: /rk3399_rockchip-uboot/drivers/ram/rockchip/sdram_px30.c (revision 2c6a058b7ea25398013cb25b4e3bb96fe40da1a5)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * (C) Copyright 2018 Rockchip Electronics Co., Ltd.
4  */
5 
6 #include <common.h>
7 #include <debug_uart.h>
8 #include <dm.h>
9 #include <ram.h>
10 #include <syscon.h>
11 #include <asm/io.h>
12 #include <asm/arch/clock.h>
13 #include <asm/arch/cru_px30.h>
14 #include <asm/arch/grf_px30.h>
15 #include <asm/arch/hardware.h>
16 #include <asm/arch/sdram_common.h>
17 #include <asm/arch/sdram_px30.h>
18 
19 /*
20  * Because px30 sram size is small, so need define CONFIG_TPL_TINY_FRAMEWORK
21  * to reduce TPL size when build TPL firmware.
22  */
23 #ifdef CONFIG_TPL_BUILD
24 #ifndef CONFIG_TPL_TINY_FRAMEWORK
25 #error please defined CONFIG_TPL_TINY_FRAMEWORK for px30 !!!
26 #endif
27 #endif
28 
29 #ifdef CONFIG_TPL_BUILD
30 
31 DECLARE_GLOBAL_DATA_PTR;
32 struct dram_info {
33 	struct px30_ddr_pctl_regs *pctl;
34 	struct px30_ddr_phy_regs *phy;
35 	struct px30_cru *cru;
36 	struct px30_msch_regs *msch;
37 	struct px30_ddr_grf_regs *ddr_grf;
38 	struct px30_grf *grf;
39 	struct ram_info info;
40 	struct px30_pmugrf *pmugrf;
41 };
42 
43 #define PMUGRF_BASE_ADDR		0xFF010000
44 #define CRU_BASE_ADDR			0xFF2B0000
45 #define GRF_BASE_ADDR			0xFF140000
46 #define DDRC_BASE_ADDR			0xFF600000
47 #define DDR_PHY_BASE_ADDR		0xFF2A0000
48 #define SERVER_MSCH0_BASE_ADDR		0xFF530000
49 #define DDR_GRF_BASE_ADDR		0xff630000
50 
51 struct dram_info dram_info;
52 
53 struct px30_sdram_params sdram_configs[] = {
54 #include	"sdram-px30-lpddr3-detect-333.inc"
55 };
56 
57 struct px30_ddr_skew skew = {
58 #include	"sdram-px30-ddr_skew.inc"
59 };
60 
61 static void rkclk_ddr_reset(struct dram_info *dram,
62 			    u32 ctl_srstn, u32 ctl_psrstn,
63 			    u32 phy_srstn, u32 phy_psrstn)
64 {
65 	writel(upctl2_srstn_req(ctl_srstn) | upctl2_psrstn_req(ctl_psrstn) |
66 	       upctl2_asrstn_req(ctl_srstn),
67 	       &dram->cru->softrst_con[1]);
68 	writel(ddrphy_srstn_req(phy_srstn) | ddrphy_psrstn_req(phy_psrstn),
69 	       &dram->cru->softrst_con[2]);
70 }
71 
72 static void rkclk_set_dpll(struct dram_info *dram, unsigned int mhz)
73 {
74 	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
75 	int delay = 1000;
76 
77 	refdiv = 1;
78 	if (mhz <= 300) {
79 		postdiv1 = 4;
80 		postdiv2 = 2;
81 	} else if (mhz <= 400) {
82 		postdiv1 = 6;
83 		postdiv2 = 1;
84 	} else if (mhz <= 600) {
85 		postdiv1 = 4;
86 		postdiv2 = 1;
87 	} else if (mhz <= 800) {
88 		postdiv1 = 3;
89 		postdiv2 = 1;
90 	} else if (mhz <= 1600) {
91 		postdiv1 = 2;
92 		postdiv2 = 1;
93 	} else {
94 		postdiv1 = 1;
95 		postdiv2 = 1;
96 	}
97 	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
98 
99 	writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
100 
101 	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
102 	writel(DSMPD(1) | POSTDIV2(postdiv2) | REFDIV(refdiv),
103 	       &dram->cru->pll[1].con1);
104 
105 	while (delay > 0) {
106 		udelay(1);
107 		if (LOCK(readl(&dram->cru->pll[1].con1)))
108 			break;
109 		delay--;
110 	}
111 
112 	writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
113 }
114 
115 static void rkclk_configure_ddr(struct dram_info *dram,
116 				struct px30_sdram_params *sdram_params)
117 {
118 	/* for inno ddr phy need 2*freq */
119 	rkclk_set_dpll(dram,  sdram_params->ddr_freq * 2);
120 }
121 
122 static void phy_soft_reset(struct dram_info *dram)
123 {
124 	void __iomem *phy_base = dram->phy;
125 
126 	clrbits_le32(PHY_REG(phy_base, 0), 0x3 << 2);
127 	udelay(1);
128 	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET);
129 	udelay(5);
130 	setbits_le32(PHY_REG(phy_base, 0), DIGITAL_DERESET);
131 	udelay(1);
132 }
133 
134 static int pctl_cfg(struct dram_info *dram,
135 		    struct px30_sdram_params *sdram_params)
136 {
137 	u32 i;
138 	void __iomem *pctl_base = dram->pctl;
139 
140 	for (i = 0; sdram_params->pctl_regs.pctl[i][0] != 0xFFFFFFFF; i++) {
141 		writel(sdram_params->pctl_regs.pctl[i][1],
142 		       pctl_base + sdram_params->pctl_regs.pctl[i][0]);
143 	}
144 	clrsetbits_le32(pctl_base + DDR_PCTL2_PWRTMG,
145 			(0xff << 16) | 0x1f,
146 			((SR_IDLE & 0xff) << 16) | (PD_IDLE & 0x1f));
147 
148 	clrsetbits_le32(pctl_base + DDR_PCTL2_HWLPCTL,
149 			0xfff << 16,
150 			5 << 16);
151 	/* disable zqcs */
152 	setbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1u << 31);
153 
154 	return 0;
155 }
156 
157 /* return ddrconfig value
158  *       (-1), find ddrconfig fail
159  *       other, the ddrconfig value
160  * only support cs0_row >= cs1_row
161  */
162 static unsigned int calculate_ddrconfig(struct px30_sdram_params *sdram_params)
163 {
164 	u32 bw, die_bw, col, bank;
165 	u32 i, tmp;
166 	u32 ddrconf = -1;
167 
168 	bw = sdram_params->ch.bw;
169 	die_bw = sdram_params->ch.dbw;
170 	col = sdram_params->ch.col;
171 	bank = sdram_params->ch.bk;
172 
173 	if (sdram_params->dramtype == DDR4) {
174 		if (die_bw == 0)
175 			ddrconf = 7 + bw;
176 		else
177 			ddrconf = 12 - bw;
178 		ddrconf = d4_rbc_2_d3_rbc[ddrconf - 7];
179 	} else {
180 		tmp = ((bank - 2) << 3) | (col + bw - 10);
181 		for (i = 0; i < 7; i++)
182 			if ((ddr_cfg_2_rbc[i] & 0xf) == tmp) {
183 				ddrconf = i;
184 				break;
185 			}
186 		if (i > 6)
187 			printascii("calculate ddrconfig error\n");
188 	}
189 
190 	return ddrconf;
191 }
192 
193 /* n: Unit bytes */
194 static void copy_to_reg(u32 *dest, u32 *src, u32 n)
195 {
196 	int i;
197 
198 	for (i = 0; i < n / sizeof(u32); i++) {
199 		writel(*src, dest);
200 		src++;
201 		dest++;
202 	}
203 }
204 
205 /*
206  * calculate controller dram address map, and setting to register.
207  * argument sdram_params->ch.ddrconf must be right value before
208  * call this function.
209  */
210 static void set_ctl_address_map(struct dram_info *dram,
211 				struct px30_sdram_params *sdram_params)
212 {
213 	void __iomem *pctl_base = dram->pctl;
214 	u32 cs_pst, bg, max_row, ddrconf;
215 	u32 i;
216 
217 	if (sdram_params->dramtype == DDR4)
218 		/*
219 		 * DDR4 8bit dram BG = 2(4bank groups),
220 		 * 16bit dram BG = 1 (2 bank groups)
221 		 */
222 		bg = (sdram_params->ch.dbw == 0) ? 2 : 1;
223 	else
224 		bg = 0;
225 
226 	cs_pst = sdram_params->ch.bw + sdram_params->ch.col +
227 		bg + sdram_params->ch.bk + sdram_params->ch.cs0_row;
228 	if (cs_pst >= 32 || sdram_params->ch.rank == 1)
229 		writel(0x1f, pctl_base + DDR_PCTL2_ADDRMAP0);
230 	else
231 		writel(cs_pst - 8, pctl_base + DDR_PCTL2_ADDRMAP0);
232 
233 	ddrconf = sdram_params->ch.ddrconfig;
234 	if (sdram_params->dramtype == DDR4) {
235 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc); i++) {
236 			if (d4_rbc_2_d3_rbc[i] == ddrconf) {
237 				ddrconf = 7 + i;
238 				break;
239 			}
240 		}
241 	}
242 
243 	copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP1),
244 		    &addrmap[ddrconf][0], 8 * 4);
245 	max_row = cs_pst - 1 - 8 - (addrmap[ddrconf][5] & 0xf);
246 
247 	if (max_row < 12)
248 		printascii("set addrmap fail\n");
249 	/* need to disable row ahead of rank by set to 0xf */
250 	for (i = 17; i > max_row; i--)
251 		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
252 			((i - 12) * 8 / 32) * 4,
253 			0xf << ((i - 12) * 8 % 32),
254 			0xf << ((i - 12) * 8 % 32));
255 
256 	if ((sdram_params->dramtype == LPDDR3 ||
257 	     sdram_params->dramtype == LPDDR2) &&
258 		 sdram_params->ch.row_3_4)
259 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
260 	if (sdram_params->dramtype == DDR4 && sdram_params->ch.bw != 0x2)
261 		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
262 }
263 
264 static void phy_dll_bypass_set(struct dram_info *dram, u32 freq)
265 {
266 	void __iomem *phy_base = dram->phy;
267 	u32 tmp;
268 	u32 i, j;
269 
270 	setbits_le32(PHY_REG(phy_base, 0x13), 1 << 4);
271 	clrbits_le32(PHY_REG(phy_base, 0x14), 1 << 3);
272 	for (i = 0; i < 4; i++) {
273 		j = 0x26 + i * 0x10;
274 		setbits_le32(PHY_REG(phy_base, j), 1 << 4);
275 		clrbits_le32(PHY_REG(phy_base, j + 0x1), 1 << 3);
276 	}
277 
278 	if (freq <= (400 * MHz))
279 		/* DLL bypass */
280 		setbits_le32(PHY_REG(phy_base, 0xa4), 0x1f);
281 	else
282 		clrbits_le32(PHY_REG(phy_base, 0xa4), 0x1f);
283 
284 	if (freq <= (801 * MHz))
285 		tmp = 2;
286 	else
287 		tmp = 1;
288 
289 	for (i = 0; i < 4; i++) {
290 		j = 0x28 + i * 0x10;
291 		writel(tmp, PHY_REG(phy_base, j));
292 	}
293 }
294 
295 static void set_ds_odt(struct dram_info *dram,
296 		       struct px30_sdram_params *sdram_params)
297 {
298 	void __iomem *phy_base = dram->phy;
299 	u32 cmd_drv, clk_drv, dqs_drv, dqs_odt;
300 	u32 i, j;
301 
302 	if (sdram_params->dramtype == DDR3) {
303 		cmd_drv = PHY_DDR3_RON_RTT_34ohm;
304 		clk_drv = PHY_DDR3_RON_RTT_45ohm;
305 		dqs_drv = PHY_DDR3_RON_RTT_34ohm;
306 		dqs_odt = PHY_DDR3_RON_RTT_225ohm;
307 	} else {
308 		cmd_drv = PHY_DDR4_LPDDR3_RON_RTT_34ohm;
309 		clk_drv = PHY_DDR4_LPDDR3_RON_RTT_43ohm;
310 		dqs_drv = PHY_DDR4_LPDDR3_RON_RTT_34ohm;
311 		if (sdram_params->dramtype == LPDDR2)
312 			dqs_odt = PHY_DDR4_LPDDR3_RON_RTT_DISABLE;
313 		else
314 			dqs_odt = PHY_DDR4_LPDDR3_RON_RTT_240ohm;
315 	}
316 	/* DS */
317 	writel(cmd_drv, PHY_REG(phy_base, 0x11));
318 	clrsetbits_le32(PHY_REG(phy_base, 0x12), 0x1f << 3, cmd_drv << 3);
319 	writel(clk_drv, PHY_REG(phy_base, 0x16));
320 	writel(clk_drv, PHY_REG(phy_base, 0x18));
321 
322 	for (i = 0; i < 4; i++) {
323 		j = 0x20 + i * 0x10;
324 		writel(dqs_drv, PHY_REG(phy_base, j));
325 		writel(dqs_drv, PHY_REG(phy_base, j + 0xf));
326 		/* ODT */
327 		writel(dqs_odt, PHY_REG(phy_base, j + 0x1));
328 		writel(dqs_odt, PHY_REG(phy_base, j + 0xe));
329 	}
330 }
331 
332 static void phy_cfg(struct dram_info *dram,
333 		    struct px30_sdram_params *sdram_params)
334 {
335 	void __iomem *phy_base = dram->phy;
336 	u32 i;
337 
338 	phy_dll_bypass_set(dram, sdram_params->ddr_freq);
339 	for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
340 		writel(sdram_params->phy_regs.phy[i][1],
341 		       phy_base + sdram_params->phy_regs.phy[i][0]);
342 	}
343 	if (sdram_params->ch.bw == 2) {
344 		clrsetbits_le32(PHY_REG(phy_base, 0), 0xf << 4, 0xf << 4);
345 	} else if (sdram_params->ch.bw == 1) {
346 		clrsetbits_le32(PHY_REG(phy_base, 0), 0xf << 4, 3 << 4);
347 		/* disable DQS2,DQS3 tx dll  for saving power */
348 		clrbits_le32(PHY_REG(phy_base, 0x46), 1 << 3);
349 		clrbits_le32(PHY_REG(phy_base, 0x56), 1 << 3);
350 	} else {
351 		clrsetbits_le32(PHY_REG(phy_base, 0), 0xf << 4, 1 << 4);
352 		/* disable DQS2,DQS3 tx dll  for saving power */
353 		clrbits_le32(PHY_REG(phy_base, 0x36), 1 << 3);
354 		clrbits_le32(PHY_REG(phy_base, 0x46), 1 << 3);
355 		clrbits_le32(PHY_REG(phy_base, 0x56), 1 << 3);
356 	}
357 	set_ds_odt(dram, sdram_params);
358 
359 	/* deskew */
360 	setbits_le32(PHY_REG(phy_base, 2), 8);
361 	copy_to_reg(PHY_REG(phy_base, 0xb0),
362 		    &sdram_params->skew->a0_a1_skew[0], 15 * 4);
363 	copy_to_reg(PHY_REG(phy_base, 0x70),
364 		    &sdram_params->skew->cs0_dm0_skew[0], 44 * 4);
365 	copy_to_reg(PHY_REG(phy_base, 0xc0),
366 		    &sdram_params->skew->cs1_dm0_skew[0], 44 * 4);
367 }
368 
369 static int update_refresh_reg(struct dram_info *dram)
370 {
371 	void __iomem *pctl_base = dram->pctl;
372 	u32 ret;
373 
374 	ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
375 	writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
376 
377 	return 0;
378 }
379 
380 /*
381  * rank = 1: cs0
382  * rank = 2: cs1
383  */
384 int read_mr(struct dram_info *dram, u32 rank, u32 mr_num)
385 {
386 	void __iomem *pctl_base = dram->pctl;
387 	void __iomem *ddr_grf_base = dram->ddr_grf;
388 
389 	writel((rank << 4) | (1 << 0), pctl_base + DDR_PCTL2_MRCTRL0);
390 	writel((mr_num << 8), pctl_base + DDR_PCTL2_MRCTRL1);
391 	setbits_le32(pctl_base + DDR_PCTL2_MRCTRL0, 1u << 31);
392 	while (readl(pctl_base + DDR_PCTL2_MRCTRL0) & (1u << 31))
393 		continue;
394 	while (readl(pctl_base + DDR_PCTL2_MRSTAT) & MR_WR_BUSY)
395 		continue;
396 
397 	return (readl(ddr_grf_base + DDR_GRF_STATUS(0)) & 0xff);
398 }
399 
400 u32 disable_zqcs_arefresh(struct dram_info *dram)
401 {
402 	void __iomem *pctl_base = dram->pctl;
403 	u32 dis_auto_zq = 0;
404 
405 	/* disable zqcs */
406 	if (!(readl(pctl_base + DDR_PCTL2_ZQCTL0) &
407 		(1ul << 31))) {
408 		dis_auto_zq = 1;
409 		setbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1 << 31);
410 	}
411 
412 	/* disable auto refresh */
413 	setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 1);
414 
415 	update_refresh_reg(dram);
416 
417 	return dis_auto_zq;
418 }
419 
420 void restore_zqcs_arefresh(struct dram_info *dram, u32 dis_auto_zq)
421 {
422 	void __iomem *pctl_base = dram->pctl;
423 
424 	/* restore zqcs */
425 	if (dis_auto_zq)
426 		clrbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1 << 31);
427 
428 	/* restore auto refresh */
429 	clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 1);
430 
431 	update_refresh_reg(dram);
432 }
433 
434 #define MIN(a, b)	(((a) > (b)) ? (b) : (a))
435 #define MAX(a, b)	(((a) > (b)) ? (a) : (b))
436 static u32 check_rd_gate(struct dram_info *dram)
437 {
438 	void __iomem *phy_base = dram->phy;
439 
440 	u32 max_val = 0;
441 	u32 min_val = 0xff;
442 	u32 gate[4];
443 	u32 i, bw;
444 
445 	bw = (readl(PHY_REG(phy_base, 0x0)) >> 4) & 0xf;
446 	switch (bw) {
447 	case 0x1:
448 		bw = 1;
449 		break;
450 	case 0x3:
451 		bw = 2;
452 		break;
453 	case 0xf:
454 	default:
455 		bw = 4;
456 		break;
457 	}
458 
459 	for (i = 0; i < bw; i++) {
460 		gate[i] = readl(PHY_REG(phy_base, 0xfb + i));
461 		max_val = MAX(max_val, gate[i]);
462 		min_val = MIN(min_val, gate[i]);
463 	}
464 
465 	if (max_val > 0x80 || min_val < 0x20)
466 		return -1;
467 	else
468 		return 0;
469 }
470 
471 static int data_training(struct dram_info *dram, u32 cs, u32 dramtype)
472 {
473 	void __iomem *phy_base = dram->phy;
474 	u32 ret;
475 	u32 dis_auto_zq = 0;
476 	u32 odt_val;
477 	u32 i, j;
478 
479 	odt_val = readl(PHY_REG(phy_base, 0x2e));
480 
481 	for (i = 0; i < 4; i++) {
482 		j = 0x20 + i * 0x10;
483 		writel(PHY_DDR3_RON_RTT_225ohm, PHY_REG(phy_base, j + 0x1));
484 		writel(0, PHY_REG(phy_base, j + 0xe));
485 	}
486 
487 	dis_auto_zq = disable_zqcs_arefresh(dram);
488 
489 	if (dramtype == DDR4) {
490 		clrsetbits_le32(PHY_REG(phy_base, 0x29), 0x3, 0);
491 		clrsetbits_le32(PHY_REG(phy_base, 0x39), 0x3, 0);
492 		clrsetbits_le32(PHY_REG(phy_base, 0x49), 0x3, 0);
493 		clrsetbits_le32(PHY_REG(phy_base, 0x59), 0x3, 0);
494 	}
495 	/* choose training cs */
496 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
497 	/* enable gate training */
498 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
499 	udelay(50);
500 	ret = readl(PHY_REG(phy_base, 0xff));
501 	/* disable gate training */
502 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
503 	clrbits_le32(PHY_REG(phy_base, 2), 0x30);
504 	restore_zqcs_arefresh(dram, dis_auto_zq);
505 
506 	if (dramtype == DDR4) {
507 		clrsetbits_le32(PHY_REG(phy_base, 0x29), 0x3, 0x2);
508 		clrsetbits_le32(PHY_REG(phy_base, 0x39), 0x3, 0x2);
509 		clrsetbits_le32(PHY_REG(phy_base, 0x49), 0x3, 0x2);
510 		clrsetbits_le32(PHY_REG(phy_base, 0x59), 0x3, 0x2);
511 	}
512 
513 	if (ret & 0x10) {
514 		ret = -1;
515 	} else {
516 		ret = (ret & 0xf) ^ (readl(PHY_REG(phy_base, 0)) >> 4);
517 		ret = (ret == 0) ? 0 : -1;
518 	}
519 
520 	for (i = 0; i < 4; i++) {
521 		j = 0x20 + i * 0x10;
522 		writel(odt_val, PHY_REG(phy_base, j + 0x1));
523 		writel(odt_val, PHY_REG(phy_base, j + 0xe));
524 	}
525 
526 	return ret;
527 }
528 
529 /* rank = 1: cs0
530  * rank = 2: cs1
531  * rank = 3: cs0 & cs1
532  * note: be careful of keep mr original val
533  */
534 static int write_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 arg,
535 		    u32 dramtype)
536 {
537 	void __iomem *pctl_base = dram->pctl;
538 
539 	while (readl(pctl_base + DDR_PCTL2_MRSTAT) & MR_WR_BUSY)
540 		continue;
541 	if (dramtype == DDR3 || dramtype == DDR4) {
542 		writel((mr_num << 12) | (rank << 4) | (0 << 0),
543 		       pctl_base + DDR_PCTL2_MRCTRL0);
544 		writel(arg, pctl_base + DDR_PCTL2_MRCTRL1);
545 	} else {
546 		writel((rank << 4) | (0 << 0),
547 		       pctl_base + DDR_PCTL2_MRCTRL0);
548 		writel((mr_num << 8) | (arg & 0xff),
549 		       pctl_base + DDR_PCTL2_MRCTRL1);
550 	}
551 
552 	setbits_le32(pctl_base + DDR_PCTL2_MRCTRL0, 1u << 31);
553 	while (readl(pctl_base + DDR_PCTL2_MRCTRL0) & (1u << 31))
554 		continue;
555 	while (readl(pctl_base + DDR_PCTL2_MRSTAT) & MR_WR_BUSY)
556 		continue;
557 
558 	return 0;
559 }
560 
561 /*
562  * rank : 1:cs0, 2:cs1, 3:cs0&cs1
563  * vrefrate: 4500: 45%,
564  */
565 static int write_vrefdq(struct dram_info *dram, u32 rank, u32 vrefrate,
566 			u32 dramtype)
567 {
568 	void __iomem *pctl_base = dram->pctl;
569 	u32 tccd_l, value;
570 	u32 dis_auto_zq = 0;
571 
572 	if (dramtype != DDR4 || vrefrate < 4500 ||
573 	    vrefrate > 9200)
574 		return (-1);
575 
576 	tccd_l = (readl(pctl_base + DDR_PCTL2_DRAMTMG4) >> 16) & 0xf;
577 	tccd_l = (tccd_l - 4) << 10;
578 
579 	if (vrefrate > 7500) {
580 		/* range 1 */
581 		value = ((vrefrate - 6000) / 65) | tccd_l;
582 	} else {
583 		/* range 2 */
584 		value = ((vrefrate - 4500) / 65) | tccd_l | (1 << 6);
585 	}
586 
587 	dis_auto_zq = disable_zqcs_arefresh(dram);
588 
589 	/* enable vrefdq calibratin */
590 	write_mr(dram, rank, 6, value | (1 << 7), dramtype);
591 	udelay(1);/* tvrefdqe */
592 	/* write vrefdq value */
593 	write_mr(dram, rank, 6, value | (1 << 7), dramtype);
594 	udelay(1);/* tvref_time */
595 	write_mr(dram, rank, 6, value | (0 << 7), dramtype);
596 	udelay(1);/* tvrefdqx */
597 
598 	restore_zqcs_arefresh(dram, dis_auto_zq);
599 
600 	return 0;
601 }
602 
603 /*
604  * cs: 0:cs0
605  *	   1:cs1
606  *     else cs0+cs1
607  * note: it didn't consider about row_3_4
608  */
609 u64 get_cs_cap(struct px30_sdram_params *sdram_params, u32 cs)
610 {
611 	u32 bg;
612 	u64 cap[2];
613 
614 	if (sdram_params->dramtype == DDR4)
615 		/* DDR4 8bit dram BG = 2(4bank groups),
616 		 * 16bit dram BG = 1 (2 bank groups)
617 		 */
618 		bg = (sdram_params->ch.dbw == 0) ? 2 : 1;
619 	else
620 		bg = 0;
621 	cap[0] = 1llu << (sdram_params->ch.bw + sdram_params->ch.col +
622 		bg + sdram_params->ch.bk + sdram_params->ch.cs0_row);
623 
624 	if (sdram_params->ch.rank == 2)
625 		cap[1] = 1llu << (sdram_params->ch.bw + sdram_params->ch.col +
626 			bg + sdram_params->ch.bk + sdram_params->ch.cs1_row);
627 	else
628 		cap[1] = 0;
629 
630 	if (cs == 0)
631 		return cap[0];
632 	else if (cs == 1)
633 		return cap[1];
634 	else
635 		return (cap[0] + cap[1]);
636 }
637 
638 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
639 {
640 	writel(ddrconfig | (ddrconfig << 8), &dram->msch->deviceconf);
641 	rk_clrsetreg(&dram->grf->soc_noc_con[1], 0x3 << 14, 0 << 14);
642 }
643 
644 static void dram_all_config(struct dram_info *dram,
645 			    struct px30_sdram_params *sdram_params)
646 {
647 	u32 sys_reg = 0;
648 	u32 sys_reg3 = 0;
649 	u64 cs_cap[2];
650 
651 	set_ddrconfig(dram, sdram_params->ch.ddrconfig);
652 
653 	sys_reg |= SYS_REG_ENC_DDRTYPE(sdram_params->dramtype);
654 	sys_reg |= SYS_REG_ENC_ROW_3_4(sdram_params->ch.row_3_4);
655 	sys_reg |= SYS_REG_ENC_RANK(sdram_params->ch.rank);
656 	sys_reg |= SYS_REG_ENC_COL(sdram_params->ch.col);
657 	sys_reg |= SYS_REG_ENC_BK(sdram_params->ch.bk);
658 	sys_reg |= SYS_REG_ENC_BW(sdram_params->ch.bw);
659 	sys_reg |= SYS_REG_ENC_DBW(sdram_params->ch.dbw);
660 
661 	SYS_REG_ENC_CS0_ROW_(sdram_params->ch.cs0_row, sys_reg, sys_reg3);
662 	if (sdram_params->ch.cs1_row)
663 		SYS_REG_ENC_CS1_ROW_(sdram_params->ch.cs1_row, sys_reg,
664 				     sys_reg3);
665 	sys_reg3 |= SYS_REG_ENC_CS1_COL(sdram_params->ch.col);
666 	sys_reg3 |= SYS_REG_ENC_VERSION(DDR_SYS_REG_VERSION);
667 
668 	writel(sys_reg, &dram->pmugrf->os_reg[2]);
669 	writel(sys_reg3, &dram->pmugrf->os_reg[3]);
670 
671 	cs_cap[0] = get_cs_cap(sdram_params, 0);
672 	cs_cap[1] = get_cs_cap(sdram_params, 1);
673 	writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
674 			(((cs_cap[0] >> 20) / 64) & 0xff),
675 			&dram->msch->devicesize);
676 
677 	writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
678 	       &dram->msch->ddrtiminga0);
679 	writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
680 	       &dram->msch->ddrtimingb0);
681 	writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
682 	       &dram->msch->ddrtimingc0);
683 	writel(sdram_params->ch.noc_timings.devtodev0.d32,
684 	       &dram->msch->devtodev0);
685 	writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
686 	writel(sdram_params->ch.noc_timings.ddr4timing.d32,
687 	       &dram->msch->ddr4timing);
688 	writel(sdram_params->ch.noc_timings.agingx0, &dram->msch->agingx0);
689 	writel(sdram_params->ch.noc_timings.agingx0, &dram->msch->aging0);
690 	writel(sdram_params->ch.noc_timings.agingx0, &dram->msch->aging1);
691 	writel(sdram_params->ch.noc_timings.agingx0, &dram->msch->aging2);
692 	writel(sdram_params->ch.noc_timings.agingx0, &dram->msch->aging3);
693 }
694 
695 static void enable_low_power(struct dram_info *dram,
696 			     struct px30_sdram_params *sdram_params)
697 {
698 	void __iomem *pctl_base = dram->pctl;
699 	void __iomem *phy_base = dram->phy;
700 	void __iomem *ddr_grf_base = dram->ddr_grf;
701 	u32 grf_lp_con;
702 
703 	/*
704 	 * bit0: grf_upctl_axi_cg_en = 1 enable upctl2 axi clk auto gating
705 	 * bit1: grf_upctl_apb_cg_en = 1 ungated axi,core clk for apb access
706 	 * bit2: grf_upctl_core_cg_en = 1 enable upctl2 core clk auto gating
707 	 * bit3: grf_selfref_type2_en = 0 disable core clk gating when type2 sr
708 	 * bit4: grf_upctl_syscreq_cg_en = 1
709 	 *       ungating coreclk when c_sysreq assert
710 	 * bit8-11: grf_auto_sr_dly = 6
711 	 */
712 	writel(0x1f1f0617, &dram->ddr_grf->ddr_grf_con[1]);
713 
714 	if (sdram_params->dramtype == DDR4)
715 		grf_lp_con = (0x7 << 16) | (1 << 1);
716 	else if (sdram_params->dramtype == DDR3)
717 		grf_lp_con = (0x7 << 16) | (1 << 0);
718 	else
719 		grf_lp_con = (0x7 << 16) | (1 << 2);
720 
721 	/* en lpckdis_en */
722 	grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
723 	writel(grf_lp_con, ddr_grf_base + DDR_GRF_LP_CON);
724 
725 	/* off digit module clock when enter power down */
726 	setbits_le32(PHY_REG(phy_base, 7), 1 << 7);
727 
728 	/* enable sr, pd */
729 	if (PD_IDLE == 0)
730 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
731 	else
732 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
733 	if (SR_IDLE == 0)
734 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
735 	else
736 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
737 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
738 }
739 
740 static void print_ddr_info(struct px30_sdram_params *sdram_params)
741 {
742 	u64 cap;
743 	u32 bg;
744 	u32 split;
745 
746 	split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON);
747 	bg = (sdram_params->ch.dbw == 0) ? 2 : 1;
748 	switch (sdram_params->dramtype) {
749 	case LPDDR3:
750 		printascii("LPDDR3\n");
751 		break;
752 	case DDR3:
753 		printascii("DDR3\n");
754 		break;
755 	case DDR4:
756 		printascii("DDR4\n");
757 		break;
758 	case LPDDR2:
759 		printascii("LPDDR2\n");
760 		break;
761 	default:
762 		printascii("Unknown Device\n");
763 		break;
764 	}
765 
766 	printdec(sdram_params->ddr_freq);
767 	printascii("MHz\n");
768 	printascii("BW=");
769 	printdec(8 << sdram_params->ch.bw);
770 	printascii(" Col=");
771 	printdec(sdram_params->ch.col);
772 	printascii(" Bk=");
773 	printdec(0x1 << sdram_params->ch.bk);
774 	if (sdram_params->dramtype == DDR4) {
775 		printascii(" BG=");
776 		printdec(1 << bg);
777 	}
778 	printascii(" CS0 Row=");
779 	printdec(sdram_params->ch.cs0_row);
780 	if (sdram_params->ch.cs0_high16bit_row !=
781 		sdram_params->ch.cs0_row) {
782 		printascii("/");
783 		printdec(sdram_params->ch.cs0_high16bit_row);
784 	}
785 	if (sdram_params->ch.rank > 1) {
786 		printascii(" CS1 Row=");
787 		printdec(sdram_params->ch.cs1_row);
788 		if (sdram_params->ch.cs1_high16bit_row !=
789 			sdram_params->ch.cs1_row) {
790 			printascii("/");
791 			printdec(sdram_params->ch.cs1_high16bit_row);
792 		}
793 	}
794 	printascii(" CS=");
795 	printdec(sdram_params->ch.rank);
796 	printascii(" Die BW=");
797 	printdec(8 << sdram_params->ch.dbw);
798 
799 	cap = get_cs_cap(sdram_params, 3);
800 	if (sdram_params->ch.row_3_4)
801 		cap = cap * 3 / 4;
802 	else if (!(split & (1 << SPLIT_BYPASS_OFFSET)))
803 		cap = cap / 2 + ((split & 0xff) << 24) / 2;
804 
805 	printascii(" Size=");
806 	printdec(cap >> 20);
807 	printascii("MB\n");
808 }
809 
810 /*
811  * pre_init: 0: pre init for dram cap detect
812  * 1: detect correct cap(except cs1 row)info, than reinit
813  * 2: after reinit, we detect cs1_row, if cs1_row not equal
814  *    to cs0_row and cs is in middle on ddrconf map, we need
815  *    to reinit dram, than set the correct ddrconf.
816  */
817 static int sdram_init_(struct dram_info *dram,
818 		       struct px30_sdram_params *sdram_params, u32 pre_init)
819 {
820 	void __iomem *pctl_base = dram->pctl;
821 
822 	rkclk_ddr_reset(dram, 1, 1, 1, 1);
823 	udelay(10);
824 	/*
825 	 * dereset ddr phy psrstn to config pll,
826 	 * if using phy pll psrstn must be dereset
827 	 * before config pll
828 	 */
829 	rkclk_ddr_reset(dram, 1, 1, 1, 0);
830 	rkclk_configure_ddr(dram, sdram_params);
831 
832 	/* release phy srst to provide clk to ctrl */
833 	rkclk_ddr_reset(dram, 1, 1, 0, 0);
834 	udelay(10);
835 	phy_soft_reset(dram);
836 	/* release ctrl presetn, and config ctl registers */
837 	rkclk_ddr_reset(dram, 1, 0, 0, 0);
838 	pctl_cfg(dram, sdram_params);
839 	sdram_params->ch.ddrconfig = calculate_ddrconfig(sdram_params);
840 	set_ctl_address_map(dram, sdram_params);
841 	phy_cfg(dram, sdram_params);
842 
843 	/* enable dfi_init_start to init phy after ctl srstn deassert */
844 	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
845 
846 	rkclk_ddr_reset(dram, 0, 0, 0, 0);
847 	/* wait for dfi_init_done and dram init complete */
848 	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
849 		continue;
850 
851 	if (sdram_params->dramtype == LPDDR3)
852 		write_mr(dram, 3, 11, 3, LPDDR3);
853 
854 	/* do ddr gate training */
855 redo_cs0_training:
856 	if (data_training(dram, 0, sdram_params->dramtype) != 0) {
857 		if (pre_init != 0)
858 			printascii("DTT cs0 error\n");
859 		return -1;
860 	}
861 	if (check_rd_gate(dram)) {
862 		printascii("re training cs0");
863 		goto redo_cs0_training;
864 	}
865 
866 	if (sdram_params->dramtype == LPDDR3) {
867 		if ((read_mr(dram, 1, 8) & 0x3) != 0x3)
868 			return -1;
869 	} else if (sdram_params->dramtype == LPDDR2) {
870 		if ((read_mr(dram, 1, 8) & 0x3) != 0x0)
871 			return -1;
872 	}
873 	/* for px30: when 2cs, both 2 cs should be training */
874 	if (pre_init != 0 && sdram_params->ch.rank == 2) {
875 redo_cs1_training:
876 		if (data_training(dram, 1, sdram_params->dramtype) != 0) {
877 			printascii("DTT cs1 error\n");
878 			return -1;
879 		}
880 		if (check_rd_gate(dram)) {
881 			printascii("re training cs1");
882 			goto redo_cs1_training;
883 		}
884 	}
885 
886 	if (sdram_params->dramtype == DDR4)
887 		write_vrefdq(dram, 0x3, 5670, sdram_params->dramtype);
888 
889 	dram_all_config(dram, sdram_params);
890 	enable_low_power(dram, sdram_params);
891 
892 	return 0;
893 }
894 
895 static u64 dram_detect_cap(struct dram_info *dram,
896 			   struct px30_sdram_params *sdram_params,
897 			   unsigned char channel)
898 {
899 	void __iomem *pctl_base = dram->pctl;
900 	void __iomem *phy_base = dram->phy;
901 
902 	/*
903 	 * for ddr3: ddrconf = 3
904 	 * for ddr4: ddrconf = 12
905 	 * for lpddr3: ddrconf = 3
906 	 * default bw = 1
907 	 */
908 	u32 bk, bktmp;
909 	u32 col, coltmp;
910 	u32 row, rowtmp, row_3_4;
911 	void __iomem *test_addr, *test_addr1;
912 	u32 dbw;
913 	u32 cs;
914 	u32 bw = 1;
915 	u64 cap = 0;
916 	u32 dram_type = sdram_params->dramtype;
917 	u32 pwrctl;
918 
919 	if (dram_type != DDR4) {
920 		/* detect col and bk for ddr3/lpddr3 */
921 		coltmp = 12;
922 		bktmp = 3;
923 		if (dram_type == LPDDR2)
924 			rowtmp = 15;
925 		else
926 			rowtmp = 16;
927 
928 		for (col = coltmp; col >= 9; col -= 1) {
929 			writel(0, CONFIG_SYS_SDRAM_BASE);
930 			test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
931 					(1ul << (col + bw - 1ul)));
932 			writel(PATTERN, test_addr);
933 			if ((readl(test_addr) == PATTERN) &&
934 			    (readl(CONFIG_SYS_SDRAM_BASE) == 0))
935 				break;
936 		}
937 		if (col == 8) {
938 			printascii("col error\n");
939 			goto cap_err;
940 		}
941 
942 		test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
943 				(1ul << (coltmp + bktmp + bw - 1ul)));
944 		writel(0, CONFIG_SYS_SDRAM_BASE);
945 		writel(PATTERN, test_addr);
946 		if ((readl(test_addr) == PATTERN) &&
947 		    (readl(CONFIG_SYS_SDRAM_BASE) == 0))
948 			bk = 3;
949 		else
950 			bk = 2;
951 		if (dram_type == DDR3)
952 			dbw = 1;
953 		else
954 			dbw = 2;
955 	} else {
956 		/* detect bg for ddr4 */
957 		coltmp = 10;
958 		bktmp = 4;
959 		rowtmp = 17;
960 
961 		col = 10;
962 		bk = 2;
963 		test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
964 				(1ul << (coltmp + bw + 1ul)));
965 		writel(0, CONFIG_SYS_SDRAM_BASE);
966 		writel(PATTERN, test_addr);
967 		if ((readl(test_addr) == PATTERN) &&
968 		    (readl(CONFIG_SYS_SDRAM_BASE) == 0))
969 			dbw = 0;
970 		else
971 			dbw = 1;
972 	}
973 	/* detect row */
974 	for (row = rowtmp; row > 12; row--) {
975 		writel(0, CONFIG_SYS_SDRAM_BASE);
976 		test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
977 				(1ul << (row + bktmp + coltmp + bw - 1ul)));
978 		writel(PATTERN, test_addr);
979 		if ((readl(test_addr) == PATTERN) &&
980 		    (readl(CONFIG_SYS_SDRAM_BASE) == 0))
981 			break;
982 	}
983 	if (row == 12) {
984 		printascii("row error");
985 		goto cap_err;
986 	}
987 	/* detect row_3_4 */
988 	test_addr = CONFIG_SYS_SDRAM_BASE;
989 	test_addr1 = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
990 			(0x3ul << (row + bktmp + coltmp + bw - 1ul - 1ul)));
991 
992 	writel(0, test_addr);
993 	writel(PATTERN, test_addr1);
994 	if ((readl(test_addr) == 0) &&
995 	    (readl(test_addr1) == PATTERN))
996 		row_3_4 = 0;
997 	else
998 		row_3_4 = 1;
999 
1000 	/* disable auto low-power */
1001 	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
1002 	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
1003 
1004 	/* bw and cs detect using phy read gate training */
1005 	if (data_training(dram, 1, dram_type) == 0)
1006 		cs = 1;
1007 	else
1008 		cs = 0;
1009 
1010 	clrsetbits_le32(PHY_REG(phy_base, 0), 0xf << 4, 0xf << 4);
1011 	setbits_le32(PHY_REG(phy_base, 0x46), 1 << 3);
1012 	setbits_le32(PHY_REG(phy_base, 0x56), 1 << 3);
1013 
1014 	phy_soft_reset(dram);
1015 
1016 	if (data_training(dram, 0, dram_type) == 0)
1017 		bw = 2;
1018 	else
1019 		bw = 1;
1020 
1021 	/* restore auto low-power */
1022 	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
1023 
1024 	sdram_params->ch.rank = cs + 1;
1025 	sdram_params->ch.col = col;
1026 	sdram_params->ch.bk = bk;
1027 	sdram_params->ch.dbw = dbw;
1028 	sdram_params->ch.bw = bw;
1029 	sdram_params->ch.cs0_row = row;
1030 	sdram_params->ch.cs0_high16bit_row = row;
1031 	if (cs) {
1032 		sdram_params->ch.cs1_row = row;
1033 		sdram_params->ch.cs1_high16bit_row = row;
1034 	} else {
1035 		sdram_params->ch.cs1_row = 0;
1036 		sdram_params->ch.cs1_high16bit_row = 0;
1037 	}
1038 	sdram_params->ch.row_3_4 = row_3_4;
1039 
1040 	if (dram_type == DDR4)
1041 		cap = 1llu << (cs + row + bk + col + ((dbw == 0) ? 2 : 1) + bw);
1042 	else
1043 		cap = 1llu << (cs + row + bk + col + bw);
1044 
1045 	return cap;
1046 
1047 cap_err:
1048 	return 0;
1049 }
1050 
1051 static u32 remodify_sdram_params(struct px30_sdram_params *sdram_params)
1052 {
1053 	u32 tmp = 0, tmp_adr = 0, i;
1054 
1055 	for (i = 0; sdram_params->pctl_regs.pctl[i][0] != 0xFFFFFFFF; i++) {
1056 		if (sdram_params->pctl_regs.pctl[i][0] == 0) {
1057 			tmp = sdram_params->pctl_regs.pctl[i][1];/* MSTR */
1058 			tmp_adr = i;
1059 		}
1060 	}
1061 
1062 	tmp &= ~((3ul << 30) | (3ul << 24) | (3ul << 12));
1063 
1064 	switch (sdram_params->ch.dbw) {
1065 	case 2:
1066 		tmp |= (3ul << 30);
1067 		break;
1068 	case 1:
1069 		tmp |= (2ul << 30);
1070 		break;
1071 	case 0:
1072 	default:
1073 		tmp |= (1ul << 30);
1074 		break;
1075 	}
1076 
1077 	/*
1078 	 * If DDR3 or DDR4 MSTR.active_ranks=1,
1079 	 * it will gate memory clock when enter power down.
1080 	 * Force set active_ranks to 3 to workaround it.
1081 	 */
1082 	if (sdram_params->ch.rank == 2 || sdram_params->dramtype == DDR3 ||
1083 	    sdram_params->dramtype == DDR4)
1084 		tmp |= 3 << 24;
1085 	else
1086 		tmp |= 1 << 24;
1087 
1088 	tmp |= (2 - sdram_params->ch.bw) << 12;
1089 
1090 	sdram_params->pctl_regs.pctl[tmp_adr][1] = tmp;
1091 
1092 	return 0;
1093 }
1094 
1095 int dram_detect_high_row(struct dram_info *dram,
1096 			 struct px30_sdram_params *sdram_params,
1097 			 unsigned char channel)
1098 {
1099 	sdram_params->ch.cs0_high16bit_row = sdram_params->ch.cs0_row;
1100 	sdram_params->ch.cs1_high16bit_row = sdram_params->ch.cs1_row;
1101 
1102 	return 0;
1103 }
1104 
1105 static int dram_detect_cs1_row(struct px30_sdram_params *sdram_params,
1106 			       unsigned char channel)
1107 {
1108 	u32 ret = 0;
1109 	void __iomem *test_addr;
1110 	u32 row, bktmp, coltmp, bw;
1111 	u64 cs0_cap;
1112 	u32 byte_mask;
1113 
1114 	if (sdram_params->ch.rank == 2) {
1115 		cs0_cap = get_cs_cap(sdram_params, 0);
1116 
1117 		if (sdram_params->dramtype == DDR4) {
1118 			if (sdram_params->ch.dbw == 0)
1119 				bktmp = sdram_params->ch.bk + 2;
1120 			else
1121 				bktmp = sdram_params->ch.bk + 1;
1122 		} else {
1123 			bktmp = sdram_params->ch.bk;
1124 		}
1125 		bw = sdram_params->ch.bw;
1126 		coltmp = sdram_params->ch.col;
1127 
1128 		/*
1129 		 * because px30 support axi split,min bandwidth
1130 		 * is 8bit. if cs0 is 32bit, cs1 may 32bit or 16bit
1131 		 * so we check low 16bit data when detect cs1 row.
1132 		 * if cs0 is 16bit/8bit, we check low 8bit data.
1133 		 */
1134 		if (bw == 2)
1135 			byte_mask = 0xFFFF;
1136 		else
1137 			byte_mask = 0xFF;
1138 
1139 		/* detect cs1 row */
1140 		for (row = sdram_params->ch.cs0_row; row > 12; row--) {
1141 			test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
1142 				    cs0_cap +
1143 				    (1ul << (row + bktmp + coltmp + bw - 1ul)));
1144 			writel(0, CONFIG_SYS_SDRAM_BASE + cs0_cap);
1145 			writel(PATTERN, test_addr);
1146 
1147 			if (((readl(test_addr) & byte_mask) ==
1148 			     (PATTERN & byte_mask)) &&
1149 			    ((readl(CONFIG_SYS_SDRAM_BASE + cs0_cap) &
1150 			      byte_mask) == 0)) {
1151 				ret = row;
1152 				break;
1153 			}
1154 		}
1155 	}
1156 
1157 	return ret;
1158 }
1159 
1160 void get_ddr_param(struct px30_sdram_params *sdram_params,
1161 		   struct ddr_param *ddr_param)
1162 {
1163 	u64 cs_cap[2];
1164 
1165 	cs_cap[0] = get_cs_cap(sdram_params, 0);
1166 	cs_cap[1] = get_cs_cap(sdram_params, 1);
1167 
1168 	if (sdram_params->ch.row_3_4) {
1169 		cs_cap[0] =  cs_cap[0] * 3 / 4;
1170 		cs_cap[1] =  cs_cap[1] * 3 / 4;
1171 	}
1172 
1173 	if (sdram_params->ch.row_3_4 && sdram_params->ch.rank == 2) {
1174 		ddr_param->count = 2;
1175 		ddr_param->para[0] = 0;
1176 		ddr_param->para[1] = cs_cap[0] * 4 / 3;
1177 		ddr_param->para[2] = cs_cap[0];
1178 		ddr_param->para[3] = cs_cap[1];
1179 	} else {
1180 		ddr_param->count = 1;
1181 		ddr_param->para[0] = 0;
1182 		ddr_param->para[1] = (u64)cs_cap[0] + (u64)cs_cap[1];
1183 	}
1184 }
1185 
1186 /* return: 0 = success, other = fail */
1187 static int sdram_init_detect(struct dram_info *dram,
1188 			     struct px30_sdram_params *sdram_params)
1189 {
1190 	u32 ret;
1191 	u32 sys_reg = 0;
1192 	u32 sys_reg3 = 0;
1193 
1194 	if (sdram_init_(dram, sdram_params, 0) != 0)
1195 		return -1;
1196 
1197 	if (dram_detect_cap(dram, sdram_params, 0) == 0)
1198 		return -1;
1199 
1200 	/* modify bw, cs related timing */
1201 	remodify_sdram_params(sdram_params);
1202 	/* reinit sdram by real dram cap */
1203 	ret = sdram_init_(dram, sdram_params, 1);
1204 	if (ret != 0)
1205 		goto out;
1206 
1207 	/* redetect cs1 row */
1208 	sdram_params->ch.cs1_row =
1209 		dram_detect_cs1_row(sdram_params, 0);
1210 	if (sdram_params->ch.cs1_row) {
1211 		sys_reg = readl(&dram->pmugrf->os_reg[2]);
1212 		sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
1213 		SYS_REG_ENC_CS1_ROW_(sdram_params->ch.cs1_row,
1214 				     sys_reg, sys_reg3);
1215 		writel(sys_reg, &dram->pmugrf->os_reg[2]);
1216 		writel(sys_reg3, &dram->pmugrf->os_reg[3]);
1217 	}
1218 
1219 	ret = dram_detect_high_row(dram, sdram_params, 0);
1220 
1221 out:
1222 	return ret;
1223 }
1224 
1225 struct px30_sdram_params
1226 		*get_default_sdram_config(void)
1227 {
1228 	sdram_configs[0].skew = &skew;
1229 
1230 	return &sdram_configs[0];
1231 }
1232 
1233 /* return: 0 = success, other = fail */
1234 int sdram_init(void)
1235 {
1236 	struct px30_sdram_params *sdram_params;
1237 	int ret = 0;
1238 	struct ddr_param ddr_param;
1239 
1240 	dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
1241 	dram_info.pctl = (void *)DDRC_BASE_ADDR;
1242 	dram_info.grf = (void *)GRF_BASE_ADDR;
1243 	dram_info.cru = (void *)CRU_BASE_ADDR;
1244 	dram_info.msch = (void *)SERVER_MSCH0_BASE_ADDR;
1245 	dram_info.ddr_grf = (void *)DDR_GRF_BASE_ADDR;
1246 	dram_info.pmugrf = (void *)PMUGRF_BASE_ADDR;
1247 
1248 	sdram_params = get_default_sdram_config();
1249 	ret = sdram_init_detect(&dram_info, sdram_params);
1250 
1251 	if (ret)
1252 		goto error;
1253 
1254 	get_ddr_param(sdram_params, &ddr_param);
1255 	rockchip_setup_ddr_param(&ddr_param);
1256 	print_ddr_info(sdram_params);
1257 
1258 	printascii("out\n");
1259 	return ret;
1260 error:
1261 	return (-1);
1262 }
1263 #endif /* CONFIG_TPL_BUILD */
1264