xref: /rk3399_rockchip-uboot/drivers/ram/rockchip/sdram_px30.c (revision 32db71f8ce59df938deea5c2559e2ba744e137bd)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * (C) Copyright 2018 Rockchip Electronics Co., Ltd.
4  */
5 
6 #include <common.h>
7 #include <debug_uart.h>
8 #include <dm.h>
9 #include <ram.h>
10 #include <syscon.h>
11 #include <asm/io.h>
12 #include <asm/arch/clock.h>
13 #include <asm/arch/cru_px30.h>
14 #include <asm/arch/grf_px30.h>
15 #include <asm/arch/hardware.h>
16 #include <asm/arch/sdram_common.h>
17 #include <asm/arch/sdram_px30.h>
18 
19 /*
20  * Because px30 sram size is small, so need define CONFIG_TPL_TINY_FRAMEWORK
21  * to reduce TPL size when build TPL firmware.
22  */
23 #ifdef CONFIG_TPL_BUILD
24 #ifndef CONFIG_TPL_TINY_FRAMEWORK
25 #error please defined CONFIG_TPL_TINY_FRAMEWORK for px30 !!!
26 #endif
27 #endif
28 
29 DECLARE_GLOBAL_DATA_PTR;
30 struct dram_info {
31 #ifdef CONFIG_TPL_BUILD
32 	struct px30_ddr_pctl_regs *pctl;
33 	struct px30_ddr_phy_regs *phy;
34 	struct px30_cru *cru;
35 	struct px30_msch_regs *msch;
36 	struct px30_ddr_grf_regs *ddr_grf;
37 	struct px30_grf *grf;
38 #endif
39 	struct ram_info info;
40 	struct px30_pmugrf *pmugrf;
41 };
42 
43 #ifdef CONFIG_TPL_BUILD
44 #define PMUGRF_BASE_ADDR		0xFF010000
45 #define CRU_BASE_ADDR			0xFF2B0000
46 #define GRF_BASE_ADDR			0xFF140000
47 #define DDRC_BASE_ADDR			0xFF600000
48 #define DDR_PHY_BASE_ADDR		0xFF2A0000
49 #define SERVER_MSCH0_BASE_ADDR		0xFF530000
50 #define DDR_GRF_BASE_ADDR		0xff630000
51 
52 struct dram_info dram_info;
53 
54 struct px30_sdram_params sdram_configs[] = {
55 #include	"sdram-px30-lpddr3-detect-333.inc"
56 };
57 
58 struct px30_ddr_skew skew = {
59 #include	"sdram-px30-ddr_skew.inc"
60 };
61 
62 static void rkclk_ddr_reset(struct dram_info *dram,
63 			    u32 ctl_srstn, u32 ctl_psrstn,
64 			    u32 phy_srstn, u32 phy_psrstn)
65 {
66 	writel(upctl2_srstn_req(ctl_srstn) | upctl2_psrstn_req(ctl_psrstn) |
67 	       upctl2_asrstn_req(ctl_srstn),
68 	       &dram->cru->softrst_con[1]);
69 	writel(ddrphy_srstn_req(phy_srstn) | ddrphy_psrstn_req(phy_psrstn),
70 	       &dram->cru->softrst_con[2]);
71 }
72 
73 static void rkclk_set_dpll(struct dram_info *dram, unsigned int mhz)
74 {
75 	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
76 	int delay = 1000;
77 
78 	refdiv = 1;
79 	if (mhz <= 300) {
80 		postdiv1 = 4;
81 		postdiv2 = 2;
82 	} else if (mhz <= 400) {
83 		postdiv1 = 6;
84 		postdiv2 = 1;
85 	} else if (mhz <= 600) {
86 		postdiv1 = 4;
87 		postdiv2 = 1;
88 	} else if (mhz <= 800) {
89 		postdiv1 = 3;
90 		postdiv2 = 1;
91 	} else if (mhz <= 1600) {
92 		postdiv1 = 2;
93 		postdiv2 = 1;
94 	} else {
95 		postdiv1 = 1;
96 		postdiv2 = 1;
97 	}
98 	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
99 
100 	writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
101 
102 	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
103 	writel(DSMPD(1) | POSTDIV2(postdiv2) | REFDIV(refdiv),
104 	       &dram->cru->pll[1].con1);
105 
106 	while (delay > 0) {
107 		udelay(1);
108 		if (LOCK(readl(&dram->cru->pll[1].con1)))
109 			break;
110 		delay--;
111 	}
112 
113 	writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
114 }
115 
116 static void rkclk_configure_ddr(struct dram_info *dram,
117 				struct px30_sdram_params *sdram_params)
118 {
119 	/* for inno ddr phy need 2*freq */
120 	rkclk_set_dpll(dram,  sdram_params->ddr_freq * 2);
121 }
122 
123 static void phy_soft_reset(struct dram_info *dram)
124 {
125 	void __iomem *phy_base = dram->phy;
126 
127 	clrbits_le32(PHY_REG(phy_base, 0), 0x3 << 2);
128 	udelay(1);
129 	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET);
130 	udelay(5);
131 	setbits_le32(PHY_REG(phy_base, 0), DIGITAL_DERESET);
132 	udelay(1);
133 }
134 
135 static int pctl_cfg(struct dram_info *dram,
136 		    struct px30_sdram_params *sdram_params)
137 {
138 	u32 i;
139 	void __iomem *pctl_base = dram->pctl;
140 
141 	for (i = 0; sdram_params->pctl_regs.pctl[i][0] != 0xFFFFFFFF; i++) {
142 		writel(sdram_params->pctl_regs.pctl[i][1],
143 		       pctl_base + sdram_params->pctl_regs.pctl[i][0]);
144 	}
145 	clrsetbits_le32(pctl_base + DDR_PCTL2_PWRTMG,
146 			(0xff << 16) | 0x1f,
147 			((SR_IDLE & 0xff) << 16) | (PD_IDLE & 0x1f));
148 
149 	clrsetbits_le32(pctl_base + DDR_PCTL2_HWLPCTL,
150 			0xfff << 16,
151 			5 << 16);
152 	/* disable zqcs */
153 	setbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1u << 31);
154 
155 	return 0;
156 }
157 
158 /* return ddrconfig value
159  *       (-1), find ddrconfig fail
160  *       other, the ddrconfig value
161  * only support cs0_row >= cs1_row
162  */
163 static unsigned int calculate_ddrconfig(struct px30_sdram_params *sdram_params)
164 {
165 	u32 bw, die_bw, col, bank;
166 	u32 i, tmp;
167 	u32 ddrconf = -1;
168 
169 	bw = sdram_params->ch.bw;
170 	die_bw = sdram_params->ch.dbw;
171 	col = sdram_params->ch.col;
172 	bank = sdram_params->ch.bk;
173 
174 	if (sdram_params->dramtype == DDR4) {
175 		if (die_bw == 0)
176 			ddrconf = 7 + bw;
177 		else
178 			ddrconf = 12 - bw;
179 		ddrconf = d4_rbc_2_d3_rbc[ddrconf - 7];
180 	} else {
181 		tmp = ((bank - 2) << 3) | (col + bw - 10);
182 		for (i = 0; i < 7; i++)
183 			if ((ddr_cfg_2_rbc[i] & 0xf) == tmp) {
184 				ddrconf = i;
185 				break;
186 			}
187 		if (i > 6)
188 			printascii("calculate ddrconfig error\n");
189 	}
190 
191 	return ddrconf;
192 }
193 
194 /* n: Unit bytes */
195 static void copy_to_reg(u32 *dest, u32 *src, u32 n)
196 {
197 	int i;
198 
199 	for (i = 0; i < n / sizeof(u32); i++) {
200 		writel(*src, dest);
201 		src++;
202 		dest++;
203 	}
204 }
205 
206 /*
207  * calculate controller dram address map, and setting to register.
208  * argument sdram_params->ch.ddrconf must be right value before
209  * call this function.
210  */
211 static void set_ctl_address_map(struct dram_info *dram,
212 				struct px30_sdram_params *sdram_params)
213 {
214 	void __iomem *pctl_base = dram->pctl;
215 	u32 cs_pst, bg, max_row, ddrconf;
216 	u32 i;
217 
218 	if (sdram_params->dramtype == DDR4)
219 		/*
220 		 * DDR4 8bit dram BG = 2(4bank groups),
221 		 * 16bit dram BG = 1 (2 bank groups)
222 		 */
223 		bg = (sdram_params->ch.dbw == 0) ? 2 : 1;
224 	else
225 		bg = 0;
226 
227 	cs_pst = sdram_params->ch.bw + sdram_params->ch.col +
228 		bg + sdram_params->ch.bk + sdram_params->ch.cs0_row;
229 	if (cs_pst >= 32 || sdram_params->ch.rank == 1)
230 		writel(0x1f, pctl_base + DDR_PCTL2_ADDRMAP0);
231 	else
232 		writel(cs_pst - 8, pctl_base + DDR_PCTL2_ADDRMAP0);
233 
234 	ddrconf = sdram_params->ch.ddrconfig;
235 	if (sdram_params->dramtype == DDR4) {
236 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc); i++) {
237 			if (d4_rbc_2_d3_rbc[i] == ddrconf) {
238 				ddrconf = 7 + i;
239 				break;
240 			}
241 		}
242 	}
243 
244 	copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP1),
245 		    &addrmap[ddrconf][0], 8 * 4);
246 	max_row = cs_pst - 1 - 8 - (addrmap[ddrconf][5] & 0xf);
247 
248 	if (max_row < 12)
249 		printascii("set addrmap fail\n");
250 	/* need to disable row ahead of rank by set to 0xf */
251 	for (i = 17; i > max_row; i--)
252 		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
253 			((i - 12) * 8 / 32) * 4,
254 			0xf << ((i - 12) * 8 % 32),
255 			0xf << ((i - 12) * 8 % 32));
256 
257 	if ((sdram_params->dramtype == LPDDR3 ||
258 	     sdram_params->dramtype == LPDDR2) &&
259 		 sdram_params->ch.row_3_4)
260 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
261 	if (sdram_params->dramtype == DDR4 && sdram_params->ch.bw != 0x2)
262 		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
263 }
264 
265 static void phy_dll_bypass_set(struct dram_info *dram, u32 freq)
266 {
267 	void __iomem *phy_base = dram->phy;
268 	u32 tmp;
269 	u32 i, j;
270 
271 	setbits_le32(PHY_REG(phy_base, 0x13), 1 << 4);
272 	clrbits_le32(PHY_REG(phy_base, 0x14), 1 << 3);
273 	for (i = 0; i < 4; i++) {
274 		j = 0x26 + i * 0x10;
275 		setbits_le32(PHY_REG(phy_base, j), 1 << 4);
276 		clrbits_le32(PHY_REG(phy_base, j + 0x1), 1 << 3);
277 	}
278 
279 	if (freq <= (400 * MHz))
280 		/* DLL bypass */
281 		setbits_le32(PHY_REG(phy_base, 0xa4), 0x1f);
282 	else
283 		clrbits_le32(PHY_REG(phy_base, 0xa4), 0x1f);
284 
285 	if (freq <= (801 * MHz))
286 		tmp = 2;
287 	else
288 		tmp = 1;
289 
290 	for (i = 0; i < 4; i++) {
291 		j = 0x28 + i * 0x10;
292 		writel(tmp, PHY_REG(phy_base, j));
293 	}
294 }
295 
296 static void set_ds_odt(struct dram_info *dram,
297 		       struct px30_sdram_params *sdram_params)
298 {
299 	void __iomem *phy_base = dram->phy;
300 	u32 cmd_drv, clk_drv, dqs_drv, dqs_odt;
301 	u32 i, j;
302 
303 	if (sdram_params->dramtype == DDR3) {
304 		cmd_drv = PHY_DDR3_RON_RTT_34ohm;
305 		clk_drv = PHY_DDR3_RON_RTT_45ohm;
306 		dqs_drv = PHY_DDR3_RON_RTT_34ohm;
307 		dqs_odt = PHY_DDR3_RON_RTT_225ohm;
308 	} else {
309 		cmd_drv = PHY_DDR4_LPDDR3_RON_RTT_34ohm;
310 		clk_drv = PHY_DDR4_LPDDR3_RON_RTT_43ohm;
311 		dqs_drv = PHY_DDR4_LPDDR3_RON_RTT_34ohm;
312 		if (sdram_params->dramtype == LPDDR2)
313 			dqs_odt = PHY_DDR4_LPDDR3_RON_RTT_DISABLE;
314 		else
315 			dqs_odt = PHY_DDR4_LPDDR3_RON_RTT_240ohm;
316 	}
317 	/* DS */
318 	writel(cmd_drv, PHY_REG(phy_base, 0x11));
319 	clrsetbits_le32(PHY_REG(phy_base, 0x12), 0x1f << 3, cmd_drv << 3);
320 	writel(clk_drv, PHY_REG(phy_base, 0x16));
321 	writel(clk_drv, PHY_REG(phy_base, 0x18));
322 
323 	for (i = 0; i < 4; i++) {
324 		j = 0x20 + i * 0x10;
325 		writel(dqs_drv, PHY_REG(phy_base, j));
326 		writel(dqs_drv, PHY_REG(phy_base, j + 0xf));
327 		/* ODT */
328 		writel(dqs_odt, PHY_REG(phy_base, j + 0x1));
329 		writel(dqs_odt, PHY_REG(phy_base, j + 0xe));
330 	}
331 }
332 
333 static void phy_cfg(struct dram_info *dram,
334 		    struct px30_sdram_params *sdram_params)
335 {
336 	void __iomem *phy_base = dram->phy;
337 	u32 i;
338 
339 	phy_dll_bypass_set(dram, sdram_params->ddr_freq);
340 	for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
341 		writel(sdram_params->phy_regs.phy[i][1],
342 		       phy_base + sdram_params->phy_regs.phy[i][0]);
343 	}
344 	if (sdram_params->ch.bw == 2) {
345 		clrsetbits_le32(PHY_REG(phy_base, 0), 0xf << 4, 0xf << 4);
346 	} else if (sdram_params->ch.bw == 1) {
347 		clrsetbits_le32(PHY_REG(phy_base, 0), 0xf << 4, 3 << 4);
348 		/* disable DQS2,DQS3 tx dll  for saving power */
349 		clrbits_le32(PHY_REG(phy_base, 0x46), 1 << 3);
350 		clrbits_le32(PHY_REG(phy_base, 0x56), 1 << 3);
351 	} else {
352 		clrsetbits_le32(PHY_REG(phy_base, 0), 0xf << 4, 1 << 4);
353 		/* disable DQS2,DQS3 tx dll  for saving power */
354 		clrbits_le32(PHY_REG(phy_base, 0x36), 1 << 3);
355 		clrbits_le32(PHY_REG(phy_base, 0x46), 1 << 3);
356 		clrbits_le32(PHY_REG(phy_base, 0x56), 1 << 3);
357 	}
358 	set_ds_odt(dram, sdram_params);
359 
360 	/* deskew */
361 	setbits_le32(PHY_REG(phy_base, 2), 8);
362 	copy_to_reg(PHY_REG(phy_base, 0xb0),
363 		    &sdram_params->skew->a0_a1_skew[0], 15 * 4);
364 	copy_to_reg(PHY_REG(phy_base, 0x70),
365 		    &sdram_params->skew->cs0_dm0_skew[0], 44 * 4);
366 	copy_to_reg(PHY_REG(phy_base, 0xc0),
367 		    &sdram_params->skew->cs1_dm0_skew[0], 44 * 4);
368 }
369 
370 static int update_refresh_reg(struct dram_info *dram)
371 {
372 	void __iomem *pctl_base = dram->pctl;
373 	u32 ret;
374 
375 	ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
376 	writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
377 
378 	return 0;
379 }
380 
381 /*
382  * rank = 1: cs0
383  * rank = 2: cs1
384  */
385 int read_mr(struct dram_info *dram, u32 rank, u32 mr_num)
386 {
387 	void __iomem *pctl_base = dram->pctl;
388 	void __iomem *ddr_grf_base = dram->ddr_grf;
389 
390 	writel((rank << 4) | (1 << 0), pctl_base + DDR_PCTL2_MRCTRL0);
391 	writel((mr_num << 8), pctl_base + DDR_PCTL2_MRCTRL1);
392 	setbits_le32(pctl_base + DDR_PCTL2_MRCTRL0, 1u << 31);
393 	while (readl(pctl_base + DDR_PCTL2_MRCTRL0) & (1u << 31))
394 		continue;
395 	while (readl(pctl_base + DDR_PCTL2_MRSTAT) & MR_WR_BUSY)
396 		continue;
397 
398 	return (readl(ddr_grf_base + DDR_GRF_STATUS(0)) & 0xff);
399 }
400 
401 u32 disable_zqcs_arefresh(struct dram_info *dram)
402 {
403 	void __iomem *pctl_base = dram->pctl;
404 	u32 dis_auto_zq = 0;
405 
406 	/* disable zqcs */
407 	if (!(readl(pctl_base + DDR_PCTL2_ZQCTL0) &
408 		(1ul << 31))) {
409 		dis_auto_zq = 1;
410 		setbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1 << 31);
411 	}
412 
413 	/* disable auto refresh */
414 	setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 1);
415 
416 	update_refresh_reg(dram);
417 
418 	return dis_auto_zq;
419 }
420 
421 void restore_zqcs_arefresh(struct dram_info *dram, u32 dis_auto_zq)
422 {
423 	void __iomem *pctl_base = dram->pctl;
424 
425 	/* restore zqcs */
426 	if (dis_auto_zq)
427 		clrbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1 << 31);
428 
429 	/* restore auto refresh */
430 	clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 1);
431 
432 	update_refresh_reg(dram);
433 }
434 
435 #define MIN(a, b)	(((a) > (b)) ? (b) : (a))
436 #define MAX(a, b)	(((a) > (b)) ? (a) : (b))
437 static u32 check_rd_gate(struct dram_info *dram)
438 {
439 	void __iomem *phy_base = dram->phy;
440 
441 	u32 max_val = 0;
442 	u32 min_val = 0xff;
443 	u32 gate[4];
444 	u32 i, bw;
445 
446 	bw = (readl(PHY_REG(phy_base, 0x0)) >> 4) & 0xf;
447 	switch (bw) {
448 	case 0x1:
449 		bw = 1;
450 		break;
451 	case 0x3:
452 		bw = 2;
453 		break;
454 	case 0xf:
455 	default:
456 		bw = 4;
457 		break;
458 	}
459 
460 	for (i = 0; i < bw; i++) {
461 		gate[i] = readl(PHY_REG(phy_base, 0xfb + i));
462 		max_val = MAX(max_val, gate[i]);
463 		min_val = MIN(min_val, gate[i]);
464 	}
465 
466 	if (max_val > 0x80 || min_val < 0x20)
467 		return -1;
468 	else
469 		return 0;
470 }
471 
472 static int data_training(struct dram_info *dram, u32 cs, u32 dramtype)
473 {
474 	void __iomem *phy_base = dram->phy;
475 	u32 ret;
476 	u32 dis_auto_zq = 0;
477 	u32 odt_val;
478 	u32 i, j;
479 
480 	odt_val = readl(PHY_REG(phy_base, 0x2e));
481 
482 	for (i = 0; i < 4; i++) {
483 		j = 0x20 + i * 0x10;
484 		writel(PHY_DDR3_RON_RTT_225ohm, PHY_REG(phy_base, j + 0x1));
485 		writel(0, PHY_REG(phy_base, j + 0xe));
486 	}
487 
488 	dis_auto_zq = disable_zqcs_arefresh(dram);
489 
490 	if (dramtype == DDR4) {
491 		clrsetbits_le32(PHY_REG(phy_base, 0x29), 0x3, 0);
492 		clrsetbits_le32(PHY_REG(phy_base, 0x39), 0x3, 0);
493 		clrsetbits_le32(PHY_REG(phy_base, 0x49), 0x3, 0);
494 		clrsetbits_le32(PHY_REG(phy_base, 0x59), 0x3, 0);
495 	}
496 	/* choose training cs */
497 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
498 	/* enable gate training */
499 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
500 	udelay(50);
501 	ret = readl(PHY_REG(phy_base, 0xff));
502 	/* disable gate training */
503 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
504 	clrbits_le32(PHY_REG(phy_base, 2), 0x30);
505 	restore_zqcs_arefresh(dram, dis_auto_zq);
506 
507 	if (dramtype == DDR4) {
508 		clrsetbits_le32(PHY_REG(phy_base, 0x29), 0x3, 0x2);
509 		clrsetbits_le32(PHY_REG(phy_base, 0x39), 0x3, 0x2);
510 		clrsetbits_le32(PHY_REG(phy_base, 0x49), 0x3, 0x2);
511 		clrsetbits_le32(PHY_REG(phy_base, 0x59), 0x3, 0x2);
512 	}
513 
514 	if (ret & 0x10) {
515 		ret = -1;
516 	} else {
517 		ret = (ret & 0xf) ^ (readl(PHY_REG(phy_base, 0)) >> 4);
518 		ret = (ret == 0) ? 0 : -1;
519 	}
520 
521 	for (i = 0; i < 4; i++) {
522 		j = 0x20 + i * 0x10;
523 		writel(odt_val, PHY_REG(phy_base, j + 0x1));
524 		writel(odt_val, PHY_REG(phy_base, j + 0xe));
525 	}
526 
527 	return ret;
528 }
529 
530 /* rank = 1: cs0
531  * rank = 2: cs1
532  * rank = 3: cs0 & cs1
533  * note: be careful of keep mr original val
534  */
535 static int write_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 arg,
536 		    u32 dramtype)
537 {
538 	void __iomem *pctl_base = dram->pctl;
539 
540 	while (readl(pctl_base + DDR_PCTL2_MRSTAT) & MR_WR_BUSY)
541 		continue;
542 	if (dramtype == DDR3 || dramtype == DDR4) {
543 		writel((mr_num << 12) | (rank << 4) | (0 << 0),
544 		       pctl_base + DDR_PCTL2_MRCTRL0);
545 		writel(arg, pctl_base + DDR_PCTL2_MRCTRL1);
546 	} else {
547 		writel((rank << 4) | (0 << 0),
548 		       pctl_base + DDR_PCTL2_MRCTRL0);
549 		writel((mr_num << 8) | (arg & 0xff),
550 		       pctl_base + DDR_PCTL2_MRCTRL1);
551 	}
552 
553 	setbits_le32(pctl_base + DDR_PCTL2_MRCTRL0, 1u << 31);
554 	while (readl(pctl_base + DDR_PCTL2_MRCTRL0) & (1u << 31))
555 		continue;
556 	while (readl(pctl_base + DDR_PCTL2_MRSTAT) & MR_WR_BUSY)
557 		continue;
558 
559 	return 0;
560 }
561 
562 /*
563  * rank : 1:cs0, 2:cs1, 3:cs0&cs1
564  * vrefrate: 4500: 45%,
565  */
566 static int write_vrefdq(struct dram_info *dram, u32 rank, u32 vrefrate,
567 			u32 dramtype)
568 {
569 	void __iomem *pctl_base = dram->pctl;
570 	u32 tccd_l, value;
571 	u32 dis_auto_zq = 0;
572 
573 	if (dramtype != DDR4 || vrefrate < 4500 ||
574 	    vrefrate > 9200)
575 		return (-1);
576 
577 	tccd_l = (readl(pctl_base + DDR_PCTL2_DRAMTMG4) >> 16) & 0xf;
578 	tccd_l = (tccd_l - 4) << 10;
579 
580 	if (vrefrate > 7500) {
581 		/* range 1 */
582 		value = ((vrefrate - 6000) / 65) | tccd_l;
583 	} else {
584 		/* range 2 */
585 		value = ((vrefrate - 4500) / 65) | tccd_l | (1 << 6);
586 	}
587 
588 	dis_auto_zq = disable_zqcs_arefresh(dram);
589 
590 	/* enable vrefdq calibratin */
591 	write_mr(dram, rank, 6, value | (1 << 7), dramtype);
592 	udelay(1);/* tvrefdqe */
593 	/* write vrefdq value */
594 	write_mr(dram, rank, 6, value | (1 << 7), dramtype);
595 	udelay(1);/* tvref_time */
596 	write_mr(dram, rank, 6, value | (0 << 7), dramtype);
597 	udelay(1);/* tvrefdqx */
598 
599 	restore_zqcs_arefresh(dram, dis_auto_zq);
600 
601 	return 0;
602 }
603 
604 /*
605  * cs: 0:cs0
606  *	   1:cs1
607  *     else cs0+cs1
608  * note: it didn't consider about row_3_4
609  */
610 u64 get_cs_cap(struct px30_sdram_params *sdram_params, u32 cs)
611 {
612 	u32 bg;
613 	u64 cap[2];
614 
615 	if (sdram_params->dramtype == DDR4)
616 		/* DDR4 8bit dram BG = 2(4bank groups),
617 		 * 16bit dram BG = 1 (2 bank groups)
618 		 */
619 		bg = (sdram_params->ch.dbw == 0) ? 2 : 1;
620 	else
621 		bg = 0;
622 	cap[0] = 1llu << (sdram_params->ch.bw + sdram_params->ch.col +
623 		bg + sdram_params->ch.bk + sdram_params->ch.cs0_row);
624 
625 	if (sdram_params->ch.rank == 2)
626 		cap[1] = 1llu << (sdram_params->ch.bw + sdram_params->ch.col +
627 			bg + sdram_params->ch.bk + sdram_params->ch.cs1_row);
628 	else
629 		cap[1] = 0;
630 
631 	if (cs == 0)
632 		return cap[0];
633 	else if (cs == 1)
634 		return cap[1];
635 	else
636 		return (cap[0] + cap[1]);
637 }
638 
639 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
640 {
641 	writel(ddrconfig | (ddrconfig << 8), &dram->msch->deviceconf);
642 	rk_clrsetreg(&dram->grf->soc_noc_con[1], 0x3 << 14, 0 << 14);
643 }
644 
645 static void dram_all_config(struct dram_info *dram,
646 			    struct px30_sdram_params *sdram_params)
647 {
648 	u32 sys_reg = 0;
649 	u32 sys_reg3 = 0;
650 	u64 cs_cap[2];
651 
652 	set_ddrconfig(dram, sdram_params->ch.ddrconfig);
653 
654 	sys_reg |= SYS_REG_ENC_DDRTYPE(sdram_params->dramtype);
655 	sys_reg |= SYS_REG_ENC_ROW_3_4(sdram_params->ch.row_3_4);
656 	sys_reg |= SYS_REG_ENC_RANK(sdram_params->ch.rank);
657 	sys_reg |= SYS_REG_ENC_COL(sdram_params->ch.col);
658 	sys_reg |= SYS_REG_ENC_BK(sdram_params->ch.bk);
659 	sys_reg |= SYS_REG_ENC_BW(sdram_params->ch.bw);
660 	sys_reg |= SYS_REG_ENC_DBW(sdram_params->ch.dbw);
661 
662 	SYS_REG_ENC_CS0_ROW_(sdram_params->ch.cs0_row, sys_reg, sys_reg3);
663 	if (sdram_params->ch.cs1_row)
664 		SYS_REG_ENC_CS1_ROW_(sdram_params->ch.cs1_row, sys_reg,
665 				     sys_reg3);
666 	sys_reg3 |= SYS_REG_ENC_CS1_COL(sdram_params->ch.col);
667 	sys_reg3 |= SYS_REG_ENC_VERSION(DDR_SYS_REG_VERSION);
668 
669 	writel(sys_reg, &dram->pmugrf->os_reg[2]);
670 	writel(sys_reg3, &dram->pmugrf->os_reg[3]);
671 
672 	cs_cap[0] = get_cs_cap(sdram_params, 0);
673 	cs_cap[1] = get_cs_cap(sdram_params, 1);
674 	writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
675 			(((cs_cap[0] >> 20) / 64) & 0xff),
676 			&dram->msch->devicesize);
677 
678 	writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
679 	       &dram->msch->ddrtiminga0);
680 	writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
681 	       &dram->msch->ddrtimingb0);
682 	writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
683 	       &dram->msch->ddrtimingc0);
684 	writel(sdram_params->ch.noc_timings.devtodev0.d32,
685 	       &dram->msch->devtodev0);
686 	writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
687 	writel(sdram_params->ch.noc_timings.ddr4timing.d32,
688 	       &dram->msch->ddr4timing);
689 	writel(sdram_params->ch.noc_timings.agingx0, &dram->msch->agingx0);
690 	writel(sdram_params->ch.noc_timings.agingx0, &dram->msch->aging0);
691 	writel(sdram_params->ch.noc_timings.agingx0, &dram->msch->aging1);
692 	writel(sdram_params->ch.noc_timings.agingx0, &dram->msch->aging2);
693 	writel(sdram_params->ch.noc_timings.agingx0, &dram->msch->aging3);
694 }
695 
696 static void enable_low_power(struct dram_info *dram,
697 			     struct px30_sdram_params *sdram_params)
698 {
699 	void __iomem *pctl_base = dram->pctl;
700 	void __iomem *phy_base = dram->phy;
701 	void __iomem *ddr_grf_base = dram->ddr_grf;
702 	u32 grf_lp_con;
703 
704 	/*
705 	 * bit0: grf_upctl_axi_cg_en = 1 enable upctl2 axi clk auto gating
706 	 * bit1: grf_upctl_apb_cg_en = 1 ungated axi,core clk for apb access
707 	 * bit2: grf_upctl_core_cg_en = 1 enable upctl2 core clk auto gating
708 	 * bit3: grf_selfref_type2_en = 0 disable core clk gating when type2 sr
709 	 * bit4: grf_upctl_syscreq_cg_en = 1
710 	 *       ungating coreclk when c_sysreq assert
711 	 * bit8-11: grf_auto_sr_dly = 6
712 	 */
713 	writel(0x1f1f0617, &dram->ddr_grf->ddr_grf_con[1]);
714 
715 	if (sdram_params->dramtype == DDR4)
716 		grf_lp_con = (0x7 << 16) | (1 << 1);
717 	else if (sdram_params->dramtype == DDR3)
718 		grf_lp_con = (0x7 << 16) | (1 << 0);
719 	else
720 		grf_lp_con = (0x7 << 16) | (1 << 2);
721 
722 	/* en lpckdis_en */
723 	grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
724 	writel(grf_lp_con, ddr_grf_base + DDR_GRF_LP_CON);
725 
726 	/* off digit module clock when enter power down */
727 	setbits_le32(PHY_REG(phy_base, 7), 1 << 7);
728 
729 	/*
730 	 * If DDR3 or DDR4 active_ranks=1,
731 	 * it will gate memory clock when enter power down.
732 	 * Force set active_ranks to 3 to workaround it.
733 	 */
734 	if (sdram_params->dramtype == DDR3 || sdram_params->dramtype == DDR4)
735 		clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x3 << 24,
736 				0x3 << 24);
737 
738 	/* enable sr, pd */
739 	if (PD_IDLE == 0)
740 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
741 	else
742 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
743 	if (SR_IDLE == 0)
744 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
745 	else
746 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
747 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
748 }
749 
750 static void print_ddr_info(struct px30_sdram_params *sdram_params)
751 {
752 	u64 cap;
753 	u32 bg;
754 	u32 split;
755 
756 	split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON);
757 	bg = (sdram_params->ch.dbw == 0) ? 2 : 1;
758 	switch (sdram_params->dramtype) {
759 	case LPDDR3:
760 		printascii("LPDDR3\n");
761 		break;
762 	case DDR3:
763 		printascii("DDR3\n");
764 		break;
765 	case DDR4:
766 		printascii("DDR4\n");
767 		break;
768 	case LPDDR2:
769 		printascii("LPDDR2\n");
770 		break;
771 	default:
772 		printascii("Unknown Device\n");
773 		break;
774 	}
775 
776 	printdec(sdram_params->ddr_freq);
777 	printascii("MHz\n");
778 	printascii("BW=");
779 	printdec(8 << sdram_params->ch.bw);
780 	printascii(" Col=");
781 	printdec(sdram_params->ch.col);
782 	printascii(" Bk=");
783 	printdec(0x1 << sdram_params->ch.bk);
784 	if (sdram_params->dramtype == DDR4) {
785 		printascii(" BG=");
786 		printdec(1 << bg);
787 	}
788 	printascii(" CS0 Row=");
789 	printdec(sdram_params->ch.cs0_row);
790 	if (sdram_params->ch.cs0_high16bit_row !=
791 		sdram_params->ch.cs0_row) {
792 		printascii("/");
793 		printdec(sdram_params->ch.cs0_high16bit_row);
794 	}
795 	if (sdram_params->ch.rank > 1) {
796 		printascii(" CS1 Row=");
797 		printdec(sdram_params->ch.cs1_row);
798 		if (sdram_params->ch.cs1_high16bit_row !=
799 			sdram_params->ch.cs1_row) {
800 			printascii("/");
801 			printdec(sdram_params->ch.cs1_high16bit_row);
802 		}
803 	}
804 	printascii(" CS=");
805 	printdec(sdram_params->ch.rank);
806 	printascii(" Die BW=");
807 	printdec(8 << sdram_params->ch.dbw);
808 
809 	cap = get_cs_cap(sdram_params, 3);
810 	if (sdram_params->ch.row_3_4)
811 		cap = cap * 3 / 4;
812 	else if (!(split & (1 << SPLIT_BYPASS_OFFSET)))
813 		cap = cap / 2 + ((split & 0xff) << 24) / 2;
814 
815 	printascii(" Size=");
816 	printdec(cap >> 20);
817 	printascii("MB\n");
818 }
819 
820 /*
821  * pre_init: 0: pre init for dram cap detect
822  * 1: detect correct cap(except cs1 row)info, than reinit
823  * 2: after reinit, we detect cs1_row, if cs1_row not equal
824  *    to cs0_row and cs is in middle on ddrconf map, we need
825  *    to reinit dram, than set the correct ddrconf.
826  */
827 static int sdram_init_(struct dram_info *dram,
828 		       struct px30_sdram_params *sdram_params, u32 pre_init)
829 {
830 	void __iomem *pctl_base = dram->pctl;
831 
832 	rkclk_ddr_reset(dram, 1, 1, 1, 1);
833 	udelay(10);
834 	/*
835 	 * dereset ddr phy psrstn to config pll,
836 	 * if using phy pll psrstn must be dereset
837 	 * before config pll
838 	 */
839 	rkclk_ddr_reset(dram, 1, 1, 1, 0);
840 	rkclk_configure_ddr(dram, sdram_params);
841 
842 	/* release phy srst to provide clk to ctrl */
843 	rkclk_ddr_reset(dram, 1, 1, 0, 0);
844 	udelay(10);
845 	phy_soft_reset(dram);
846 	/* release ctrl presetn, and config ctl registers */
847 	rkclk_ddr_reset(dram, 1, 0, 0, 0);
848 	pctl_cfg(dram, sdram_params);
849 	sdram_params->ch.ddrconfig = calculate_ddrconfig(sdram_params);
850 	set_ctl_address_map(dram, sdram_params);
851 	phy_cfg(dram, sdram_params);
852 
853 	/* enable dfi_init_start to init phy after ctl srstn deassert */
854 	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
855 
856 	rkclk_ddr_reset(dram, 0, 0, 0, 0);
857 	/* wait for dfi_init_done and dram init complete */
858 	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
859 		continue;
860 
861 	if (sdram_params->dramtype == LPDDR3)
862 		write_mr(dram, 3, 11, 3, LPDDR3);
863 
864 	/* do ddr gate training */
865 redo_cs0_training:
866 	if (data_training(dram, 0, sdram_params->dramtype) != 0) {
867 		if (pre_init != 0)
868 			printascii("DTT cs0 error\n");
869 		return -1;
870 	}
871 	if (check_rd_gate(dram)) {
872 		printascii("re training cs0");
873 		goto redo_cs0_training;
874 	}
875 
876 	if (sdram_params->dramtype == LPDDR3) {
877 		if ((read_mr(dram, 1, 8) & 0x3) != 0x3)
878 			return -1;
879 	} else if (sdram_params->dramtype == LPDDR2) {
880 		if ((read_mr(dram, 1, 8) & 0x3) != 0x0)
881 			return -1;
882 	}
883 	/* for px30: when 2cs, both 2 cs should be training */
884 	if (pre_init != 0 && sdram_params->ch.rank == 2) {
885 redo_cs1_training:
886 		if (data_training(dram, 1, sdram_params->dramtype) != 0) {
887 			printascii("DTT cs1 error\n");
888 			return -1;
889 		}
890 		if (check_rd_gate(dram)) {
891 			printascii("re training cs1");
892 			goto redo_cs1_training;
893 		}
894 	}
895 
896 	if (sdram_params->dramtype == DDR4)
897 		write_vrefdq(dram, 0x3, 5670, sdram_params->dramtype);
898 
899 	dram_all_config(dram, sdram_params);
900 	enable_low_power(dram, sdram_params);
901 
902 	return 0;
903 }
904 
905 static u64 dram_detect_cap(struct dram_info *dram,
906 			   struct px30_sdram_params *sdram_params,
907 			   unsigned char channel)
908 {
909 	void __iomem *pctl_base = dram->pctl;
910 	void __iomem *phy_base = dram->phy;
911 
912 	/*
913 	 * for ddr3: ddrconf = 3
914 	 * for ddr4: ddrconf = 12
915 	 * for lpddr3: ddrconf = 3
916 	 * default bw = 1
917 	 */
918 	u32 bk, bktmp;
919 	u32 col, coltmp;
920 	u32 row, rowtmp, row_3_4;
921 	void __iomem *test_addr, *test_addr1;
922 	u32 dbw;
923 	u32 cs;
924 	u32 bw = 1;
925 	u64 cap = 0;
926 	u32 dram_type = sdram_params->dramtype;
927 	u32 pwrctl;
928 
929 	if (dram_type != DDR4) {
930 		/* detect col and bk for ddr3/lpddr3 */
931 		coltmp = 12;
932 		bktmp = 3;
933 		rowtmp = 16;
934 
935 		for (col = coltmp; col >= 9; col -= 1) {
936 			writel(0, CONFIG_SYS_SDRAM_BASE);
937 			test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
938 					(1ul << (col + bw - 1ul)));
939 			writel(PATTERN, test_addr);
940 			if ((readl(test_addr) == PATTERN) &&
941 			    (readl(CONFIG_SYS_SDRAM_BASE) == 0))
942 				break;
943 		}
944 		if (col == 8) {
945 			printascii("col error\n");
946 			goto cap_err;
947 		}
948 
949 		test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
950 				(1ul << (coltmp + bktmp + bw - 1ul)));
951 		writel(0, CONFIG_SYS_SDRAM_BASE);
952 		writel(PATTERN, test_addr);
953 		if ((readl(test_addr) == PATTERN) &&
954 		    (readl(CONFIG_SYS_SDRAM_BASE) == 0))
955 			bk = 3;
956 		else
957 			bk = 2;
958 		if (dram_type == DDR3)
959 			dbw = 1;
960 		else
961 			dbw = 2;
962 	} else {
963 		/* detect bg for ddr4 */
964 		coltmp = 10;
965 		bktmp = 4;
966 		rowtmp = 17;
967 
968 		col = 10;
969 		bk = 2;
970 		test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
971 				(1ul << (coltmp + bw + 1ul)));
972 		writel(0, CONFIG_SYS_SDRAM_BASE);
973 		writel(PATTERN, test_addr);
974 		if ((readl(test_addr) == PATTERN) &&
975 		    (readl(CONFIG_SYS_SDRAM_BASE) == 0))
976 			dbw = 0;
977 		else
978 			dbw = 1;
979 	}
980 	/* detect row */
981 	for (row = rowtmp; row > 12; row--) {
982 		writel(0, CONFIG_SYS_SDRAM_BASE);
983 		test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
984 				(1ul << (row + bktmp + coltmp + bw - 1ul)));
985 		writel(PATTERN, test_addr);
986 		if ((readl(test_addr) == PATTERN) &&
987 		    (readl(CONFIG_SYS_SDRAM_BASE) == 0))
988 			break;
989 	}
990 	if (row == 12) {
991 		printascii("row error");
992 		goto cap_err;
993 	}
994 	/* detect row_3_4 */
995 	test_addr = CONFIG_SYS_SDRAM_BASE;
996 	test_addr1 = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
997 			(0x3ul << (row + bktmp + coltmp + bw - 1ul - 1ul)));
998 
999 	writel(0, test_addr);
1000 	writel(PATTERN, test_addr1);
1001 	if ((readl(test_addr) == 0) &&
1002 	    (readl(test_addr1) == PATTERN))
1003 		row_3_4 = 0;
1004 	else
1005 		row_3_4 = 1;
1006 
1007 	/* disable auto low-power */
1008 	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
1009 	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
1010 
1011 	/* bw and cs detect using phy read gate training */
1012 	if (data_training(dram, 1, dram_type) == 0)
1013 		cs = 1;
1014 	else
1015 		cs = 0;
1016 
1017 	clrsetbits_le32(PHY_REG(phy_base, 0), 0xf << 4, 0xf << 4);
1018 	setbits_le32(PHY_REG(phy_base, 0x46), 1 << 3);
1019 	setbits_le32(PHY_REG(phy_base, 0x56), 1 << 3);
1020 
1021 	phy_soft_reset(dram);
1022 
1023 	if (data_training(dram, 0, dram_type) == 0)
1024 		bw = 2;
1025 	else
1026 		bw = 1;
1027 
1028 	/* restore auto low-power */
1029 	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
1030 
1031 	sdram_params->ch.rank = cs + 1;
1032 	sdram_params->ch.col = col;
1033 	sdram_params->ch.bk = bk;
1034 	sdram_params->ch.dbw = dbw;
1035 	sdram_params->ch.bw = bw;
1036 	sdram_params->ch.cs0_row = row;
1037 	sdram_params->ch.cs0_high16bit_row = row;
1038 	if (cs) {
1039 		sdram_params->ch.cs1_row = row;
1040 		sdram_params->ch.cs1_high16bit_row = row;
1041 	} else {
1042 		sdram_params->ch.cs1_row = 0;
1043 		sdram_params->ch.cs1_high16bit_row = 0;
1044 	}
1045 	sdram_params->ch.row_3_4 = row_3_4;
1046 
1047 	if (dram_type == DDR4)
1048 		cap = 1llu << (cs + row + bk + col + ((dbw == 0) ? 2 : 1) + bw);
1049 	else
1050 		cap = 1llu << (cs + row + bk + col + bw);
1051 
1052 	return cap;
1053 
1054 cap_err:
1055 	return 0;
1056 }
1057 
1058 static u32 remodify_sdram_params(struct px30_sdram_params *sdram_params)
1059 {
1060 	u32 tmp = 0, tmp_adr = 0, i;
1061 
1062 	for (i = 0; sdram_params->pctl_regs.pctl[i][0] != 0xFFFFFFFF; i++) {
1063 		if (sdram_params->pctl_regs.pctl[i][0] == 0) {
1064 			tmp = sdram_params->pctl_regs.pctl[i][1];/* MSTR */
1065 			tmp_adr = i;
1066 		}
1067 	}
1068 
1069 	tmp &= ~((3ul << 30) | (3ul << 24) | (3ul << 12));
1070 
1071 	switch (sdram_params->ch.dbw) {
1072 	case 2:
1073 		tmp |= (3ul << 30);
1074 		break;
1075 	case 1:
1076 		tmp |= (2ul << 30);
1077 		break;
1078 	case 0:
1079 	default:
1080 		tmp |= (1ul << 30);
1081 		break;
1082 	}
1083 
1084 	if (sdram_params->ch.rank == 2)
1085 		tmp |= 3 << 24;
1086 	else
1087 		tmp |= 1 << 24;
1088 
1089 	tmp |= (2 - sdram_params->ch.bw) << 12;
1090 
1091 	sdram_params->pctl_regs.pctl[tmp_adr][1] = tmp;
1092 
1093 	return 0;
1094 }
1095 
1096 int dram_detect_high_row(struct dram_info *dram,
1097 			 struct px30_sdram_params *sdram_params,
1098 			 unsigned char channel)
1099 {
1100 	sdram_params->ch.cs0_high16bit_row = sdram_params->ch.cs0_row;
1101 	sdram_params->ch.cs1_high16bit_row = sdram_params->ch.cs1_row;
1102 
1103 	return 0;
1104 }
1105 
1106 static int dram_detect_cs1_row(struct px30_sdram_params *sdram_params,
1107 			       unsigned char channel)
1108 {
1109 	u32 ret = 0;
1110 	void __iomem *test_addr;
1111 	u32 row, bktmp, coltmp, bw;
1112 	u64 cs0_cap;
1113 	u32 byte_mask;
1114 
1115 	if (sdram_params->ch.rank == 2) {
1116 		cs0_cap = get_cs_cap(sdram_params, 0);
1117 
1118 		if (sdram_params->dramtype == DDR4) {
1119 			if (sdram_params->ch.dbw == 0)
1120 				bktmp = sdram_params->ch.bk + 2;
1121 			else
1122 				bktmp = sdram_params->ch.bk + 1;
1123 		} else {
1124 			bktmp = sdram_params->ch.bk;
1125 		}
1126 		bw = sdram_params->ch.bw;
1127 		coltmp = sdram_params->ch.col;
1128 
1129 		/*
1130 		 * because px30 support axi split,min bandwidth
1131 		 * is 8bit. if cs0 is 32bit, cs1 may 32bit or 16bit
1132 		 * so we check low 16bit data when detect cs1 row.
1133 		 * if cs0 is 16bit/8bit, we check low 8bit data.
1134 		 */
1135 		if (bw == 2)
1136 			byte_mask = 0xFFFF;
1137 		else
1138 			byte_mask = 0xFF;
1139 
1140 		/* detect cs1 row */
1141 		for (row = sdram_params->ch.cs0_row; row > 12; row--) {
1142 			test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
1143 				    cs0_cap +
1144 				    (1ul << (row + bktmp + coltmp + bw - 1ul)));
1145 			writel(0, CONFIG_SYS_SDRAM_BASE + cs0_cap);
1146 			writel(PATTERN, test_addr);
1147 
1148 			if (((readl(test_addr) & byte_mask) ==
1149 			     (PATTERN & byte_mask)) &&
1150 			    ((readl(CONFIG_SYS_SDRAM_BASE + cs0_cap) &
1151 			      byte_mask) == 0)) {
1152 				ret = row;
1153 				break;
1154 			}
1155 		}
1156 	}
1157 
1158 	return ret;
1159 }
1160 
1161 /* return: 0 = success, other = fail */
1162 static int sdram_init_detect(struct dram_info *dram,
1163 			     struct px30_sdram_params *sdram_params)
1164 {
1165 	u32 ret;
1166 	u32 sys_reg = 0;
1167 	u32 sys_reg3 = 0;
1168 
1169 	if (sdram_init_(dram, sdram_params, 0) != 0)
1170 		return -1;
1171 
1172 	if (dram_detect_cap(dram, sdram_params, 0) == 0)
1173 		return -1;
1174 
1175 	/* modify bw, cs related timing */
1176 	remodify_sdram_params(sdram_params);
1177 	/* reinit sdram by real dram cap */
1178 	ret = sdram_init_(dram, sdram_params, 1);
1179 	if (ret != 0)
1180 		goto out;
1181 
1182 	/* redetect cs1 row */
1183 	sdram_params->ch.cs1_row =
1184 		dram_detect_cs1_row(sdram_params, 0);
1185 	if (sdram_params->ch.cs1_row) {
1186 		sys_reg = readl(&dram->pmugrf->os_reg[2]);
1187 		sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
1188 		SYS_REG_ENC_CS1_ROW_(sdram_params->ch.cs1_row,
1189 				     sys_reg, sys_reg3);
1190 		writel(sys_reg, &dram->pmugrf->os_reg[2]);
1191 		writel(sys_reg3, &dram->pmugrf->os_reg[3]);
1192 	}
1193 
1194 	ret = dram_detect_high_row(dram, sdram_params, 0);
1195 
1196 out:
1197 	return ret;
1198 }
1199 
1200 struct px30_sdram_params
1201 		*get_default_sdram_config(void)
1202 {
1203 	sdram_configs[0].skew = &skew;
1204 
1205 	return &sdram_configs[0];
1206 }
1207 
1208 /* return: 0 = success, other = fail */
1209 int sdram_init(void)
1210 {
1211 	struct px30_sdram_params *sdram_params;
1212 	int ret = 0;
1213 
1214 	dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
1215 	dram_info.pctl = (void *)DDRC_BASE_ADDR;
1216 	dram_info.grf = (void *)GRF_BASE_ADDR;
1217 	dram_info.cru = (void *)CRU_BASE_ADDR;
1218 	dram_info.msch = (void *)SERVER_MSCH0_BASE_ADDR;
1219 	dram_info.ddr_grf = (void *)DDR_GRF_BASE_ADDR;
1220 	dram_info.pmugrf = (void *)PMUGRF_BASE_ADDR;
1221 
1222 	sdram_params = get_default_sdram_config();
1223 	ret = sdram_init_detect(&dram_info, sdram_params);
1224 
1225 	if (ret)
1226 		goto error;
1227 
1228 	print_ddr_info(sdram_params);
1229 
1230 	printascii("out\n");
1231 	return ret;
1232 error:
1233 	return (-1);
1234 }
1235 
1236 #else /* CONFIG_TPL_BUILD */
1237 
1238 static int px30_dmc_probe(struct udevice *dev)
1239 {
1240 	struct dram_info *priv = dev_get_priv(dev);
1241 
1242 	priv->pmugrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUGRF);
1243 	debug("%s: pmugrf=%p\n", __func__, priv->pmugrf);
1244 	priv->info.base = CONFIG_SYS_SDRAM_BASE;
1245 	priv->info.size =
1246 		rockchip_sdram_size((phys_addr_t)&priv->pmugrf->os_reg[2]);
1247 
1248 	return 0;
1249 }
1250 
1251 static int px30_dmc_get_info(struct udevice *dev, struct ram_info *info)
1252 {
1253 	struct dram_info *priv = dev_get_priv(dev);
1254 
1255 	*info = priv->info;
1256 
1257 	return 0;
1258 }
1259 
1260 static struct ram_ops px30_dmc_ops = {
1261 	.get_info = px30_dmc_get_info,
1262 };
1263 
1264 static const struct udevice_id px30_dmc_ids[] = {
1265 	{ .compatible = "rockchip,px30-dmc" },
1266 	{ }
1267 };
1268 
1269 U_BOOT_DRIVER(dmc_px30) = {
1270 	.name = "rockchip_px30_dmc",
1271 	.id = UCLASS_RAM,
1272 	.of_match = px30_dmc_ids,
1273 	.ops = &px30_dmc_ops,
1274 	.probe = px30_dmc_probe,
1275 	.priv_auto_alloc_size = sizeof(struct dram_info),
1276 };
1277 #endif /* CONFIG_TPL_BUILD */
1278