xref: /rk3399_rockchip-uboot/drivers/ram/rockchip/sdram_rv1108_pctl_phy.c (revision 10427e2df5a90fdf95a3ef373e36c5dd49ba07ad)
1 // SPDX-License-Identifier:     GPL-2.0+
2 /*
3  * Copyright (C) 2020 Rockchip Electronics Co., Ltd
4  */
5 
6 #include <common.h>
7 #include <debug_uart.h>
8 #include <dm.h>
9 #include <dm/root.h>
10 #include <dt-structs.h>
11 #include <ram.h>
12 #include <regmap.h>
13 #include <asm/io.h>
14 #include <asm/types.h>
15 #include <asm/arch/hardware.h>
16 #include <asm/arch/sdram_rv1108_pctl_phy.h>
17 #include <asm/arch/timer.h>
18 #include <asm/arch/sdram.h>
19 
20 #if defined(CONFIG_ROCKCHIP_RV1108)
21 #include <asm/arch/sdram_rv1108.h>
22 #elif defined(CONFIG_ROCKCHIP_RK3308)
23 #include <asm/arch/sdram_rk3308.h>
24 #endif
25 
26 /*
27  * we can not fit the code to access the device tree in SPL
28  * (due to 6K SRAM size limits), so these are hard-coded
29  */
30 
31 void copy_to_reg(u32 *dest, const u32 *src, u32 n)
32 {
33 	int i;
34 
35 	for (i = 0; i < n / sizeof(u32); i++) {
36 		writel(*src, dest);
37 		src++;
38 		dest++;
39 	}
40 }
41 
42 static void phy_pctrl_reset(struct dram_info *priv)
43 {
44 	phy_pctrl_reset_cru(priv);
45 	clrsetbits_le32(&priv->phy->phy_reg0,
46 			RESET_DIGITAL_CORE_MASK | RESET_ANALOG_LOGIC_MASK,
47 			RESET_DIGITAL_CORE_ACT << RESET_DIGITAL_CORE_SHIFT |
48 			RESET_ANALOG_LOGIC_ACT << RESET_ANALOG_LOGIC_SHIFT);
49 	udelay(1);
50 	clrsetbits_le32(&priv->phy->phy_reg0,
51 			RESET_ANALOG_LOGIC_MASK,
52 			RESET_ANALOG_LOGIC_DIS << RESET_ANALOG_LOGIC_SHIFT);
53 	udelay(5);
54 	clrsetbits_le32(&priv->phy->phy_reg0,
55 			RESET_DIGITAL_CORE_MASK,
56 			RESET_DIGITAL_CORE_DIS << RESET_DIGITAL_CORE_SHIFT);
57 	udelay(1);
58 }
59 
60 static void phy_dll_bypass_set(struct dram_info *priv, unsigned int freq)
61 {
62 	clrsetbits_le32(&priv->phy->phy_reg13,
63 			CMD_DLL_BYPASS_MASK << CMD_DLL_BYPASS_SHIFT,
64 			CMD_DLL_BYPASS << CMD_DLL_BYPASS_SHIFT);
65 
66 	writel(CK_DLL_BYPASS_DISABLE << CK_DLL_BYPASS_SHIFT,
67 	       &priv->phy->phy_reg14);
68 
69 	clrsetbits_le32(&priv->phy->phy_reg26,
70 			LEFT_CHN_A_DQ_DLL_BYPASS_MASK << LEFT_CHN_A_DQ_DLL_SHIFT,
71 			LEFT_CHN_A_DQ_DLL_BYPASS << LEFT_CHN_A_DQ_DLL_SHIFT);
72 	writel(LEFT_CHN_A_DQS_DLL_BYPASS_DIS << LEFT_CHN_A_DQS_DLL_SHIFT,
73 	       &priv->phy->phy_reg27);
74 
75 	clrsetbits_le32(&priv->phy->phy_reg36,
76 			RIGHT_CHN_A_DQ_DLL_BYPASS_MASK << RIGHT_CHN_A_DQ_DLL_SHIFT,
77 			RIGHT_CHN_A_DQ_DLL_BYPASS << RIGHT_CHN_A_DQ_DLL_SHIFT);
78 	writel(RIGHT_CHN_A_DQS_DLL_BYPASS_DIS <<
79 	       RIGHT_CHN_A_DQS_DLL_SHIFT, &priv->phy->phy_reg37);
80 
81 	if (freq <= PHY_LOW_SPEED_MHZ) {
82 		writel(RIGHT_CHN_A_TX_DQ_BYPASS_SET <<
83 		       RIGHT_CHN_A_TX_DQ_BYPASS_SHIFT |
84 		       LEFT_CHN_A_TX_DQ_BYPASS_SET <<
85 		       LEFT_CHN_A_TX_DQ_BYPASS_SHIFT |
86 		       CMD_CK_DLL_BYPASS_SET << CMD_CK_DLL_BYPASS_SHIFT,
87 		       &priv->phy->phy_regdll);
88 	} else {
89 		writel(RIGHT_CHN_A_TX_DQ_BYPASS_DIS <<
90 		       RIGHT_CHN_A_TX_DQ_BYPASS_SHIFT |
91 		       LEFT_CHN_A_TX_DQ_BYPASS_DIS <<
92 		       LEFT_CHN_A_TX_DQ_BYPASS_SHIFT |
93 		       CMD_CK_DLL_BYPASS_DIS << CMD_CK_DLL_BYPASS_SHIFT,
94 				&priv->phy->phy_regdll);
95 	}
96 
97 	ddr_phy_dqs_rx_dll_cfg(priv, freq);
98 }
99 
100 static void send_command(struct dram_info *priv,
101 			 u32 rank, u32 cmd, u32 arg)
102 {
103 	writel((START_CMD | (rank << RANK_SEL_SHIFT) | arg | cmd),
104 	       &priv->pctl->mcmd);
105 	while (readl(&priv->pctl->mcmd) & START_CMD)
106 		;
107 }
108 
109 static void memory_init(struct dram_info *priv,
110 			struct sdram_params *params_priv)
111 {
112 	u32 mr0;
113 
114 	if (params_priv->ddr_config_t.ddr_type == DDR3 ||
115 	    params_priv->ddr_config_t.ddr_type == DDR2) {
116 		send_command(priv, RANK_SEL_CS0_CS1, DESELECT_CMD, 0);
117 		udelay(1);
118 		send_command(priv, RANK_SEL_CS0_CS1, PREA_CMD, 0);
119 		send_command(priv, RANK_SEL_CS0_CS1, DESELECT_CMD, 0);
120 		udelay(1);
121 		send_command(priv, RANK_SEL_CS0_CS1, MRS_CMD,
122 			     (MR2 & BANK_ADDR_MASK) << BANK_ADDR_SHIFT |
123 			     (params_priv->ddr_timing_t.phy_timing.mr[2] &
124 			     CMD_ADDR_MASK) << CMD_ADDR_SHIFT);
125 
126 		send_command(priv, RANK_SEL_CS0_CS1, MRS_CMD,
127 			     (MR3 & BANK_ADDR_MASK) << BANK_ADDR_SHIFT |
128 			     (params_priv->ddr_timing_t.phy_timing.mr[3] &
129 			     CMD_ADDR_MASK) << CMD_ADDR_SHIFT);
130 
131 		send_command(priv, RANK_SEL_CS0_CS1, MRS_CMD,
132 			     (MR1 & BANK_ADDR_MASK) << BANK_ADDR_SHIFT |
133 			     (params_priv->ddr_timing_t.phy_timing.mr[1] &
134 			     CMD_ADDR_MASK) << CMD_ADDR_SHIFT);
135 
136 		mr0 = params_priv->ddr_timing_t.phy_timing.mr[0];
137 		if (params_priv->ddr_config_t.ddr_type == DDR3) {
138 			send_command(priv, RANK_SEL_CS0_CS1, MRS_CMD,
139 				     (MR0 & BANK_ADDR_MASK) << BANK_ADDR_SHIFT |
140 				     (((mr0 | DDR3_DLL_RESET) &
141 				       CMD_ADDR_MASK) << CMD_ADDR_SHIFT));
142 
143 			send_command(priv, RANK_SEL_CS0_CS1, ZQCL_CMD, 0);
144 		} else {
145 			send_command(priv, RANK_SEL_CS0_CS1, MRS_CMD,
146 				     (MR0 & BANK_ADDR_MASK) << BANK_ADDR_SHIFT |
147 				     (((mr0 | DDR3_DLL_RESET) &
148 				       CMD_ADDR_MASK) << CMD_ADDR_SHIFT));
149 			send_command(priv, RANK_SEL_CS0_CS1, PREA_CMD, 0);
150 			send_command(priv, RANK_SEL_CS0_CS1, REF_CMD, 0);
151 			send_command(priv, RANK_SEL_CS0_CS1, REF_CMD, 0);
152 			send_command(priv, RANK_SEL_CS0_CS1, MRS_CMD,
153 				     (MR0 & BANK_ADDR_MASK) <<
154 				     BANK_ADDR_SHIFT |
155 				     ((mr0 & CMD_ADDR_MASK) <<
156 				      CMD_ADDR_SHIFT));
157 		}
158 	} else {
159 		/* reset */
160 		send_command(priv, RANK_SEL_CS0_CS1, MRS_CMD,
161 			     (63 & LPDDR23_MA_MASK) << LPDDR23_MA_SHIFT |
162 			     (0 & LPDDR23_OP_MASK) <<
163 			     LPDDR23_OP_SHIFT);
164 		/* tINIT5 */
165 		udelay(10);
166 		/* ZQ calibration Init */
167 		send_command(priv, RANK_SEL_CS0, MRS_CMD,
168 			     (10 & LPDDR23_MA_MASK) << LPDDR23_MA_SHIFT |
169 			     (0xFF & LPDDR23_OP_MASK) <<
170 			     LPDDR23_OP_SHIFT);
171 		/* tZQINIT */
172 		udelay(1);
173 		send_command(priv, RANK_SEL_CS1, MRS_CMD,
174 			     (10 & LPDDR23_MA_MASK) << LPDDR23_MA_SHIFT |
175 			     (0xFF & LPDDR23_OP_MASK) <<
176 			     LPDDR23_OP_SHIFT);
177 		/* tZQINIT */
178 		udelay(1);
179 		send_command(priv, RANK_SEL_CS0_CS1, MRS_CMD,
180 			     (1 & LPDDR23_MA_MASK) << LPDDR23_MA_SHIFT |
181 			     (params_priv->ddr_timing_t.phy_timing.mr[1] &
182 			     LPDDR23_OP_MASK) << LPDDR23_OP_SHIFT);
183 		send_command(priv, RANK_SEL_CS0_CS1, MRS_CMD,
184 			     (2 & LPDDR23_MA_MASK) << LPDDR23_MA_SHIFT |
185 			     (params_priv->ddr_timing_t.phy_timing.mr[2] &
186 			     LPDDR23_OP_MASK) << LPDDR23_OP_SHIFT);
187 		send_command(priv, RANK_SEL_CS0_CS1, MRS_CMD,
188 			     (3 & LPDDR23_MA_MASK) << LPDDR23_MA_SHIFT |
189 			     (params_priv->ddr_timing_t.phy_timing.mr[3] &
190 			     LPDDR23_OP_MASK) << LPDDR23_OP_SHIFT);
191 	}
192 }
193 
194 void move_to_config_state(struct dram_info *priv)
195 {
196 	unsigned int state;
197 
198 	while (1) {
199 		state = readl(&priv->pctl->stat) & PCTL_CTL_STAT_MASK;
200 		switch (state) {
201 		case LOW_POWER:
202 			writel(WAKEUP_STATE, &priv->pctl->sctl);
203 			while ((readl(&priv->pctl->stat) & PCTL_CTL_STAT_MASK)
204 				!= ACCESS)
205 				;
206 			/*
207 			 * If at low power state, need wakeup first, and then
208 			 * enter the config, so fallthrough
209 			 */
210 		case ACCESS:
211 		case INIT_MEM:
212 			writel(CFG_STATE, &priv->pctl->sctl);
213 			while ((readl(&priv->pctl->stat) & PCTL_CTL_STAT_MASK)
214 				!= CONFIG)
215 				;
216 			break;
217 		case CONFIG:
218 			return;
219 		default:
220 			break;
221 		}
222 	}
223 }
224 
225 void move_to_access_state(struct dram_info *priv)
226 {
227 	unsigned int state;
228 
229 	while (1) {
230 		state = readl(&priv->pctl->stat) & PCTL_CTL_STAT_MASK;
231 		switch (state) {
232 		case LOW_POWER:
233 			writel(WAKEUP_STATE, &priv->pctl->sctl);
234 			while ((readl(&priv->pctl->stat) &
235 				PCTL_CTL_STAT_MASK) != ACCESS)
236 				;
237 			break;
238 		case INIT_MEM:
239 			writel(CFG_STATE, &priv->pctl->sctl);
240 			while ((readl(&priv->pctl->stat) &
241 				PCTL_CTL_STAT_MASK) != CONFIG)
242 				;
243 			/* fallthrough */
244 		case CONFIG:
245 			writel(GO_STATE, &priv->pctl->sctl);
246 			while ((readl(&priv->pctl->stat) &
247 				PCTL_CTL_STAT_MASK) != ACCESS)
248 				;
249 			break;
250 		case ACCESS:
251 			return;
252 		default:
253 			break;
254 		}
255 	}
256 }
257 
258 static void pctl_cfg(struct dram_info *priv,
259 		     struct sdram_params *params_priv)
260 {
261 	u32 reg;
262 	u32 burstlen;
263 	u32 bl_mddr_lpddr2;
264 
265 	/* DFI config */
266 	writel(DFI_DATA_BYTE_DISABLE_EN << DFI_DATA_BYTE_DISABLE_EN_SHIFT |
267 	       DFI_INIT_START_EN << DFI_INIT_START_SHIFT,
268 	       &priv->pctl->dfistcfg0);
269 	writel(DFI_DRAM_CLK_DISABLE_EN_DPD <<
270 	       DFI_DRAM_CLK_DISABLE_EN_DPD_SHIFT |
271 	       DFI_DRAM_CLK_DISABLE_EN << DFI_DRAM_CLK_DISABLE_EN_SHIFT,
272 	       &priv->pctl->dfistcfg1);
273 	writel(PARITY_EN << PARITY_EN_SHIFT |
274 	       PARITY_INTR_EN << PARITY_INTR_EN_SHIFT, &priv->pctl->dfistcfg2);
275 
276 	writel(TPHYUPD_TYPE0, &priv->pctl->dfitphyupdtype0);
277 	writel(TPHY_RDLAT, &priv->pctl->dfitphyrdlat);
278 	writel(TPHY_WRDATA, &priv->pctl->dfitphywrdata);
279 
280 	writel(DFI_PHYUPD_DISABLE | DFI_CTRLUPD_DISABLE,
281 	       &priv->pctl->dfiupdcfg);
282 
283 	copy_to_reg(&priv->pctl->togcnt1u,
284 		    &params_priv->ddr_timing_t.pctl_timing.togcnt1u,
285 		    sizeof(struct pctl_timing));
286 	/*
287 	 * rv1108 phy is 1:2 mode, noc_timing.b.burstlen
288 	 * have divide by scheuler clock, so need to * 4
289 	 */
290 	burstlen = params_priv->ddr_timing_t.noc_timing.b.burstlen * 4;
291 
292 	if (params_priv->ddr_config_t.ddr_type == DDR3 ||
293 	    params_priv->ddr_config_t.ddr_type == DDR2) {
294 		writel((RANK0_ODT_WRITE_SEL << RANK0_ODT_WRITE_SEL_SHIFT |
295 		       RANK1_ODT_WRITE_SEL << RANK1_ODT_WRITE_SEL_SHIFT),
296 		       &priv->pctl->dfiodtcfg);
297 
298 		writel(ODT_LEN_BL8_W << ODT_LEN_BL8_W_SHIFT,
299 		       &priv->pctl->dfiodtcfg1);
300 
301 		writel(params_priv->ddr_timing_t.pctl_timing.trsth,
302 		       &priv->pctl->trsth);
303 		if (params_priv->ddr_config_t.ddr_type == DDR3)
304 			writel(MDDR_LPDDR23_CLOCK_STOP_IDLE_DIS | DDR3_EN |
305 			       MEM_BL_8 | TFAW_CFG_5_TDDR |
306 			       PD_EXIT_SLOW_EXIT_MODE | PD_TYPE_ACT_PD |
307 			       PD_IDLE_DISABLE |
308 			       params_priv->ddr_2t_en << TWO_T_SHIFT,
309 			       &priv->pctl->mcfg);
310 		else if (burstlen == 8)
311 			writel(MDDR_LPDDR23_CLOCK_STOP_IDLE_DIS | DDR2_EN |
312 			       MEM_BL_8 | TFAW_CFG_5_TDDR |
313 			       PD_EXIT_SLOW_EXIT_MODE | PD_TYPE_ACT_PD |
314 			       PD_IDLE_DISABLE |
315 			       params_priv->ddr_2t_en << TWO_T_SHIFT,
316 			       &priv->pctl->mcfg);
317 		else
318 			writel(MDDR_LPDDR23_CLOCK_STOP_IDLE_DIS | DDR2_EN |
319 			       MEM_BL_4 | TFAW_CFG_5_TDDR |
320 			       PD_EXIT_SLOW_EXIT_MODE | PD_TYPE_ACT_PD |
321 			       PD_IDLE_DISABLE |
322 			       params_priv->ddr_2t_en << TWO_T_SHIFT,
323 			       &priv->pctl->mcfg);
324 		writel(DFI_LP_EN_SR << DFI_LP_EN_SR_SHIFT |
325 		       DFI_LP_WAKEUP_SR_32_CYCLES << DFI_LP_WAKEUP_SR_SHIFT |
326 		       DFI_TLP_RESP << DFI_TLP_RESP_SHIFT,
327 		       &priv->pctl->dfilpcfg0);
328 
329 		reg = readl(&priv->pctl->tcl);
330 		writel((reg - 1) / 2 - 1, &priv->pctl->dfitrddataen);
331 		reg = readl(&priv->pctl->tcwl);
332 		writel((reg - 1) / 2 - 1, &priv->pctl->dfitphywrlat);
333 	} else {
334 		if (burstlen == 4)
335 			bl_mddr_lpddr2 = MDDR_LPDDR2_BL_4;
336 		else
337 			bl_mddr_lpddr2 = MDDR_LPDDR2_BL_8;
338 		writel((RANK0_ODT_WRITE_DIS << RANK0_ODT_WRITE_SEL_SHIFT |
339 		       RANK1_ODT_WRITE_DIS << RANK1_ODT_WRITE_SEL_SHIFT),
340 		       &priv->pctl->dfiodtcfg);
341 
342 		writel(ODT_LEN_BL8_W_0 << ODT_LEN_BL8_W_SHIFT,
343 		       &priv->pctl->dfiodtcfg1);
344 
345 		writel(0, &priv->pctl->trsth);
346 		writel(MDDR_LPDDR23_CLOCK_STOP_IDLE_DIS | LPDDR2_EN |
347 			LPDDR2_S4 | bl_mddr_lpddr2 |
348 		       TFAW_CFG_6_TDDR | PD_EXIT_FAST_EXIT_MODE |
349 		       PD_TYPE_ACT_PD | PD_IDLE_DISABLE, &priv->pctl->mcfg);
350 		writel(DFI_LP_EN_SR << DFI_LP_EN_SR_SHIFT |
351 		       DFI_LP_WAKEUP_SR_32_CYCLES << DFI_LP_WAKEUP_SR_SHIFT |
352 		       DFI_TLP_RESP << DFI_TLP_RESP_SHIFT |
353 		       DFI_LP_WAKEUP_PD_32_CYCLES << DFI_LP_WAKEUP_PD_SHIFT |
354 		       DFI_LP_EN_PD,
355 		       &priv->pctl->dfilpcfg0);
356 
357 		reg = readl(&priv->pctl->tcl);
358 		writel(reg / 2 - 1, &priv->pctl->dfitrddataen);
359 		reg = readl(&priv->pctl->tcwl);
360 		writel(reg / 2 - 1, &priv->pctl->dfitphywrlat);
361 	}
362 	pctl_cfg_grf(priv, params_priv);
363 	setbits_le32(&priv->pctl->scfg, HW_LOW_POWER_EN);
364 
365 	/* only support x16 memory */
366 	clrsetbits_le32(&priv->pctl->ppcfg, PPMEM_EN_MASK, PPMEM_EN);
367 }
368 
369 static void phy_cfg(struct dram_info *priv,
370 		    struct sdram_params *params_priv)
371 {
372 	u32 burstlen;
373 
374 	burstlen = params_priv->ddr_timing_t.noc_timing.b.burstlen * 4;
375 	burstlen = (burstlen == 4) ? PHY_BL_4 : PHY_BL_8;
376 	ddr_msch_cfg(priv, params_priv);
377 	ddr_phy_skew_cfg(priv);
378 	switch (params_priv->ddr_config_t.ddr_type) {
379 	case DDR2:
380 		writel(MEMORY_SELECT_DDR2 | PHY_BL_8, &priv->phy->phy_reg1);
381 		break;
382 	case DDR3:
383 		writel(MEMORY_SELECT_DDR3 | PHY_BL_8, &priv->phy->phy_reg1);
384 		break;
385 	case LPDDR2:
386 	default:
387 		writel(MEMORY_SELECT_LPDDR2 | burstlen, &priv->phy->phy_reg1);
388 		break;
389 	}
390 
391 	writel(params_priv->ddr_timing_t.phy_timing.cl_al,
392 	       &priv->phy->phy_regb);
393 	writel(params_priv->ddr_timing_t.pctl_timing.tcwl,
394 	       &priv->phy->phy_regc);
395 
396 	set_ds_odt(priv, params_priv);
397 
398 	/* only support x16 memory */
399 	clrsetbits_le32(&priv->phy->phy_reg0, DQ_16BIT_EN_MASK,
400 			DQ_16BIT_EN);
401 }
402 
403 static void dram_cfg_rbc(struct dram_info *priv,
404 			 struct sdram_params *params_priv)
405 {
406 	move_to_config_state(priv);
407 	ddr_msch_cfg_rbc(params_priv, priv);
408 	move_to_access_state(priv);
409 }
410 
411 static void data_training(struct dram_info *priv)
412 {
413 	u32 value;
414 	u32 tmp = 0;
415 	u32 tmp1 = 0;
416 	u32 timeout = 1000;
417 
418 	/* disable auto refresh */
419 	value = readl(&priv->pctl->trefi);
420 	writel(UPD_REF, &priv->pctl->trefi);
421 
422 	tmp1 = readl(&priv->phy->phy_reg2);
423 
424 	writel(DQS_GATE_TRAINING_SEL_CS0 | DQS_GATE_TRAINING_DIS | tmp1,
425 	       &priv->phy->phy_reg2);
426 	writel(DQS_GATE_TRAINING_SEL_CS0 | DQS_GATE_TRAINING_ACT | tmp1,
427 	       &priv->phy->phy_reg2);
428 
429 		/* delay until data training done */
430 	while (tmp != (CHN_A_HIGH_8BIT_TRAINING_DONE |
431 	       CHN_A_LOW_8BIT_TRAINING_DONE)) {
432 		udelay(1);
433 		tmp = (readl(&priv->phy->phy_regff) & CHN_A_TRAINING_DONE_MASK);
434 		timeout--;
435 		if (!timeout)
436 			break;
437 	}
438 
439 	writel(DQS_GATE_TRAINING_SEL_CS0 | DQS_GATE_TRAINING_DIS | tmp1,
440 	       &priv->phy->phy_reg2);
441 
442 	send_command(priv, RANK_SEL_CS0_CS1, PREA_CMD, 0);
443 
444 	writel(value | UPD_REF, &priv->pctl->trefi);
445 }
446 
447 static int sdram_detect(struct dram_info *priv,
448 			struct sdram_params *params_priv)
449 {
450 	u32 row, col, row_max, col_max, bank_max;
451 	u32 bw = 1;
452 	phys_addr_t test_addr;
453 	struct ddr_schedule ddr_sch;
454 
455 	/* if col detect wrong,row needs initial */
456 	row = 0;
457 
458 	/* detect col */
459 	move_to_config_state(priv);
460 	ddr_msch_get_max_col(priv, &ddr_sch);
461 	col_max = ddr_sch.col;
462 	bank_max = ddr_sch.bank;
463 	move_to_access_state(priv);
464 
465 	for (col = col_max; col >= 10; col--) {
466 		writel(0, CONFIG_SYS_SDRAM_BASE);
467 		test_addr = (phys_addr_t)(CONFIG_SYS_SDRAM_BASE +
468 		    (1ul << (col + bw - 1ul)));
469 		writel(PATTERN, test_addr);
470 		if ((readl(test_addr) == PATTERN) &&
471 		    (readl(CONFIG_SYS_SDRAM_BASE) == 0))
472 			break;
473 	}
474 	if (col <= 9)
475 		goto cap_err;
476 	params_priv->ddr_config_t.col = col;
477 
478 	if (params_priv->ddr_config_t.ddr_type == DDR3) {
479 		params_priv->ddr_config_t.bank = 3;
480 	} else {
481 		writel(0, CONFIG_SYS_SDRAM_BASE);
482 		test_addr = (phys_addr_t)(CONFIG_SYS_SDRAM_BASE +
483 		    (1ul << (bank_max + col_max +
484 		    bw - 1ul)));
485 		writel(PATTERN, test_addr);
486 		if ((readl(test_addr) == PATTERN) &&
487 		    (readl(CONFIG_SYS_SDRAM_BASE) == 0))
488 			params_priv->ddr_config_t.bank = 3;
489 		else
490 			params_priv->ddr_config_t.bank = 2;
491 	}
492 
493 	/* detect row */
494 	move_to_config_state(priv);
495 	ddr_msch_get_max_row(priv, &ddr_sch);
496 	move_to_access_state(priv);
497 	col_max = ddr_sch.col;
498 	row_max = ddr_sch.row;
499 
500 	for (row = row_max; row >= 12; row--) {
501 		writel(0, CONFIG_SYS_SDRAM_BASE);
502 		test_addr = (phys_addr_t)(CONFIG_SYS_SDRAM_BASE +
503 				(1ul << (row + bank_max +
504 				col_max + bw - 1ul)));
505 
506 		writel(PATTERN, test_addr);
507 		if ((readl(test_addr) == PATTERN) &&
508 		    (readl(CONFIG_SYS_SDRAM_BASE) == 0))
509 			break;
510 	}
511 	if (row <= 11)
512 		goto cap_err;
513 	params_priv->ddr_config_t.cs0_row = row;
514 	return 0;
515 cap_err:
516 	return -EAGAIN;
517 }
518 
519 #define DDR_VERSION	0x2
520 
521 static void sdram_all_config(struct dram_info *priv,
522 			     struct sdram_params *params_priv)
523 {
524 	u32 version = DDR_VERSION;
525 	u32 os_reg = 0;
526 	u32 row_12 = 0;
527 	u32 ddr_info = 0;
528 	/* rk3308,rv1108 only support 1 channel, x16 ddr bus, x16 memory */
529 	u32 chn_cnt = 0;
530 	u32 rank = 1;
531 	u32 bw = 1;
532 	u32 dbw = 1;
533 	size_t size = 0;
534 	struct ddr_param ddr_param;
535 
536 	/* os_reg2 */
537 	os_reg = (params_priv->ddr_config_t.ddr_type & SYS_REG_DDRTYPE_MASK) <<
538 		 SYS_REG_DDRTYPE_SHIFT |
539 		 (chn_cnt & SYS_REG_NUM_CH_MASK) <<
540 		 SYS_REG_NUM_CH_SHIFT |
541 		 ((rank - 1) & SYS_REG_RANK_MASK) <<
542 		 SYS_REG_RANK_SHIFT(0) |
543 		 ((params_priv->ddr_config_t.col - 9) & SYS_REG_COL_MASK) <<
544 		 SYS_REG_COL_SHIFT(0) |
545 		 ((params_priv->ddr_config_t.bank == 3 ? 0 : 1) &
546 		 SYS_REG_BK_MASK) << SYS_REG_BK_SHIFT(0) |
547 		 ((params_priv->ddr_config_t.cs0_row - 13) &
548 		 SYS_REG_CS0_ROW_MASK) << SYS_REG_CS0_ROW_SHIFT(0) |
549 		 (bw & SYS_REG_BW_MASK) <<
550 		 SYS_REG_BW_SHIFT(0) |
551 		 (dbw & SYS_REG_DBW_MASK) <<
552 		 SYS_REG_DBW_SHIFT(0);
553 
554 	writel(os_reg, &priv->grf->os_reg2);
555 
556 	/* os_reg3 */
557 	if (params_priv->ddr_config_t.cs0_row == 12)
558 		row_12 = 1;
559 	os_reg = (version & SYS_REG1_VERSION_MASK) <<
560 		 SYS_REG1_VERSION_SHIFT | (row_12 &
561 		 SYS_REG1_EXTEND_CS0_ROW_MASK) <<
562 		 SYS_REG1_EXTEND_CS0_ROW_SHIFT(0);
563 	writel(os_reg, &priv->grf->os_reg3);
564 
565 	printascii("In\n");
566 	printdec(params_priv->ddr_timing_t.freq);
567 	printascii("MHz\n");
568 	switch (params_priv->ddr_config_t.ddr_type & SYS_REG_DDRTYPE_MASK) {
569 	case 2:
570 		printascii("DDR2\n");
571 		break;
572 	case 5:
573 		printascii("LPDDR2\n");
574 		break;
575 	case 3:
576 	default:
577 		printascii("DDR3\n");
578 		break;
579 	}
580 	printascii(" Col=");
581 	printdec(params_priv->ddr_config_t.col);
582 	printascii(" Bank=");
583 	printdec(params_priv->ddr_config_t.bank);
584 	printascii(" Row=");
585 	printdec(params_priv->ddr_config_t.cs0_row);
586 
587 	size = 1llu << (bw +
588 	       params_priv->ddr_config_t.col +
589 	       params_priv->ddr_config_t.cs0_row +
590 	       params_priv->ddr_config_t.bank);
591 	ddr_info = size >> 20;
592 	printascii(" Size=");
593 	printdec(ddr_info);
594 	printascii("MB\n");
595 	printascii("msch:");
596 	ddr_info = readl(&priv->service_msch->ddrconf);
597 	printdec(ddr_info);
598 	printascii("\n");
599 
600 	priv->info.base = CONFIG_SYS_SDRAM_BASE;
601 	priv->info.size = size;
602 	ddr_param.count = 1;
603 	ddr_param.para[0] = priv->info.base;
604 	ddr_param.para[1] = priv->info.size;
605 	rockchip_setup_ddr_param(&ddr_param);
606 }
607 
608 int rv1108_sdram_init(struct dram_info *sdram_priv,
609 		      struct sdram_params *params_priv)
610 {
611 	/* pmu enable ddr io retention */
612 	enable_ddr_io_ret(sdram_priv);
613 	rkdclk_init(sdram_priv, params_priv);
614 	phy_pctrl_reset(sdram_priv);
615 	phy_dll_bypass_set(sdram_priv, params_priv->ddr_timing_t.freq);
616 	pctl_cfg(sdram_priv, params_priv);
617 	phy_cfg(sdram_priv, params_priv);
618 	writel(POWER_UP_START, &sdram_priv->pctl->powctl);
619 	while (!(readl(&sdram_priv->pctl->powstat) & POWER_UP_DONE))
620 		;
621 
622 	memory_init(sdram_priv, params_priv);
623 re_training:
624 	move_to_config_state(sdram_priv);
625 	data_training(sdram_priv);
626 	move_to_access_state(sdram_priv);
627 	if (sdram_detect(sdram_priv, params_priv)) {
628 		while (1)
629 			;
630 	}
631 	if (check_rd_gate(sdram_priv))
632 		goto re_training;
633 
634 	/* workaround data training not in middle */
635 	modify_data_training(sdram_priv, params_priv);
636 
637 	dram_cfg_rbc(sdram_priv, params_priv);
638 	sdram_all_config(sdram_priv, params_priv);
639 	enable_low_power(sdram_priv, params_priv);
640 
641 	return 0;
642 }
643