xref: /OK3568_Linux_fs/u-boot/drivers/ram/rockchip/sdram_rv1108_pctl_phy.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1 // SPDX-License-Identifier:     GPL-2.0+
2 /*
3  * Copyright (C) 2020 Rockchip Electronics Co., Ltd
4  */
5 
6 #include <common.h>
7 
8 #if defined(CONFIG_SPL_BUILD) || defined(CONFIG_TPL_BUILD)
9 #include <debug_uart.h>
10 #include <dm.h>
11 #include <dm/root.h>
12 #include <dt-structs.h>
13 #include <ram.h>
14 #include <regmap.h>
15 #include <asm/io.h>
16 #include <asm/types.h>
17 #include <asm/arch/hardware.h>
18 #include <asm/arch/sdram_rv1108_pctl_phy.h>
19 #include <asm/arch/timer.h>
20 #include <asm/arch/sdram.h>
21 
22 #if defined(CONFIG_ROCKCHIP_RV1108)
23 #include <asm/arch/sdram_rv1108.h>
24 #elif defined(CONFIG_ROCKCHIP_RK3308)
25 #include <asm/arch/sdram_rk3308.h>
26 #endif
27 
28 /*
29  * we can not fit the code to access the device tree in SPL
30  * (due to 6K SRAM size limits), so these are hard-coded
31  */
32 
copy_to_reg(u32 * dest,const u32 * src,u32 n)33 void copy_to_reg(u32 *dest, const u32 *src, u32 n)
34 {
35 	int i;
36 
37 	for (i = 0; i < n / sizeof(u32); i++) {
38 		writel(*src, dest);
39 		src++;
40 		dest++;
41 	}
42 }
43 
phy_pctrl_reset(struct dram_info * priv)44 static void phy_pctrl_reset(struct dram_info *priv)
45 {
46 	phy_pctrl_reset_cru(priv);
47 	clrsetbits_le32(&priv->phy->phy_reg0,
48 			RESET_DIGITAL_CORE_MASK | RESET_ANALOG_LOGIC_MASK,
49 			RESET_DIGITAL_CORE_ACT << RESET_DIGITAL_CORE_SHIFT |
50 			RESET_ANALOG_LOGIC_ACT << RESET_ANALOG_LOGIC_SHIFT);
51 	udelay(1);
52 	clrsetbits_le32(&priv->phy->phy_reg0,
53 			RESET_ANALOG_LOGIC_MASK,
54 			RESET_ANALOG_LOGIC_DIS << RESET_ANALOG_LOGIC_SHIFT);
55 	udelay(5);
56 	clrsetbits_le32(&priv->phy->phy_reg0,
57 			RESET_DIGITAL_CORE_MASK,
58 			RESET_DIGITAL_CORE_DIS << RESET_DIGITAL_CORE_SHIFT);
59 	udelay(1);
60 }
61 
phy_dll_bypass_set(struct dram_info * priv,unsigned int freq)62 static void phy_dll_bypass_set(struct dram_info *priv, unsigned int freq)
63 {
64 	clrsetbits_le32(&priv->phy->phy_reg13,
65 			CMD_DLL_BYPASS_MASK << CMD_DLL_BYPASS_SHIFT,
66 			CMD_DLL_BYPASS << CMD_DLL_BYPASS_SHIFT);
67 
68 	writel(CK_DLL_BYPASS_DISABLE << CK_DLL_BYPASS_SHIFT,
69 	       &priv->phy->phy_reg14);
70 
71 	clrsetbits_le32(&priv->phy->phy_reg26,
72 			LEFT_CHN_A_DQ_DLL_BYPASS_MASK << LEFT_CHN_A_DQ_DLL_SHIFT,
73 			LEFT_CHN_A_DQ_DLL_BYPASS << LEFT_CHN_A_DQ_DLL_SHIFT);
74 	writel(LEFT_CHN_A_DQS_DLL_BYPASS_DIS << LEFT_CHN_A_DQS_DLL_SHIFT,
75 	       &priv->phy->phy_reg27);
76 
77 	clrsetbits_le32(&priv->phy->phy_reg36,
78 			RIGHT_CHN_A_DQ_DLL_BYPASS_MASK << RIGHT_CHN_A_DQ_DLL_SHIFT,
79 			RIGHT_CHN_A_DQ_DLL_BYPASS << RIGHT_CHN_A_DQ_DLL_SHIFT);
80 	writel(RIGHT_CHN_A_DQS_DLL_BYPASS_DIS <<
81 	       RIGHT_CHN_A_DQS_DLL_SHIFT, &priv->phy->phy_reg37);
82 
83 	if (freq <= PHY_LOW_SPEED_MHZ) {
84 		writel(RIGHT_CHN_A_TX_DQ_BYPASS_SET <<
85 		       RIGHT_CHN_A_TX_DQ_BYPASS_SHIFT |
86 		       LEFT_CHN_A_TX_DQ_BYPASS_SET <<
87 		       LEFT_CHN_A_TX_DQ_BYPASS_SHIFT |
88 		       CMD_CK_DLL_BYPASS_SET << CMD_CK_DLL_BYPASS_SHIFT,
89 		       &priv->phy->phy_regdll);
90 	} else {
91 		writel(RIGHT_CHN_A_TX_DQ_BYPASS_DIS <<
92 		       RIGHT_CHN_A_TX_DQ_BYPASS_SHIFT |
93 		       LEFT_CHN_A_TX_DQ_BYPASS_DIS <<
94 		       LEFT_CHN_A_TX_DQ_BYPASS_SHIFT |
95 		       CMD_CK_DLL_BYPASS_DIS << CMD_CK_DLL_BYPASS_SHIFT,
96 				&priv->phy->phy_regdll);
97 	}
98 
99 	ddr_phy_dqs_rx_dll_cfg(priv, freq);
100 }
101 
send_command(struct dram_info * priv,u32 rank,u32 cmd,u32 arg)102 static void send_command(struct dram_info *priv,
103 			 u32 rank, u32 cmd, u32 arg)
104 {
105 	writel((START_CMD | (rank << RANK_SEL_SHIFT) | arg | cmd),
106 	       &priv->pctl->mcmd);
107 	while (readl(&priv->pctl->mcmd) & START_CMD)
108 		;
109 }
110 
memory_init(struct dram_info * priv,struct sdram_params * params_priv)111 static void memory_init(struct dram_info *priv,
112 			struct sdram_params *params_priv)
113 {
114 	u32 mr0;
115 
116 	if (params_priv->ddr_config_t.ddr_type == DDR3 ||
117 	    params_priv->ddr_config_t.ddr_type == DDR2) {
118 		send_command(priv, RANK_SEL_CS0_CS1, DESELECT_CMD, 0);
119 		udelay(1);
120 		send_command(priv, RANK_SEL_CS0_CS1, PREA_CMD, 0);
121 		send_command(priv, RANK_SEL_CS0_CS1, DESELECT_CMD, 0);
122 		udelay(1);
123 		send_command(priv, RANK_SEL_CS0_CS1, MRS_CMD,
124 			     (MR2 & BANK_ADDR_MASK) << BANK_ADDR_SHIFT |
125 			     (params_priv->ddr_timing_t.phy_timing.mr[2] &
126 			     CMD_ADDR_MASK) << CMD_ADDR_SHIFT);
127 
128 		send_command(priv, RANK_SEL_CS0_CS1, MRS_CMD,
129 			     (MR3 & BANK_ADDR_MASK) << BANK_ADDR_SHIFT |
130 			     (params_priv->ddr_timing_t.phy_timing.mr[3] &
131 			     CMD_ADDR_MASK) << CMD_ADDR_SHIFT);
132 
133 		send_command(priv, RANK_SEL_CS0_CS1, MRS_CMD,
134 			     (MR1 & BANK_ADDR_MASK) << BANK_ADDR_SHIFT |
135 			     (params_priv->ddr_timing_t.phy_timing.mr[1] &
136 			     CMD_ADDR_MASK) << CMD_ADDR_SHIFT);
137 
138 		mr0 = params_priv->ddr_timing_t.phy_timing.mr[0];
139 		if (params_priv->ddr_config_t.ddr_type == DDR3) {
140 			send_command(priv, RANK_SEL_CS0_CS1, MRS_CMD,
141 				     (MR0 & BANK_ADDR_MASK) << BANK_ADDR_SHIFT |
142 				     (((mr0 | DDR3_DLL_RESET) &
143 				       CMD_ADDR_MASK) << CMD_ADDR_SHIFT));
144 
145 			send_command(priv, RANK_SEL_CS0_CS1, ZQCL_CMD, 0);
146 		} else {
147 			send_command(priv, RANK_SEL_CS0_CS1, MRS_CMD,
148 				     (MR0 & BANK_ADDR_MASK) << BANK_ADDR_SHIFT |
149 				     (((mr0 | DDR3_DLL_RESET) &
150 				       CMD_ADDR_MASK) << CMD_ADDR_SHIFT));
151 			send_command(priv, RANK_SEL_CS0_CS1, PREA_CMD, 0);
152 			send_command(priv, RANK_SEL_CS0_CS1, REF_CMD, 0);
153 			send_command(priv, RANK_SEL_CS0_CS1, REF_CMD, 0);
154 			send_command(priv, RANK_SEL_CS0_CS1, MRS_CMD,
155 				     (MR0 & BANK_ADDR_MASK) <<
156 				     BANK_ADDR_SHIFT |
157 				     ((mr0 & CMD_ADDR_MASK) <<
158 				      CMD_ADDR_SHIFT));
159 		}
160 	} else {
161 		/* reset */
162 		send_command(priv, RANK_SEL_CS0_CS1, MRS_CMD,
163 			     (63 & LPDDR23_MA_MASK) << LPDDR23_MA_SHIFT |
164 			     (0 & LPDDR23_OP_MASK) <<
165 			     LPDDR23_OP_SHIFT);
166 		/* tINIT5 */
167 		udelay(10);
168 		/* ZQ calibration Init */
169 		send_command(priv, RANK_SEL_CS0, MRS_CMD,
170 			     (10 & LPDDR23_MA_MASK) << LPDDR23_MA_SHIFT |
171 			     (0xFF & LPDDR23_OP_MASK) <<
172 			     LPDDR23_OP_SHIFT);
173 		/* tZQINIT */
174 		udelay(1);
175 		send_command(priv, RANK_SEL_CS1, MRS_CMD,
176 			     (10 & LPDDR23_MA_MASK) << LPDDR23_MA_SHIFT |
177 			     (0xFF & LPDDR23_OP_MASK) <<
178 			     LPDDR23_OP_SHIFT);
179 		/* tZQINIT */
180 		udelay(1);
181 		send_command(priv, RANK_SEL_CS0_CS1, MRS_CMD,
182 			     (1 & LPDDR23_MA_MASK) << LPDDR23_MA_SHIFT |
183 			     (params_priv->ddr_timing_t.phy_timing.mr[1] &
184 			     LPDDR23_OP_MASK) << LPDDR23_OP_SHIFT);
185 		send_command(priv, RANK_SEL_CS0_CS1, MRS_CMD,
186 			     (2 & LPDDR23_MA_MASK) << LPDDR23_MA_SHIFT |
187 			     (params_priv->ddr_timing_t.phy_timing.mr[2] &
188 			     LPDDR23_OP_MASK) << LPDDR23_OP_SHIFT);
189 		send_command(priv, RANK_SEL_CS0_CS1, MRS_CMD,
190 			     (3 & LPDDR23_MA_MASK) << LPDDR23_MA_SHIFT |
191 			     (params_priv->ddr_timing_t.phy_timing.mr[3] &
192 			     LPDDR23_OP_MASK) << LPDDR23_OP_SHIFT);
193 	}
194 }
195 
move_to_config_state(struct dram_info * priv)196 void move_to_config_state(struct dram_info *priv)
197 {
198 	unsigned int state;
199 
200 	while (1) {
201 		state = readl(&priv->pctl->stat) & PCTL_CTL_STAT_MASK;
202 		switch (state) {
203 		case LOW_POWER:
204 			writel(WAKEUP_STATE, &priv->pctl->sctl);
205 			while ((readl(&priv->pctl->stat) & PCTL_CTL_STAT_MASK)
206 				!= ACCESS)
207 				;
208 			/*
209 			 * If at low power state, need wakeup first, and then
210 			 * enter the config, so fallthrough
211 			 */
212 		case ACCESS:
213 		case INIT_MEM:
214 			writel(CFG_STATE, &priv->pctl->sctl);
215 			while ((readl(&priv->pctl->stat) & PCTL_CTL_STAT_MASK)
216 				!= CONFIG)
217 				;
218 			break;
219 		case CONFIG:
220 			return;
221 		default:
222 			break;
223 		}
224 	}
225 }
226 
move_to_access_state(struct dram_info * priv)227 void move_to_access_state(struct dram_info *priv)
228 {
229 	unsigned int state;
230 
231 	while (1) {
232 		state = readl(&priv->pctl->stat) & PCTL_CTL_STAT_MASK;
233 		switch (state) {
234 		case LOW_POWER:
235 			writel(WAKEUP_STATE, &priv->pctl->sctl);
236 			while ((readl(&priv->pctl->stat) &
237 				PCTL_CTL_STAT_MASK) != ACCESS)
238 				;
239 			break;
240 		case INIT_MEM:
241 			writel(CFG_STATE, &priv->pctl->sctl);
242 			while ((readl(&priv->pctl->stat) &
243 				PCTL_CTL_STAT_MASK) != CONFIG)
244 				;
245 			/* fallthrough */
246 		case CONFIG:
247 			writel(GO_STATE, &priv->pctl->sctl);
248 			while ((readl(&priv->pctl->stat) &
249 				PCTL_CTL_STAT_MASK) != ACCESS)
250 				;
251 			break;
252 		case ACCESS:
253 			return;
254 		default:
255 			break;
256 		}
257 	}
258 }
259 
pctl_cfg(struct dram_info * priv,struct sdram_params * params_priv)260 static void pctl_cfg(struct dram_info *priv,
261 		     struct sdram_params *params_priv)
262 {
263 	u32 reg;
264 	u32 burstlen;
265 	u32 bl_mddr_lpddr2;
266 
267 	/* DFI config */
268 	writel(DFI_DATA_BYTE_DISABLE_EN << DFI_DATA_BYTE_DISABLE_EN_SHIFT |
269 	       DFI_INIT_START_EN << DFI_INIT_START_SHIFT,
270 	       &priv->pctl->dfistcfg0);
271 	writel(DFI_DRAM_CLK_DISABLE_EN_DPD <<
272 	       DFI_DRAM_CLK_DISABLE_EN_DPD_SHIFT |
273 	       DFI_DRAM_CLK_DISABLE_EN << DFI_DRAM_CLK_DISABLE_EN_SHIFT,
274 	       &priv->pctl->dfistcfg1);
275 	writel(PARITY_EN << PARITY_EN_SHIFT |
276 	       PARITY_INTR_EN << PARITY_INTR_EN_SHIFT, &priv->pctl->dfistcfg2);
277 
278 	writel(TPHYUPD_TYPE0, &priv->pctl->dfitphyupdtype0);
279 	writel(TPHY_RDLAT, &priv->pctl->dfitphyrdlat);
280 	writel(TPHY_WRDATA, &priv->pctl->dfitphywrdata);
281 
282 	writel(DFI_PHYUPD_DISABLE | DFI_CTRLUPD_DISABLE,
283 	       &priv->pctl->dfiupdcfg);
284 
285 	copy_to_reg(&priv->pctl->togcnt1u,
286 		    &params_priv->ddr_timing_t.pctl_timing.togcnt1u,
287 		    sizeof(struct pctl_timing));
288 	/*
289 	 * rv1108 phy is 1:2 mode, noc_timing.b.burstlen
290 	 * have divide by scheuler clock, so need to * 4
291 	 */
292 	burstlen = params_priv->ddr_timing_t.noc_timing.b.burstlen * 4;
293 
294 	if (params_priv->ddr_config_t.ddr_type == DDR3 ||
295 	    params_priv->ddr_config_t.ddr_type == DDR2) {
296 		writel((RANK0_ODT_WRITE_SEL << RANK0_ODT_WRITE_SEL_SHIFT |
297 		       RANK1_ODT_WRITE_SEL << RANK1_ODT_WRITE_SEL_SHIFT),
298 		       &priv->pctl->dfiodtcfg);
299 
300 		writel(ODT_LEN_BL8_W << ODT_LEN_BL8_W_SHIFT,
301 		       &priv->pctl->dfiodtcfg1);
302 
303 		writel(params_priv->ddr_timing_t.pctl_timing.trsth,
304 		       &priv->pctl->trsth);
305 		if (params_priv->ddr_config_t.ddr_type == DDR3)
306 			writel(MDDR_LPDDR23_CLOCK_STOP_IDLE_DIS | DDR3_EN |
307 			       MEM_BL_8 | TFAW_CFG_5_TDDR |
308 			       PD_EXIT_SLOW_EXIT_MODE | PD_TYPE_ACT_PD |
309 			       PD_IDLE_DISABLE |
310 			       params_priv->ddr_2t_en << TWO_T_SHIFT,
311 			       &priv->pctl->mcfg);
312 		else if (burstlen == 8)
313 			writel(MDDR_LPDDR23_CLOCK_STOP_IDLE_DIS | DDR2_EN |
314 			       MEM_BL_8 | TFAW_CFG_5_TDDR |
315 			       PD_EXIT_SLOW_EXIT_MODE | PD_TYPE_ACT_PD |
316 			       PD_IDLE_DISABLE |
317 			       params_priv->ddr_2t_en << TWO_T_SHIFT,
318 			       &priv->pctl->mcfg);
319 		else
320 			writel(MDDR_LPDDR23_CLOCK_STOP_IDLE_DIS | DDR2_EN |
321 			       MEM_BL_4 | TFAW_CFG_5_TDDR |
322 			       PD_EXIT_SLOW_EXIT_MODE | PD_TYPE_ACT_PD |
323 			       PD_IDLE_DISABLE |
324 			       params_priv->ddr_2t_en << TWO_T_SHIFT,
325 			       &priv->pctl->mcfg);
326 		writel(DFI_LP_EN_SR << DFI_LP_EN_SR_SHIFT |
327 		       DFI_LP_WAKEUP_SR_32_CYCLES << DFI_LP_WAKEUP_SR_SHIFT |
328 		       DFI_TLP_RESP << DFI_TLP_RESP_SHIFT,
329 		       &priv->pctl->dfilpcfg0);
330 
331 		reg = readl(&priv->pctl->tcl);
332 		writel((reg - 1) / 2 - 1, &priv->pctl->dfitrddataen);
333 		reg = readl(&priv->pctl->tcwl);
334 		writel((reg - 1) / 2 - 1, &priv->pctl->dfitphywrlat);
335 	} else {
336 		if (burstlen == 4)
337 			bl_mddr_lpddr2 = MDDR_LPDDR2_BL_4;
338 		else
339 			bl_mddr_lpddr2 = MDDR_LPDDR2_BL_8;
340 		writel((RANK0_ODT_WRITE_DIS << RANK0_ODT_WRITE_SEL_SHIFT |
341 		       RANK1_ODT_WRITE_DIS << RANK1_ODT_WRITE_SEL_SHIFT),
342 		       &priv->pctl->dfiodtcfg);
343 
344 		writel(ODT_LEN_BL8_W_0 << ODT_LEN_BL8_W_SHIFT,
345 		       &priv->pctl->dfiodtcfg1);
346 
347 		writel(0, &priv->pctl->trsth);
348 		writel(MDDR_LPDDR23_CLOCK_STOP_IDLE_DIS | LPDDR2_EN |
349 			LPDDR2_S4 | bl_mddr_lpddr2 |
350 		       TFAW_CFG_6_TDDR | PD_EXIT_FAST_EXIT_MODE |
351 		       PD_TYPE_ACT_PD | PD_IDLE_DISABLE, &priv->pctl->mcfg);
352 		writel(DFI_LP_EN_SR << DFI_LP_EN_SR_SHIFT |
353 		       DFI_LP_WAKEUP_SR_32_CYCLES << DFI_LP_WAKEUP_SR_SHIFT |
354 		       DFI_TLP_RESP << DFI_TLP_RESP_SHIFT |
355 		       DFI_LP_WAKEUP_PD_32_CYCLES << DFI_LP_WAKEUP_PD_SHIFT |
356 		       DFI_LP_EN_PD,
357 		       &priv->pctl->dfilpcfg0);
358 
359 		reg = readl(&priv->pctl->tcl);
360 		writel(reg / 2 - 1, &priv->pctl->dfitrddataen);
361 		reg = readl(&priv->pctl->tcwl);
362 		writel(reg / 2 - 1, &priv->pctl->dfitphywrlat);
363 	}
364 	pctl_cfg_grf(priv, params_priv);
365 	setbits_le32(&priv->pctl->scfg, HW_LOW_POWER_EN);
366 
367 	/* only support x16 memory */
368 	clrsetbits_le32(&priv->pctl->ppcfg, PPMEM_EN_MASK, PPMEM_EN);
369 }
370 
phy_cfg(struct dram_info * priv,struct sdram_params * params_priv)371 static void phy_cfg(struct dram_info *priv,
372 		    struct sdram_params *params_priv)
373 {
374 	u32 burstlen;
375 
376 	burstlen = params_priv->ddr_timing_t.noc_timing.b.burstlen * 4;
377 	burstlen = (burstlen == 4) ? PHY_BL_4 : PHY_BL_8;
378 	ddr_msch_cfg(priv, params_priv);
379 	ddr_phy_skew_cfg(priv);
380 	switch (params_priv->ddr_config_t.ddr_type) {
381 	case DDR2:
382 		writel(MEMORY_SELECT_DDR2 | PHY_BL_8, &priv->phy->phy_reg1);
383 		break;
384 	case DDR3:
385 		writel(MEMORY_SELECT_DDR3 | PHY_BL_8, &priv->phy->phy_reg1);
386 		break;
387 	case LPDDR2:
388 	default:
389 		writel(MEMORY_SELECT_LPDDR2 | burstlen, &priv->phy->phy_reg1);
390 		break;
391 	}
392 
393 	writel(params_priv->ddr_timing_t.phy_timing.cl_al,
394 	       &priv->phy->phy_regb);
395 	writel(params_priv->ddr_timing_t.pctl_timing.tcwl,
396 	       &priv->phy->phy_regc);
397 
398 	set_ds_odt(priv, params_priv);
399 
400 	/* only support x16 memory */
401 	clrsetbits_le32(&priv->phy->phy_reg0, DQ_16BIT_EN_MASK,
402 			DQ_16BIT_EN);
403 }
404 
dram_cfg_rbc(struct dram_info * priv,struct sdram_params * params_priv)405 static void dram_cfg_rbc(struct dram_info *priv,
406 			 struct sdram_params *params_priv)
407 {
408 	move_to_config_state(priv);
409 	ddr_msch_cfg_rbc(params_priv, priv);
410 	move_to_access_state(priv);
411 }
412 
data_training(struct dram_info * priv)413 static void data_training(struct dram_info *priv)
414 {
415 	u32 value;
416 	u32 tmp = 0;
417 	u32 tmp1 = 0;
418 	u32 timeout = 1000;
419 
420 	/* disable auto refresh */
421 	value = readl(&priv->pctl->trefi);
422 	writel(UPD_REF, &priv->pctl->trefi);
423 
424 	tmp1 = readl(&priv->phy->phy_reg2);
425 
426 	writel(DQS_GATE_TRAINING_SEL_CS0 | DQS_GATE_TRAINING_DIS | tmp1,
427 	       &priv->phy->phy_reg2);
428 	writel(DQS_GATE_TRAINING_SEL_CS0 | DQS_GATE_TRAINING_ACT | tmp1,
429 	       &priv->phy->phy_reg2);
430 
431 		/* delay until data training done */
432 	while (tmp != (CHN_A_HIGH_8BIT_TRAINING_DONE |
433 	       CHN_A_LOW_8BIT_TRAINING_DONE)) {
434 		udelay(1);
435 		tmp = (readl(&priv->phy->phy_regff) & CHN_A_TRAINING_DONE_MASK);
436 		timeout--;
437 		if (!timeout)
438 			break;
439 	}
440 
441 	writel(DQS_GATE_TRAINING_SEL_CS0 | DQS_GATE_TRAINING_DIS | tmp1,
442 	       &priv->phy->phy_reg2);
443 
444 	send_command(priv, RANK_SEL_CS0_CS1, PREA_CMD, 0);
445 
446 	writel(value | UPD_REF, &priv->pctl->trefi);
447 }
448 
sdram_detect(struct dram_info * priv,struct sdram_params * params_priv)449 static int sdram_detect(struct dram_info *priv,
450 			struct sdram_params *params_priv)
451 {
452 	u32 row, col, row_max, col_max, bank_max;
453 	u32 bw = 1;
454 	phys_addr_t test_addr;
455 	struct ddr_schedule ddr_sch;
456 
457 	/* if col detect wrong,row needs initial */
458 	row = 0;
459 
460 	/* detect col */
461 	move_to_config_state(priv);
462 	ddr_msch_get_max_col(priv, &ddr_sch);
463 	col_max = ddr_sch.col;
464 	bank_max = ddr_sch.bank;
465 	move_to_access_state(priv);
466 
467 	for (col = col_max; col >= 10; col--) {
468 		writel(0, CONFIG_SYS_SDRAM_BASE);
469 		test_addr = (phys_addr_t)(CONFIG_SYS_SDRAM_BASE +
470 		    (1ul << (col + bw - 1ul)));
471 		writel(PATTERN, test_addr);
472 		if ((readl(test_addr) == PATTERN) &&
473 		    (readl(CONFIG_SYS_SDRAM_BASE) == 0))
474 			break;
475 	}
476 	if (col <= 9)
477 		goto cap_err;
478 	params_priv->ddr_config_t.col = col;
479 
480 	if (params_priv->ddr_config_t.ddr_type == DDR3) {
481 		params_priv->ddr_config_t.bank = 3;
482 	} else {
483 		writel(0, CONFIG_SYS_SDRAM_BASE);
484 		test_addr = (phys_addr_t)(CONFIG_SYS_SDRAM_BASE +
485 		    (1ul << (bank_max + col_max +
486 		    bw - 1ul)));
487 		writel(PATTERN, test_addr);
488 		if ((readl(test_addr) == PATTERN) &&
489 		    (readl(CONFIG_SYS_SDRAM_BASE) == 0))
490 			params_priv->ddr_config_t.bank = 3;
491 		else
492 			params_priv->ddr_config_t.bank = 2;
493 	}
494 
495 	/* detect row */
496 	move_to_config_state(priv);
497 	ddr_msch_get_max_row(priv, &ddr_sch);
498 	move_to_access_state(priv);
499 	col_max = ddr_sch.col;
500 	row_max = ddr_sch.row;
501 
502 	for (row = row_max; row >= 12; row--) {
503 		writel(0, CONFIG_SYS_SDRAM_BASE);
504 		test_addr = (phys_addr_t)(CONFIG_SYS_SDRAM_BASE +
505 				(1ul << (row + bank_max +
506 				col_max + bw - 1ul)));
507 
508 		writel(PATTERN, test_addr);
509 		if ((readl(test_addr) == PATTERN) &&
510 		    (readl(CONFIG_SYS_SDRAM_BASE) == 0))
511 			break;
512 	}
513 	if (row <= 11)
514 		goto cap_err;
515 	params_priv->ddr_config_t.cs0_row = row;
516 	return 0;
517 cap_err:
518 	return -EAGAIN;
519 }
520 
521 #define DDR_VERSION	0x2
522 
sdram_all_config(struct dram_info * priv,struct sdram_params * params_priv)523 static void sdram_all_config(struct dram_info *priv,
524 			     struct sdram_params *params_priv)
525 {
526 	u32 version = DDR_VERSION;
527 	u32 os_reg = 0;
528 	u32 row_12 = 0;
529 	u32 ddr_info = 0;
530 	/* rk3308,rv1108 only support 1 channel, x16 ddr bus, x16 memory */
531 	u32 chn_cnt = 0;
532 	u32 rank = 1;
533 	u32 bw = 1;
534 	u32 dbw = 1;
535 	size_t size = 0;
536 	struct ddr_param ddr_param;
537 
538 	/* os_reg2 */
539 	os_reg = (params_priv->ddr_config_t.ddr_type & SYS_REG_DDRTYPE_MASK) <<
540 		 SYS_REG_DDRTYPE_SHIFT |
541 		 (chn_cnt & SYS_REG_NUM_CH_MASK) <<
542 		 SYS_REG_NUM_CH_SHIFT |
543 		 ((rank - 1) & SYS_REG_RANK_MASK) <<
544 		 SYS_REG_RANK_SHIFT(0) |
545 		 ((params_priv->ddr_config_t.col - 9) & SYS_REG_COL_MASK) <<
546 		 SYS_REG_COL_SHIFT(0) |
547 		 ((params_priv->ddr_config_t.bank == 3 ? 0 : 1) &
548 		 SYS_REG_BK_MASK) << SYS_REG_BK_SHIFT(0) |
549 		 ((params_priv->ddr_config_t.cs0_row - 13) &
550 		 SYS_REG_CS0_ROW_MASK) << SYS_REG_CS0_ROW_SHIFT(0) |
551 		 (bw & SYS_REG_BW_MASK) <<
552 		 SYS_REG_BW_SHIFT(0) |
553 		 (dbw & SYS_REG_DBW_MASK) <<
554 		 SYS_REG_DBW_SHIFT(0);
555 
556 	writel(os_reg, &priv->grf->os_reg2);
557 
558 	/* os_reg3 */
559 	if (params_priv->ddr_config_t.cs0_row == 12)
560 		row_12 = 1;
561 	os_reg = (version & SYS_REG1_VERSION_MASK) <<
562 		 SYS_REG1_VERSION_SHIFT | (row_12 &
563 		 SYS_REG1_EXTEND_CS0_ROW_MASK) <<
564 		 SYS_REG1_EXTEND_CS0_ROW_SHIFT(0);
565 	writel(os_reg, &priv->grf->os_reg3);
566 
567 	printascii("In\n");
568 	printdec(params_priv->ddr_timing_t.freq);
569 	printascii("MHz\n");
570 	switch (params_priv->ddr_config_t.ddr_type & SYS_REG_DDRTYPE_MASK) {
571 	case 2:
572 		printascii("DDR2\n");
573 		break;
574 	case 5:
575 		printascii("LPDDR2\n");
576 		break;
577 	case 3:
578 	default:
579 		printascii("DDR3\n");
580 		break;
581 	}
582 	printascii(" Col=");
583 	printdec(params_priv->ddr_config_t.col);
584 	printascii(" Bank=");
585 	printdec(params_priv->ddr_config_t.bank);
586 	printascii(" Row=");
587 	printdec(params_priv->ddr_config_t.cs0_row);
588 
589 	size = 1llu << (bw +
590 	       params_priv->ddr_config_t.col +
591 	       params_priv->ddr_config_t.cs0_row +
592 	       params_priv->ddr_config_t.bank);
593 	ddr_info = size >> 20;
594 	printascii(" Size=");
595 	printdec(ddr_info);
596 	printascii("MB\n");
597 	printascii("msch:");
598 	ddr_info = readl(&priv->service_msch->ddrconf);
599 	printdec(ddr_info);
600 	printascii("\n");
601 
602 	priv->info.base = CONFIG_SYS_SDRAM_BASE;
603 	priv->info.size = size;
604 	ddr_param.count = 1;
605 	ddr_param.para[0] = priv->info.base;
606 	ddr_param.para[1] = priv->info.size;
607 	rockchip_setup_ddr_param(&ddr_param);
608 }
609 
rv1108_sdram_init(struct dram_info * sdram_priv,struct sdram_params * params_priv)610 int rv1108_sdram_init(struct dram_info *sdram_priv,
611 		      struct sdram_params *params_priv)
612 {
613 	/* pmu enable ddr io retention */
614 	enable_ddr_io_ret(sdram_priv);
615 	rkdclk_init(sdram_priv, params_priv);
616 	phy_pctrl_reset(sdram_priv);
617 	phy_dll_bypass_set(sdram_priv, params_priv->ddr_timing_t.freq);
618 	pctl_cfg(sdram_priv, params_priv);
619 	phy_cfg(sdram_priv, params_priv);
620 	writel(POWER_UP_START, &sdram_priv->pctl->powctl);
621 	while (!(readl(&sdram_priv->pctl->powstat) & POWER_UP_DONE))
622 		;
623 
624 	memory_init(sdram_priv, params_priv);
625 re_training:
626 	move_to_config_state(sdram_priv);
627 	data_training(sdram_priv);
628 	move_to_access_state(sdram_priv);
629 	if (sdram_detect(sdram_priv, params_priv)) {
630 		while (1)
631 			;
632 	}
633 	if (check_rd_gate(sdram_priv))
634 		goto re_training;
635 
636 	/* workaround data training not in middle */
637 	modify_data_training(sdram_priv, params_priv);
638 
639 	dram_cfg_rbc(sdram_priv, params_priv);
640 	sdram_all_config(sdram_priv, params_priv);
641 	enable_low_power(sdram_priv, params_priv);
642 
643 	return 0;
644 }
645 
646 #endif /* CONFIG_TPL_BUILD */
647