xref: /rk3399_rockchip-uboot/drivers/ram/rockchip/sdram_rk3328.c (revision 4e8c225a1d386380171f4be14f3bd22ca85bc0bb)
1 /*
2  * (C) Copyright 2017 Rockchip Electronics Co., Ltd.
3  *
4  * SPDX-License-Identifier:     GPL-2.0
5  */
6 #include <common.h>
7 #include <clk.h>
8 #include <debug_uart.h>
9 #include <dm.h>
10 #include <dt-structs.h>
11 #include <ram.h>
12 #include <regmap.h>
13 #include <syscon.h>
14 #include <asm/io.h>
15 #include <asm/arch/clock.h>
16 #include <asm/arch/cru_rk3328.h>
17 #include <asm/arch/grf_rk3328.h>
18 #include <asm/arch/sdram_common.h>
19 #include <asm/arch/sdram_rk3328.h>
20 #include <asm/arch/uart.h>
21 
22 DECLARE_GLOBAL_DATA_PTR;
23 struct dram_info {
24 #ifdef CONFIG_TPL_BUILD
25 	struct rk3328_ddr_pctl_regs *pctl;
26 	struct rk3328_ddr_phy_regs *phy;
27 	struct clk ddr_clk;
28 	struct rk3328_cru *cru;
29 	struct rk3328_msch_regs *msch;
30 	struct rk3328_ddr_grf_regs *ddr_grf;
31 #endif
32 	struct ram_info info;
33 	struct rk3328_grf_regs *grf;
34 };
35 
36 #ifdef CONFIG_TPL_BUILD
37 
38 struct rk3328_sdram_channel sdram_ch;
39 
40 struct rockchip_dmc_plat {
41 #if CONFIG_IS_ENABLED(OF_PLATDATA)
42 	struct dtd_rockchip_rk3328_dmc dtplat;
43 #else
44 	struct rk3328_sdram_params sdram_params;
45 #endif
46 	struct regmap *map;
47 };
48 
49 #if CONFIG_IS_ENABLED(OF_PLATDATA)
50 static int conv_of_platdata(struct udevice *dev)
51 {
52 	struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
53 	struct dtd_rockchip_rk3328_dmc *dtplat = &plat->dtplat;
54 	int ret;
55 
56 	ret = regmap_init_mem_platdata(dev, dtplat->reg,
57 				       ARRAY_SIZE(dtplat->reg) / 2,
58 				       &plat->map);
59 	if (ret)
60 		return ret;
61 
62 	return 0;
63 }
64 #endif
65 
66 static void rkclk_ddr_reset(struct dram_info *dram,
67 			    u32 ctl_srstn, u32 ctl_psrstn,
68 			    u32 phy_srstn, u32 phy_psrstn)
69 {
70 	writel(ddrctrl_srstn_req(ctl_srstn) | ddrctrl_psrstn_req(ctl_psrstn) |
71 		ddrphy_srstn_req(phy_srstn) | ddrphy_psrstn_req(phy_psrstn),
72 		&dram->cru->softrst_con[5]);
73 	writel(ddrctrl_asrstn_req(ctl_srstn), &dram->cru->softrst_con[9]);
74 }
75 
76 static void rkclk_set_dpll(struct dram_info *dram, unsigned int mhz)
77 {
78 	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
79 	int delay = 1000;
80 
81 	refdiv = 1;
82 	if (mhz <= 300) {
83 		postdiv1 = 4;
84 		postdiv2 = 2;
85 	} else if (mhz <= 400) {
86 		postdiv1 = 6;
87 		postdiv2 = 1;
88 	} else if (mhz <= 600) {
89 		postdiv1 = 4;
90 		postdiv2 = 1;
91 	} else if (mhz <= 800) {
92 		postdiv1 = 3;
93 		postdiv2 = 1;
94 	} else if (mhz <= 1600) {
95 		postdiv1 = 2;
96 		postdiv2 = 1;
97 	} else {
98 		postdiv1 = 1;
99 		postdiv2 = 1;
100 	}
101 	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
102 
103 	writel(((0x1 << 4) << 16) | (0 << 4), &dram->cru->mode_con);
104 	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->dpll_con[0]);
105 	writel(DSMPD(1) | POSTDIV2(postdiv2) | REFDIV(refdiv),
106 	       &dram->cru->dpll_con[1]);
107 
108 	while (delay > 0) {
109 		udelay(1);
110 		if (LOCK(readl(&dram->cru->dpll_con[1])))
111 			break;
112 		delay--;
113 	}
114 
115 	writel(((0x1 << 4) << 16) | (1 << 4), &dram->cru->mode_con);
116 }
117 
118 static void rkclk_configure_ddr(struct dram_info *dram,
119 				struct rk3328_sdram_params *sdram_params)
120 {
121 	void __iomem *phy_base = dram->phy;
122 
123 	/* choose DPLL for ddr clk source */
124 	clrbits_le32(PHY_REG(phy_base, 0xef), 1 << 7);
125 
126 	/* for inno ddr phy need 2*freq */
127 	rkclk_set_dpll(dram,  sdram_params->ddr_freq * 2);
128 }
129 
130 static void phy_soft_reset(struct dram_info *dram)
131 {
132 	void __iomem *phy_base = dram->phy;
133 
134 	clrbits_le32(PHY_REG(phy_base, 0), 0x3 << 2);
135 	udelay(1);
136 	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET);
137 	udelay(5);
138 	setbits_le32(PHY_REG(phy_base, 0), DIGITAL_DERESET);
139 	udelay(1);
140 }
141 
142 static int pctl_cfg(struct dram_info *dram,
143 		    struct rk3328_sdram_params *sdram_params)
144 {
145 	u32 i;
146 	void __iomem *pctl_base = dram->pctl;
147 
148 	for (i = 0; sdram_params->pctl_regs.pctl[i][0] != 0xFFFFFFFF; i++) {
149 		writel(sdram_params->pctl_regs.pctl[i][1],
150 		       pctl_base + sdram_params->pctl_regs.pctl[i][0]);
151 	}
152 	clrsetbits_le32(pctl_base + DDR_PCTL2_PWRTMG,
153 			(0xff << 16) | 0x1f,
154 			((SR_IDLE & 0xff) << 16) | (PD_IDLE & 0x1f));
155 	/*
156 	 * dfi_lp_en_pd=1,dfi_lp_wakeup_pd=2
157 	 * hw_lp_idle_x32=1
158 	 */
159 	if (sdram_params->dramtype == LPDDR3) {
160 		setbits_le32(pctl_base + DDR_PCTL2_DFILPCFG0, 1);
161 		clrsetbits_le32(pctl_base + DDR_PCTL2_DFILPCFG0,
162 				0xf << 4,
163 				2 << 4);
164 	}
165 	clrsetbits_le32(pctl_base + DDR_PCTL2_HWLPCTL,
166 			0xfff << 16,
167 			1 << 16);
168 	/* disable zqcs */
169 	setbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1u << 31);
170 	setbits_le32(pctl_base + 0x2000 + DDR_PCTL2_ZQCTL0, 1u << 31);
171 
172 	return 0;
173 }
174 
175 /* return ddrconfig value
176  *       (-1), find ddrconfig fail
177  *       other, the ddrconfig value
178  * only support cs0_row >= cs1_row
179  */
180 static unsigned int calculate_ddrconfig(
181 	struct rk3328_sdram_params *sdram_params)
182 {
183 	u32 cs, bw, die_bw, col, row, bank;
184 	u32 i, tmp;
185 	u32 ddrconf = -1;
186 
187 	cs = sdram_ch.rank;
188 	bw = sdram_ch.bw;
189 	die_bw = sdram_ch.dbw;
190 	col = sdram_ch.col;
191 	row = sdram_ch.cs0_row;
192 	bank = sdram_ch.bk;
193 
194 	if (sdram_params->dramtype == DDR4) {
195 		tmp = ((cs - 1) << 6) | ((row - 13) << 3) | (bw & 0x2) | die_bw;
196 		for (i = 10; i < 17; i++) {
197 			if (((tmp & 0x7) == (ddr4_cfg_2_rbc[i - 10] & 0x7)) &&
198 			    ((tmp & 0x3c) <= (ddr4_cfg_2_rbc[i - 10] & 0x3c)) &&
199 			    ((tmp & 0x40) <= (ddr4_cfg_2_rbc[i - 10] & 0x40))) {
200 				ddrconf = i;
201 				goto out;
202 			}
203 		}
204 	} else {
205 		if (bank == 2) {
206 			ddrconf = 8;
207 			goto out;
208 		}
209 
210 		tmp = ((row - 13) << 4) | (1 << 2) | ((bw + col - 11) << 0);
211 		for (i = 0; i < 5; i++)
212 			if (((tmp & 0xf) == (ddr_cfg_2_rbc[i] & 0xf)) &&
213 			    ((tmp & 0x30) <= (ddr_cfg_2_rbc[i] & 0x30))) {
214 				ddrconf = i;
215 				goto out;
216 			}
217 	}
218 
219 out:
220 	if (ddrconf > 20)
221 		printf("calculate_ddrconfig error\n");
222 
223 	return ddrconf;
224 }
225 
226 /* n: Unit bytes */
227 static void copy_to_reg(u32 *dest, u32 *src, u32 n)
228 {
229 	int i;
230 
231 	for (i = 0; i < n / sizeof(u32); i++) {
232 		writel(*src, dest);
233 		src++;
234 		dest++;
235 	}
236 }
237 
238 /*******
239  * calculate controller dram address map, and setting to register.
240  * argument sdram_ch.ddrconf must be right value before
241  * call this function.
242  *******/
243 static void set_ctl_address_map(struct dram_info *dram,
244 				struct rk3328_sdram_params *sdram_params)
245 {
246 	void __iomem *pctl_base = dram->pctl;
247 
248 	copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
249 		    &addrmap[sdram_ch.ddrconfig][0], 9 * 4);
250 	if ((sdram_params->dramtype == LPDDR3) && (sdram_ch.row_3_4))
251 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
252 	if ((sdram_params->dramtype == DDR4) && (sdram_ch.bw == 0x1))
253 		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
254 
255 	if (sdram_ch.rank == 1)
256 		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
257 }
258 
259 static void phy_dll_bypass_set(struct dram_info *dram, u32 freq)
260 {
261 	u32 tmp;
262 	void __iomem *phy_base = dram->phy;
263 
264 	setbits_le32(PHY_REG(phy_base, 0x13), 1 << 4);
265 	clrbits_le32(PHY_REG(phy_base, 0x14), 1 << 3);
266 	setbits_le32(PHY_REG(phy_base, 0x26), 1 << 4);
267 	clrbits_le32(PHY_REG(phy_base, 0x27), 1 << 3);
268 	setbits_le32(PHY_REG(phy_base, 0x36), 1 << 4);
269 	clrbits_le32(PHY_REG(phy_base, 0x37), 1 << 3);
270 	setbits_le32(PHY_REG(phy_base, 0x46), 1 << 4);
271 	clrbits_le32(PHY_REG(phy_base, 0x47), 1 << 3);
272 	setbits_le32(PHY_REG(phy_base, 0x56), 1 << 4);
273 	clrbits_le32(PHY_REG(phy_base, 0x57), 1 << 3);
274 
275 	if (freq <= (400 * MHz))
276 		/* DLL bypass */
277 		setbits_le32(PHY_REG(phy_base, 0xa4), 0x1f);
278 	else
279 		clrbits_le32(PHY_REG(phy_base, 0xa4), 0x1f);
280 	if (freq <= (680 * MHz))
281 		tmp = 2;
282 	else
283 		tmp = 1;
284 	writel(tmp, PHY_REG(phy_base, 0x28));
285 	writel(tmp, PHY_REG(phy_base, 0x38));
286 	writel(tmp, PHY_REG(phy_base, 0x48));
287 	writel(tmp, PHY_REG(phy_base, 0x58));
288 }
289 
290 static void set_ds_odt(struct dram_info *dram,
291 		       struct rk3328_sdram_params *sdram_params)
292 {
293 	u32 cmd_drv, clk_drv, dqs_drv, dqs_odt;
294 	void __iomem *phy_base = dram->phy;
295 
296 	if (sdram_params->dramtype == DDR3) {
297 		cmd_drv = PHY_DDR3_RON_RTT_34ohm;
298 		clk_drv = PHY_DDR3_RON_RTT_45ohm;
299 		dqs_drv = PHY_DDR3_RON_RTT_34ohm;
300 		dqs_odt = PHY_DDR3_RON_RTT_225ohm;
301 	} else {
302 		cmd_drv = PHY_DDR4_LPDDR3_RON_RTT_34ohm;
303 		clk_drv = PHY_DDR4_LPDDR3_RON_RTT_43ohm;
304 		dqs_drv = PHY_DDR4_LPDDR3_RON_RTT_34ohm;
305 		dqs_odt = PHY_DDR4_LPDDR3_RON_RTT_240ohm;
306 	}
307 	/* DS */
308 	writel(cmd_drv, PHY_REG(phy_base, 0x11));
309 	clrsetbits_le32(PHY_REG(phy_base, 0x12), 0x1f << 3, cmd_drv << 3);
310 	writel(clk_drv, PHY_REG(phy_base, 0x16));
311 	writel(clk_drv, PHY_REG(phy_base, 0x18));
312 	writel(dqs_drv, PHY_REG(phy_base, 0x20));
313 	writel(dqs_drv, PHY_REG(phy_base, 0x2f));
314 	writel(dqs_drv, PHY_REG(phy_base, 0x30));
315 	writel(dqs_drv, PHY_REG(phy_base, 0x3f));
316 	writel(dqs_drv, PHY_REG(phy_base, 0x40));
317 	writel(dqs_drv, PHY_REG(phy_base, 0x4f));
318 	writel(dqs_drv, PHY_REG(phy_base, 0x50));
319 	writel(dqs_drv, PHY_REG(phy_base, 0x5f));
320 	/* ODT */
321 	writel(dqs_odt, PHY_REG(phy_base, 0x21));
322 	writel(dqs_odt, PHY_REG(phy_base, 0x2e));
323 	writel(dqs_odt, PHY_REG(phy_base, 0x31));
324 	writel(dqs_odt, PHY_REG(phy_base, 0x3e));
325 	writel(dqs_odt, PHY_REG(phy_base, 0x41));
326 	writel(dqs_odt, PHY_REG(phy_base, 0x4e));
327 	writel(dqs_odt, PHY_REG(phy_base, 0x51));
328 	writel(dqs_odt, PHY_REG(phy_base, 0x5e));
329 }
330 
331 static void phy_cfg(struct dram_info *dram,
332 		    struct rk3328_sdram_params *sdram_params)
333 {
334 	u32 i;
335 	void __iomem *phy_base = dram->phy;
336 
337 	phy_dll_bypass_set(dram, sdram_params->ddr_freq);
338 	for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
339 		writel(sdram_params->phy_regs.phy[i][1],
340 		       phy_base + sdram_params->phy_regs.phy[i][0]);
341 	}
342 	if (sdram_ch.bw == 2) {
343 		clrsetbits_le32(PHY_REG(phy_base, 0), 0xf << 4, 0xf << 4);
344 	} else {
345 		clrsetbits_le32(PHY_REG(phy_base, 0), 0xf << 4, 3 << 4);
346 		/* disable DQS2,DQS3 tx dll  for saving power */
347 		clrbits_le32(PHY_REG(phy_base, 0x46), 1 << 3);
348 		clrbits_le32(PHY_REG(phy_base, 0x56), 1 << 3);
349 	}
350 	set_ds_odt(dram, sdram_params);
351 	/* deskew */
352 	setbits_le32(PHY_REG(phy_base, 2), 8);
353 	copy_to_reg(PHY_REG(phy_base, 0xb0),
354 		    &sdram_params->skew.a0_a1_skew[0], 15 * 4);
355 	copy_to_reg(PHY_REG(phy_base, 0x70),
356 		    &sdram_params->skew.cs0_dm0_skew[0], 44 * 4);
357 	copy_to_reg(PHY_REG(phy_base, 0xc0),
358 		    &sdram_params->skew.cs0_dm1_skew[0], 44 * 4);
359 }
360 
361 static int update_refresh_reg(struct dram_info *dram)
362 {
363 	void __iomem *pctl_base = dram->pctl;
364 	u32 ret;
365 
366 	ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
367 	writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
368 
369 	return 0;
370 }
371 
372 static int data_training(struct dram_info *dram, u32 cs, u32 dramtype)
373 {
374 	u32 ret;
375 	u32 dis_auto_zq = 0;
376 	void __iomem *pctl_base = dram->pctl;
377 	void __iomem *phy_base = dram->phy;
378 
379 	/* disable zqcs */
380 	if (!(readl(pctl_base + DDR_PCTL2_ZQCTL0) &
381 		(1ul << 31))) {
382 		dis_auto_zq = 1;
383 		setbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1 << 31);
384 	}
385 	/* disable auto refresh */
386 	setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 1);
387 	update_refresh_reg(dram);
388 
389 	if (dramtype == DDR4) {
390 		clrsetbits_le32(PHY_REG(phy_base, 0x29), 0x3, 0);
391 		clrsetbits_le32(PHY_REG(phy_base, 0x39), 0x3, 0);
392 		clrsetbits_le32(PHY_REG(phy_base, 0x49), 0x3, 0);
393 		clrsetbits_le32(PHY_REG(phy_base, 0x59), 0x3, 0);
394 	}
395 	/* choose training cs */
396 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
397 	/* enable gate training */
398 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
399 	udelay(50);
400 	ret = readl(PHY_REG(phy_base, 0xff));
401 	/* disable gate training */
402 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
403 	/* restore zqcs */
404 	if (dis_auto_zq)
405 		clrbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1 << 31);
406 	/* restore auto refresh */
407 	clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 1);
408 	update_refresh_reg(dram);
409 
410 	if (dramtype == DDR4) {
411 		clrsetbits_le32(PHY_REG(phy_base, 0x29), 0x3, 0x2);
412 		clrsetbits_le32(PHY_REG(phy_base, 0x39), 0x3, 0x2);
413 		clrsetbits_le32(PHY_REG(phy_base, 0x49), 0x3, 0x2);
414 		clrsetbits_le32(PHY_REG(phy_base, 0x59), 0x3, 0x2);
415 	}
416 
417 	if (ret & 0x10) {
418 		ret = -1;
419 	} else {
420 		ret = (ret & 0xf) ^ (readl(PHY_REG(phy_base, 0)) >> 4);
421 		ret = (ret == 0) ? 0 : -1;
422 	}
423 	return ret;
424 }
425 
426 /* rank = 1: cs0
427  * rank = 2: cs1
428  * rank = 3: cs0 & cs1
429  * note: be careful of keep mr original val
430  */
431 static int write_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 arg,
432 		    u32 dramtype)
433 {
434 	void __iomem *pctl_base = dram->pctl;
435 
436 	while (readl(pctl_base + DDR_PCTL2_MRSTAT) & MR_WR_BUSY)
437 		continue;
438 	if ((dramtype == DDR3) || (dramtype == DDR4)) {
439 		writel((mr_num << 12) | (rank << 4) | (0 << 0),
440 		       pctl_base + DDR_PCTL2_MRCTRL0);
441 		writel(arg, pctl_base + DDR_PCTL2_MRCTRL1);
442 	} else {
443 		writel((rank << 4) | (0 << 0),
444 		       pctl_base + DDR_PCTL2_MRCTRL0);
445 		writel((mr_num << 8) | (arg & 0xff),
446 		       pctl_base + DDR_PCTL2_MRCTRL1);
447 	}
448 
449 	setbits_le32(pctl_base + DDR_PCTL2_MRCTRL0, 1u << 31);
450 	while (readl(pctl_base + DDR_PCTL2_MRCTRL0) & (1u << 31))
451 		continue;
452 	while (readl(pctl_base + DDR_PCTL2_MRSTAT) & MR_WR_BUSY)
453 		continue;
454 
455 	return 0;
456 }
457 
458 /*
459  * rank : 1:cs0, 2:cs1, 3:cs0&cs1
460  * vrefrate: 4500: 45%,
461  */
462 static int write_vrefdq(struct dram_info *dram, u32 rank, u32 vrefrate,
463 			u32 dramtype)
464 {
465 	u32 tccd_l, value;
466 	u32 dis_auto_zq = 0;
467 	void __iomem *pctl_base = dram->pctl;
468 
469 	if ((dramtype != DDR4) || (vrefrate < 4500) ||
470 	    (vrefrate > 9200))
471 		return -1;
472 
473 	tccd_l = (readl(pctl_base + DDR_PCTL2_DRAMTMG4) >> 16) & 0xf;
474 	tccd_l = (tccd_l - 4) << 10;
475 
476 	if (vrefrate > 7500) {
477 		/* range 1 */
478 		value = ((vrefrate - 6000) / 65) | tccd_l;
479 	} else {
480 		/* range 2 */
481 		value = ((vrefrate - 4500) / 65) | tccd_l | (1 << 6);
482 	}
483 
484 	/* disable zqcs */
485 	if (!(readl(pctl_base + DDR_PCTL2_ZQCTL0) &
486 		(1ul << 31))) {
487 		dis_auto_zq = 1;
488 		setbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1 << 31);
489 	}
490 	/* disable auto refresh */
491 	setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 1);
492 	update_refresh_reg(dram);
493 
494 	/* enable vrefdq calibratin */
495 	write_mr(dram, rank, 6, value | (1 << 7), dramtype);
496 	udelay(1);/* tvrefdqe */
497 	/* write vrefdq value */
498 	write_mr(dram, rank, 6, value | (1 << 7), dramtype);
499 	udelay(1);/* tvref_time */
500 	write_mr(dram, rank, 6, value | (0 << 7), dramtype);
501 	udelay(1);/* tvrefdqx */
502 
503 	/* restore zqcs */
504 	if (dis_auto_zq)
505 		clrbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1 << 31);
506 	/* restore auto refresh */
507 	clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 1);
508 	update_refresh_reg(dram);
509 
510 	return 0;
511 }
512 
513 #define _MAX_(x, y) (x > y ? x : y)
514 
515 static void rx_deskew_switch_adjust(struct dram_info *dram)
516 {
517 	u32 i, deskew_val;
518 	u32 gate_val = 0;
519 	void __iomem *phy_base = dram->phy;
520 
521 	for (i = 0; i < 4; i++)
522 		gate_val = _MAX_(readl(PHY_REG(phy_base, 0xfb + i)), gate_val);
523 
524 	deskew_val = (gate_val >> 3) + 1;
525 	deskew_val = (deskew_val > 0x1f) ? 0x1f : deskew_val;
526 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xc, (deskew_val & 0x3) << 2);
527 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x7 << 4,
528 			(deskew_val & 0x1c) << 2);
529 }
530 
531 static void tx_deskew_switch_adjust(struct dram_info *dram)
532 {
533 	void __iomem *phy_base = dram->phy;
534 
535 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0x3, 1);
536 }
537 
538 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
539 {
540 	writel(ddrconfig, &dram->msch->ddrconf);
541 }
542 
543 static void dram_all_config(struct dram_info *dram,
544 			    struct rk3328_sdram_params *sdram_params)
545 {
546 	u32 sys_reg = 0;
547 
548 	set_ddrconfig(dram, sdram_ch.ddrconfig);
549 
550 	sys_reg |= SYS_REG_ENC_DDRTYPE(sdram_params->dramtype);
551 	sys_reg |= SYS_REG_ENC_ROW_3_4(sdram_ch.row_3_4);
552 	sys_reg |= SYS_REG_ENC_RANK(sdram_ch.rank);
553 	sys_reg |= SYS_REG_ENC_COL(sdram_ch.col);
554 	sys_reg |= SYS_REG_ENC_BK(sdram_ch.bk);
555 	sys_reg |= SYS_REG_ENC_CS0_ROW(sdram_ch.cs0_row);
556 	if (sdram_ch.cs1_row)
557 		sys_reg |= SYS_REG_ENC_CS1_ROW(sdram_ch.cs1_row);
558 	sys_reg |= SYS_REG_ENC_BW(sdram_ch.bw);
559 	sys_reg |= SYS_REG_ENC_DBW(sdram_ch.dbw);
560 
561 	writel(sys_reg, &dram->grf->os_reg[2]);
562 
563 	writel(sdram_ch.noc_timings.ddrtiming.d32, &dram->msch->ddrtiming);
564 
565 	writel(sdram_ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
566 	writel(sdram_ch.noc_timings.readlatency, &dram->msch->readlatency);
567 
568 	writel(sdram_ch.noc_timings.activate.d32, &dram->msch->activate);
569 	writel(sdram_ch.noc_timings.devtodev.d32, &dram->msch->devtodev);
570 	writel(sdram_ch.noc_timings.ddr4timing.d32, &dram->msch->ddr4_timing);
571 	writel(sdram_ch.noc_timings.agingx0, &dram->msch->aging0);
572 	writel(sdram_ch.noc_timings.agingx0, &dram->msch->aging1);
573 	writel(sdram_ch.noc_timings.agingx0, &dram->msch->aging2);
574 	writel(sdram_ch.noc_timings.agingx0, &dram->msch->aging3);
575 	writel(sdram_ch.noc_timings.agingx0, &dram->msch->aging4);
576 	writel(sdram_ch.noc_timings.agingx0, &dram->msch->aging5);
577 }
578 
579 static void enable_low_power(struct dram_info *dram,
580 			     struct rk3328_sdram_params *sdram_params)
581 {
582 	void __iomem *pctl_base = dram->pctl;
583 
584 	/* enable upctl2 axi clock auto gating */
585 	writel(0x00800000, &dram->ddr_grf->ddr_grf_con[0]);
586 	writel(0x20012001, &dram->ddr_grf->ddr_grf_con[2]);
587 	/* enable upctl2 core clock auto gating */
588 	writel(0x001e001a, &dram->ddr_grf->ddr_grf_con[2]);
589 	/* enable sr, pd */
590 	if (PD_IDLE == 0)
591 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
592 	else
593 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
594 	if (SR_IDLE == 0)
595 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL,	1);
596 	else
597 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
598 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
599 }
600 
601 static int sdram_init(struct dram_info *dram,
602 		      struct rk3328_sdram_params *sdram_params, u32 pre_init)
603 {
604 	void __iomem *pctl_base = dram->pctl;
605 
606 	rkclk_ddr_reset(dram, 1, 1, 1, 1);
607 	udelay(10);
608 	/*
609 	 * dereset ddr phy psrstn to config pll,
610 	 * if using phy pll psrstn must be dereset
611 	 * before config pll
612 	 */
613 	rkclk_ddr_reset(dram, 1, 1, 1, 0);
614 	rkclk_configure_ddr(dram, sdram_params);
615 	if (pre_init == 0) {
616 		switch (sdram_params->dramtype) {
617 		case DDR3:
618 			printf("DDR3\n");
619 			break;
620 		case DDR4:
621 			printf("DDR4\n");
622 			break;
623 		case LPDDR3:
624 		default:
625 			printf("LPDDR3\n");
626 			break;
627 		}
628 	}
629 	/* release phy srst to provide clk to ctrl */
630 	rkclk_ddr_reset(dram, 1, 1, 0, 0);
631 	udelay(10);
632 	phy_soft_reset(dram);
633 	/* release ctrl presetn, and config ctl registers */
634 	rkclk_ddr_reset(dram, 1, 0, 0, 0);
635 	pctl_cfg(dram, sdram_params);
636 	sdram_ch.ddrconfig = calculate_ddrconfig(sdram_params);
637 	set_ctl_address_map(dram, sdram_params);
638 	phy_cfg(dram, sdram_params);
639 
640 	/* enable dfi_init_start to init phy after ctl srstn deassert */
641 	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
642 	rkclk_ddr_reset(dram, 0, 0, 0, 0);
643 	/* wait for dfi_init_done and dram init complete */
644 	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
645 		continue;
646 
647 	/* do ddr gate training */
648 	if (data_training(dram, 0, sdram_params->dramtype) != 0) {
649 		printf("data training error\n");
650 		return -1;
651 	}
652 
653 	if (sdram_params->dramtype == DDR4)
654 		write_vrefdq(dram, 0x3, 5670, sdram_params->dramtype);
655 
656 	if (pre_init == 0) {
657 		rx_deskew_switch_adjust(dram);
658 		tx_deskew_switch_adjust(dram);
659 	}
660 
661 	dram_all_config(dram, sdram_params);
662 	enable_low_power(dram, sdram_params);
663 
664 	return 0;
665 }
666 
667 static u64 dram_detect_cap(struct dram_info *dram,
668 			   struct rk3328_sdram_params *sdram_params,
669 			   unsigned char channel)
670 {
671 	void __iomem *pctl_base = dram->pctl;
672 
673 	/*
674 	 * for ddr3: ddrconf = 3
675 	 * for ddr4: ddrconf = 12
676 	 * for lpddr3: ddrconf = 3
677 	 * default bw = 1
678 	 */
679 	u32 bk, bktmp;
680 	u32 col, coltmp;
681 	u32 row, rowtmp, row_3_4;
682 	void __iomem *test_addr, *test_addr1;
683 	u32 dbw;
684 	u32 cs;
685 	u32 bw = 1;
686 	u64 cap = 0;
687 	u32 dram_type = sdram_params->dramtype;
688 	u32 pwrctl;
689 
690 	if (dram_type != DDR4) {
691 		/* detect col and bk for ddr3/lpddr3 */
692 		coltmp = 12;
693 		bktmp = 3;
694 		rowtmp = 16;
695 
696 		for (col = coltmp; col >= 9; col -= 1) {
697 			writel(0, SDRAM_ADDR);
698 			test_addr = (void __iomem *)(SDRAM_ADDR +
699 					(1ul << (col + bw - 1ul)));
700 			writel(PATTERN, test_addr);
701 			if ((readl(test_addr) == PATTERN) &&
702 			    (readl(SDRAM_ADDR) == 0))
703 				break;
704 		}
705 		if (col == 8) {
706 			printf("col error\n");
707 			goto cap_err;
708 		}
709 
710 		test_addr = (void __iomem *)(SDRAM_ADDR +
711 				(1ul << (coltmp + bktmp + bw - 1ul)));
712 		writel(0, SDRAM_ADDR);
713 		writel(PATTERN, test_addr);
714 		if ((readl(test_addr) == PATTERN) &&
715 		    (readl(SDRAM_ADDR) == 0))
716 			bk = 3;
717 		else
718 			bk = 2;
719 		if (dram_type == LPDDR3)
720 			dbw = 2;
721 		else
722 			dbw = 1;
723 	} else {
724 		/* detect bg for ddr4 */
725 		coltmp = 10;
726 		bktmp = 4;
727 		rowtmp = 17;
728 
729 		col = 10;
730 		bk = 2;
731 		test_addr = (void __iomem *)(SDRAM_ADDR +
732 				(1ul << (coltmp + bw + 1ul)));
733 		writel(0, SDRAM_ADDR);
734 		writel(PATTERN, test_addr);
735 		if ((readl(test_addr) == PATTERN) &&
736 		    (readl(SDRAM_ADDR) == 0))
737 			dbw = 0;
738 		else
739 			dbw = 1;
740 	}
741 	/* detect row */
742 	for (row = rowtmp; row > 12; row--) {
743 		writel(0, SDRAM_ADDR);
744 		test_addr = (void __iomem *)(SDRAM_ADDR +
745 				(1ul << (row + bktmp + coltmp + bw - 1ul)));
746 		writel(PATTERN, test_addr);
747 		if ((readl(test_addr) == PATTERN) &&
748 		    (readl(SDRAM_ADDR) == 0))
749 			break;
750 	}
751 	if (row == 12) {
752 		printf("row error");
753 		goto cap_err;
754 	}
755 	/* detect row_3_4 */
756 	test_addr = SDRAM_ADDR;
757 	test_addr1 = (void __iomem *)(SDRAM_ADDR +
758 			(0x3ul << (row + bktmp + coltmp + bw - 1ul - 1ul)));
759 
760 	writel(0, test_addr);
761 	writel(PATTERN, test_addr1);
762 	if ((readl(test_addr) == 0) &&
763 	    (readl(test_addr1) == PATTERN))
764 		row_3_4 = 0;
765 	else
766 		row_3_4 = 1;
767 
768 	/* disable auto low-power */
769 	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
770 	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
771 
772 	/* bw and cs detect using phy read gate training */
773 	if (data_training(dram, 1, dram_type) == 0)
774 		cs = 1;
775 	else
776 		cs = 0;
777 
778 	bw = 2;
779 
780 	/* restore auto low-power */
781 	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
782 
783 	sdram_ch.rank = cs + 1;
784 	sdram_ch.col = col;
785 	sdram_ch.bk = bk;
786 	sdram_ch.dbw = dbw;
787 	sdram_ch.bw = bw;
788 	sdram_ch.cs0_row = row;
789 	if (cs)
790 		sdram_ch.cs1_row = row;
791 	else
792 		sdram_ch.cs1_row = 0;
793 	sdram_ch.row_3_4 = row_3_4;
794 
795 	if (dram_type == DDR4)
796 		cap = 1llu << (cs + row + bk + col + ((dbw == 0) ? 2 : 1) + bw);
797 	else
798 		cap = 1llu << (cs + row + bk + col + bw);
799 
800 	return cap;
801 
802 cap_err:
803 	return 0;
804 }
805 
806 static u32 remodify_sdram_params(struct rk3328_sdram_params *sdram_params)
807 {
808 	u32 tmp = 0, tmp_adr = 0, i;
809 
810 	for (i = 0; sdram_params->pctl_regs.pctl[i][0] != 0xFFFFFFFF; i++) {
811 		if (sdram_params->pctl_regs.pctl[i][0] == 0) {
812 			tmp = sdram_params->pctl_regs.pctl[i][1];/* MSTR */
813 			tmp_adr = i;
814 		}
815 	}
816 
817 	tmp &= ~((3ul << 30) | (3ul << 24) | (3ul << 12));
818 
819 	switch (sdram_ch.dbw) {
820 	case 2:
821 		tmp |= (3ul << 30);
822 		break;
823 	case 1:
824 		tmp |= (2ul << 30);
825 		break;
826 	case 0:
827 	default:
828 		tmp |= (1ul << 30);
829 		break;
830 	}
831 
832 	if (sdram_ch.rank == 2)
833 		tmp |= 3 << 24;
834 	else
835 		tmp |= 1 << 24;
836 
837 	tmp |= (2 - sdram_ch.bw) << 12;
838 
839 	sdram_params->pctl_regs.pctl[tmp_adr][1] = tmp;
840 
841 	if (sdram_ch.bw == 2)
842 		sdram_ch.noc_timings.ddrtiming.b.bwratio = 0;
843 	else
844 		sdram_ch.noc_timings.ddrtiming.b.bwratio = 1;
845 
846 	return 0;
847 }
848 
849 static int dram_detect_cs1_row(struct rk3328_sdram_params *sdram_params,
850 			       unsigned char channel)
851 {
852 	u32 ret = 0;
853 	u32 cs1_bit;
854 	void __iomem *test_addr, *cs1_addr;
855 	u32 row, bktmp, coltmp, bw;
856 	u32 ddrconf = sdram_ch.ddrconfig;
857 
858 	if (sdram_ch.rank == 2) {
859 		cs1_bit = addrmap[ddrconf][0] + 8;
860 
861 		if (cs1_bit > 31)
862 			goto out;
863 
864 		cs1_addr = (void __iomem *)(1ul << cs1_bit);
865 		if (cs1_bit < 20)
866 			cs1_bit = 1;
867 		else
868 			cs1_bit = 0;
869 
870 		if (sdram_params->dramtype == DDR4) {
871 			if (sdram_ch.dbw == 0)
872 				bktmp = sdram_ch.bk + 2;
873 			else
874 				bktmp = sdram_ch.bk + 1;
875 		} else {
876 			bktmp = sdram_ch.bk;
877 		}
878 		bw = sdram_ch.bw;
879 		coltmp = sdram_ch.col;
880 
881 		/* detect cs1 row */
882 		for (row = sdram_ch.cs0_row; row > 12; row--) {
883 			test_addr = (void __iomem *)(SDRAM_ADDR + cs1_addr +
884 					(1ul << (row + cs1_bit + bktmp +
885 					 coltmp + bw - 1ul)));
886 			writel(0, SDRAM_ADDR + cs1_addr);
887 			writel(PATTERN, test_addr);
888 			if ((readl(test_addr) == PATTERN) &&
889 			    (readl(SDRAM_ADDR + cs1_addr) == 0)) {
890 				ret = row;
891 				break;
892 			}
893 		}
894 	}
895 
896 out:
897 	return ret;
898 }
899 
900 static int sdram_init_detect(struct dram_info *dram,
901 			     struct rk3328_sdram_params *sdram_params)
902 {
903 	debug("Starting SDRAM initialization...\n");
904 
905 	memcpy(&sdram_ch, &sdram_params->ch,
906 	       sizeof(struct rk3328_sdram_channel));
907 
908 	sdram_init(dram, sdram_params, 1);
909 	dram_detect_cap(dram, sdram_params, 0);
910 
911 	/* modify bw, cs related timing */
912 	remodify_sdram_params(sdram_params);
913 	/* reinit sdram by real dram cap */
914 	sdram_init(dram, sdram_params, 0);
915 
916 	/* redetect cs1 row */
917 	sdram_ch.cs1_row =
918 		dram_detect_cs1_row(sdram_params, 0);
919 
920 	return 0;
921 }
922 
923 static int rk3328_dmc_init(struct udevice *dev)
924 {
925 	struct dram_info *priv = dev_get_priv(dev);
926 	struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
927 	int ret;
928 
929 #if !CONFIG_IS_ENABLED(OF_PLATDATA)
930 	struct rk3328_sdram_params *params = &plat->sdram_params;
931 #else
932 	struct dtd_rockchip_rk3328_dmc *dtplat = &plat->dtplat;
933 	struct rk3328_sdram_params *params =
934 					(void *)dtplat->rockchip_sdram_params;
935 
936 	ret = conv_of_platdata(dev);
937 	if (ret)
938 		return ret;
939 #endif
940 	priv->phy = regmap_get_range(plat->map, 0);
941 	priv->pctl = regmap_get_range(plat->map, 1);
942 	priv->grf = regmap_get_range(plat->map, 2);
943 	priv->cru = regmap_get_range(plat->map, 3);
944 	priv->msch = regmap_get_range(plat->map, 4);
945 	priv->ddr_grf = regmap_get_range(plat->map, 5);
946 
947 	debug("%s phy %p pctrl %p grf %p cru %p msch %p ddr_grf %p\n",
948 	      __func__, priv->phy, priv->pctl, priv->grf, priv->cru,
949 	      priv->msch, priv->ddr_grf);
950 	ret = sdram_init_detect(priv, params);
951 	if (ret < 0) {
952 		printf("%s DRAM init failed%d\n", __func__, ret);
953 		return ret;
954 	}
955 
956 	return 0;
957 }
958 
959 static int rk3328_dmc_ofdata_to_platdata(struct udevice *dev)
960 {
961 #if !CONFIG_IS_ENABLED(OF_PLATDATA)
962 	struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
963 	int ret;
964 
965 	ret = dev_read_u32_array(dev, "rockchip,sdram-params",
966 				 (u32 *)&plat->sdram_params,
967 				 sizeof(plat->sdram_params) / sizeof(u32));
968 	if (ret) {
969 		printf("%s: Cannot read rockchip,sdram-params %d\n",
970 		       __func__, ret);
971 		return ret;
972 	}
973 	ret = regmap_init_mem(dev, &plat->map);
974 	if (ret)
975 		printf("%s: regmap failed %d\n", __func__, ret);
976 #endif
977 	return 0;
978 }
979 
980 #endif
981 
982 static int rk3328_dmc_probe(struct udevice *dev)
983 {
984 #ifdef CONFIG_TPL_BUILD
985 	if (rk3328_dmc_init(dev))
986 		return 0;
987 #else
988 	struct dram_info *priv = dev_get_priv(dev);
989 
990 	priv->grf = syscon_get_first_range(ROCKCHIP_SYSCON_GRF);
991 	debug("%s: grf=%p\n", __func__, priv->grf);
992 	priv->info.base = CONFIG_SYS_SDRAM_BASE;
993 	priv->info.size = rockchip_sdram_size(
994 				(phys_addr_t)&priv->grf->os_reg[2]);
995 #ifdef CONFIG_SPL_BUILD
996 	rockchip_setup_ddr_param(&priv->info);
997 #endif
998 #endif
999 	return 0;
1000 }
1001 
1002 static int rk3328_dmc_get_info(struct udevice *dev, struct ram_info *info)
1003 {
1004 	struct dram_info *priv = dev_get_priv(dev);
1005 
1006 	*info = priv->info;
1007 
1008 	return 0;
1009 }
1010 
1011 static struct ram_ops rk3328_dmc_ops = {
1012 	.get_info = rk3328_dmc_get_info,
1013 };
1014 
1015 static const struct udevice_id rk3328_dmc_ids[] = {
1016 	{ .compatible = "rockchip,rk3328-dmc" },
1017 	{ }
1018 };
1019 
1020 U_BOOT_DRIVER(dmc_rk3328) = {
1021 	.name = "rockchip_rk3328_dmc",
1022 	.id = UCLASS_RAM,
1023 	.of_match = rk3328_dmc_ids,
1024 	.ops = &rk3328_dmc_ops,
1025 #ifdef CONFIG_TPL_BUILD
1026 	.ofdata_to_platdata = rk3328_dmc_ofdata_to_platdata,
1027 #endif
1028 	.probe = rk3328_dmc_probe,
1029 	.priv_auto_alloc_size = sizeof(struct dram_info),
1030 #ifdef CONFIG_TPL_BUILD
1031 	.platdata_auto_alloc_size = sizeof(struct rockchip_dmc_plat),
1032 #endif
1033 };
1034