xref: /rk3399_rockchip-uboot/drivers/ram/rockchip/sdram_rk3328.c (revision 8dd9db5d1cd5826638c3cdb5f681300ff2f29f3b)
1 /*
2  * (C) Copyright 2017 Rockchip Electronics Co., Ltd.
3  *
4  * SPDX-License-Identifier:     GPL-2.0
5  */
6 #include <common.h>
7 #include <clk.h>
8 #include <debug_uart.h>
9 #include <dm.h>
10 #include <dt-structs.h>
11 #include <ram.h>
12 #include <regmap.h>
13 #include <syscon.h>
14 #include <asm/io.h>
15 #include <asm/arch/clock.h>
16 #include <asm/arch/cru_rk3328.h>
17 #include <asm/arch/grf_rk3328.h>
18 #include <asm/arch/rockchip_dmc.h>
19 #include <asm/arch/sdram_common.h>
20 #include <asm/arch/sdram_rk3328.h>
21 #include <asm/arch/uart.h>
22 
23 DECLARE_GLOBAL_DATA_PTR;
24 struct dram_info {
25 #ifdef CONFIG_TPL_BUILD
26 	struct rk3328_ddr_pctl_regs *pctl;
27 	struct rk3328_ddr_phy_regs *phy;
28 	struct clk ddr_clk;
29 	struct rk3328_cru *cru;
30 	struct rk3328_msch_regs *msch;
31 	struct rk3328_ddr_grf_regs *ddr_grf;
32 #endif
33 	struct ram_info info;
34 	struct rk3328_grf_regs *grf;
35 };
36 
37 #ifdef CONFIG_TPL_BUILD
38 
39 struct rk3328_sdram_channel sdram_ch;
40 
41 struct rockchip_dmc_plat {
42 #if CONFIG_IS_ENABLED(OF_PLATDATA)
43 	struct dtd_rockchip_rk3328_dmc dtplat;
44 #else
45 	struct rk3328_sdram_params sdram_params;
46 #endif
47 	struct regmap *map;
48 };
49 
50 #if CONFIG_IS_ENABLED(OF_PLATDATA)
51 static int conv_of_platdata(struct udevice *dev)
52 {
53 	struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
54 	struct dtd_rockchip_rk3328_dmc *dtplat = &plat->dtplat;
55 	int ret;
56 
57 	ret = regmap_init_mem_platdata(dev, dtplat->reg,
58 				       ARRAY_SIZE(dtplat->reg) / 2,
59 				       &plat->map);
60 	if (ret)
61 		return ret;
62 
63 	return 0;
64 }
65 #endif
66 
67 static void rkclk_ddr_reset(struct dram_info *dram,
68 			    u32 ctl_srstn, u32 ctl_psrstn,
69 			    u32 phy_srstn, u32 phy_psrstn)
70 {
71 	writel(ddrctrl_srstn_req(ctl_srstn) | ddrctrl_psrstn_req(ctl_psrstn) |
72 		ddrphy_srstn_req(phy_srstn) | ddrphy_psrstn_req(phy_psrstn),
73 		&dram->cru->softrst_con[5]);
74 	writel(ddrctrl_asrstn_req(ctl_srstn), &dram->cru->softrst_con[9]);
75 }
76 
77 static void rkclk_set_dpll(struct dram_info *dram, unsigned int mhz)
78 {
79 	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
80 	int delay = 1000;
81 
82 	refdiv = 1;
83 	if (mhz <= 300) {
84 		postdiv1 = 4;
85 		postdiv2 = 2;
86 	} else if (mhz <= 400) {
87 		postdiv1 = 6;
88 		postdiv2 = 1;
89 	} else if (mhz <= 600) {
90 		postdiv1 = 4;
91 		postdiv2 = 1;
92 	} else if (mhz <= 800) {
93 		postdiv1 = 3;
94 		postdiv2 = 1;
95 	} else if (mhz <= 1600) {
96 		postdiv1 = 2;
97 		postdiv2 = 1;
98 	} else {
99 		postdiv1 = 1;
100 		postdiv2 = 1;
101 	}
102 	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
103 
104 	writel(((0x1 << 4) << 16) | (0 << 4), &dram->cru->mode_con);
105 	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->dpll_con[0]);
106 	writel(DSMPD(1) | POSTDIV2(postdiv2) | REFDIV(refdiv),
107 	       &dram->cru->dpll_con[1]);
108 
109 	while (delay > 0) {
110 		udelay(1);
111 		if (LOCK(readl(&dram->cru->dpll_con[1])))
112 			break;
113 		delay--;
114 	}
115 
116 	writel(((0x1 << 4) << 16) | (1 << 4), &dram->cru->mode_con);
117 }
118 
119 static void rkclk_configure_ddr(struct dram_info *dram,
120 				struct rk3328_sdram_params *sdram_params)
121 {
122 	void __iomem *phy_base = dram->phy;
123 
124 	/* choose DPLL for ddr clk source */
125 	clrbits_le32(PHY_REG(phy_base, 0xef), 1 << 7);
126 
127 	/* for inno ddr phy need 2*freq */
128 	rkclk_set_dpll(dram,  sdram_params->ddr_freq * 2);
129 }
130 
131 static void phy_soft_reset(struct dram_info *dram)
132 {
133 	void __iomem *phy_base = dram->phy;
134 
135 	clrbits_le32(PHY_REG(phy_base, 0), 0x3 << 2);
136 	udelay(1);
137 	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET);
138 	udelay(5);
139 	setbits_le32(PHY_REG(phy_base, 0), DIGITAL_DERESET);
140 	udelay(1);
141 }
142 
143 static int pctl_cfg(struct dram_info *dram,
144 		    struct rk3328_sdram_params *sdram_params)
145 {
146 	u32 i;
147 	void __iomem *pctl_base = dram->pctl;
148 
149 	for (i = 0; sdram_params->pctl_regs.pctl[i][0] != 0xFFFFFFFF; i++) {
150 		writel(sdram_params->pctl_regs.pctl[i][1],
151 		       pctl_base + sdram_params->pctl_regs.pctl[i][0]);
152 	}
153 	clrsetbits_le32(pctl_base + DDR_PCTL2_PWRTMG,
154 			(0xff << 16) | 0x1f,
155 			((SR_IDLE & 0xff) << 16) | (PD_IDLE & 0x1f));
156 	/*
157 	 * dfi_lp_en_pd=1,dfi_lp_wakeup_pd=2
158 	 * hw_lp_idle_x32=1
159 	 */
160 	if (sdram_params->dramtype == LPDDR3) {
161 		setbits_le32(pctl_base + DDR_PCTL2_DFILPCFG0, 1);
162 		clrsetbits_le32(pctl_base + DDR_PCTL2_DFILPCFG0,
163 				0xf << 4,
164 				2 << 4);
165 	}
166 	clrsetbits_le32(pctl_base + DDR_PCTL2_HWLPCTL,
167 			0xfff << 16,
168 			1 << 16);
169 	/* disable zqcs */
170 	setbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1u << 31);
171 	setbits_le32(pctl_base + 0x2000 + DDR_PCTL2_ZQCTL0, 1u << 31);
172 
173 	return 0;
174 }
175 
176 /* return ddrconfig value
177  *       (-1), find ddrconfig fail
178  *       other, the ddrconfig value
179  * only support cs0_row >= cs1_row
180  */
181 static unsigned int calculate_ddrconfig(
182 	struct rk3328_sdram_params *sdram_params)
183 {
184 	u32 cs, bw, die_bw, col, row, bank;
185 	u32 i, tmp;
186 	u32 ddrconf = -1;
187 
188 	cs = sdram_ch.rank;
189 	bw = sdram_ch.bw;
190 	die_bw = sdram_ch.dbw;
191 	col = sdram_ch.col;
192 	row = sdram_ch.cs0_row;
193 	bank = sdram_ch.bk;
194 
195 	if (sdram_params->dramtype == DDR4) {
196 		tmp = ((cs - 1) << 6) | ((row - 13) << 3) | (bw & 0x2) | die_bw;
197 		for (i = 10; i < 17; i++) {
198 			if (((tmp & 0x7) == (ddr4_cfg_2_rbc[i - 10] & 0x7)) &&
199 			    ((tmp & 0x3c) <= (ddr4_cfg_2_rbc[i - 10] & 0x3c)) &&
200 			    ((tmp & 0x40) <= (ddr4_cfg_2_rbc[i - 10] & 0x40))) {
201 				ddrconf = i;
202 				goto out;
203 			}
204 		}
205 	} else {
206 		if (bank == 2) {
207 			ddrconf = 8;
208 			goto out;
209 		}
210 
211 		tmp = ((row - 13) << 4) | (1 << 2) | ((bw + col - 11) << 0);
212 		for (i = 0; i < 5; i++)
213 			if (((tmp & 0xf) == (ddr_cfg_2_rbc[i] & 0xf)) &&
214 			    ((tmp & 0x30) <= (ddr_cfg_2_rbc[i] & 0x30))) {
215 				ddrconf = i;
216 				goto out;
217 			}
218 	}
219 
220 out:
221 	if (ddrconf > 20)
222 		printf("calculate_ddrconfig error\n");
223 
224 	return ddrconf;
225 }
226 
227 /* n: Unit bytes */
228 static void copy_to_reg(u32 *dest, u32 *src, u32 n)
229 {
230 	int i;
231 
232 	for (i = 0; i < n / sizeof(u32); i++) {
233 		writel(*src, dest);
234 		src++;
235 		dest++;
236 	}
237 }
238 
239 /*******
240  * calculate controller dram address map, and setting to register.
241  * argument sdram_ch.ddrconf must be right value before
242  * call this function.
243  *******/
244 static void set_ctl_address_map(struct dram_info *dram,
245 				struct rk3328_sdram_params *sdram_params)
246 {
247 	void __iomem *pctl_base = dram->pctl;
248 
249 	copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
250 		    &addrmap[sdram_ch.ddrconfig][0], 9 * 4);
251 	if ((sdram_params->dramtype == LPDDR3) && (sdram_ch.row_3_4))
252 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
253 	if ((sdram_params->dramtype == DDR4) && (sdram_ch.bw == 0x1))
254 		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
255 
256 	if (sdram_ch.rank == 1)
257 		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
258 }
259 
260 static void phy_dll_bypass_set(struct dram_info *dram, u32 freq)
261 {
262 	u32 tmp;
263 	void __iomem *phy_base = dram->phy;
264 
265 	setbits_le32(PHY_REG(phy_base, 0x13), 1 << 4);
266 	clrbits_le32(PHY_REG(phy_base, 0x14), 1 << 3);
267 	setbits_le32(PHY_REG(phy_base, 0x26), 1 << 4);
268 	clrbits_le32(PHY_REG(phy_base, 0x27), 1 << 3);
269 	setbits_le32(PHY_REG(phy_base, 0x36), 1 << 4);
270 	clrbits_le32(PHY_REG(phy_base, 0x37), 1 << 3);
271 	setbits_le32(PHY_REG(phy_base, 0x46), 1 << 4);
272 	clrbits_le32(PHY_REG(phy_base, 0x47), 1 << 3);
273 	setbits_le32(PHY_REG(phy_base, 0x56), 1 << 4);
274 	clrbits_le32(PHY_REG(phy_base, 0x57), 1 << 3);
275 
276 	if (freq <= (400 * MHz))
277 		/* DLL bypass */
278 		setbits_le32(PHY_REG(phy_base, 0xa4), 0x1f);
279 	else
280 		clrbits_le32(PHY_REG(phy_base, 0xa4), 0x1f);
281 	if (freq <= (680 * MHz))
282 		tmp = 2;
283 	else
284 		tmp = 1;
285 	writel(tmp, PHY_REG(phy_base, 0x28));
286 	writel(tmp, PHY_REG(phy_base, 0x38));
287 	writel(tmp, PHY_REG(phy_base, 0x48));
288 	writel(tmp, PHY_REG(phy_base, 0x58));
289 }
290 
291 static void set_ds_odt(struct dram_info *dram,
292 		       struct rk3328_sdram_params *sdram_params)
293 {
294 	u32 cmd_drv, clk_drv, dqs_drv, dqs_odt;
295 	void __iomem *phy_base = dram->phy;
296 
297 	if (sdram_params->dramtype == DDR3) {
298 		cmd_drv = PHY_DDR3_RON_RTT_34ohm;
299 		clk_drv = PHY_DDR3_RON_RTT_45ohm;
300 		dqs_drv = PHY_DDR3_RON_RTT_34ohm;
301 		dqs_odt = PHY_DDR3_RON_RTT_225ohm;
302 	} else {
303 		cmd_drv = PHY_DDR4_LPDDR3_RON_RTT_34ohm;
304 		clk_drv = PHY_DDR4_LPDDR3_RON_RTT_43ohm;
305 		dqs_drv = PHY_DDR4_LPDDR3_RON_RTT_34ohm;
306 		dqs_odt = PHY_DDR4_LPDDR3_RON_RTT_240ohm;
307 	}
308 	/* DS */
309 	writel(cmd_drv, PHY_REG(phy_base, 0x11));
310 	clrsetbits_le32(PHY_REG(phy_base, 0x12), 0x1f << 3, cmd_drv << 3);
311 	writel(clk_drv, PHY_REG(phy_base, 0x16));
312 	writel(clk_drv, PHY_REG(phy_base, 0x18));
313 	writel(dqs_drv, PHY_REG(phy_base, 0x20));
314 	writel(dqs_drv, PHY_REG(phy_base, 0x2f));
315 	writel(dqs_drv, PHY_REG(phy_base, 0x30));
316 	writel(dqs_drv, PHY_REG(phy_base, 0x3f));
317 	writel(dqs_drv, PHY_REG(phy_base, 0x40));
318 	writel(dqs_drv, PHY_REG(phy_base, 0x4f));
319 	writel(dqs_drv, PHY_REG(phy_base, 0x50));
320 	writel(dqs_drv, PHY_REG(phy_base, 0x5f));
321 	/* ODT */
322 	writel(dqs_odt, PHY_REG(phy_base, 0x21));
323 	writel(dqs_odt, PHY_REG(phy_base, 0x2e));
324 	writel(dqs_odt, PHY_REG(phy_base, 0x31));
325 	writel(dqs_odt, PHY_REG(phy_base, 0x3e));
326 	writel(dqs_odt, PHY_REG(phy_base, 0x41));
327 	writel(dqs_odt, PHY_REG(phy_base, 0x4e));
328 	writel(dqs_odt, PHY_REG(phy_base, 0x51));
329 	writel(dqs_odt, PHY_REG(phy_base, 0x5e));
330 }
331 
332 static void phy_cfg(struct dram_info *dram,
333 		    struct rk3328_sdram_params *sdram_params)
334 {
335 	u32 i;
336 	void __iomem *phy_base = dram->phy;
337 
338 	phy_dll_bypass_set(dram, sdram_params->ddr_freq);
339 	for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
340 		writel(sdram_params->phy_regs.phy[i][1],
341 		       phy_base + sdram_params->phy_regs.phy[i][0]);
342 	}
343 	if (sdram_ch.bw == 2) {
344 		clrsetbits_le32(PHY_REG(phy_base, 0), 0xf << 4, 0xf << 4);
345 	} else {
346 		clrsetbits_le32(PHY_REG(phy_base, 0), 0xf << 4, 3 << 4);
347 		/* disable DQS2,DQS3 tx dll  for saving power */
348 		clrbits_le32(PHY_REG(phy_base, 0x46), 1 << 3);
349 		clrbits_le32(PHY_REG(phy_base, 0x56), 1 << 3);
350 	}
351 	set_ds_odt(dram, sdram_params);
352 	/* deskew */
353 	setbits_le32(PHY_REG(phy_base, 2), 8);
354 	copy_to_reg(PHY_REG(phy_base, 0xb0),
355 		    &sdram_params->skew.a0_a1_skew[0], 15 * 4);
356 	copy_to_reg(PHY_REG(phy_base, 0x70),
357 		    &sdram_params->skew.cs0_dm0_skew[0], 44 * 4);
358 	copy_to_reg(PHY_REG(phy_base, 0xc0),
359 		    &sdram_params->skew.cs0_dm1_skew[0], 44 * 4);
360 }
361 
362 static int update_refresh_reg(struct dram_info *dram)
363 {
364 	void __iomem *pctl_base = dram->pctl;
365 	u32 ret;
366 
367 	ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
368 	writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
369 
370 	return 0;
371 }
372 
373 static int data_training(struct dram_info *dram, u32 cs, u32 dramtype)
374 {
375 	u32 ret;
376 	u32 dis_auto_zq = 0;
377 	void __iomem *pctl_base = dram->pctl;
378 	void __iomem *phy_base = dram->phy;
379 
380 	/* disable zqcs */
381 	if (!(readl(pctl_base + DDR_PCTL2_ZQCTL0) &
382 		(1ul << 31))) {
383 		dis_auto_zq = 1;
384 		setbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1 << 31);
385 	}
386 	/* disable auto refresh */
387 	setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 1);
388 	update_refresh_reg(dram);
389 
390 	if (dramtype == DDR4) {
391 		clrsetbits_le32(PHY_REG(phy_base, 0x29), 0x3, 0);
392 		clrsetbits_le32(PHY_REG(phy_base, 0x39), 0x3, 0);
393 		clrsetbits_le32(PHY_REG(phy_base, 0x49), 0x3, 0);
394 		clrsetbits_le32(PHY_REG(phy_base, 0x59), 0x3, 0);
395 	}
396 	/* choose training cs */
397 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
398 	/* enable gate training */
399 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
400 	udelay(50);
401 	ret = readl(PHY_REG(phy_base, 0xff));
402 	/* disable gate training */
403 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
404 	/* restore zqcs */
405 	if (dis_auto_zq)
406 		clrbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1 << 31);
407 	/* restore auto refresh */
408 	clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 1);
409 	update_refresh_reg(dram);
410 
411 	if (dramtype == DDR4) {
412 		clrsetbits_le32(PHY_REG(phy_base, 0x29), 0x3, 0x2);
413 		clrsetbits_le32(PHY_REG(phy_base, 0x39), 0x3, 0x2);
414 		clrsetbits_le32(PHY_REG(phy_base, 0x49), 0x3, 0x2);
415 		clrsetbits_le32(PHY_REG(phy_base, 0x59), 0x3, 0x2);
416 	}
417 
418 	if (ret & 0x10) {
419 		ret = -1;
420 	} else {
421 		ret = (ret & 0xf) ^ (readl(PHY_REG(phy_base, 0)) >> 4);
422 		ret = (ret == 0) ? 0 : -1;
423 	}
424 	return ret;
425 }
426 
427 /* rank = 1: cs0
428  * rank = 2: cs1
429  * rank = 3: cs0 & cs1
430  * note: be careful of keep mr original val
431  */
432 static int write_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 arg,
433 		    u32 dramtype)
434 {
435 	void __iomem *pctl_base = dram->pctl;
436 
437 	while (readl(pctl_base + DDR_PCTL2_MRSTAT) & MR_WR_BUSY)
438 		continue;
439 	if ((dramtype == DDR3) || (dramtype == DDR4)) {
440 		writel((mr_num << 12) | (rank << 4) | (0 << 0),
441 		       pctl_base + DDR_PCTL2_MRCTRL0);
442 		writel(arg, pctl_base + DDR_PCTL2_MRCTRL1);
443 	} else {
444 		writel((rank << 4) | (0 << 0),
445 		       pctl_base + DDR_PCTL2_MRCTRL0);
446 		writel((mr_num << 8) | (arg & 0xff),
447 		       pctl_base + DDR_PCTL2_MRCTRL1);
448 	}
449 
450 	setbits_le32(pctl_base + DDR_PCTL2_MRCTRL0, 1u << 31);
451 	while (readl(pctl_base + DDR_PCTL2_MRCTRL0) & (1u << 31))
452 		continue;
453 	while (readl(pctl_base + DDR_PCTL2_MRSTAT) & MR_WR_BUSY)
454 		continue;
455 
456 	return 0;
457 }
458 
459 /*
460  * rank : 1:cs0, 2:cs1, 3:cs0&cs1
461  * vrefrate: 4500: 45%,
462  */
463 static int write_vrefdq(struct dram_info *dram, u32 rank, u32 vrefrate,
464 			u32 dramtype)
465 {
466 	u32 tccd_l, value;
467 	u32 dis_auto_zq = 0;
468 	void __iomem *pctl_base = dram->pctl;
469 
470 	if ((dramtype != DDR4) || (vrefrate < 4500) ||
471 	    (vrefrate > 9200))
472 		return -1;
473 
474 	tccd_l = (readl(pctl_base + DDR_PCTL2_DRAMTMG4) >> 16) & 0xf;
475 	tccd_l = (tccd_l - 4) << 10;
476 
477 	if (vrefrate > 7500) {
478 		/* range 1 */
479 		value = ((vrefrate - 6000) / 65) | tccd_l;
480 	} else {
481 		/* range 2 */
482 		value = ((vrefrate - 4500) / 65) | tccd_l | (1 << 6);
483 	}
484 
485 	/* disable zqcs */
486 	if (!(readl(pctl_base + DDR_PCTL2_ZQCTL0) &
487 		(1ul << 31))) {
488 		dis_auto_zq = 1;
489 		setbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1 << 31);
490 	}
491 	/* disable auto refresh */
492 	setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 1);
493 	update_refresh_reg(dram);
494 
495 	/* enable vrefdq calibratin */
496 	write_mr(dram, rank, 6, value | (1 << 7), dramtype);
497 	udelay(1);/* tvrefdqe */
498 	/* write vrefdq value */
499 	write_mr(dram, rank, 6, value | (1 << 7), dramtype);
500 	udelay(1);/* tvref_time */
501 	write_mr(dram, rank, 6, value | (0 << 7), dramtype);
502 	udelay(1);/* tvrefdqx */
503 
504 	/* restore zqcs */
505 	if (dis_auto_zq)
506 		clrbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1 << 31);
507 	/* restore auto refresh */
508 	clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 1);
509 	update_refresh_reg(dram);
510 
511 	return 0;
512 }
513 
514 #define _MAX_(x, y) (x > y ? x : y)
515 
516 static void rx_deskew_switch_adjust(struct dram_info *dram)
517 {
518 	u32 i, deskew_val;
519 	u32 gate_val = 0;
520 	void __iomem *phy_base = dram->phy;
521 
522 	for (i = 0; i < 4; i++)
523 		gate_val = _MAX_(readl(PHY_REG(phy_base, 0xfb + i)), gate_val);
524 
525 	deskew_val = (gate_val >> 3) + 1;
526 	deskew_val = (deskew_val > 0x1f) ? 0x1f : deskew_val;
527 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xc, (deskew_val & 0x3) << 2);
528 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x7 << 4,
529 			(deskew_val & 0x1c) << 2);
530 }
531 
532 static void tx_deskew_switch_adjust(struct dram_info *dram)
533 {
534 	void __iomem *phy_base = dram->phy;
535 
536 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0x3, 1);
537 }
538 
539 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
540 {
541 	writel(ddrconfig, &dram->msch->ddrconf);
542 }
543 
544 static void dram_all_config(struct dram_info *dram,
545 			    struct rk3328_sdram_params *sdram_params)
546 {
547 	u32 sys_reg = 0;
548 
549 	set_ddrconfig(dram, sdram_ch.ddrconfig);
550 
551 	sys_reg |= SYS_REG_ENC_DDRTYPE(sdram_params->dramtype);
552 	sys_reg |= SYS_REG_ENC_ROW_3_4(sdram_ch.row_3_4);
553 	sys_reg |= SYS_REG_ENC_RANK(sdram_ch.rank);
554 	sys_reg |= SYS_REG_ENC_COL(sdram_ch.col);
555 	sys_reg |= SYS_REG_ENC_BK(sdram_ch.bk);
556 	sys_reg |= SYS_REG_ENC_CS0_ROW(sdram_ch.cs0_row);
557 	if (sdram_ch.cs1_row)
558 		sys_reg |= SYS_REG_ENC_CS1_ROW(sdram_ch.cs1_row);
559 	sys_reg |= SYS_REG_ENC_BW(sdram_ch.bw);
560 	sys_reg |= SYS_REG_ENC_DBW(sdram_ch.dbw);
561 
562 	writel(sys_reg, &dram->grf->os_reg[2]);
563 
564 	writel(sdram_ch.noc_timings.ddrtiming.d32, &dram->msch->ddrtiming);
565 
566 	writel(sdram_ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
567 	writel(sdram_ch.noc_timings.readlatency, &dram->msch->readlatency);
568 
569 	writel(sdram_ch.noc_timings.activate.d32, &dram->msch->activate);
570 	writel(sdram_ch.noc_timings.devtodev.d32, &dram->msch->devtodev);
571 	writel(sdram_ch.noc_timings.ddr4timing.d32, &dram->msch->ddr4_timing);
572 	writel(sdram_ch.noc_timings.agingx0, &dram->msch->aging0);
573 	writel(sdram_ch.noc_timings.agingx0, &dram->msch->aging1);
574 	writel(sdram_ch.noc_timings.agingx0, &dram->msch->aging2);
575 	writel(sdram_ch.noc_timings.agingx0, &dram->msch->aging3);
576 	writel(sdram_ch.noc_timings.agingx0, &dram->msch->aging4);
577 	writel(sdram_ch.noc_timings.agingx0, &dram->msch->aging5);
578 }
579 
580 static void enable_low_power(struct dram_info *dram,
581 			     struct rk3328_sdram_params *sdram_params)
582 {
583 	void __iomem *pctl_base = dram->pctl;
584 
585 	/* enable upctl2 axi clock auto gating */
586 	writel(0x00800000, &dram->ddr_grf->ddr_grf_con[0]);
587 	writel(0x20012001, &dram->ddr_grf->ddr_grf_con[2]);
588 	/* enable upctl2 core clock auto gating */
589 	writel(0x001e001a, &dram->ddr_grf->ddr_grf_con[2]);
590 	/* enable sr, pd */
591 	if (PD_IDLE == 0)
592 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
593 	else
594 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
595 	if (SR_IDLE == 0)
596 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL,	1);
597 	else
598 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
599 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
600 }
601 
602 static int sdram_init(struct dram_info *dram,
603 		      struct rk3328_sdram_params *sdram_params, u32 pre_init)
604 {
605 	void __iomem *pctl_base = dram->pctl;
606 
607 	rkclk_ddr_reset(dram, 1, 1, 1, 1);
608 	udelay(10);
609 	/*
610 	 * dereset ddr phy psrstn to config pll,
611 	 * if using phy pll psrstn must be dereset
612 	 * before config pll
613 	 */
614 	rkclk_ddr_reset(dram, 1, 1, 1, 0);
615 	rkclk_configure_ddr(dram, sdram_params);
616 	if (pre_init == 0) {
617 		switch (sdram_params->dramtype) {
618 		case DDR3:
619 			printf("DDR3\n");
620 			break;
621 		case DDR4:
622 			printf("DDR4\n");
623 			break;
624 		case LPDDR3:
625 		default:
626 			printf("LPDDR3\n");
627 			break;
628 		}
629 	}
630 	/* release phy srst to provide clk to ctrl */
631 	rkclk_ddr_reset(dram, 1, 1, 0, 0);
632 	udelay(10);
633 	phy_soft_reset(dram);
634 	/* release ctrl presetn, and config ctl registers */
635 	rkclk_ddr_reset(dram, 1, 0, 0, 0);
636 	pctl_cfg(dram, sdram_params);
637 	sdram_ch.ddrconfig = calculate_ddrconfig(sdram_params);
638 	set_ctl_address_map(dram, sdram_params);
639 	phy_cfg(dram, sdram_params);
640 
641 	/* enable dfi_init_start to init phy after ctl srstn deassert */
642 	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
643 	rkclk_ddr_reset(dram, 0, 0, 0, 0);
644 	/* wait for dfi_init_done and dram init complete */
645 	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
646 		continue;
647 
648 	/* do ddr gate training */
649 	if (data_training(dram, 0, sdram_params->dramtype) != 0) {
650 		printf("data training error\n");
651 		return -1;
652 	}
653 
654 	if (sdram_params->dramtype == DDR4)
655 		write_vrefdq(dram, 0x3, 5670, sdram_params->dramtype);
656 
657 	if (pre_init == 0) {
658 		rx_deskew_switch_adjust(dram);
659 		tx_deskew_switch_adjust(dram);
660 	}
661 
662 	dram_all_config(dram, sdram_params);
663 	enable_low_power(dram, sdram_params);
664 
665 	return 0;
666 }
667 
668 static u64 dram_detect_cap(struct dram_info *dram,
669 			   struct rk3328_sdram_params *sdram_params,
670 			   unsigned char channel)
671 {
672 	void __iomem *pctl_base = dram->pctl;
673 
674 	/*
675 	 * for ddr3: ddrconf = 3
676 	 * for ddr4: ddrconf = 12
677 	 * for lpddr3: ddrconf = 3
678 	 * default bw = 1
679 	 */
680 	u32 bk, bktmp;
681 	u32 col, coltmp;
682 	u32 row, rowtmp, row_3_4;
683 	void __iomem *test_addr, *test_addr1;
684 	u32 dbw;
685 	u32 cs;
686 	u32 bw = 1;
687 	u64 cap = 0;
688 	u32 dram_type = sdram_params->dramtype;
689 	u32 pwrctl;
690 
691 	if (dram_type != DDR4) {
692 		/* detect col and bk for ddr3/lpddr3 */
693 		coltmp = 12;
694 		bktmp = 3;
695 		rowtmp = 16;
696 
697 		for (col = coltmp; col >= 9; col -= 1) {
698 			writel(0, SDRAM_ADDR);
699 			test_addr = (void __iomem *)(SDRAM_ADDR +
700 					(1ul << (col + bw - 1ul)));
701 			writel(PATTERN, test_addr);
702 			if ((readl(test_addr) == PATTERN) &&
703 			    (readl(SDRAM_ADDR) == 0))
704 				break;
705 		}
706 		if (col == 8) {
707 			printf("col error\n");
708 			goto cap_err;
709 		}
710 
711 		test_addr = (void __iomem *)(SDRAM_ADDR +
712 				(1ul << (coltmp + bktmp + bw - 1ul)));
713 		writel(0, SDRAM_ADDR);
714 		writel(PATTERN, test_addr);
715 		if ((readl(test_addr) == PATTERN) &&
716 		    (readl(SDRAM_ADDR) == 0))
717 			bk = 3;
718 		else
719 			bk = 2;
720 		if (dram_type == LPDDR3)
721 			dbw = 2;
722 		else
723 			dbw = 1;
724 	} else {
725 		/* detect bg for ddr4 */
726 		coltmp = 10;
727 		bktmp = 4;
728 		rowtmp = 17;
729 
730 		col = 10;
731 		bk = 2;
732 		test_addr = (void __iomem *)(SDRAM_ADDR +
733 				(1ul << (coltmp + bw + 1ul)));
734 		writel(0, SDRAM_ADDR);
735 		writel(PATTERN, test_addr);
736 		if ((readl(test_addr) == PATTERN) &&
737 		    (readl(SDRAM_ADDR) == 0))
738 			dbw = 0;
739 		else
740 			dbw = 1;
741 	}
742 	/* detect row */
743 	for (row = rowtmp; row > 12; row--) {
744 		writel(0, SDRAM_ADDR);
745 		test_addr = (void __iomem *)(SDRAM_ADDR +
746 				(1ul << (row + bktmp + coltmp + bw - 1ul)));
747 		writel(PATTERN, test_addr);
748 		if ((readl(test_addr) == PATTERN) &&
749 		    (readl(SDRAM_ADDR) == 0))
750 			break;
751 	}
752 	if (row == 12) {
753 		printf("row error");
754 		goto cap_err;
755 	}
756 	/* detect row_3_4 */
757 	test_addr = SDRAM_ADDR;
758 	test_addr1 = (void __iomem *)(SDRAM_ADDR +
759 			(0x3ul << (row + bktmp + coltmp + bw - 1ul - 1ul)));
760 
761 	writel(0, test_addr);
762 	writel(PATTERN, test_addr1);
763 	if ((readl(test_addr) == 0) &&
764 	    (readl(test_addr1) == PATTERN))
765 		row_3_4 = 0;
766 	else
767 		row_3_4 = 1;
768 
769 	/* disable auto low-power */
770 	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
771 	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
772 
773 	/* bw and cs detect using phy read gate training */
774 	if (data_training(dram, 1, dram_type) == 0)
775 		cs = 1;
776 	else
777 		cs = 0;
778 
779 	bw = 2;
780 
781 	/* restore auto low-power */
782 	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
783 
784 	sdram_ch.rank = cs + 1;
785 	sdram_ch.col = col;
786 	sdram_ch.bk = bk;
787 	sdram_ch.dbw = dbw;
788 	sdram_ch.bw = bw;
789 	sdram_ch.cs0_row = row;
790 	if (cs)
791 		sdram_ch.cs1_row = row;
792 	else
793 		sdram_ch.cs1_row = 0;
794 	sdram_ch.row_3_4 = row_3_4;
795 
796 	if (dram_type == DDR4)
797 		cap = 1llu << (cs + row + bk + col + ((dbw == 0) ? 2 : 1) + bw);
798 	else
799 		cap = 1llu << (cs + row + bk + col + bw);
800 
801 	return cap;
802 
803 cap_err:
804 	return 0;
805 }
806 
807 static u32 remodify_sdram_params(struct rk3328_sdram_params *sdram_params)
808 {
809 	u32 tmp = 0, tmp_adr = 0, i;
810 
811 	for (i = 0; sdram_params->pctl_regs.pctl[i][0] != 0xFFFFFFFF; i++) {
812 		if (sdram_params->pctl_regs.pctl[i][0] == 0) {
813 			tmp = sdram_params->pctl_regs.pctl[i][1];/* MSTR */
814 			tmp_adr = i;
815 		}
816 	}
817 
818 	tmp &= ~((3ul << 30) | (3ul << 24) | (3ul << 12));
819 
820 	switch (sdram_ch.dbw) {
821 	case 2:
822 		tmp |= (3ul << 30);
823 		break;
824 	case 1:
825 		tmp |= (2ul << 30);
826 		break;
827 	case 0:
828 	default:
829 		tmp |= (1ul << 30);
830 		break;
831 	}
832 
833 	if (sdram_ch.rank == 2)
834 		tmp |= 3 << 24;
835 	else
836 		tmp |= 1 << 24;
837 
838 	tmp |= (2 - sdram_ch.bw) << 12;
839 
840 	sdram_params->pctl_regs.pctl[tmp_adr][1] = tmp;
841 
842 	if (sdram_ch.bw == 2)
843 		sdram_ch.noc_timings.ddrtiming.b.bwratio = 0;
844 	else
845 		sdram_ch.noc_timings.ddrtiming.b.bwratio = 1;
846 
847 	return 0;
848 }
849 
850 static int dram_detect_cs1_row(struct rk3328_sdram_params *sdram_params,
851 			       unsigned char channel)
852 {
853 	u32 ret = 0;
854 	u32 cs1_bit;
855 	void __iomem *test_addr, *cs1_addr;
856 	u32 row, bktmp, coltmp, bw;
857 	u32 ddrconf = sdram_ch.ddrconfig;
858 
859 	if (sdram_ch.rank == 2) {
860 		cs1_bit = addrmap[ddrconf][0] + 8;
861 
862 		if (cs1_bit > 31)
863 			goto out;
864 
865 		cs1_addr = (void __iomem *)(1ul << cs1_bit);
866 		if (cs1_bit < 20)
867 			cs1_bit = 1;
868 		else
869 			cs1_bit = 0;
870 
871 		if (sdram_params->dramtype == DDR4) {
872 			if (sdram_ch.dbw == 0)
873 				bktmp = sdram_ch.bk + 2;
874 			else
875 				bktmp = sdram_ch.bk + 1;
876 		} else {
877 			bktmp = sdram_ch.bk;
878 		}
879 		bw = sdram_ch.bw;
880 		coltmp = sdram_ch.col;
881 
882 		/* detect cs1 row */
883 		for (row = sdram_ch.cs0_row; row > 12; row--) {
884 			test_addr = (void __iomem *)(SDRAM_ADDR + cs1_addr +
885 					(1ul << (row + cs1_bit + bktmp +
886 					 coltmp + bw - 1ul)));
887 			writel(0, SDRAM_ADDR + cs1_addr);
888 			writel(PATTERN, test_addr);
889 			if ((readl(test_addr) == PATTERN) &&
890 			    (readl(SDRAM_ADDR + cs1_addr) == 0)) {
891 				ret = row;
892 				break;
893 			}
894 		}
895 	}
896 
897 out:
898 	return ret;
899 }
900 
901 static int sdram_init_detect(struct dram_info *dram,
902 			     struct rk3328_sdram_params *sdram_params)
903 {
904 	debug("Starting SDRAM initialization...\n");
905 
906 	memcpy(&sdram_ch, &sdram_params->ch,
907 	       sizeof(struct rk3328_sdram_channel));
908 
909 	sdram_init(dram, sdram_params, 1);
910 	dram_detect_cap(dram, sdram_params, 0);
911 
912 	/* modify bw, cs related timing */
913 	remodify_sdram_params(sdram_params);
914 	/* reinit sdram by real dram cap */
915 	sdram_init(dram, sdram_params, 0);
916 
917 	/* redetect cs1 row */
918 	sdram_ch.cs1_row =
919 		dram_detect_cs1_row(sdram_params, 0);
920 
921 	return 0;
922 }
923 
924 static int rk3328_dmc_init(struct udevice *dev)
925 {
926 	struct dram_info *priv = dev_get_priv(dev);
927 	struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
928 	int ret;
929 
930 #if !CONFIG_IS_ENABLED(OF_PLATDATA)
931 	struct rk3328_sdram_params *params = &plat->sdram_params;
932 #else
933 	struct dtd_rockchip_rk3328_dmc *dtplat = &plat->dtplat;
934 	struct rk3328_sdram_params *params =
935 					(void *)dtplat->rockchip_sdram_params;
936 
937 	ret = conv_of_platdata(dev);
938 	if (ret)
939 		return ret;
940 #endif
941 	priv->phy = regmap_get_range(plat->map, 0);
942 	priv->pctl = regmap_get_range(plat->map, 1);
943 	priv->grf = regmap_get_range(plat->map, 2);
944 	priv->cru = regmap_get_range(plat->map, 3);
945 	priv->msch = regmap_get_range(plat->map, 4);
946 	priv->ddr_grf = regmap_get_range(plat->map, 5);
947 
948 	debug("%s phy %p pctrl %p grf %p cru %p msch %p ddr_grf %p\n",
949 	      __func__, priv->phy, priv->pctl, priv->grf, priv->cru,
950 	      priv->msch, priv->ddr_grf);
951 	ret = sdram_init_detect(priv, params);
952 	if (ret < 0) {
953 		printf("%s DRAM init failed%d\n", __func__, ret);
954 		return ret;
955 	}
956 
957 	return 0;
958 }
959 
960 static int rk3328_dmc_ofdata_to_platdata(struct udevice *dev)
961 {
962 #if !CONFIG_IS_ENABLED(OF_PLATDATA)
963 	struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
964 	int ret;
965 
966 	ret = dev_read_u32_array(dev, "rockchip,sdram-params",
967 				 (u32 *)&plat->sdram_params,
968 				 sizeof(plat->sdram_params) / sizeof(u32));
969 	if (ret) {
970 		printf("%s: Cannot read rockchip,sdram-params %d\n",
971 		       __func__, ret);
972 		return ret;
973 	}
974 	ret = regmap_init_mem(dev, &plat->map);
975 	if (ret)
976 		printf("%s: regmap failed %d\n", __func__, ret);
977 #endif
978 	return 0;
979 }
980 
981 #endif
982 
983 static int rk3328_dmc_probe(struct udevice *dev)
984 {
985 	int ret = 0;
986 #ifdef CONFIG_TPL_BUILD
987 	if (rk3328_dmc_init(dev))
988 		return 0;
989 #else
990 	struct dram_info *priv;
991 
992 	if (!(gd->flags & GD_FLG_RELOC)) {
993 		priv = dev_get_priv(dev);
994 		priv->grf = syscon_get_first_range(ROCKCHIP_SYSCON_GRF);
995 		debug("%s: grf=%p\n", __func__, priv->grf);
996 		priv->info.base = CONFIG_SYS_SDRAM_BASE;
997 		priv->info.size =
998 			rockchip_sdram_size((phys_addr_t)&priv->grf->os_reg[2]);
999 #ifdef CONFIG_SPL_BUILD
1000 	struct ddr_param ddr_parem;
1001 
1002 	ddr_parem.count = 1;
1003 	ddr_parem.para[0] = priv->info.base;
1004 	ddr_parem.para[1] = priv->info.size;
1005 	rockchip_setup_ddr_param(&ddr_parem);
1006 #endif
1007 	} else {
1008 #if !defined(CONFIG_SPL_BUILD) && defined(CONFIG_ROCKCHIP_DMC)
1009 		ret = rockchip_dmcfreq_probe(dev);
1010 #endif
1011 	}
1012 #endif
1013 	return ret;
1014 }
1015 
1016 static int rk3328_dmc_get_info(struct udevice *dev, struct ram_info *info)
1017 {
1018 	struct dram_info *priv = dev_get_priv(dev);
1019 
1020 	*info = priv->info;
1021 
1022 	return 0;
1023 }
1024 
1025 static struct ram_ops rk3328_dmc_ops = {
1026 	.get_info = rk3328_dmc_get_info,
1027 };
1028 
1029 static const struct udevice_id rk3328_dmc_ids[] = {
1030 	{ .compatible = "rockchip,rk3328-dmc" },
1031 	{ }
1032 };
1033 
1034 U_BOOT_DRIVER(dmc_rk3328) = {
1035 	.name = "rockchip_rk3328_dmc",
1036 	.id = UCLASS_RAM,
1037 	.of_match = rk3328_dmc_ids,
1038 	.ops = &rk3328_dmc_ops,
1039 #ifdef CONFIG_TPL_BUILD
1040 	.ofdata_to_platdata = rk3328_dmc_ofdata_to_platdata,
1041 #endif
1042 	.probe = rk3328_dmc_probe,
1043 	.priv_auto_alloc_size = sizeof(struct dram_info),
1044 #ifdef CONFIG_TPL_BUILD
1045 	.platdata_auto_alloc_size = sizeof(struct rockchip_dmc_plat),
1046 #endif
1047 };
1048