xref: /rk3399_rockchip-uboot/drivers/ram/rockchip/sdram_rv1126.c (revision 0367dfefd7dceefdbde5f0517ac8e0aa6f34e4cd)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * (C) Copyright 2020 Rockchip Electronics Co., Ltd.
4  */
5 
6 #include <common.h>
7 #include <debug_uart.h>
8 #include <dm.h>
9 #include <ram.h>
10 #include <syscon.h>
11 #include <asm/io.h>
12 #include <asm/arch/clock.h>
13 #include <asm/arch/hardware.h>
14 #include <asm/arch/rk_atags.h>
15 #include <asm/arch/cru_rv1126.h>
16 #include <asm/arch/grf_rv1126.h>
17 #include <asm/arch/sdram_common.h>
18 #include <asm/arch/sdram_rv1126.h>
19 
20 /* define training flag */
21 #define CA_TRAINING			(0x1 << 0)
22 #define READ_GATE_TRAINING		(0x1 << 1)
23 #define WRITE_LEVELING			(0x1 << 2)
24 #define WRITE_TRAINING			(0x1 << 3)
25 #define READ_TRAINING			(0x1 << 4)
26 #define FULL_TRAINING			(0xff)
27 
28 #define SKEW_RX_SIGNAL			(0)
29 #define SKEW_TX_SIGNAL			(1)
30 #define SKEW_CA_SIGNAL			(2)
31 
32 #define DESKEW_MDF_ABS_VAL		(0)
33 #define DESKEW_MDF_DIFF_VAL		(1)
34 
35 #ifdef CONFIG_TPL_BUILD
36 #ifndef CONFIG_TPL_TINY_FRAMEWORK
37 #error please defined CONFIG_TPL_TINY_FRAMEWORK for RV1126 !!!
38 #endif
39 #endif
40 
41 #ifdef CONFIG_TPL_BUILD
42 
43 struct dram_info {
44 	void __iomem *pctl;
45 	void __iomem *phy;
46 	struct rv1126_cru *cru;
47 	struct msch_regs *msch;
48 	struct rv1126_ddrgrf *ddrgrf;
49 	struct rv1126_grf *grf;
50 	struct ram_info info;
51 	struct rv1126_pmugrf *pmugrf;
52 	u32 sr_idle;
53 	u32 pd_idle;
54 };
55 
56 #define GRF_BASE_ADDR			0xfe000000
57 #define PMU_GRF_BASE_ADDR		0xfe020000
58 #define DDR_GRF_BASE_ADDR		0xfe030000
59 #define BUS_SGRF_BASE_ADDR		0xfe0a0000
60 #define SERVER_MSCH_BASE_ADDR		0xfe800000
61 #define CRU_BASE_ADDR			0xff490000
62 #define DDR_PHY_BASE_ADDR		0xff4a0000
63 #define UPCTL2_BASE_ADDR		0xffa50000
64 
65 #define SGRF_SOC_CON12			0x30
66 #define SGRF_SOC_CON13			0x34
67 
68 struct dram_info dram_info;
69 
70 #if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 3)
71 struct rv1126_sdram_params sdram_configs[] = {
72 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-328.inc"
73 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-396.inc"
74 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-528.inc"
75 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-664.inc"
76 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-784.inc"
77 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-924.inc"
78 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-1056.inc"
79 };
80 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 0)
81 struct rv1126_sdram_params sdram_configs[] = {
82 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-328.inc"
83 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-396.inc"
84 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-528.inc"
85 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-664.inc"
86 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-784.inc"
87 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-924.inc"
88 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-1056.inc"
89 };
90 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 6)
91 struct rv1126_sdram_params sdram_configs[] = {
92 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-328.inc"
93 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-396.inc"
94 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-528.inc"
95 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-664.inc"
96 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-784.inc"
97 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-924.inc"
98 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-1056.inc"
99 };
100 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 7)
101 struct rv1126_sdram_params sdram_configs[] = {
102 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-328.inc"
103 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-396.inc"
104 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-528.inc"
105 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-664.inc"
106 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-784.inc"
107 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-924.inc"
108 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-1056.inc"
109 };
110 #endif
111 
112 u32 common_info[] = {
113 	#include "sdram_inc/rv1126/sdram-rv1126-loader_params.inc"
114 };
115 
116 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
117 static struct rw_trn_result rw_trn_result;
118 #endif
119 
120 static struct rv1126_fsp_param fsp_param[MAX_IDX];
121 
122 static u8 lp3_odt_value;
123 
124 static s8 wrlvl_result[2][4];
125 
126 /* DDR configuration 0-9 */
127 u16 ddr_cfg_2_rbc[] = {
128 	((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */
129 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */
130 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */
131 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */
132 	((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */
133 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */
134 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */
135 	((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */
136 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */
137 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */
138 };
139 
140 /* DDR configuration 10-21 */
141 u8 ddr4_cfg_2_rbc[] = {
142 	((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */
143 	((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */
144 	((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */
145 	((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */
146 	((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */
147 	((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */
148 	((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */
149 	((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */
150 	((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */
151 	((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */
152 	((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */
153 	((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */
154 };
155 
156 /* DDR configuration 22-28 */
157 u16 ddr_cfg_2_rbc_p2[] = {
158 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */
159 	((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */
160 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */
161 	((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */
162 	((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */
163 	((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */
164 	((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */
165 };
166 
167 u8 d4_rbc_2_d3_rbc[][2] = {
168 	{10, 0},
169 	{11, 2},
170 	{12, 23},
171 	{13, 1},
172 	{14, 28},
173 	{15, 24},
174 	{16, 27},
175 	{17, 7},
176 	{18, 6},
177 	{19, 25},
178 	{20, 26},
179 	{21, 3}
180 };
181 
182 u32 addrmap[23][9] = {
183 	{24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
184 		0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */
185 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
186 		0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */
187 	{23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
188 		0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */
189 	{22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606,
190 		0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */
191 	{24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909,
192 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */
193 	{6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707,
194 		0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */
195 	{7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808,
196 		0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */
197 	{8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909,
198 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */
199 	{22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
200 		0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */
201 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
202 		0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */
203 
204 	{24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
205 		0x08080808, 0x00000f0f, 0x0801}, /* 10 */
206 	{23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
207 		0x0f080808, 0x00000f0f, 0x0801}, /* 11 */
208 	{24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
209 		0x07070707, 0x00000f07, 0x0700}, /* 12 */
210 	{23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
211 		0x07070707, 0x00000f0f, 0x0700}, /* 13 */
212 	{24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
213 		0x07070707, 0x00000f07, 0x3f01}, /* 14 */
214 	{23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
215 		0x07070707, 0x00000f0f, 0x3f01}, /* 15 */
216 	{23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606,
217 		0x06060606, 0x00000f06, 0x3f00}, /* 16 */
218 	{8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909,
219 		0x0f090909, 0x00000f0f, 0x0801}, /* 17 */
220 	{7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808,
221 		0x08080808, 0x00000f0f, 0x0700}, /* 18 */
222 	{7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
223 		0x08080808, 0x00000f0f, 0x3f01}, /* 19 */
224 
225 	{6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
226 		0x07070707, 0x00000f07, 0x3f00}, /* 20 */
227 	{23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606,
228 		0x06060606, 0x00000f06, 0x0600}, /* 21 */
229 	{21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505,
230 		0x05050505, 0x00000f0f, 0x3f3f} /* 22 */
231 };
232 
233 static u8 dq_sel[22][3] = {
234 	{0x0, 0x17, 0x22},
235 	{0x1, 0x18, 0x23},
236 	{0x2, 0x19, 0x24},
237 	{0x3, 0x1a, 0x25},
238 	{0x4, 0x1b, 0x26},
239 	{0x5, 0x1c, 0x27},
240 	{0x6, 0x1d, 0x28},
241 	{0x7, 0x1e, 0x29},
242 	{0x8, 0x16, 0x21},
243 	{0x9, 0x1f, 0x2a},
244 	{0xa, 0x20, 0x2b},
245 	{0x10, 0x1, 0xc},
246 	{0x11, 0x2, 0xd},
247 	{0x12, 0x3, 0xe},
248 	{0x13, 0x4, 0xf},
249 	{0x14, 0x5, 0x10},
250 	{0x15, 0x6, 0x11},
251 	{0x16, 0x7, 0x12},
252 	{0x17, 0x8, 0x13},
253 	{0x18, 0x0, 0xb},
254 	{0x19, 0x9, 0x14},
255 	{0x1a, 0xa, 0x15}
256 };
257 
258 static u16 grp_addr[4] = {
259 	ADD_GROUP_CS0_A,
260 	ADD_GROUP_CS0_B,
261 	ADD_GROUP_CS1_A,
262 	ADD_GROUP_CS1_B
263 };
264 
265 static u8 wrlvl_result_offset[2][4] = {
266 	{0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27},
267 	{0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29},
268 };
269 
270 static u16 dqs_dq_skew_adr[16] = {
271 	0x170 + 0,	/* SKEW_UPDATE_RX_CS0_DQS0 */
272 	0x170 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS1 */
273 	0x1d0 + 0,	/* SKEW_UPDATE_RX_CS0_DQS2 */
274 	0x1d0 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS3 */
275 	0x1a0 + 0,	/* SKEW_UPDATE_RX_CS1_DQS0 */
276 	0x1a0 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS1 */
277 	0x200 + 0,	/* SKEW_UPDATE_RX_CS1_DQS2 */
278 	0x200 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS3 */
279 	0x170 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS0 */
280 	0x170 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS1 */
281 	0x1d0 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS2 */
282 	0x1d0 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS3 */
283 	0x1a0 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS0 */
284 	0x1a0 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS1 */
285 	0x200 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS2 */
286 	0x200 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS3 */
287 };
288 
289 static void rkclk_ddr_reset(struct dram_info *dram,
290 			    u32 ctl_srstn, u32 ctl_psrstn,
291 			    u32 phy_srstn, u32 phy_psrstn)
292 {
293 	writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) |
294 	       UPCTL2_ASRSTN_REQ(ctl_srstn),
295 	       BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13);
296 
297 	writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn),
298 	       &dram->cru->softrst_con[12]);
299 }
300 
301 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
302 {
303 	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
304 	int delay = 1000;
305 	u32 mhz = hz / MHz;
306 	struct global_info *gbl_info;
307 	struct sdram_head_info_index_v2 *index =
308 		(struct sdram_head_info_index_v2 *)common_info;
309 	u32 ssmod_info;
310 	u32 dsmpd = 1;
311 
312 	gbl_info = (struct global_info *)((void *)common_info +
313 		    index->global_index.offset * 4);
314 	ssmod_info = gbl_info->info_2t;
315 	refdiv = 1;
316 	if (mhz <= 100) {
317 		postdiv1 = 6;
318 		postdiv2 = 4;
319 	} else if (mhz <= 150) {
320 		postdiv1 = 4;
321 		postdiv2 = 4;
322 	} else if (mhz <= 200) {
323 		postdiv1 = 6;
324 		postdiv2 = 2;
325 	} else if (mhz <= 300) {
326 		postdiv1 = 4;
327 		postdiv2 = 2;
328 	} else if (mhz <= 400) {
329 		postdiv1 = 6;
330 		postdiv2 = 1;
331 	} else {
332 		postdiv1 = 4;
333 		postdiv2 = 1;
334 	}
335 	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
336 
337 	writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
338 
339 	writel(0x1f000000, &dram->cru->clksel_con[64]);
340 	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
341 	/* enable ssmod */
342 	if (PLL_SSMOD_SPREAD(ssmod_info)) {
343 		dsmpd = 0;
344 		clrsetbits_le32(&dram->cru->pll[1].con2,
345 				0xffffff << 0, 0x0 << 0);
346 		writel(SSMOD_SPREAD(PLL_SSMOD_SPREAD(ssmod_info)) |
347 		       SSMOD_DIVVAL(PLL_SSMOD_DIV(ssmod_info)) |
348 		       SSMOD_DOWNSPREAD(PLL_SSMOD_DOWNSPREAD(ssmod_info)) |
349 		       SSMOD_RESET(0) |
350 		       SSMOD_DIS_SSCG(0) |
351 		       SSMOD_BP(0),
352 		       &dram->cru->pll[1].con3);
353 	}
354 	writel(DSMPD(dsmpd) | POSTDIV2(postdiv2) | REFDIV(refdiv),
355 	       &dram->cru->pll[1].con1);
356 
357 	while (delay > 0) {
358 		udelay(1);
359 		if (LOCK(readl(&dram->cru->pll[1].con1)))
360 			break;
361 		delay--;
362 	}
363 
364 	writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
365 }
366 
367 static void rkclk_configure_ddr(struct dram_info *dram,
368 				struct rv1126_sdram_params *sdram_params)
369 {
370 	/* for inno ddr phy need freq / 2 */
371 	rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2);
372 }
373 
374 static unsigned int
375 	calculate_ddrconfig(struct rv1126_sdram_params *sdram_params)
376 {
377 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
378 	u32 cs, bw, die_bw, col, row, bank;
379 	u32 cs1_row;
380 	u32 i, tmp;
381 	u32 ddrconf = -1;
382 	u32 row_3_4;
383 
384 	cs = cap_info->rank;
385 	bw = cap_info->bw;
386 	die_bw = cap_info->dbw;
387 	col = cap_info->col;
388 	row = cap_info->cs0_row;
389 	cs1_row = cap_info->cs1_row;
390 	bank = cap_info->bk;
391 	row_3_4 = cap_info->row_3_4;
392 
393 	if (sdram_params->base.dramtype == DDR4) {
394 		if (cs == 2 && row == cs1_row && !row_3_4) {
395 			tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) |
396 			      die_bw;
397 			for (i = 17; i < 21; i++) {
398 				if (((tmp & 0xf) ==
399 				     (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
400 				    ((tmp & 0x70) <=
401 				     (ddr4_cfg_2_rbc[i - 10] & 0x70))) {
402 					ddrconf = i;
403 					goto out;
404 				}
405 			}
406 		}
407 
408 		tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw;
409 		for (i = 10; i < 21; i++) {
410 			if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
411 			    ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) &&
412 			    ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) {
413 				ddrconf = i;
414 				goto out;
415 			}
416 		}
417 	} else {
418 		if (cs == 2 && row == cs1_row && bank == 3) {
419 			for (i = 5; i < 8; i++) {
420 				if (((bw + col - 10) == (ddr_cfg_2_rbc[i] &
421 							 0x7)) &&
422 				    ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] &
423 							  (0x7 << 5))) {
424 					ddrconf = i;
425 					goto out;
426 				}
427 			}
428 		}
429 
430 		tmp = ((cs - 1) << 8) | ((row - 13) << 5) |
431 		      ((bw + col - 10) << 0);
432 		if (bank == 3)
433 			tmp |= (1 << 3);
434 
435 		for (i = 0; i < 9; i++)
436 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) &&
437 			    ((tmp & (7 << 5)) <=
438 			     (ddr_cfg_2_rbc[i] & (7 << 5))) &&
439 			    ((tmp & (1 << 8)) <=
440 			     (ddr_cfg_2_rbc[i] & (1 << 8)))) {
441 				ddrconf = i;
442 				goto out;
443 			}
444 
445 		for (i = 0; i < 7; i++)
446 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc_p2[i] & 0x1f)) &&
447 			    ((tmp & (7 << 5)) <=
448 			     (ddr_cfg_2_rbc_p2[i] & (7 << 5))) &&
449 			    ((tmp & (1 << 8)) <=
450 			     (ddr_cfg_2_rbc_p2[i] & (1 << 8)))) {
451 				ddrconf = i + 22;
452 				goto out;
453 			}
454 
455 		if (cs == 1 && bank == 3 && row <= 17 &&
456 		    (col + bw) == 12)
457 			ddrconf = 23;
458 	}
459 
460 out:
461 	if (ddrconf > 28)
462 		printascii("calculate ddrconfig error\n");
463 
464 	if (sdram_params->base.dramtype == DDR4) {
465 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
466 			if (ddrconf == d4_rbc_2_d3_rbc[i][0]) {
467 				if (ddrconf == 21 && row > 16)
468 					printascii("warn:ddrconf21 row > 16\n");
469 				else
470 					ddrconf = d4_rbc_2_d3_rbc[i][1];
471 				break;
472 			}
473 		}
474 	}
475 
476 	return ddrconf;
477 }
478 
479 static void sw_set_req(struct dram_info *dram)
480 {
481 	void __iomem *pctl_base = dram->pctl;
482 
483 	/* clear sw_done=0 */
484 	writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL);
485 }
486 
487 static void sw_set_ack(struct dram_info *dram)
488 {
489 	void __iomem *pctl_base = dram->pctl;
490 
491 	/* set sw_done=1 */
492 	writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL);
493 	while (1) {
494 		/* wait programming done */
495 		if (readl(pctl_base + DDR_PCTL2_SWSTAT) &
496 				PCTL2_SW_DONE_ACK)
497 			break;
498 	}
499 }
500 
501 static void set_ctl_address_map(struct dram_info *dram,
502 				struct rv1126_sdram_params *sdram_params)
503 {
504 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
505 	void __iomem *pctl_base = dram->pctl;
506 	u32 ddrconf = cap_info->ddrconfig;
507 	u32 i, row;
508 
509 	row = cap_info->cs0_row;
510 	if (sdram_params->base.dramtype == DDR4) {
511 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
512 			if (ddrconf == d4_rbc_2_d3_rbc[i][1]) {
513 				ddrconf = d4_rbc_2_d3_rbc[i][0];
514 				break;
515 			}
516 		}
517 	}
518 
519 	if (ddrconf > ARRAY_SIZE(addrmap)) {
520 		printascii("set ctl address map fail\n");
521 		return;
522 	}
523 
524 	sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
525 			  &addrmap[ddrconf][0], 9 * 4);
526 
527 	/* unused row set to 0xf */
528 	for (i = 17; i >= row; i--)
529 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
530 			((i - 12) * 8 / 32) * 4,
531 			0xf << ((i - 12) * 8 % 32));
532 
533 	if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4)
534 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
535 	if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1)
536 		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
537 
538 	if (cap_info->rank == 1)
539 		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
540 }
541 
542 static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait)
543 {
544 	void __iomem *phy_base = dram->phy;
545 	u32 fbdiv, prediv, postdiv, postdiv_en;
546 
547 	if (wait) {
548 		clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB);
549 		while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK))
550 			continue;
551 	} else {
552 		freq /= MHz;
553 		prediv = 1;
554 		if (freq <= 200) {
555 			fbdiv = 16;
556 			postdiv = 2;
557 			postdiv_en = 1;
558 		} else if (freq <= 456) {
559 			fbdiv = 8;
560 			postdiv = 1;
561 			postdiv_en = 1;
562 		} else {
563 			fbdiv = 4;
564 			postdiv = 0;
565 			postdiv_en = 0;
566 		}
567 		writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50));
568 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK,
569 				(fbdiv >> 8) & 1);
570 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK,
571 				postdiv_en << PHY_POSTDIV_EN_SHIFT);
572 
573 		clrsetbits_le32(PHY_REG(phy_base, 0x52),
574 				PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv);
575 		clrsetbits_le32(PHY_REG(phy_base, 0x53),
576 				PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT,
577 				postdiv << PHY_POSTDIV_SHIFT);
578 	}
579 }
580 
581 static const u16 d3_phy_drv_2_ohm[][2] = {
582 	{PHY_DDR3_RON_455ohm, 455},
583 	{PHY_DDR3_RON_230ohm, 230},
584 	{PHY_DDR3_RON_153ohm, 153},
585 	{PHY_DDR3_RON_115ohm, 115},
586 	{PHY_DDR3_RON_91ohm, 91},
587 	{PHY_DDR3_RON_76ohm, 76},
588 	{PHY_DDR3_RON_65ohm, 65},
589 	{PHY_DDR3_RON_57ohm, 57},
590 	{PHY_DDR3_RON_51ohm, 51},
591 	{PHY_DDR3_RON_46ohm, 46},
592 	{PHY_DDR3_RON_41ohm, 41},
593 	{PHY_DDR3_RON_38ohm, 38},
594 	{PHY_DDR3_RON_35ohm, 35},
595 	{PHY_DDR3_RON_32ohm, 32},
596 	{PHY_DDR3_RON_30ohm, 30},
597 	{PHY_DDR3_RON_28ohm, 28},
598 	{PHY_DDR3_RON_27ohm, 27},
599 	{PHY_DDR3_RON_25ohm, 25},
600 	{PHY_DDR3_RON_24ohm, 24},
601 	{PHY_DDR3_RON_23ohm, 23},
602 	{PHY_DDR3_RON_22ohm, 22},
603 	{PHY_DDR3_RON_21ohm, 21},
604 	{PHY_DDR3_RON_20ohm, 20}
605 };
606 
607 static u16 d3_phy_odt_2_ohm[][2] = {
608 	{PHY_DDR3_RTT_DISABLE, 0},
609 	{PHY_DDR3_RTT_561ohm, 561},
610 	{PHY_DDR3_RTT_282ohm, 282},
611 	{PHY_DDR3_RTT_188ohm, 188},
612 	{PHY_DDR3_RTT_141ohm, 141},
613 	{PHY_DDR3_RTT_113ohm, 113},
614 	{PHY_DDR3_RTT_94ohm, 94},
615 	{PHY_DDR3_RTT_81ohm, 81},
616 	{PHY_DDR3_RTT_72ohm, 72},
617 	{PHY_DDR3_RTT_64ohm, 64},
618 	{PHY_DDR3_RTT_58ohm, 58},
619 	{PHY_DDR3_RTT_52ohm, 52},
620 	{PHY_DDR3_RTT_48ohm, 48},
621 	{PHY_DDR3_RTT_44ohm, 44},
622 	{PHY_DDR3_RTT_41ohm, 41},
623 	{PHY_DDR3_RTT_38ohm, 38},
624 	{PHY_DDR3_RTT_37ohm, 37},
625 	{PHY_DDR3_RTT_34ohm, 34},
626 	{PHY_DDR3_RTT_32ohm, 32},
627 	{PHY_DDR3_RTT_31ohm, 31},
628 	{PHY_DDR3_RTT_29ohm, 29},
629 	{PHY_DDR3_RTT_28ohm, 28},
630 	{PHY_DDR3_RTT_27ohm, 27},
631 	{PHY_DDR3_RTT_25ohm, 25}
632 };
633 
634 static u16 d4lp3_phy_drv_2_ohm[][2] = {
635 	{PHY_DDR4_LPDDR3_RON_482ohm, 482},
636 	{PHY_DDR4_LPDDR3_RON_244ohm, 244},
637 	{PHY_DDR4_LPDDR3_RON_162ohm, 162},
638 	{PHY_DDR4_LPDDR3_RON_122ohm, 122},
639 	{PHY_DDR4_LPDDR3_RON_97ohm, 97},
640 	{PHY_DDR4_LPDDR3_RON_81ohm, 81},
641 	{PHY_DDR4_LPDDR3_RON_69ohm, 69},
642 	{PHY_DDR4_LPDDR3_RON_61ohm, 61},
643 	{PHY_DDR4_LPDDR3_RON_54ohm, 54},
644 	{PHY_DDR4_LPDDR3_RON_48ohm, 48},
645 	{PHY_DDR4_LPDDR3_RON_44ohm, 44},
646 	{PHY_DDR4_LPDDR3_RON_40ohm, 40},
647 	{PHY_DDR4_LPDDR3_RON_37ohm, 37},
648 	{PHY_DDR4_LPDDR3_RON_34ohm, 34},
649 	{PHY_DDR4_LPDDR3_RON_32ohm, 32},
650 	{PHY_DDR4_LPDDR3_RON_30ohm, 30},
651 	{PHY_DDR4_LPDDR3_RON_28ohm, 28},
652 	{PHY_DDR4_LPDDR3_RON_27ohm, 27},
653 	{PHY_DDR4_LPDDR3_RON_25ohm, 25},
654 	{PHY_DDR4_LPDDR3_RON_24ohm, 24},
655 	{PHY_DDR4_LPDDR3_RON_23ohm, 23},
656 	{PHY_DDR4_LPDDR3_RON_22ohm, 22},
657 	{PHY_DDR4_LPDDR3_RON_21ohm, 21}
658 };
659 
660 static u16 d4lp3_phy_odt_2_ohm[][2] = {
661 	{PHY_DDR4_LPDDR3_RTT_DISABLE, 0},
662 	{PHY_DDR4_LPDDR3_RTT_586ohm, 586},
663 	{PHY_DDR4_LPDDR3_RTT_294ohm, 294},
664 	{PHY_DDR4_LPDDR3_RTT_196ohm, 196},
665 	{PHY_DDR4_LPDDR3_RTT_148ohm, 148},
666 	{PHY_DDR4_LPDDR3_RTT_118ohm, 118},
667 	{PHY_DDR4_LPDDR3_RTT_99ohm, 99},
668 	{PHY_DDR4_LPDDR3_RTT_85ohm, 58},
669 	{PHY_DDR4_LPDDR3_RTT_76ohm, 76},
670 	{PHY_DDR4_LPDDR3_RTT_67ohm, 67},
671 	{PHY_DDR4_LPDDR3_RTT_60ohm, 60},
672 	{PHY_DDR4_LPDDR3_RTT_55ohm, 55},
673 	{PHY_DDR4_LPDDR3_RTT_50ohm, 50},
674 	{PHY_DDR4_LPDDR3_RTT_46ohm, 46},
675 	{PHY_DDR4_LPDDR3_RTT_43ohm, 43},
676 	{PHY_DDR4_LPDDR3_RTT_40ohm, 40},
677 	{PHY_DDR4_LPDDR3_RTT_38ohm, 38},
678 	{PHY_DDR4_LPDDR3_RTT_36ohm, 36},
679 	{PHY_DDR4_LPDDR3_RTT_34ohm, 34},
680 	{PHY_DDR4_LPDDR3_RTT_32ohm, 32},
681 	{PHY_DDR4_LPDDR3_RTT_31ohm, 31},
682 	{PHY_DDR4_LPDDR3_RTT_29ohm, 29},
683 	{PHY_DDR4_LPDDR3_RTT_28ohm, 28},
684 	{PHY_DDR4_LPDDR3_RTT_27ohm, 27}
685 };
686 
687 static u16 lp4_phy_drv_2_ohm[][2] = {
688 	{PHY_LPDDR4_RON_501ohm, 501},
689 	{PHY_LPDDR4_RON_253ohm, 253},
690 	{PHY_LPDDR4_RON_168ohm, 168},
691 	{PHY_LPDDR4_RON_126ohm, 126},
692 	{PHY_LPDDR4_RON_101ohm, 101},
693 	{PHY_LPDDR4_RON_84ohm, 84},
694 	{PHY_LPDDR4_RON_72ohm, 72},
695 	{PHY_LPDDR4_RON_63ohm, 63},
696 	{PHY_LPDDR4_RON_56ohm, 56},
697 	{PHY_LPDDR4_RON_50ohm, 50},
698 	{PHY_LPDDR4_RON_46ohm, 46},
699 	{PHY_LPDDR4_RON_42ohm, 42},
700 	{PHY_LPDDR4_RON_38ohm, 38},
701 	{PHY_LPDDR4_RON_36ohm, 36},
702 	{PHY_LPDDR4_RON_33ohm, 33},
703 	{PHY_LPDDR4_RON_31ohm, 31},
704 	{PHY_LPDDR4_RON_29ohm, 29},
705 	{PHY_LPDDR4_RON_28ohm, 28},
706 	{PHY_LPDDR4_RON_26ohm, 26},
707 	{PHY_LPDDR4_RON_25ohm, 25},
708 	{PHY_LPDDR4_RON_24ohm, 24},
709 	{PHY_LPDDR4_RON_23ohm, 23},
710 	{PHY_LPDDR4_RON_22ohm, 22}
711 };
712 
713 static u16 lp4_phy_odt_2_ohm[][2] = {
714 	{PHY_LPDDR4_RTT_DISABLE, 0},
715 	{PHY_LPDDR4_RTT_604ohm, 604},
716 	{PHY_LPDDR4_RTT_303ohm, 303},
717 	{PHY_LPDDR4_RTT_202ohm, 202},
718 	{PHY_LPDDR4_RTT_152ohm, 152},
719 	{PHY_LPDDR4_RTT_122ohm, 122},
720 	{PHY_LPDDR4_RTT_101ohm, 101},
721 	{PHY_LPDDR4_RTT_87ohm,	87},
722 	{PHY_LPDDR4_RTT_78ohm, 78},
723 	{PHY_LPDDR4_RTT_69ohm, 69},
724 	{PHY_LPDDR4_RTT_62ohm, 62},
725 	{PHY_LPDDR4_RTT_56ohm, 56},
726 	{PHY_LPDDR4_RTT_52ohm, 52},
727 	{PHY_LPDDR4_RTT_48ohm, 48},
728 	{PHY_LPDDR4_RTT_44ohm, 44},
729 	{PHY_LPDDR4_RTT_41ohm, 41},
730 	{PHY_LPDDR4_RTT_39ohm, 39},
731 	{PHY_LPDDR4_RTT_37ohm, 37},
732 	{PHY_LPDDR4_RTT_35ohm, 35},
733 	{PHY_LPDDR4_RTT_33ohm, 33},
734 	{PHY_LPDDR4_RTT_32ohm, 32},
735 	{PHY_LPDDR4_RTT_30ohm, 30},
736 	{PHY_LPDDR4_RTT_29ohm, 29},
737 	{PHY_LPDDR4_RTT_27ohm, 27}
738 };
739 
740 static u32 lp4_odt_calc(u32 odt_ohm)
741 {
742 	u32 odt;
743 
744 	if (odt_ohm == 0)
745 		odt = LPDDR4_DQODT_DIS;
746 	else if (odt_ohm <= 40)
747 		odt = LPDDR4_DQODT_40;
748 	else if (odt_ohm <= 48)
749 		odt = LPDDR4_DQODT_48;
750 	else if (odt_ohm <= 60)
751 		odt = LPDDR4_DQODT_60;
752 	else if (odt_ohm <= 80)
753 		odt = LPDDR4_DQODT_80;
754 	else if (odt_ohm <= 120)
755 		odt = LPDDR4_DQODT_120;
756 	else
757 		odt = LPDDR4_DQODT_240;
758 
759 	return odt;
760 }
761 
762 static void *get_ddr_drv_odt_info(u32 dramtype)
763 {
764 	struct sdram_head_info_index_v2 *index =
765 		(struct sdram_head_info_index_v2 *)common_info;
766 	void *ddr_info = 0;
767 
768 	if (dramtype == DDR4)
769 		ddr_info = (void *)common_info + index->ddr4_index.offset * 4;
770 	else if (dramtype == DDR3)
771 		ddr_info = (void *)common_info + index->ddr3_index.offset * 4;
772 	else if (dramtype == LPDDR3)
773 		ddr_info = (void *)common_info + index->lp3_index.offset * 4;
774 	else if (dramtype == LPDDR4)
775 		ddr_info = (void *)common_info + index->lp4_index.offset * 4;
776 	else
777 		printascii("unsupported dram type\n");
778 	return ddr_info;
779 }
780 
781 static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info,
782 			 u32 freq_mhz, u32 dst_fsp)
783 {
784 	void __iomem *pctl_base = dram->pctl;
785 	u32 ca_vref, dq_vref;
786 
787 	if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
788 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff);
789 	else
790 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten);
791 
792 	if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq))
793 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff);
794 	else
795 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten);
796 
797 	if (ca_vref < 100)
798 		ca_vref = 100;
799 	if (ca_vref > 420)
800 		ca_vref = 420;
801 
802 	if (ca_vref <= 300)
803 		ca_vref = (0 << 6) | (ca_vref - 100) / 4;
804 	else
805 		ca_vref = (1 << 6) | (ca_vref - 220) / 4;
806 
807 	if (dq_vref < 100)
808 		dq_vref = 100;
809 	if (dq_vref > 420)
810 		dq_vref = 420;
811 
812 	if (dq_vref <= 300)
813 		dq_vref = (0 << 6) | (dq_vref - 100) / 4;
814 	else
815 		dq_vref = (1 << 6) | (dq_vref - 220) / 4;
816 
817 	sw_set_req(dram);
818 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
819 			DDR_PCTL2_INIT6,
820 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT,
821 			ca_vref << PCTL2_LPDDR4_MR12_SHIFT);
822 
823 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
824 			DDR_PCTL2_INIT7,
825 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT,
826 			dq_vref << PCTL2_LPDDR4_MR14_SHIFT);
827 	sw_set_ack(dram);
828 }
829 
830 static void set_ds_odt(struct dram_info *dram,
831 		       struct rv1126_sdram_params *sdram_params, u32 dst_fsp)
832 {
833 	void __iomem *phy_base = dram->phy;
834 	void __iomem *pctl_base = dram->pctl;
835 	u32 dramtype = sdram_params->base.dramtype;
836 	struct ddr2_3_4_lp2_3_info *ddr_info;
837 	struct lp4_info *lp4_info;
838 	u32 i, j, tmp;
839 	const u16 (*p_drv)[2];
840 	const u16 (*p_odt)[2];
841 	u32 drv_info, sr_info;
842 	u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm;
843 	u32 phy_odt_ohm, dram_odt_ohm;
844 	u32 lp4_pu_cal, phy_lp4_drv_pd_en;
845 	u32 phy_odt_up_en, phy_odt_dn_en;
846 	u32 sr_dq, sr_clk;
847 	u32 freq = sdram_params->base.ddr_freq;
848 	u32 mr1_mr3, mr11, mr22, vref_out, vref_inner;
849 	u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0;
850 	u32 phy_dq_drv = 0;
851 	u32 phy_odt_up = 0, phy_odt_dn = 0;
852 
853 	ddr_info = get_ddr_drv_odt_info(dramtype);
854 	lp4_info = (void *)ddr_info;
855 
856 	if (!ddr_info)
857 		return;
858 
859 	/* dram odt en freq control phy drv, dram odt and phy sr */
860 	if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) {
861 		drv_info = ddr_info->drv_when_odtoff;
862 		dram_odt_ohm = 0;
863 		sr_info = ddr_info->sr_when_odtoff;
864 		phy_lp4_drv_pd_en =
865 			PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info);
866 	} else {
867 		drv_info = ddr_info->drv_when_odten;
868 		dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info);
869 		sr_info = ddr_info->sr_when_odten;
870 		phy_lp4_drv_pd_en =
871 			PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info);
872 	}
873 	phy_dq_drv_ohm =
874 		DRV_INFO_PHY_DQ_DRV(drv_info);
875 	phy_clk_drv_ohm =
876 		DRV_INFO_PHY_CLK_DRV(drv_info);
877 	phy_ca_drv_ohm =
878 		DRV_INFO_PHY_CA_DRV(drv_info);
879 
880 	sr_dq = DQ_SR_INFO(sr_info);
881 	sr_clk = CLK_SR_INFO(sr_info);
882 
883 	/* phy odt en freq control dram drv and phy odt */
884 	if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) {
885 		dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff);
886 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info);
887 		phy_odt_ohm = 0;
888 		phy_odt_up_en = 0;
889 		phy_odt_dn_en = 0;
890 	} else {
891 		dram_drv_ohm =
892 			DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten);
893 		phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info);
894 		phy_odt_up_en =
895 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
896 		phy_odt_dn_en =
897 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
898 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info);
899 	}
900 
901 	if (dramtype == LPDDR4) {
902 		if (phy_odt_ohm) {
903 			phy_odt_up_en = 0;
904 			phy_odt_dn_en = 1;
905 		}
906 		if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
907 			dram_caodt_ohm = 0;
908 		else
909 			dram_caodt_ohm =
910 				ODT_INFO_LP4_CA_ODT(lp4_info->odt_info);
911 	}
912 
913 	if (dramtype == DDR3) {
914 		p_drv = d3_phy_drv_2_ohm;
915 		p_odt = d3_phy_odt_2_ohm;
916 	} else if (dramtype == LPDDR4) {
917 		p_drv = lp4_phy_drv_2_ohm;
918 		p_odt = lp4_phy_odt_2_ohm;
919 	} else {
920 		p_drv = d4lp3_phy_drv_2_ohm;
921 		p_odt = d4lp3_phy_odt_2_ohm;
922 	}
923 
924 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
925 		if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) {
926 			phy_dq_drv = **(p_drv + i);
927 			break;
928 		}
929 		if (i == 0)
930 			break;
931 	}
932 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
933 		if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) {
934 			phy_clk_drv = **(p_drv + i);
935 			break;
936 		}
937 		if (i == 0)
938 			break;
939 	}
940 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
941 		if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) {
942 			phy_ca_drv = **(p_drv + i);
943 			break;
944 		}
945 		if (i == 0)
946 			break;
947 	}
948 	if (!phy_odt_ohm)
949 		phy_odt = 0;
950 	else
951 		for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) {
952 			if (phy_odt_ohm <= *(*(p_odt + i) + 1)) {
953 				phy_odt = **(p_odt + i);
954 				break;
955 			}
956 			if (i == 0)
957 				break;
958 		}
959 
960 	if (dramtype != LPDDR4) {
961 		if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en))
962 			vref_inner = 0x80;
963 		else if (phy_odt_up_en)
964 			vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 /
965 				     (dram_drv_ohm + phy_odt_ohm);
966 		else
967 			vref_inner = phy_odt_ohm * 128 /
968 				(phy_odt_ohm + dram_drv_ohm);
969 
970 		if (dramtype != DDR3 && dram_odt_ohm)
971 			vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 /
972 				   (phy_dq_drv_ohm + dram_odt_ohm);
973 		else
974 			vref_out = 0x80;
975 	} else {
976 		/* for lp4 */
977 		if (phy_odt_ohm)
978 			vref_inner =
979 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) *
980 				 256) / 1000;
981 		else
982 			vref_inner =
983 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) *
984 				 256) / 1000;
985 
986 		vref_out = 0x80;
987 	}
988 
989 	/* default ZQCALIB bypass mode */
990 	clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv);
991 	clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv);
992 	clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv);
993 	clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv);
994 	if (dramtype == LPDDR4) {
995 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_clk_drv);
996 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_clk_drv);
997 	} else {
998 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_ca_drv);
999 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_ca_drv);
1000 	}
1001 	/* clk / cmd slew rate */
1002 	clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk);
1003 
1004 	phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1;
1005 	if (phy_odt_up_en)
1006 		phy_odt_up = phy_odt;
1007 	if (phy_odt_dn_en)
1008 		phy_odt_dn = phy_odt;
1009 
1010 	for (i = 0; i < 4; i++) {
1011 		j = 0x110 + i * 0x10;
1012 		clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up);
1013 		clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn);
1014 		clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv);
1015 		clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv);
1016 		writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10));
1017 
1018 		clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10),
1019 				1 << 3, phy_lp4_drv_pd_en << 3);
1020 		/* dq slew rate */
1021 		clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10),
1022 				0x1f, sr_dq);
1023 	}
1024 
1025 	/* reg_rx_vref_value_update */
1026 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1027 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1028 
1029 	/* RAM VREF */
1030 	writel(vref_out, PHY_REG(phy_base, 0x105));
1031 	if (dramtype == LPDDR3)
1032 		udelay(100);
1033 
1034 	if (dramtype == LPDDR4)
1035 		set_lp4_vref(dram, lp4_info, freq, dst_fsp);
1036 
1037 	if (dramtype == DDR3 || dramtype == DDR4) {
1038 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1039 				DDR_PCTL2_INIT3);
1040 		mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK;
1041 	} else {
1042 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1043 				DDR_PCTL2_INIT4);
1044 		mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK;
1045 	}
1046 
1047 	if (dramtype == DDR3) {
1048 		mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK);
1049 		if (dram_drv_ohm == 34)
1050 			mr1_mr3 |= DDR3_DS_34;
1051 
1052 		if (dram_odt_ohm == 0)
1053 			mr1_mr3 |= DDR3_RTT_NOM_DIS;
1054 		else if (dram_odt_ohm <= 40)
1055 			mr1_mr3 |= DDR3_RTT_NOM_40;
1056 		else if (dram_odt_ohm <= 60)
1057 			mr1_mr3 |= DDR3_RTT_NOM_60;
1058 		else
1059 			mr1_mr3 |= DDR3_RTT_NOM_120;
1060 
1061 	} else if (dramtype == DDR4) {
1062 		mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK);
1063 		if (dram_drv_ohm == 48)
1064 			mr1_mr3 |= DDR4_DS_48;
1065 
1066 		if (dram_odt_ohm == 0)
1067 			mr1_mr3 |= DDR4_RTT_NOM_DIS;
1068 		else if (dram_odt_ohm <= 34)
1069 			mr1_mr3 |= DDR4_RTT_NOM_34;
1070 		else if (dram_odt_ohm <= 40)
1071 			mr1_mr3 |= DDR4_RTT_NOM_40;
1072 		else if (dram_odt_ohm <= 48)
1073 			mr1_mr3 |= DDR4_RTT_NOM_48;
1074 		else if (dram_odt_ohm <= 60)
1075 			mr1_mr3 |= DDR4_RTT_NOM_60;
1076 		else
1077 			mr1_mr3 |= DDR4_RTT_NOM_120;
1078 
1079 	} else if (dramtype == LPDDR3) {
1080 		if (dram_drv_ohm <= 34)
1081 			mr1_mr3 |= LPDDR3_DS_34;
1082 		else if (dram_drv_ohm <= 40)
1083 			mr1_mr3 |= LPDDR3_DS_40;
1084 		else if (dram_drv_ohm <= 48)
1085 			mr1_mr3 |= LPDDR3_DS_48;
1086 		else if (dram_drv_ohm <= 60)
1087 			mr1_mr3 |= LPDDR3_DS_60;
1088 		else if (dram_drv_ohm <= 80)
1089 			mr1_mr3 |= LPDDR3_DS_80;
1090 
1091 		if (dram_odt_ohm == 0)
1092 			lp3_odt_value = LPDDR3_ODT_DIS;
1093 		else if (dram_odt_ohm <= 60)
1094 			lp3_odt_value = LPDDR3_ODT_60;
1095 		else if (dram_odt_ohm <= 120)
1096 			lp3_odt_value = LPDDR3_ODT_120;
1097 		else
1098 			lp3_odt_value = LPDDR3_ODT_240;
1099 	} else {/* for lpddr4 */
1100 		/* MR3 for lp4 PU-CAL and PDDS */
1101 		mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK);
1102 		mr1_mr3 |= lp4_pu_cal;
1103 
1104 		tmp = lp4_odt_calc(dram_drv_ohm);
1105 		if (!tmp)
1106 			tmp = LPDDR4_PDDS_240;
1107 		mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT);
1108 
1109 		/* MR11 for lp4 ca odt, dq odt set */
1110 		mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1111 			     DDR_PCTL2_INIT6);
1112 		mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK;
1113 
1114 		mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK);
1115 
1116 		tmp = lp4_odt_calc(dram_odt_ohm);
1117 		mr11 |= (tmp << LPDDR4_DQODT_SHIFT);
1118 
1119 		tmp = lp4_odt_calc(dram_caodt_ohm);
1120 		mr11 |= (tmp << LPDDR4_CAODT_SHIFT);
1121 		sw_set_req(dram);
1122 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1123 				DDR_PCTL2_INIT6,
1124 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT,
1125 				mr11 << PCTL2_LPDDR4_MR11_SHIFT);
1126 		sw_set_ack(dram);
1127 
1128 		/* MR22 for soc odt/odt-ck/odt-cs/odt-ca */
1129 		mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1130 			     DDR_PCTL2_INIT7);
1131 		mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK;
1132 		mr22 &= ~LPDDR4_SOC_ODT_MASK;
1133 
1134 		tmp = lp4_odt_calc(phy_odt_ohm);
1135 		mr22 |= tmp;
1136 		mr22 = mr22 |
1137 		       (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) <<
1138 			LPDDR4_ODTE_CK_SHIFT) |
1139 		       (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) <<
1140 			LPDDR4_ODTE_CS_SHIFT) |
1141 		       (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) <<
1142 			LPDDR4_ODTD_CA_SHIFT);
1143 
1144 		sw_set_req(dram);
1145 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1146 				DDR_PCTL2_INIT7,
1147 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT,
1148 				mr22 << PCTL2_LPDDR4_MR22_SHIFT);
1149 		sw_set_ack(dram);
1150 	}
1151 
1152 	if (dramtype == DDR4 || dramtype == DDR3) {
1153 		sw_set_req(dram);
1154 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1155 				DDR_PCTL2_INIT3,
1156 				PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT,
1157 				mr1_mr3 << PCTL2_DDR34_MR1_SHIFT);
1158 		sw_set_ack(dram);
1159 	} else {
1160 		sw_set_req(dram);
1161 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1162 				DDR_PCTL2_INIT4,
1163 				PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT,
1164 				mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT);
1165 		sw_set_ack(dram);
1166 	}
1167 }
1168 
1169 static int sdram_cmd_dq_path_remap(struct dram_info *dram,
1170 				   struct rv1126_sdram_params *sdram_params)
1171 {
1172 	void __iomem *phy_base = dram->phy;
1173 	u32 dramtype = sdram_params->base.dramtype;
1174 	struct sdram_head_info_index_v2 *index =
1175 		(struct sdram_head_info_index_v2 *)common_info;
1176 	struct dq_map_info *map_info;
1177 
1178 	map_info = (struct dq_map_info *)((void *)common_info +
1179 		index->dq_map_index.offset * 4);
1180 
1181 	if (dramtype <= LPDDR4)
1182 		writel((map_info->byte_map[dramtype / 4] >>
1183 			((dramtype % 4) * 8)) & 0xff,
1184 		       PHY_REG(phy_base, 0x4f));
1185 
1186 	return 0;
1187 }
1188 
1189 static void phy_cfg(struct dram_info *dram,
1190 		    struct rv1126_sdram_params *sdram_params)
1191 {
1192 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
1193 	void __iomem *phy_base = dram->phy;
1194 	u32 i, dq_map, tmp;
1195 	u32 byte1 = 0, byte0 = 0;
1196 
1197 	sdram_cmd_dq_path_remap(dram, sdram_params);
1198 
1199 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0);
1200 	for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
1201 		writel(sdram_params->phy_regs.phy[i][1],
1202 		       phy_base + sdram_params->phy_regs.phy[i][0]);
1203 	}
1204 
1205 	clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5));
1206 	dq_map = readl(PHY_REG(phy_base, 0x4f));
1207 	for (i = 0; i < 4; i++) {
1208 		if (((dq_map >> (i * 2)) & 0x3) == 0)
1209 			byte0 = i;
1210 		if (((dq_map >> (i * 2)) & 0x3) == 1)
1211 			byte1 = i;
1212 	}
1213 
1214 	tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK);
1215 	if (cap_info->bw == 2)
1216 		tmp |= 0xf;
1217 	else if (cap_info->bw == 1)
1218 		tmp |= ((1 << byte0) | (1 << byte1));
1219 	else
1220 		tmp |= (1 << byte0);
1221 
1222 	writel(tmp, PHY_REG(phy_base, 0xf));
1223 
1224 	/* lpddr4 odt control by phy, enable cs0 odt */
1225 	if (sdram_params->base.dramtype == LPDDR4)
1226 		clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4,
1227 				(1 << 6) | (1 << 4));
1228 	/* for ca training ca vref choose range1 */
1229 	setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6));
1230 	setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6));
1231 	/* for wr training PHY_0x7c[5], choose range0 */
1232 	clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5));
1233 }
1234 
1235 static int update_refresh_reg(struct dram_info *dram)
1236 {
1237 	void __iomem *pctl_base = dram->pctl;
1238 	u32 ret;
1239 
1240 	ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
1241 	writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
1242 
1243 	return 0;
1244 }
1245 
1246 /*
1247  * rank = 1: cs0
1248  * rank = 2: cs1
1249  */
1250 int read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype)
1251 {
1252 	u32 ret;
1253 	u32 i, temp;
1254 	u32 dqmap;
1255 
1256 	void __iomem *pctl_base = dram->pctl;
1257 	struct sdram_head_info_index_v2 *index =
1258 		(struct sdram_head_info_index_v2 *)common_info;
1259 	struct dq_map_info *map_info;
1260 
1261 	map_info = (struct dq_map_info *)((void *)common_info +
1262 		index->dq_map_index.offset * 4);
1263 
1264 	if (dramtype == LPDDR2)
1265 		dqmap = map_info->lp2_dq0_7_map;
1266 	else
1267 		dqmap = map_info->lp3_dq0_7_map;
1268 
1269 	pctl_read_mr(pctl_base, rank, mr_num);
1270 
1271 	ret = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff);
1272 
1273 	if (dramtype != LPDDR4) {
1274 		temp = 0;
1275 		for (i = 0; i < 8; i++) {
1276 			temp = temp | (((ret >> i) & 0x1) <<
1277 				       ((dqmap >> (i * 4)) & 0xf));
1278 		}
1279 	} else {
1280 		temp = (readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff);
1281 	}
1282 
1283 	return temp;
1284 }
1285 
1286 /* before call this function autorefresh should be disabled */
1287 void send_a_refresh(struct dram_info *dram)
1288 {
1289 	void __iomem *pctl_base = dram->pctl;
1290 
1291 	while (readl(pctl_base + DDR_PCTL2_DBGSTAT) & 0x3)
1292 		continue;
1293 	writel(0x3, pctl_base + DDR_PCTL2_DBGCMD);
1294 }
1295 
1296 static void enter_sr(struct dram_info *dram, u32 en)
1297 {
1298 	void __iomem *pctl_base = dram->pctl;
1299 
1300 	if (en) {
1301 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1302 		while (1) {
1303 			if (((readl(pctl_base + DDR_PCTL2_STAT) &
1304 			      PCTL2_SELFREF_TYPE_MASK) ==
1305 			     PCTL2_SELFREF_TYPE_SR_NOT_AUTO) &&
1306 			    ((readl(pctl_base + DDR_PCTL2_STAT) &
1307 			      PCTL2_OPERATING_MODE_MASK) ==
1308 			     PCTL2_OPERATING_MODE_SR))
1309 				break;
1310 		}
1311 	} else {
1312 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1313 		while ((readl(pctl_base + DDR_PCTL2_STAT) &
1314 		       PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR)
1315 			continue;
1316 	}
1317 }
1318 
1319 void record_dq_prebit(struct dram_info *dram)
1320 {
1321 	u32 group, i, tmp;
1322 	void __iomem *phy_base = dram->phy;
1323 
1324 	for (group = 0; group < 4; group++) {
1325 		for (i = 0; i < ARRAY_SIZE(dq_sel); i++) {
1326 			/* l_loop_invdelaysel */
1327 			writel(dq_sel[i][0], PHY_REG(phy_base,
1328 						     grp_addr[group] + 0x2c));
1329 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e));
1330 			writel(tmp, PHY_REG(phy_base,
1331 					    grp_addr[group] + dq_sel[i][1]));
1332 
1333 			/* r_loop_invdelaysel */
1334 			writel(dq_sel[i][0], PHY_REG(phy_base,
1335 						     grp_addr[group] + 0x2d));
1336 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f));
1337 			writel(tmp, PHY_REG(phy_base,
1338 					    grp_addr[group] + dq_sel[i][2]));
1339 		}
1340 	}
1341 }
1342 
1343 static void update_dq_rx_prebit(struct dram_info *dram)
1344 {
1345 	void __iomem *phy_base = dram->phy;
1346 
1347 	clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4),
1348 			BIT(4));
1349 	udelay(1);
1350 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4));
1351 }
1352 
1353 static void update_dq_tx_prebit(struct dram_info *dram)
1354 {
1355 	void __iomem *phy_base = dram->phy;
1356 
1357 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1358 	setbits_le32(PHY_REG(phy_base, 0x2), BIT(3));
1359 	setbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1360 	udelay(1);
1361 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1362 }
1363 
1364 static void update_ca_prebit(struct dram_info *dram)
1365 {
1366 	void __iomem *phy_base = dram->phy;
1367 
1368 	clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2));
1369 	setbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1370 	udelay(1);
1371 	clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1372 }
1373 
1374 /*
1375  * dir: 0: de-skew = delta_*
1376  *	1: de-skew = reg val - delta_*
1377  * delta_dir: value for differential signal: clk/
1378  * delta_sig: value for single signal: ca/cmd
1379  */
1380 static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif,
1381 			     int delta_sig, u32 cs, u32 dramtype)
1382 {
1383 	void __iomem *phy_base = dram->phy;
1384 	u32 i, cs_en, tmp;
1385 	u32 dfi_lp_stat = 0;
1386 
1387 	if (cs == 0)
1388 		cs_en = 1;
1389 	else if (cs == 2)
1390 		cs_en = 2;
1391 	else
1392 		cs_en = 3;
1393 
1394 	if (dramtype == LPDDR4 &&
1395 	    ((readl(PHY_REG(phy_base, 0x60)) & BIT(5)) == 0)) {
1396 		dfi_lp_stat = 1;
1397 		setbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1398 	}
1399 	enter_sr(dram, 1);
1400 
1401 	for (i = 0; i < 0x20; i++) {
1402 		if (dir == DESKEW_MDF_ABS_VAL)
1403 			tmp = delta_sig;
1404 		else
1405 			tmp = readl(PHY_REG(phy_base, 0x150 + i)) +
1406 			      delta_sig;
1407 		writel(tmp, PHY_REG(phy_base, 0x150 + i));
1408 	}
1409 
1410 	if (dir == DESKEW_MDF_ABS_VAL)
1411 		tmp = delta_dif;
1412 	else
1413 		tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) -
1414 		       delta_sig + delta_dif;
1415 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x17));
1416 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x18));
1417 	if (dramtype == LPDDR4) {
1418 		writel(tmp, PHY_REG(phy_base, 0x150 + 0x4));
1419 		writel(tmp, PHY_REG(phy_base, 0x150 + 0xa));
1420 
1421 		clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6);
1422 		update_ca_prebit(dram);
1423 	}
1424 	enter_sr(dram, 0);
1425 
1426 	if (dfi_lp_stat)
1427 		clrbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1428 
1429 }
1430 
1431 static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank)
1432 {
1433 	u32 i, j, offset = 0;
1434 	u32 min = 0x3f;
1435 	void __iomem *phy_base = dram->phy;
1436 	u32 byte_en;
1437 
1438 	if (signal == SKEW_TX_SIGNAL)
1439 		offset = 8;
1440 
1441 	if (signal == SKEW_CA_SIGNAL) {
1442 		for (i = 0; i < 0x20; i++)
1443 			min = MIN(min, readl(PHY_REG(phy_base, 0x150  + i)));
1444 	} else {
1445 		byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1446 		for (j = offset; j < offset + rank * 4; j++) {
1447 			if (!((byte_en >> (j % 4)) & 1))
1448 				continue;
1449 			for (i = 0; i < 11; i++)
1450 				min = MIN(min,
1451 					  readl(PHY_REG(phy_base,
1452 							dqs_dq_skew_adr[j] +
1453 							i)));
1454 		}
1455 	}
1456 
1457 	return min;
1458 }
1459 
1460 static u32 low_power_update(struct dram_info *dram, u32 en)
1461 {
1462 	void __iomem *pctl_base = dram->pctl;
1463 	u32 lp_stat = 0;
1464 
1465 	if (en) {
1466 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf);
1467 	} else {
1468 		lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf;
1469 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf);
1470 	}
1471 
1472 	return lp_stat;
1473 }
1474 
1475 /*
1476  * signal:
1477  * dir: 0: de-skew = delta_*
1478  *	1: de-skew = reg val - delta_*
1479  * delta_dir: value for differential signal: dqs
1480  * delta_sig: value for single signal: dq/dm
1481  */
1482 static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir,
1483 			     int delta_dif, int delta_sig, u32 rank)
1484 {
1485 	void __iomem *phy_base = dram->phy;
1486 	u32 i, j, tmp, offset;
1487 	u32 byte_en;
1488 
1489 	byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1490 
1491 	if (signal == SKEW_RX_SIGNAL)
1492 		offset = 0;
1493 	else
1494 		offset = 8;
1495 
1496 	for (j = offset; j < (offset + rank * 4); j++) {
1497 		if (!((byte_en >> (j % 4)) & 1))
1498 			continue;
1499 		for (i = 0; i < 0x9; i++) {
1500 			if (dir == DESKEW_MDF_ABS_VAL)
1501 				tmp = delta_sig;
1502 			else
1503 				tmp = delta_sig + readl(PHY_REG(phy_base,
1504 							dqs_dq_skew_adr[j] +
1505 							i));
1506 			writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i));
1507 		}
1508 		if (dir == DESKEW_MDF_ABS_VAL)
1509 			tmp = delta_dif;
1510 		else
1511 			tmp = delta_dif + readl(PHY_REG(phy_base,
1512 						dqs_dq_skew_adr[j] + 9));
1513 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9));
1514 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa));
1515 	}
1516 	if (signal == SKEW_RX_SIGNAL)
1517 		update_dq_rx_prebit(dram);
1518 	else
1519 		update_dq_tx_prebit(dram);
1520 }
1521 
1522 static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype)
1523 {
1524 	void __iomem *phy_base = dram->phy;
1525 	u32 ret;
1526 	u32 dis_auto_zq = 0;
1527 	u32 odt_val_up, odt_val_dn;
1528 	u32 i, j;
1529 
1530 	odt_val_dn = readl(PHY_REG(phy_base, 0x110));
1531 	odt_val_up = readl(PHY_REG(phy_base, 0x111));
1532 
1533 	if (dramtype != LPDDR4) {
1534 		for (i = 0; i < 4; i++) {
1535 			j = 0x110 + i * 0x10;
1536 			writel(PHY_DDR4_LPDDR3_RTT_294ohm,
1537 			       PHY_REG(phy_base, j));
1538 			writel(PHY_DDR4_LPDDR3_RTT_DISABLE,
1539 			       PHY_REG(phy_base, j + 0x1));
1540 		}
1541 	}
1542 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1543 	/* use normal read mode for data training */
1544 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1545 
1546 	if (dramtype == DDR4)
1547 		setbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1548 
1549 	/* choose training cs */
1550 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
1551 	/* enable gate training */
1552 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
1553 	udelay(50);
1554 	ret = readl(PHY_REG(phy_base, 0x91));
1555 	/* disable gate training */
1556 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
1557 	clrbits_le32(PHY_REG(phy_base, 2), 0x30);
1558 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1559 
1560 	if (ret & 0x20)
1561 		ret = -1;
1562 	else
1563 		ret = (ret & 0xf) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf);
1564 
1565 	if (dramtype != LPDDR4) {
1566 		for (i = 0; i < 4; i++) {
1567 			j = 0x110 + i * 0x10;
1568 			writel(odt_val_dn, PHY_REG(phy_base, j));
1569 			writel(odt_val_up, PHY_REG(phy_base, j + 0x1));
1570 		}
1571 	}
1572 	return ret;
1573 }
1574 
1575 static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype,
1576 			    u32 rank)
1577 {
1578 	void __iomem *pctl_base = dram->pctl;
1579 	void __iomem *phy_base = dram->phy;
1580 	u32 dis_auto_zq = 0;
1581 	u32 tmp;
1582 	u32 cur_fsp;
1583 	u32 timeout_us = 1000;
1584 
1585 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1586 
1587 	clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1588 
1589 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1590 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) &
1591 	      0xffff;
1592 	writel(tmp & 0xff, PHY_REG(phy_base, 0x3));
1593 
1594 	/* disable another cs's output */
1595 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1596 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12),
1597 			      dramtype);
1598 	if (dramtype == DDR3 || dramtype == DDR4)
1599 		writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1600 	else
1601 		writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1602 
1603 	/* choose cs */
1604 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1605 			((0x2 >> cs) << 6) | (0 << 2));
1606 	/* enable write leveling */
1607 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1608 			((0x2 >> cs) << 6) | (1 << 2));
1609 
1610 	while (1) {
1611 		if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) ==
1612 		    (readl(PHY_REG(phy_base, 0xf)) & 0xf))
1613 			break;
1614 
1615 		udelay(1);
1616 		if (timeout_us-- == 0) {
1617 			printascii("error: write leveling timeout\n");
1618 			while (1)
1619 				;
1620 		}
1621 	}
1622 
1623 	/* disable write leveling */
1624 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1625 			((0x2 >> cs) << 6) | (0 << 2));
1626 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6);
1627 
1628 	/* enable another cs's output */
1629 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1630 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12),
1631 			      dramtype);
1632 
1633 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1634 
1635 	return 0;
1636 }
1637 
1638 char pattern[32] = {
1639 	0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa,
1640 	0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55,
1641 	0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55,
1642 	0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa
1643 };
1644 
1645 static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype,
1646 			    u32 mhz)
1647 {
1648 	void __iomem *pctl_base = dram->pctl;
1649 	void __iomem *phy_base = dram->phy;
1650 	u32 trefi_1x, trfc_1x;
1651 	u32 dis_auto_zq = 0;
1652 	u32 timeout_us = 1000;
1653 	u32 dqs_default;
1654 	u32 cur_fsp;
1655 	u32 vref_inner;
1656 	u32 i;
1657 	struct sdram_head_info_index_v2 *index =
1658 		(struct sdram_head_info_index_v2 *)common_info;
1659 	struct dq_map_info *map_info;
1660 
1661 	vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff;
1662 	if (dramtype == DDR3 && vref_inner == 0x80) {
1663 		for (i = 0; i < 4; i++)
1664 			writel(vref_inner - 0xa,
1665 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1666 
1667 		/* reg_rx_vref_value_update */
1668 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1669 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1670 	}
1671 
1672 	map_info = (struct dq_map_info *)((void *)common_info +
1673 		index->dq_map_index.offset * 4);
1674 	/* only 1cs a time, 0:cs0 1 cs1 */
1675 	if (cs > 1)
1676 		return -1;
1677 
1678 	dqs_default = 0xf;
1679 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1680 
1681 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1682 	/* config refresh timing */
1683 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1684 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1685 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1686 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1687 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1688 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1689 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1690 	/* reg_phy_trfc */
1691 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1692 	/* reg_max_refi_cnt */
1693 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1694 
1695 	/* choose training cs */
1696 	clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6);
1697 
1698 	/* set dq map for ddr4 */
1699 	if (dramtype == DDR4) {
1700 		setbits_le32(PHY_REG(phy_base, 0x70), BIT(7));
1701 		for (i = 0; i < 4; i++) {
1702 			writel((map_info->ddr4_dq_map[cs * 2] >>
1703 				((i % 4) * 8)) & 0xff,
1704 				PHY_REG(phy_base, 0x238 + i));
1705 			writel((map_info->ddr4_dq_map[cs * 2 + 1] >>
1706 				((i % 4) * 8)) & 0xff,
1707 				PHY_REG(phy_base, 0x2b8 + i));
1708 		}
1709 	}
1710 
1711 	/* cha_l reg_l_rd_train_dqs_default[5:0] */
1712 	clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default);
1713 	/* cha_h reg_h_rd_train_dqs_default[5:0] */
1714 	clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default);
1715 	/* chb_l reg_l_rd_train_dqs_default[5:0] */
1716 	clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default);
1717 	/* chb_h reg_h_rd_train_dqs_default[5:0] */
1718 	clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default);
1719 
1720 	/* Choose the read train auto mode */
1721 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1);
1722 	/* Enable the auto train of the read train */
1723 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3);
1724 
1725 	/* Wait the train done. */
1726 	while (1) {
1727 		if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1)
1728 			break;
1729 
1730 		udelay(1);
1731 		if (timeout_us-- == 0) {
1732 			printascii("error: read training timeout\n");
1733 			return -1;
1734 		}
1735 	}
1736 
1737 	/* Check the read train state */
1738 	if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) ||
1739 	    (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) {
1740 		printascii("error: read training error\n");
1741 		return -1;
1742 	}
1743 
1744 	/* Exit the Read Training by setting */
1745 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1));
1746 
1747 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1748 
1749 	if (dramtype == DDR3 && vref_inner == 0x80) {
1750 		for (i = 0; i < 4; i++)
1751 			writel(vref_inner,
1752 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1753 
1754 		/* reg_rx_vref_value_update */
1755 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1756 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1757 	}
1758 
1759 	return 0;
1760 }
1761 
1762 static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype,
1763 			    u32 mhz, u32 dst_fsp)
1764 {
1765 	void __iomem *pctl_base = dram->pctl;
1766 	void __iomem *phy_base = dram->phy;
1767 	u32 trefi_1x, trfc_1x;
1768 	u32 dis_auto_zq = 0;
1769 	u32 timeout_us = 1000;
1770 	u32 cur_fsp;
1771 	u32 mr_tmp, cl, cwl, phy_fsp, offset = 0;
1772 
1773 	if (dramtype == LPDDR3 && mhz <= 400) {
1774 		phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3;
1775 		offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3;
1776 		cl = readl(PHY_REG(phy_base, offset));
1777 		cwl = readl(PHY_REG(phy_base, offset + 2));
1778 
1779 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8);
1780 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4);
1781 		pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype);
1782 	}
1783 
1784 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1785 
1786 	/* PHY_0x7b[7:0] reg_train_col_addr[7:0] */
1787 	clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0);
1788 	/* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */
1789 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2);
1790 	/* PHY_0x7c[1:0] reg_train_col_addr[9:8] */
1791 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0);
1792 	/* PHY_0x7d[7:0] reg_train_row_addr[7:0] */
1793 	clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0);
1794 	/* PHY_0x7e[7:0] reg_train_row_addr[15:8] */
1795 	clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0);
1796 
1797 	/* PHY_0x71[3] wrtrain_check_data_value_random_gen */
1798 	clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3));
1799 
1800 	/* config refresh timing */
1801 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1802 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1803 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1804 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1805 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1806 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1807 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1808 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1809 	/* reg_phy_trfc */
1810 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1811 	/* reg_max_refi_cnt */
1812 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1813 
1814 	/* choose training cs */
1815 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6);
1816 
1817 	/* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */
1818 	/* 0: Use the write-leveling value. */
1819 	/* 1: use reg0x233 0x237 0x2b3 0x2b7 */
1820 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4));
1821 
1822 	/* PHY_0x7a [0] reg_dq_wr_train_auto */
1823 	setbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1824 
1825 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1826 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1827 
1828 	send_a_refresh(dram);
1829 
1830 	while (1) {
1831 		if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1)
1832 			break;
1833 
1834 		udelay(1);
1835 		if (timeout_us-- == 0) {
1836 			printascii("error: write training timeout\n");
1837 			while (1)
1838 				;
1839 		}
1840 	}
1841 
1842 	/* Check the write train state */
1843 	if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) {
1844 		printascii("error: write training error\n");
1845 		return -1;
1846 	}
1847 
1848 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1849 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1850 
1851 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1852 
1853 	/* save LPDDR4 write vref to fsp_param for dfs */
1854 	if (dramtype == LPDDR4) {
1855 		fsp_param[dst_fsp].vref_dq[cs] =
1856 			((readl(PHY_REG(phy_base, 0x384)) & 0x3f) +
1857 			 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2;
1858 		/* add range info */
1859 		fsp_param[dst_fsp].vref_dq[cs] |=
1860 			((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1);
1861 	}
1862 
1863 	if (dramtype == LPDDR3 && mhz <= 400) {
1864 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl);
1865 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl);
1866 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1867 			       DDR_PCTL2_INIT3);
1868 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
1869 			      dramtype);
1870 	}
1871 
1872 	return 0;
1873 }
1874 
1875 static int data_training(struct dram_info *dram, u32 cs,
1876 			 struct rv1126_sdram_params *sdram_params, u32 dst_fsp,
1877 			 u32 training_flag)
1878 {
1879 	u32 ret = 0;
1880 
1881 	if (training_flag == FULL_TRAINING)
1882 		training_flag = READ_GATE_TRAINING | WRITE_LEVELING |
1883 				WRITE_TRAINING | READ_TRAINING;
1884 
1885 	if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) {
1886 		ret = data_training_wl(dram, cs,
1887 				       sdram_params->base.dramtype,
1888 				       sdram_params->ch.cap_info.rank);
1889 		if (ret != 0)
1890 			goto out;
1891 	}
1892 
1893 	if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) {
1894 		ret = data_training_rg(dram, cs,
1895 				       sdram_params->base.dramtype);
1896 		if (ret != 0)
1897 			goto out;
1898 	}
1899 
1900 	if ((training_flag & READ_TRAINING) == READ_TRAINING) {
1901 		ret = data_training_rd(dram, cs,
1902 				       sdram_params->base.dramtype,
1903 				       sdram_params->base.ddr_freq);
1904 		if (ret != 0)
1905 			goto out;
1906 	}
1907 
1908 	if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) {
1909 		ret = data_training_wr(dram, cs,
1910 				       sdram_params->base.dramtype,
1911 				       sdram_params->base.ddr_freq, dst_fsp);
1912 		if (ret != 0)
1913 			goto out;
1914 	}
1915 
1916 out:
1917 	return ret;
1918 }
1919 
1920 static int get_wrlvl_val(struct dram_info *dram,
1921 			 struct rv1126_sdram_params *sdram_params)
1922 {
1923 	u32 i, j, clk_skew;
1924 	void __iomem *phy_base = dram->phy;
1925 	u32 lp_stat;
1926 	int ret;
1927 
1928 	lp_stat = low_power_update(dram, 0);
1929 
1930 	clk_skew = 0x1f;
1931 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, clk_skew, 3,
1932 			 sdram_params->base.dramtype);
1933 
1934 	ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING);
1935 	if (sdram_params->ch.cap_info.rank == 2)
1936 		ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING);
1937 
1938 	for (j = 0; j < 2; j++)
1939 		for (i = 0; i < 4; i++)
1940 			wrlvl_result[j][i] =
1941 				readl(PHY_REG(phy_base,
1942 					      wrlvl_result_offset[j][i])) -
1943 				clk_skew;
1944 
1945 	low_power_update(dram, lp_stat);
1946 
1947 	return ret;
1948 }
1949 
1950 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
1951 static void init_rw_trn_result_struct(struct rw_trn_result *result,
1952 				      void __iomem *phy_base, u8 cs_num)
1953 {
1954 	int i;
1955 
1956 	result->cs_num = cs_num;
1957 	result->byte_en = readb(PHY_REG(dram_info.phy, 0xf)) &
1958 			  PHY_DQ_WIDTH_MASK;
1959 	for (i = 0; i < FSP_NUM; i++)
1960 		result->fsp_mhz[i] = 0;
1961 }
1962 
1963 static void save_rw_trn_min_max(void __iomem *phy_base,
1964 				struct cs_rw_trn_result *rd_result,
1965 				struct cs_rw_trn_result *wr_result,
1966 				u8 byte_en)
1967 {
1968 	u16 phy_ofs;
1969 	u8 dqs;
1970 	u8 dq;
1971 
1972 	for (dqs = 0; dqs < BYTE_NUM; dqs++) {
1973 		if ((byte_en & BIT(dqs)) == 0)
1974 			continue;
1975 
1976 		/* Channel A or B (low or high 16 bit) */
1977 		phy_ofs = dqs < 2 ? 0x230 : 0x2b0;
1978 		/* low or high 8 bit */
1979 		phy_ofs += (dqs & 0x1) == 0 ? 0 : 0x9;
1980 		for (dq = 0; dq < 8; dq++) {
1981 			rd_result->dqs[dqs].dq_min[dq] =
1982 				readb(PHY_REG(phy_base, phy_ofs + 0x15 + dq));
1983 			rd_result->dqs[dqs].dq_max[dq] =
1984 				readb(PHY_REG(phy_base, phy_ofs + 0x27 + dq));
1985 			wr_result->dqs[dqs].dq_min[dq] =
1986 				readb(PHY_REG(phy_base, phy_ofs + 0x3d + dq));
1987 			wr_result->dqs[dqs].dq_max[dq] =
1988 				readb(PHY_REG(phy_base, phy_ofs + 0x4f + dq));
1989 		}
1990 	}
1991 }
1992 
1993 static void save_rw_trn_deskew(void __iomem *phy_base,
1994 			       struct fsp_rw_trn_result *result, u8 cs_num,
1995 			       int min_val, bool rw)
1996 {
1997 	u16 phy_ofs;
1998 	u8 cs;
1999 	u8 dq;
2000 
2001 	result->min_val = min_val;
2002 
2003 	for (cs = 0; cs < cs_num; cs++) {
2004 		phy_ofs = cs == 0 ? 0x170 : 0x1a0;
2005 		phy_ofs += rw == SKEW_RX_SIGNAL ? 0x1 : 0x17;
2006 		for (dq = 0; dq < 8; dq++) {
2007 			result->cs[cs].dqs[0].dq_deskew[dq] =
2008 				readb(PHY_REG(phy_base, phy_ofs + dq));
2009 			result->cs[cs].dqs[1].dq_deskew[dq] =
2010 				readb(PHY_REG(phy_base, phy_ofs + 0xb + dq));
2011 			result->cs[cs].dqs[2].dq_deskew[dq] =
2012 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + dq));
2013 			result->cs[cs].dqs[3].dq_deskew[dq] =
2014 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + dq));
2015 		}
2016 
2017 		result->cs[cs].dqs[0].dqs_deskew =
2018 			readb(PHY_REG(phy_base, phy_ofs + 0x8));
2019 		result->cs[cs].dqs[1].dqs_deskew =
2020 			readb(PHY_REG(phy_base, phy_ofs + 0xb + 0x8));
2021 		result->cs[cs].dqs[2].dqs_deskew =
2022 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0x8));
2023 		result->cs[cs].dqs[3].dqs_deskew =
2024 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + 0x8));
2025 	}
2026 }
2027 
2028 static void save_rw_trn_result_to_ddr(struct rw_trn_result *result)
2029 {
2030 	result->flag = DDR_DQ_EYE_FLAG;
2031 	memcpy((void *)(RW_TRN_RESULT_ADDR), result, sizeof(*result));
2032 }
2033 #endif
2034 
2035 static int high_freq_training(struct dram_info *dram,
2036 			      struct rv1126_sdram_params *sdram_params,
2037 			      u32 fsp)
2038 {
2039 	u32 i, j;
2040 	void __iomem *phy_base = dram->phy;
2041 	u32 dramtype = sdram_params->base.dramtype;
2042 	int min_val;
2043 	int dqs_skew, clk_skew, ca_skew;
2044 	u8 byte_en;
2045 	int ret;
2046 
2047 	byte_en = readl(PHY_REG(phy_base, 0xf)) & PHY_DQ_WIDTH_MASK;
2048 	dqs_skew = 0;
2049 	for (j = 0; j < sdram_params->ch.cap_info.rank; j++) {
2050 		for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) {
2051 			if ((byte_en & BIT(i)) != 0)
2052 				dqs_skew += wrlvl_result[j][i];
2053 		}
2054 	}
2055 	dqs_skew = dqs_skew / (sdram_params->ch.cap_info.rank *
2056 			       (1 << sdram_params->ch.cap_info.bw));
2057 
2058 	clk_skew = 0x20 - dqs_skew;
2059 	dqs_skew = 0x20;
2060 
2061 	if (dramtype == LPDDR4) {
2062 		clk_skew = 0;
2063 		ca_skew = 0;
2064 	} else if (dramtype == LPDDR3) {
2065 		ca_skew = clk_skew - 4;
2066 	} else {
2067 		ca_skew = clk_skew;
2068 	}
2069 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3,
2070 			 dramtype);
2071 
2072 	writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233));
2073 	writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237));
2074 	writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2075 	writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2076 	ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING |
2077 			    READ_TRAINING | WRITE_TRAINING);
2078 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2079 	rw_trn_result.fsp_mhz[fsp] = (u16)sdram_params->base.ddr_freq;
2080 	save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[0],
2081 			    &rw_trn_result.wr_fsp[fsp].cs[0],
2082 			    rw_trn_result.byte_en);
2083 #endif
2084 	if (sdram_params->ch.cap_info.rank == 2) {
2085 		writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233));
2086 		writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237));
2087 		writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2088 		writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2089 		ret |= data_training(dram, 1, sdram_params, fsp,
2090 				     READ_GATE_TRAINING | READ_TRAINING |
2091 				     WRITE_TRAINING);
2092 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2093 		save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[1],
2094 				    &rw_trn_result.wr_fsp[fsp].cs[1],
2095 				    rw_trn_result.byte_en);
2096 #endif
2097 	}
2098 	if (ret)
2099 		goto out;
2100 
2101 	record_dq_prebit(dram);
2102 
2103 	min_val = get_min_value(dram, SKEW_RX_SIGNAL,
2104 				sdram_params->ch.cap_info.rank) * -1;
2105 	modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2106 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2107 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2108 	save_rw_trn_deskew(phy_base, &rw_trn_result.rd_fsp[fsp],
2109 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2110 			   SKEW_RX_SIGNAL);
2111 #endif
2112 
2113 	min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL,
2114 				    sdram_params->ch.cap_info.rank),
2115 		      get_min_value(dram, SKEW_CA_SIGNAL,
2116 				    sdram_params->ch.cap_info.rank)) * -1;
2117 
2118 	/* clk = 0, rx all skew -7, tx - min_value */
2119 	modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3,
2120 			 dramtype);
2121 
2122 	modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2123 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2124 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2125 	save_rw_trn_deskew(phy_base, &rw_trn_result.wr_fsp[fsp],
2126 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2127 			   SKEW_TX_SIGNAL);
2128 #endif
2129 
2130 	ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING);
2131 	if (sdram_params->ch.cap_info.rank == 2)
2132 		ret |= data_training(dram, 1, sdram_params, 0,
2133 				     READ_GATE_TRAINING);
2134 out:
2135 	return ret;
2136 }
2137 
2138 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
2139 {
2140 	writel(ddrconfig, &dram->msch->deviceconf);
2141 	clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0);
2142 }
2143 
2144 static void update_noc_timing(struct dram_info *dram,
2145 			      struct rv1126_sdram_params *sdram_params)
2146 {
2147 	void __iomem *pctl_base = dram->pctl;
2148 	u32 bw, bl;
2149 
2150 	bw = 8 << sdram_params->ch.cap_info.bw;
2151 	bl = ((readl(pctl_base + DDR_PCTL2_MSTR) >> 16) & 0xf) * 2;
2152 
2153 	/* update the noc timing related to data bus width */
2154 	if ((bw / 8 * bl) <= 16)
2155 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 0;
2156 	else if ((bw / 8 * bl) == 32)
2157 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 1;
2158 	else if ((bw / 8 * bl) == 64)
2159 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 2;
2160 	else
2161 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 3;
2162 
2163 	sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty =
2164 		(bl * bw / 8) > 16 ? (bl / 4) : (16 / (bl * bw / 8)) * bl / 4;
2165 
2166 	if (sdram_params->base.dramtype == LPDDR4) {
2167 		sdram_params->ch.noc_timings.ddrmode.b.mwrsize =
2168 			(bw == 16) ? 0x1 : 0x2;
2169 		sdram_params->ch.noc_timings.ddrtimingc0.b.wrtomwr =
2170 			3 * sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty;
2171 	}
2172 
2173 	writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
2174 	       &dram->msch->ddrtiminga0);
2175 	writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
2176 	       &dram->msch->ddrtimingb0);
2177 	writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
2178 	       &dram->msch->ddrtimingc0);
2179 	writel(sdram_params->ch.noc_timings.devtodev0.d32,
2180 	       &dram->msch->devtodev0);
2181 	writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
2182 	writel(sdram_params->ch.noc_timings.ddr4timing.d32,
2183 	       &dram->msch->ddr4timing);
2184 }
2185 
2186 static void dram_all_config(struct dram_info *dram,
2187 			    struct rv1126_sdram_params *sdram_params)
2188 {
2189 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2190 	u32 dram_type = sdram_params->base.dramtype;
2191 	void __iomem *pctl_base = dram->pctl;
2192 	u32 sys_reg2 = 0;
2193 	u32 sys_reg3 = 0;
2194 	u64 cs_cap[2];
2195 	u32 cs_pst;
2196 
2197 	set_ddrconfig(dram, cap_info->ddrconfig);
2198 	sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
2199 			 &sys_reg3, 0);
2200 	writel(sys_reg2, &dram->pmugrf->os_reg[2]);
2201 	writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2202 
2203 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2204 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2205 
2206 	if (cap_info->rank == 2) {
2207 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2208 			6 + 2;
2209 		if (cs_pst > 28)
2210 			cs_cap[0] = 1llu << cs_pst;
2211 	}
2212 
2213 	writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
2214 			(((cs_cap[0] >> 20) / 64) & 0xff),
2215 			&dram->msch->devicesize);
2216 	update_noc_timing(dram, sdram_params);
2217 }
2218 
2219 static void enable_low_power(struct dram_info *dram,
2220 			     struct rv1126_sdram_params *sdram_params)
2221 {
2222 	void __iomem *pctl_base = dram->pctl;
2223 	u32 grf_lp_con;
2224 
2225 	writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]);
2226 
2227 	if (sdram_params->base.dramtype == DDR4)
2228 		grf_lp_con = (0x7 << 16) | (1 << 1);
2229 	else if (sdram_params->base.dramtype == DDR3)
2230 		grf_lp_con = (0x7 << 16) | (1 << 0);
2231 	else
2232 		grf_lp_con = (0x7 << 16) | (1 << 2);
2233 
2234 	/* en lpckdis_en */
2235 	grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
2236 	writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con);
2237 
2238 	/* enable sr, pd */
2239 	if (dram->pd_idle == 0)
2240 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2241 	else
2242 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2243 	if (dram->sr_idle == 0)
2244 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2245 	else
2246 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2247 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
2248 }
2249 
2250 static void ddr_set_atags(struct dram_info *dram,
2251 			  struct rv1126_sdram_params *sdram_params)
2252 {
2253 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2254 	u32 dram_type = sdram_params->base.dramtype;
2255 	void __iomem *pctl_base = dram->pctl;
2256 	struct tag_serial t_serial;
2257 	struct tag_ddr_mem t_ddrmem;
2258 	struct tag_soc_info t_socinfo;
2259 	u64 cs_cap[2];
2260 	u32 cs_pst = 0;
2261 
2262 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2263 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2264 
2265 	memset(&t_serial, 0, sizeof(struct tag_serial));
2266 
2267 	t_serial.version = 0;
2268 	t_serial.enable = 1;
2269 	t_serial.addr = CONFIG_DEBUG_UART_BASE;
2270 	t_serial.baudrate = CONFIG_BAUDRATE;
2271 	t_serial.m_mode = SERIAL_M_MODE_M0;
2272 	t_serial.id = 2;
2273 
2274 	atags_destroy();
2275 	atags_set_tag(ATAG_SERIAL,  &t_serial);
2276 
2277 	memset(&t_ddrmem, 0, sizeof(struct tag_ddr_mem));
2278 	if (cap_info->row_3_4) {
2279 		cs_cap[0] =  cs_cap[0] * 3 / 4;
2280 		cs_cap[1] =  cs_cap[1] * 3 / 4;
2281 	}
2282 	t_ddrmem.version = 0;
2283 	t_ddrmem.bank[0] = CONFIG_SYS_SDRAM_BASE;
2284 	if (cs_cap[1]) {
2285 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2286 			6 + 2;
2287 	}
2288 
2289 	if (cs_cap[1] && cs_pst > 27) {
2290 		t_ddrmem.count = 2;
2291 		t_ddrmem.bank[1] = 1 << cs_pst;
2292 		t_ddrmem.bank[2] = cs_cap[0];
2293 		t_ddrmem.bank[3] = cs_cap[1];
2294 	} else {
2295 		t_ddrmem.count = 1;
2296 		t_ddrmem.bank[1] = (u64)cs_cap[0] + (u64)cs_cap[1];
2297 	}
2298 
2299 	atags_set_tag(ATAG_DDR_MEM,  &t_ddrmem);
2300 
2301 	memset(&t_socinfo, 0, sizeof(struct tag_soc_info));
2302 	t_socinfo.version = 0;
2303 	t_socinfo.name = 0x1126;
2304 }
2305 
2306 static void print_ddr_info(struct rv1126_sdram_params *sdram_params)
2307 {
2308 	u32 split;
2309 
2310 	if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2311 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2312 		split = 0;
2313 	else
2314 		split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2315 			SPLIT_SIZE_MASK;
2316 
2317 	sdram_print_ddr_info(&sdram_params->ch.cap_info,
2318 			     &sdram_params->base, split);
2319 }
2320 
2321 static int sdram_init_(struct dram_info *dram,
2322 		       struct rv1126_sdram_params *sdram_params, u32 post_init)
2323 {
2324 	void __iomem *pctl_base = dram->pctl;
2325 	void __iomem *phy_base = dram->phy;
2326 	u32 ddr4_vref;
2327 	u32 mr_tmp;
2328 
2329 	rkclk_configure_ddr(dram, sdram_params);
2330 
2331 	rkclk_ddr_reset(dram, 1, 1, 1, 1);
2332 	udelay(10);
2333 
2334 	rkclk_ddr_reset(dram, 1, 1, 1, 0);
2335 	phy_cfg(dram, sdram_params);
2336 
2337 	rkclk_ddr_reset(dram, 1, 1, 0, 0);
2338 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1);
2339 
2340 	rkclk_ddr_reset(dram, 1, 0, 0, 0);
2341 	pctl_cfg(dram->pctl, &sdram_params->pctl_regs,
2342 		 dram->sr_idle, dram->pd_idle);
2343 
2344 	if (sdram_params->ch.cap_info.bw == 2) {
2345 		/* 32bit interface use pageclose */
2346 		setbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2347 		/* pageclose = 1, pageclose_timer = 0 will err in lp4 328MHz */
2348 		clrsetbits_le32(pctl_base + DDR_PCTL2_SCHED1, 0xff, 0x1 << 0);
2349 	} else {
2350 		clrbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2351 	}
2352 
2353 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2354 	u32 tmp, trefi;
2355 
2356 	tmp = readl(pctl_base + DDR_PCTL2_RFSHTMG);
2357 	trefi = (tmp >> 16) & 0xfff;
2358 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2359 	       pctl_base + DDR_PCTL2_RFSHTMG);
2360 #endif
2361 
2362 	/* set frequency_mode */
2363 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
2364 	/* set target_frequency to Frequency 0 */
2365 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0);
2366 
2367 	set_ds_odt(dram, sdram_params, 0);
2368 	sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params);
2369 	set_ctl_address_map(dram, sdram_params);
2370 
2371 	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
2372 
2373 	rkclk_ddr_reset(dram, 0, 0, 0, 0);
2374 
2375 	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
2376 		continue;
2377 
2378 	if (sdram_params->base.dramtype == LPDDR3) {
2379 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3);
2380 	} else if (sdram_params->base.dramtype == LPDDR4) {
2381 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6);
2382 		/* MR11 */
2383 		pctl_write_mr(dram->pctl, 3, 11,
2384 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2385 			      LPDDR4);
2386 		/* MR12 */
2387 		pctl_write_mr(dram->pctl, 3, 12,
2388 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2389 			      LPDDR4);
2390 
2391 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2392 		/* MR22 */
2393 		pctl_write_mr(dram->pctl, 3, 22,
2394 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2395 			      LPDDR4);
2396 		/* MR14 */
2397 		pctl_write_mr(dram->pctl, 3, 14,
2398 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2399 			      LPDDR4);
2400 	}
2401 
2402 	if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) != 0) {
2403 		if (post_init != 0)
2404 			printascii("DTT cs0 error\n");
2405 		return -1;
2406 	}
2407 
2408 	if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) {
2409 		if (data_training(dram, 1, sdram_params, 0,
2410 				  READ_GATE_TRAINING) != 0) {
2411 			printascii("DTT cs1 error\n");
2412 			return -1;
2413 		}
2414 	}
2415 
2416 	if (sdram_params->base.dramtype == DDR4) {
2417 		ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39;
2418 		pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref,
2419 				  sdram_params->base.dramtype);
2420 	}
2421 
2422 	dram_all_config(dram, sdram_params);
2423 	enable_low_power(dram, sdram_params);
2424 
2425 	return 0;
2426 }
2427 
2428 static u64 dram_detect_cap(struct dram_info *dram,
2429 			   struct rv1126_sdram_params *sdram_params,
2430 			   unsigned char channel)
2431 {
2432 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2433 	void __iomem *pctl_base = dram->pctl;
2434 	void __iomem *phy_base = dram->phy;
2435 	u32 mr8;
2436 
2437 	u32 bktmp;
2438 	u32 coltmp;
2439 	u32 rowtmp;
2440 	u32 cs;
2441 	u32 dram_type = sdram_params->base.dramtype;
2442 	u32 pwrctl;
2443 	u32 i, dq_map;
2444 	u32 byte1 = 0, byte0 = 0;
2445 
2446 	cap_info->bw = dram_type == DDR3 ? 0 : 1;
2447 	if (dram_type != LPDDR4) {
2448 		if (dram_type != DDR4) {
2449 			coltmp = 12;
2450 			bktmp = 3;
2451 			if (dram_type == LPDDR2)
2452 				rowtmp = 15;
2453 			else
2454 				rowtmp = 16;
2455 
2456 			if (sdram_detect_col(cap_info, coltmp) != 0)
2457 				goto cap_err;
2458 
2459 			sdram_detect_bank(cap_info, coltmp, bktmp);
2460 			sdram_detect_dbw(cap_info, dram_type);
2461 		} else {
2462 			coltmp = 10;
2463 			bktmp = 4;
2464 			rowtmp = 17;
2465 
2466 			cap_info->col = 10;
2467 			cap_info->bk = 2;
2468 			sdram_detect_bg(cap_info, coltmp);
2469 		}
2470 
2471 		if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
2472 			goto cap_err;
2473 
2474 		sdram_detect_row_3_4(cap_info, coltmp, bktmp);
2475 	} else {
2476 		mr8 = (read_mr(dram, 1, 8, dram_type) >> 2) & 0xf;
2477 		cap_info->col = 10;
2478 		cap_info->bk = 3;
2479 		cap_info->cs0_row = 14 + (mr8 + 1) / 2;
2480 		if (mr8 % 2)
2481 			cap_info->row_3_4 = 1;
2482 		else
2483 			cap_info->row_3_4 = 0;
2484 		cap_info->dbw = 1;
2485 		cap_info->bw = 2;
2486 	}
2487 
2488 	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
2489 	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
2490 
2491 	if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0)
2492 		cs = 1;
2493 	else
2494 		cs = 0;
2495 	cap_info->rank = cs + 1;
2496 
2497 	if (dram_type != LPDDR4) {
2498 		setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2499 
2500 		if (data_training(dram, 0, sdram_params, 0,
2501 				  READ_GATE_TRAINING) == 0) {
2502 			cap_info->bw = 2;
2503 		} else {
2504 			dq_map = readl(PHY_REG(phy_base, 0x4f));
2505 			for (i = 0; i < 4; i++) {
2506 				if (((dq_map >> (i * 2)) & 0x3) == 0)
2507 					byte0 = i;
2508 				if (((dq_map >> (i * 2)) & 0x3) == 1)
2509 					byte1 = i;
2510 			}
2511 			clrsetbits_le32(PHY_REG(phy_base, 0xf), PHY_DQ_WIDTH_MASK,
2512 					BIT(byte0) | BIT(byte1));
2513 			if (data_training(dram, 0, sdram_params, 0,
2514 					  READ_GATE_TRAINING) == 0)
2515 				cap_info->bw = 1;
2516 			else
2517 				cap_info->bw = 0;
2518 		}
2519 		if (cap_info->bw > 0)
2520 			cap_info->dbw = 1;
2521 	}
2522 
2523 	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
2524 
2525 	cap_info->cs0_high16bit_row = cap_info->cs0_row;
2526 	if (cs) {
2527 		cap_info->cs1_row = cap_info->cs0_row;
2528 		cap_info->cs1_high16bit_row = cap_info->cs0_row;
2529 	} else {
2530 		cap_info->cs1_row = 0;
2531 		cap_info->cs1_high16bit_row = 0;
2532 	}
2533 
2534 	return 0;
2535 cap_err:
2536 	return -1;
2537 }
2538 
2539 static int dram_detect_cs1_row(struct dram_info *dram,
2540 			       struct rv1126_sdram_params *sdram_params,
2541 			       unsigned char channel)
2542 {
2543 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2544 	void __iomem *pctl_base = dram->pctl;
2545 	u32 ret = 0;
2546 	void __iomem *test_addr;
2547 	u32 row, bktmp, coltmp, bw;
2548 	u64 cs0_cap;
2549 	u32 byte_mask;
2550 	u32 cs_pst;
2551 	u32 cs_add = 0;
2552 	u32 max_row;
2553 
2554 	if (cap_info->rank == 2) {
2555 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2556 			6 + 2;
2557 		if (cs_pst < 28)
2558 			cs_add = 1;
2559 
2560 		cs0_cap = 1 << cs_pst;
2561 
2562 		if (sdram_params->base.dramtype == DDR4) {
2563 			if (cap_info->dbw == 0)
2564 				bktmp = cap_info->bk + 2;
2565 			else
2566 				bktmp = cap_info->bk + 1;
2567 		} else {
2568 			bktmp = cap_info->bk;
2569 		}
2570 		bw = cap_info->bw;
2571 		coltmp = cap_info->col;
2572 
2573 		if (bw == 2)
2574 			byte_mask = 0xFFFF;
2575 		else
2576 			byte_mask = 0xFF;
2577 
2578 		max_row = (cs_pst == 31) ? 30 : 31;
2579 
2580 		max_row = max_row - bktmp - coltmp - bw - cs_add + 1;
2581 
2582 		row = (cap_info->cs0_row > max_row) ? max_row :
2583 			cap_info->cs0_row;
2584 
2585 		for (; row > 12; row--) {
2586 			test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
2587 				    (u32)cs0_cap +
2588 				    (1ul << (row + bktmp + coltmp +
2589 					     cs_add + bw - 1ul)));
2590 
2591 			writel(0, CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap);
2592 			writel(PATTERN, test_addr);
2593 
2594 			if (((readl(test_addr) & byte_mask) ==
2595 			     (PATTERN & byte_mask)) &&
2596 			    ((readl(CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap) &
2597 			      byte_mask) == 0)) {
2598 				ret = row;
2599 				break;
2600 			}
2601 		}
2602 	}
2603 
2604 	return ret;
2605 }
2606 
2607 /* return: 0 = success, other = fail */
2608 static int sdram_init_detect(struct dram_info *dram,
2609 			     struct rv1126_sdram_params *sdram_params)
2610 {
2611 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2612 	u32 ret;
2613 	u32 sys_reg = 0;
2614 	u32 sys_reg3 = 0;
2615 	struct sdram_head_info_index_v2 *index =
2616 		(struct sdram_head_info_index_v2 *)common_info;
2617 	struct dq_map_info *map_info;
2618 
2619 	map_info = (struct dq_map_info *)((void *)common_info +
2620 		index->dq_map_index.offset * 4);
2621 
2622 	if (sdram_init_(dram, sdram_params, 0)) {
2623 		if (sdram_params->base.dramtype == DDR3) {
2624 			clrsetbits_le32(&map_info->byte_map[0], 0xff << 24,
2625 					((0x1 << 6) | (0x3 << 4) | (0x2 << 2) |
2626 					(0x0 << 0)) << 24);
2627 			if (sdram_init_(dram, sdram_params, 0))
2628 				return -1;
2629 		} else {
2630 			return -1;
2631 		}
2632 	}
2633 
2634 	if (sdram_params->base.dramtype == DDR3) {
2635 		writel(PATTERN, CONFIG_SYS_SDRAM_BASE);
2636 		if (readl(CONFIG_SYS_SDRAM_BASE) != PATTERN)
2637 			return -1;
2638 	}
2639 
2640 	if (dram_detect_cap(dram, sdram_params, 0) != 0)
2641 		return -1;
2642 
2643 	pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
2644 				   sdram_params->base.dramtype);
2645 	ret = sdram_init_(dram, sdram_params, 1);
2646 	if (ret != 0)
2647 		goto out;
2648 
2649 	cap_info->cs1_row =
2650 		dram_detect_cs1_row(dram, sdram_params, 0);
2651 	if (cap_info->cs1_row) {
2652 		sys_reg = readl(&dram->pmugrf->os_reg[2]);
2653 		sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
2654 		SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
2655 				    sys_reg, sys_reg3, 0);
2656 		writel(sys_reg, &dram->pmugrf->os_reg[2]);
2657 		writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2658 	}
2659 
2660 	sdram_detect_high_row(cap_info);
2661 
2662 out:
2663 	return ret;
2664 }
2665 
2666 struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz)
2667 {
2668 	u32 i;
2669 	u32 offset = 0;
2670 	struct ddr2_3_4_lp2_3_info *ddr_info;
2671 
2672 	if (!freq_mhz) {
2673 		ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype);
2674 		if (ddr_info)
2675 			freq_mhz =
2676 				(ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
2677 				DDR_FREQ_MASK;
2678 		else
2679 			freq_mhz = 0;
2680 	}
2681 
2682 	for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) {
2683 		if (sdram_configs[i].base.ddr_freq == 0 ||
2684 		    freq_mhz < sdram_configs[i].base.ddr_freq)
2685 			break;
2686 	}
2687 	offset = i == 0 ? 0 : i - 1;
2688 
2689 	return &sdram_configs[offset];
2690 }
2691 
2692 static const u16 pctl_need_update_reg[] = {
2693 	DDR_PCTL2_RFSHTMG,
2694 	DDR_PCTL2_INIT3,
2695 	DDR_PCTL2_INIT4,
2696 	DDR_PCTL2_INIT6,
2697 	DDR_PCTL2_INIT7,
2698 	DDR_PCTL2_DRAMTMG0,
2699 	DDR_PCTL2_DRAMTMG1,
2700 	DDR_PCTL2_DRAMTMG2,
2701 	DDR_PCTL2_DRAMTMG3,
2702 	DDR_PCTL2_DRAMTMG4,
2703 	DDR_PCTL2_DRAMTMG5,
2704 	DDR_PCTL2_DRAMTMG6,
2705 	DDR_PCTL2_DRAMTMG7,
2706 	DDR_PCTL2_DRAMTMG8,
2707 	DDR_PCTL2_DRAMTMG9,
2708 	DDR_PCTL2_DRAMTMG12,
2709 	DDR_PCTL2_DRAMTMG13,
2710 	DDR_PCTL2_DRAMTMG14,
2711 	DDR_PCTL2_ZQCTL0,
2712 	DDR_PCTL2_DFITMG0,
2713 	DDR_PCTL2_ODTCFG
2714 };
2715 
2716 static const u16 phy_need_update_reg[] = {
2717 	0x14,
2718 	0x18,
2719 	0x1c
2720 };
2721 
2722 static void pre_set_rate(struct dram_info *dram,
2723 			 struct rv1126_sdram_params *sdram_params,
2724 			 u32 dst_fsp, u32 dst_fsp_lp4)
2725 {
2726 	u32 i, j, find;
2727 	void __iomem *pctl_base = dram->pctl;
2728 	void __iomem *phy_base = dram->phy;
2729 	u32 phy_offset;
2730 	u32 mr_tmp;
2731 	u32 dramtype = sdram_params->base.dramtype;
2732 
2733 	sw_set_req(dram);
2734 	/* pctl timing update */
2735 	for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) {
2736 		for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF;
2737 		     j++) {
2738 			if (sdram_params->pctl_regs.pctl[j][0] ==
2739 			    pctl_need_update_reg[i]) {
2740 				writel(sdram_params->pctl_regs.pctl[j][1],
2741 				       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2742 				       pctl_need_update_reg[i]);
2743 				find = j;
2744 				break;
2745 			}
2746 		}
2747 	}
2748 
2749 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2750 	u32 tmp, trefi;
2751 
2752 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2753 	trefi = (tmp >> 16) & 0xfff;
2754 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2755 	       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2756 #endif
2757 
2758 	sw_set_ack(dram);
2759 
2760 	/* phy timing update */
2761 	if (dst_fsp == 0)
2762 		phy_offset = 0;
2763 	else
2764 		phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3);
2765 	/* cl cwl al update */
2766 	for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) {
2767 		for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF;
2768 		     j++) {
2769 			if (sdram_params->phy_regs.phy[j][0] ==
2770 			    phy_need_update_reg[i]) {
2771 				writel(sdram_params->phy_regs.phy[j][1],
2772 				       phy_base + phy_offset +
2773 				       phy_need_update_reg[i]);
2774 				find = j;
2775 				break;
2776 			}
2777 		}
2778 	}
2779 
2780 	set_ds_odt(dram, sdram_params, dst_fsp);
2781 	if (dramtype == LPDDR4) {
2782 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2783 			       DDR_PCTL2_INIT4);
2784 		/* MR13 */
2785 		pctl_write_mr(dram->pctl, 3, 13,
2786 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2787 			       PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2788 			      ((0x2 << 6) >> dst_fsp_lp4), dramtype);
2789 		writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2790 				      PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2791 				      ((0x2 << 6) >> dst_fsp_lp4),
2792 				       PHY_REG(phy_base, 0x1b));
2793 		/* MR3 */
2794 		pctl_write_mr(dram->pctl, 3, 3,
2795 			      mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT &
2796 			      PCTL2_MR_MASK,
2797 			      dramtype);
2798 		writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK,
2799 		       PHY_REG(phy_base, 0x19));
2800 
2801 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2802 			       DDR_PCTL2_INIT3);
2803 		/* MR1 */
2804 		pctl_write_mr(dram->pctl, 3, 1,
2805 			      mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT &
2806 			      PCTL2_MR_MASK,
2807 			      dramtype);
2808 		writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK,
2809 		       PHY_REG(phy_base, 0x17));
2810 		/* MR2 */
2811 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
2812 			      dramtype);
2813 		writel(mr_tmp & PCTL2_MR_MASK,
2814 		       PHY_REG(phy_base, 0x18));
2815 
2816 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2817 			       DDR_PCTL2_INIT6);
2818 		/* MR11 */
2819 		pctl_write_mr(dram->pctl, 3, 11,
2820 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2821 			      dramtype);
2822 		writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2823 		       PHY_REG(phy_base, 0x1a));
2824 		/* MR12 */
2825 		pctl_write_mr(dram->pctl, 3, 12,
2826 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2827 			      dramtype);
2828 
2829 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2830 			       DDR_PCTL2_INIT7);
2831 		/* MR22 */
2832 		pctl_write_mr(dram->pctl, 3, 22,
2833 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2834 			      dramtype);
2835 		writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2836 		       PHY_REG(phy_base, 0x1d));
2837 		/* MR14 */
2838 		pctl_write_mr(dram->pctl, 3, 14,
2839 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2840 			      dramtype);
2841 		writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2842 		       PHY_REG(phy_base, 0x1c));
2843 	}
2844 
2845 	update_noc_timing(dram, sdram_params);
2846 }
2847 
2848 static void save_fsp_param(struct dram_info *dram, u32 dst_fsp,
2849 			   struct rv1126_sdram_params *sdram_params)
2850 {
2851 	void __iomem *pctl_base = dram->pctl;
2852 	void __iomem *phy_base = dram->phy;
2853 	struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp];
2854 	u32 temp, temp1;
2855 	struct ddr2_3_4_lp2_3_info *ddr_info;
2856 
2857 	ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype);
2858 
2859 	p_fsp_param->freq_mhz = sdram_params->base.ddr_freq;
2860 
2861 	if (sdram_params->base.dramtype == LPDDR4) {
2862 		p_fsp_param->rd_odt_up_en = 0;
2863 		p_fsp_param->rd_odt_down_en = 1;
2864 	} else {
2865 		p_fsp_param->rd_odt_up_en =
2866 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
2867 		p_fsp_param->rd_odt_down_en =
2868 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
2869 	}
2870 
2871 	if (p_fsp_param->rd_odt_up_en)
2872 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111));
2873 	else if (p_fsp_param->rd_odt_down_en)
2874 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110));
2875 	else
2876 		p_fsp_param->rd_odt = 0;
2877 	p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112));
2878 	p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100));
2879 	p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102));
2880 	p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128));
2881 	p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105));
2882 
2883 	if (sdram_params->base.dramtype == DDR3) {
2884 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2885 			     DDR_PCTL2_INIT3);
2886 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
2887 		p_fsp_param->ds_pdds = temp & DDR3_DS_MASK;
2888 		p_fsp_param->dq_odt = temp & DDR3_RTT_NOM_MASK;
2889 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2890 	} else if (sdram_params->base.dramtype == DDR4) {
2891 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2892 			     DDR_PCTL2_INIT3);
2893 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
2894 		p_fsp_param->ds_pdds = temp & DDR4_DS_MASK;
2895 		p_fsp_param->dq_odt = temp & DDR4_RTT_NOM_MASK;
2896 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2897 	} else if (sdram_params->base.dramtype == LPDDR3) {
2898 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2899 			     DDR_PCTL2_INIT4);
2900 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
2901 		p_fsp_param->ds_pdds = temp & 0xf;
2902 
2903 		p_fsp_param->dq_odt = lp3_odt_value;
2904 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2905 	} else if (sdram_params->base.dramtype == LPDDR4) {
2906 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2907 			     DDR_PCTL2_INIT4);
2908 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
2909 		p_fsp_param->ds_pdds = temp & LPDDR4_PDDS_MASK;
2910 
2911 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2912 			     DDR_PCTL2_INIT6);
2913 		temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK;
2914 		p_fsp_param->dq_odt = temp & LPDDR4_DQODT_MASK;
2915 		p_fsp_param->ca_odt = temp & LPDDR4_CAODT_MASK;
2916 
2917 		temp = MAX(readl(PHY_REG(phy_base, 0x3ae)),
2918 			   readl(PHY_REG(phy_base, 0x3ce)));
2919 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)),
2920 			    readl(PHY_REG(phy_base, 0x3de)));
2921 		p_fsp_param->vref_ca[0] = (temp + temp1) / 2;
2922 		temp = MAX(readl(PHY_REG(phy_base, 0x3af)),
2923 			   readl(PHY_REG(phy_base, 0x3cf)));
2924 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)),
2925 			    readl(PHY_REG(phy_base, 0x3df)));
2926 		p_fsp_param->vref_ca[1] = (temp + temp1) / 2;
2927 		p_fsp_param->vref_ca[0] |=
2928 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
2929 		p_fsp_param->vref_ca[1] |=
2930 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
2931 
2932 		p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >>
2933 					      3) & 0x1;
2934 	}
2935 
2936 	p_fsp_param->noc_timings.ddrtiminga0 =
2937 		sdram_params->ch.noc_timings.ddrtiminga0;
2938 	p_fsp_param->noc_timings.ddrtimingb0 =
2939 		sdram_params->ch.noc_timings.ddrtimingb0;
2940 	p_fsp_param->noc_timings.ddrtimingc0 =
2941 		sdram_params->ch.noc_timings.ddrtimingc0;
2942 	p_fsp_param->noc_timings.devtodev0 =
2943 		sdram_params->ch.noc_timings.devtodev0;
2944 	p_fsp_param->noc_timings.ddrmode =
2945 		sdram_params->ch.noc_timings.ddrmode;
2946 	p_fsp_param->noc_timings.ddr4timing =
2947 		sdram_params->ch.noc_timings.ddr4timing;
2948 	p_fsp_param->noc_timings.agingx0 =
2949 		sdram_params->ch.noc_timings.agingx0;
2950 	p_fsp_param->noc_timings.aging0 =
2951 		sdram_params->ch.noc_timings.aging0;
2952 	p_fsp_param->noc_timings.aging1 =
2953 		sdram_params->ch.noc_timings.aging1;
2954 	p_fsp_param->noc_timings.aging2 =
2955 		sdram_params->ch.noc_timings.aging2;
2956 	p_fsp_param->noc_timings.aging3 =
2957 		sdram_params->ch.noc_timings.aging3;
2958 
2959 	p_fsp_param->flag = FSP_FLAG;
2960 }
2961 
2962 #ifndef CONFIG_SPL_KERNEL_BOOT
2963 static void copy_fsp_param_to_ddr(void)
2964 {
2965 	memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param,
2966 	       sizeof(fsp_param));
2967 }
2968 #endif
2969 
2970 static void pctl_modify_trfc(struct ddr_pctl_regs *pctl_regs,
2971 			     struct sdram_cap_info *cap_info, u32 dram_type,
2972 			     u32 freq)
2973 {
2974 	u64 cs0_cap;
2975 	u32 die_cap;
2976 	u32 trfc_ns, trfc4_ns;
2977 	u32 trfc, txsnr;
2978 	u32 txs_abort_fast = 0;
2979 	u32 tmp;
2980 
2981 	cs0_cap = sdram_get_cs_cap(cap_info, 0, dram_type);
2982 	die_cap = (u32)(cs0_cap >> (20 + (cap_info->bw - cap_info->dbw)));
2983 
2984 	switch (dram_type) {
2985 	case DDR3:
2986 		if (die_cap <= DIE_CAP_512MBIT)
2987 			trfc_ns = 90;
2988 		else if (die_cap <= DIE_CAP_1GBIT)
2989 			trfc_ns = 110;
2990 		else if (die_cap <= DIE_CAP_2GBIT)
2991 			trfc_ns = 160;
2992 		else if (die_cap <= DIE_CAP_4GBIT)
2993 			trfc_ns = 260;
2994 		else
2995 			trfc_ns = 350;
2996 		txsnr = MAX(5, ((trfc_ns + 10) * freq + 999) / 1000);
2997 		break;
2998 
2999 	case DDR4:
3000 		if (die_cap <= DIE_CAP_2GBIT) {
3001 			trfc_ns = 160;
3002 			trfc4_ns = 90;
3003 		} else if (die_cap <= DIE_CAP_4GBIT) {
3004 			trfc_ns = 260;
3005 			trfc4_ns = 110;
3006 		} else if (die_cap <= DIE_CAP_8GBIT) {
3007 			trfc_ns = 350;
3008 			trfc4_ns = 160;
3009 		} else {
3010 			trfc_ns = 550;
3011 			trfc4_ns = 260;
3012 		}
3013 		txsnr = ((trfc_ns + 10) * freq + 999) / 1000;
3014 		txs_abort_fast = ((trfc4_ns + 10) * freq + 999) / 1000;
3015 		break;
3016 
3017 	case LPDDR3:
3018 		if (die_cap <= DIE_CAP_4GBIT)
3019 			trfc_ns = 130;
3020 		else
3021 			trfc_ns = 210;
3022 		txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3023 		break;
3024 
3025 	case LPDDR4:
3026 	case LPDDR4X:
3027 		if (die_cap <= DIE_CAP_4GBIT)
3028 			trfc_ns = 130;
3029 		else if (die_cap <= DIE_CAP_8GBIT)
3030 			trfc_ns = 180;
3031 		else if (die_cap <= DIE_CAP_16GBIT)
3032 			trfc_ns = 280;
3033 		else
3034 			trfc_ns = 380;
3035 		txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3036 		break;
3037 
3038 	default:
3039 		return;
3040 	}
3041 	trfc = (trfc_ns * freq + 999) / 1000;
3042 
3043 	for (int i = 0; pctl_regs->pctl[i][0] != 0xffffffff; i++) {
3044 		switch (pctl_regs->pctl[i][0]) {
3045 		case DDR_PCTL2_RFSHTMG:
3046 			tmp = pctl_regs->pctl[i][1];
3047 			/* t_rfc_min */
3048 			tmp &= ~((u32)0x3ff);
3049 			tmp |= ((trfc + 1) / 2) & 0x3ff;
3050 			pctl_regs->pctl[i][1] = tmp;
3051 			break;
3052 
3053 		case DDR_PCTL2_DRAMTMG8:
3054 			if (dram_type == DDR3 || dram_type == DDR4) {
3055 				tmp = pctl_regs->pctl[i][1];
3056 				/* t_xs_x32 */
3057 				tmp &= ~((u32)0x7f);
3058 				tmp |= ((txsnr + 63) / 64) & 0x7f;
3059 
3060 				if (dram_type == DDR4) {
3061 					/* t_xs_abort_x32 */
3062 					tmp &= ~((u32)(0x7f << 16));
3063 					tmp |= (((txs_abort_fast + 63) / 64) & 0x7f) << 16;
3064 					/* t_xs_fast_x32 */
3065 					tmp &= ~((u32)(0x7f << 24));
3066 					tmp |= (((txs_abort_fast + 63) / 64) & 0x7f) << 24;
3067 				}
3068 
3069 				pctl_regs->pctl[i][1] = tmp;
3070 			}
3071 			break;
3072 
3073 		case DDR_PCTL2_DRAMTMG14:
3074 			if (dram_type == LPDDR3 ||
3075 			    dram_type == LPDDR4 || dram_type == LPDDR4X) {
3076 				tmp = pctl_regs->pctl[i][1];
3077 				/* t_xsr */
3078 				tmp &= ~((u32)0xfff);
3079 				tmp |= ((txsnr + 1) / 2) & 0xfff;
3080 				pctl_regs->pctl[i][1] = tmp;
3081 			}
3082 			break;
3083 
3084 		default:
3085 			break;
3086 		}
3087 	}
3088 }
3089 
3090 void ddr_set_rate(struct dram_info *dram,
3091 		  struct rv1126_sdram_params *sdram_params,
3092 		  u32 freq, u32 cur_freq, u32 dst_fsp,
3093 		  u32 dst_fsp_lp4, u32 training_en)
3094 {
3095 	u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off;
3096 	u32 mr_tmp;
3097 	u32 lp_stat;
3098 	u32 dramtype = sdram_params->base.dramtype;
3099 	struct rv1126_sdram_params *sdram_params_new;
3100 	void __iomem *pctl_base = dram->pctl;
3101 	void __iomem *phy_base = dram->phy;
3102 
3103 	lp_stat = low_power_update(dram, 0);
3104 	sdram_params_new = get_default_sdram_config(freq);
3105 	sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank;
3106 	sdram_params_new->ch.cap_info.bw = sdram_params->ch.cap_info.bw;
3107 
3108 	pctl_modify_trfc(&sdram_params_new->pctl_regs,
3109 			 &sdram_params->ch.cap_info, dramtype, freq);
3110 	pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4);
3111 
3112 	while ((readl(pctl_base + DDR_PCTL2_STAT) &
3113 			 PCTL2_OPERATING_MODE_MASK) ==
3114 			 PCTL2_OPERATING_MODE_SR)
3115 		continue;
3116 
3117 	dest_dll_off = 0;
3118 	dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3119 			  DDR_PCTL2_INIT3);
3120 	if ((dramtype == DDR3 && (dst_init3 & 1)) ||
3121 	    (dramtype == DDR4 && !(dst_init3 & 1)))
3122 		dest_dll_off = 1;
3123 
3124 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
3125 	cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
3126 			  DDR_PCTL2_INIT3);
3127 	cur_init3 &= PCTL2_MR_MASK;
3128 	cur_dll_off = 1;
3129 	if ((dramtype == DDR3 && !(cur_init3 & 1)) ||
3130 	    (dramtype == DDR4 && (cur_init3 & 1)))
3131 		cur_dll_off = 0;
3132 
3133 	if (!cur_dll_off) {
3134 		if (dramtype == DDR3)
3135 			cur_init3 |= 1;
3136 		else
3137 			cur_init3 &= ~1;
3138 		pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype);
3139 	}
3140 
3141 	setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3142 		     PCTL2_DIS_AUTO_REFRESH);
3143 	update_refresh_reg(dram);
3144 
3145 	enter_sr(dram, 1);
3146 
3147 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3148 	       PMUGRF_CON_DDRPHY_BUFFEREN_EN,
3149 	       &dram->pmugrf->soc_con[0]);
3150 	sw_set_req(dram);
3151 	clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC,
3152 		     PCTL2_DFI_INIT_COMPLETE_EN);
3153 	sw_set_ack(dram);
3154 
3155 	sw_set_req(dram);
3156 	if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off)
3157 		setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3158 	else
3159 		clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3160 
3161 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0,
3162 		     PCTL2_DIS_SRX_ZQCL);
3163 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0,
3164 		     PCTL2_DIS_SRX_ZQCL);
3165 	sw_set_ack(dram);
3166 
3167 	writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT),
3168 	       &dram->cru->clkgate_con[21]);
3169 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3170 					(0x1 << CLK_DDR_UPCTL_EN_SHIFT) |
3171 					(0x1 << ACLK_DDR_UPCTL_EN_SHIFT),
3172 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3173 
3174 	clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3175 	rkclk_set_dpll(dram, freq * MHz / 2);
3176 	phy_pll_set(dram, freq * MHz, 0);
3177 	phy_pll_set(dram, freq * MHz, 1);
3178 	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3179 
3180 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3181 			PMUGRF_CON_DDRPHY_BUFFEREN_DIS,
3182 			&dram->pmugrf->soc_con[0]);
3183 	writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT),
3184 	       &dram->cru->clkgate_con[21]);
3185 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3186 					(0x0 << CLK_DDR_UPCTL_EN_SHIFT) |
3187 					(0x0 << ACLK_DDR_UPCTL_EN_SHIFT),
3188 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3189 	while ((readl(pctl_base + DDR_PCTL2_DFISTAT) &
3190 	       PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE)
3191 		continue;
3192 
3193 	sw_set_req(dram);
3194 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
3195 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp);
3196 	sw_set_ack(dram);
3197 	update_refresh_reg(dram);
3198 	clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2);
3199 
3200 	enter_sr(dram, 0);
3201 
3202 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3203 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3204 
3205 	mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4);
3206 	if (dramtype == LPDDR3) {
3207 		pctl_write_mr(dram->pctl, 3, 1,
3208 			      (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) &
3209 			      PCTL2_MR_MASK,
3210 			      dramtype);
3211 		pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK,
3212 			      dramtype);
3213 		pctl_write_mr(dram->pctl, 3, 3,
3214 			      (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) &
3215 			      PCTL2_MR_MASK,
3216 			      dramtype);
3217 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype);
3218 	} else if ((dramtype == DDR3) || (dramtype == DDR4)) {
3219 		pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK,
3220 			      dramtype);
3221 		if (!dest_dll_off) {
3222 			pctl_write_mr(dram->pctl, 3, 0,
3223 				      ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) &
3224 				      PCTL2_MR_MASK) | DDR3_DLL_RESET,
3225 				      dramtype);
3226 			udelay(2);
3227 		}
3228 		pctl_write_mr(dram->pctl, 3, 0,
3229 			      (dst_init3 >> PCTL2_DDR34_MR0_SHIFT &
3230 			       PCTL2_MR_MASK) & (~DDR3_DLL_RESET),
3231 			      dramtype);
3232 		pctl_write_mr(dram->pctl, 3, 2,
3233 			      ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) &
3234 			       PCTL2_MR_MASK), dramtype);
3235 		if (dramtype == DDR4) {
3236 			pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK,
3237 				      dramtype);
3238 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3239 				       DDR_PCTL2_INIT6);
3240 			pctl_write_mr(dram->pctl, 3, 4,
3241 				      (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) &
3242 				       PCTL2_MR_MASK,
3243 				      dramtype);
3244 			pctl_write_mr(dram->pctl, 3, 5,
3245 				      mr_tmp >> PCTL2_DDR4_MR5_SHIFT &
3246 				      PCTL2_MR_MASK,
3247 				      dramtype);
3248 
3249 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3250 				       DDR_PCTL2_INIT7);
3251 			pctl_write_mr(dram->pctl, 3, 6,
3252 				      mr_tmp >> PCTL2_DDR4_MR6_SHIFT &
3253 				      PCTL2_MR_MASK,
3254 				      dramtype);
3255 		}
3256 	} else if (dramtype == LPDDR4) {
3257 		pctl_write_mr(dram->pctl, 3, 13,
3258 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
3259 			       PCTL2_MR_MASK) & (~(BIT(7)))) |
3260 			      dst_fsp_lp4 << 7, dramtype);
3261 	}
3262 	clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3263 		     PCTL2_DIS_AUTO_REFRESH);
3264 	update_refresh_reg(dram);
3265 
3266 	/* training */
3267 	high_freq_training(dram, sdram_params_new, dst_fsp);
3268 	low_power_update(dram, lp_stat);
3269 
3270 	save_fsp_param(dram, dst_fsp, sdram_params_new);
3271 }
3272 
3273 static void ddr_set_rate_for_fsp(struct dram_info *dram,
3274 				 struct rv1126_sdram_params *sdram_params)
3275 {
3276 	struct ddr2_3_4_lp2_3_info *ddr_info;
3277 	u32 f0;
3278 	u32 dramtype = sdram_params->base.dramtype;
3279 #ifndef CONFIG_SPL_KERNEL_BOOT
3280 	u32 f1, f2, f3;
3281 #endif
3282 
3283 	ddr_info = get_ddr_drv_odt_info(dramtype);
3284 	if (!ddr_info)
3285 		return;
3286 
3287 	f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
3288 	     DDR_FREQ_MASK;
3289 
3290 #ifndef CONFIG_SPL_KERNEL_BOOT
3291 	memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param));
3292 	memset((void *)&fsp_param, 0, sizeof(fsp_param));
3293 
3294 	f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) &
3295 	     DDR_FREQ_MASK;
3296 	f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) &
3297 	     DDR_FREQ_MASK;
3298 	f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) &
3299 	     DDR_FREQ_MASK;
3300 #endif
3301 
3302 	if (get_wrlvl_val(dram, sdram_params))
3303 		printascii("get wrlvl value fail\n");
3304 
3305 #ifndef CONFIG_SPL_KERNEL_BOOT
3306 	printascii("change to: ");
3307 	printdec(f1);
3308 	printascii("MHz\n");
3309 	ddr_set_rate(&dram_info, sdram_params, f1,
3310 		     sdram_params->base.ddr_freq, 1, 1, 1);
3311 	printascii("change to: ");
3312 	printdec(f2);
3313 	printascii("MHz\n");
3314 	ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1);
3315 	printascii("change to: ");
3316 	printdec(f3);
3317 	printascii("MHz\n");
3318 	ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1);
3319 #endif
3320 	printascii("change to: ");
3321 	printdec(f0);
3322 	printascii("MHz(final freq)\n");
3323 #ifndef CONFIG_SPL_KERNEL_BOOT
3324 	ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1);
3325 #else
3326 	ddr_set_rate(&dram_info, sdram_params, f0, sdram_params->base.ddr_freq, 1, 1, 1);
3327 #endif
3328 }
3329 
3330 int get_uart_config(void)
3331 {
3332 	struct sdram_head_info_index_v2 *index =
3333 		(struct sdram_head_info_index_v2 *)common_info;
3334 	struct global_info *gbl_info;
3335 
3336 	gbl_info = (struct global_info *)((void *)common_info +
3337 		index->global_index.offset * 4);
3338 
3339 	return gbl_info->uart_info;
3340 }
3341 
3342 /* return: 0 = success, other = fail */
3343 int sdram_init(void)
3344 {
3345 	struct rv1126_sdram_params *sdram_params;
3346 	int ret = 0;
3347 	struct sdram_head_info_index_v2 *index =
3348 		(struct sdram_head_info_index_v2 *)common_info;
3349 	struct global_info *gbl_info;
3350 
3351 	dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
3352 	dram_info.pctl = (void *)UPCTL2_BASE_ADDR;
3353 	dram_info.grf = (void *)GRF_BASE_ADDR;
3354 	dram_info.cru = (void *)CRU_BASE_ADDR;
3355 	dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR;
3356 	dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR;
3357 	dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR;
3358 
3359 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
3360 	printascii("extended temp support\n");
3361 #endif
3362 	if (index->version_info != 2 ||
3363 	    (index->global_index.size != sizeof(struct global_info) / 4) ||
3364 	    (index->ddr3_index.size !=
3365 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3366 	    (index->ddr4_index.size !=
3367 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3368 	    (index->lp3_index.size !=
3369 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3370 	    (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) ||
3371 	    index->global_index.offset == 0 ||
3372 	    index->ddr3_index.offset == 0 ||
3373 	    index->ddr4_index.offset == 0 ||
3374 	    index->lp3_index.offset == 0 ||
3375 	    index->lp4_index.offset == 0) {
3376 		printascii("common info error\n");
3377 		goto error;
3378 	}
3379 
3380 	gbl_info = (struct global_info *)((void *)common_info +
3381 		index->global_index.offset * 4);
3382 
3383 	dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info);
3384 	dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info);
3385 
3386 	sdram_params = &sdram_configs[0];
3387 
3388 	if (sdram_params->base.dramtype == DDR3 ||
3389 	    sdram_params->base.dramtype == DDR4) {
3390 		if (DDR_2T_INFO(gbl_info->info_2t))
3391 			sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10;
3392 		else
3393 			sdram_params->pctl_regs.pctl[0][1] &=
3394 				~(0x1 << 10);
3395 	}
3396 	ret = sdram_init_detect(&dram_info, sdram_params);
3397 	if (ret) {
3398 		sdram_print_dram_type(sdram_params->base.dramtype);
3399 		printascii(", ");
3400 		printdec(sdram_params->base.ddr_freq);
3401 		printascii("MHz\n");
3402 		goto error;
3403 	}
3404 	print_ddr_info(sdram_params);
3405 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3406 	init_rw_trn_result_struct(&rw_trn_result, dram_info.phy,
3407 				  (u8)sdram_params->ch.cap_info.rank);
3408 #endif
3409 
3410 	ddr_set_rate_for_fsp(&dram_info, sdram_params);
3411 #ifndef CONFIG_SPL_KERNEL_BOOT
3412 	copy_fsp_param_to_ddr();
3413 #endif
3414 
3415 	ddr_set_atags(&dram_info, sdram_params);
3416 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3417 	save_rw_trn_result_to_ddr(&rw_trn_result);
3418 #endif
3419 
3420 	printascii("out\n");
3421 
3422 	return ret;
3423 error:
3424 	printascii("error\n");
3425 	return (-1);
3426 }
3427 #endif /* CONFIG_TPL_BUILD */
3428