xref: /rk3399_rockchip-uboot/drivers/ram/rockchip/sdram_rv1126.c (revision 77bac292f4ebd0ec3e4e2e49c2af5551cbc57f2d)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * (C) Copyright 2020 Rockchip Electronics Co., Ltd.
4  */
5 
6 #include <common.h>
7 #include <debug_uart.h>
8 #include <dm.h>
9 #include <ram.h>
10 #include <syscon.h>
11 #include <asm/io.h>
12 #include <asm/arch/clock.h>
13 #include <asm/arch/hardware.h>
14 #include <asm/arch/rk_atags.h>
15 #include <asm/arch/cru_rv1126.h>
16 #include <asm/arch/grf_rv1126.h>
17 #include <asm/arch/sdram_common.h>
18 #include <asm/arch/sdram_rv1126.h>
19 
20 /* define training flag */
21 #define CA_TRAINING			(0x1 << 0)
22 #define READ_GATE_TRAINING		(0x1 << 1)
23 #define WRITE_LEVELING			(0x1 << 2)
24 #define WRITE_TRAINING			(0x1 << 3)
25 #define READ_TRAINING			(0x1 << 4)
26 #define FULL_TRAINING			(0xff)
27 
28 #define SKEW_RX_SIGNAL			(0)
29 #define SKEW_TX_SIGNAL			(1)
30 #define SKEW_CA_SIGNAL			(2)
31 
32 #define DESKEW_MDF_ABS_VAL		(0)
33 #define DESKEW_MDF_DIFF_VAL		(1)
34 
35 #ifdef CONFIG_TPL_BUILD
36 #ifndef CONFIG_TPL_TINY_FRAMEWORK
37 #error please defined CONFIG_TPL_TINY_FRAMEWORK for RV1126 !!!
38 #endif
39 #endif
40 
41 #ifdef CONFIG_TPL_BUILD
42 
43 struct dram_info {
44 	void __iomem *pctl;
45 	void __iomem *phy;
46 	struct rv1126_cru *cru;
47 	struct msch_regs *msch;
48 	struct rv1126_ddrgrf *ddrgrf;
49 	struct rv1126_grf *grf;
50 	struct ram_info info;
51 	struct rv1126_pmugrf *pmugrf;
52 	u32 sr_idle;
53 	u32 pd_idle;
54 };
55 
56 #define GRF_BASE_ADDR			0xfe000000
57 #define PMU_GRF_BASE_ADDR		0xfe020000
58 #define DDR_GRF_BASE_ADDR		0xfe030000
59 #define BUS_SGRF_BASE_ADDR		0xfe0a0000
60 #define SERVER_MSCH_BASE_ADDR		0xfe800000
61 #define CRU_BASE_ADDR			0xff490000
62 #define DDR_PHY_BASE_ADDR		0xff4a0000
63 #define UPCTL2_BASE_ADDR		0xffa50000
64 
65 #define SGRF_SOC_CON12			0x30
66 #define SGRF_SOC_CON13			0x34
67 
68 struct dram_info dram_info;
69 
70 #if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 3)
71 struct rv1126_sdram_params sdram_configs[] = {
72 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-328.inc"
73 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-396.inc"
74 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-528.inc"
75 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-664.inc"
76 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-784.inc"
77 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-924.inc"
78 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-1056.inc"
79 };
80 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 0)
81 struct rv1126_sdram_params sdram_configs[] = {
82 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-328.inc"
83 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-396.inc"
84 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-528.inc"
85 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-664.inc"
86 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-784.inc"
87 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-924.inc"
88 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-1056.inc"
89 };
90 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 6)
91 struct rv1126_sdram_params sdram_configs[] = {
92 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-328.inc"
93 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-396.inc"
94 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-528.inc"
95 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-664.inc"
96 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-784.inc"
97 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-924.inc"
98 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-1056.inc"
99 };
100 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 7)
101 struct rv1126_sdram_params sdram_configs[] = {
102 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-328.inc"
103 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-396.inc"
104 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-528.inc"
105 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-664.inc"
106 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-784.inc"
107 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-924.inc"
108 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-1056.inc"
109 };
110 #endif
111 
112 u32 common_info[] = {
113 	#include "sdram_inc/rv1126/sdram-rv1126-loader_params.inc"
114 };
115 
116 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
117 static struct rw_trn_result rw_trn_result;
118 #endif
119 
120 static struct rv1126_fsp_param fsp_param[MAX_IDX];
121 
122 static u8 lp3_odt_value;
123 
124 static s8 wrlvl_result[2][4];
125 
126 /* DDR configuration 0-9 */
127 u16 ddr_cfg_2_rbc[] = {
128 	((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */
129 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */
130 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */
131 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */
132 	((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */
133 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */
134 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */
135 	((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */
136 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */
137 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */
138 };
139 
140 /* DDR configuration 10-21 */
141 u8 ddr4_cfg_2_rbc[] = {
142 	((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */
143 	((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */
144 	((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */
145 	((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */
146 	((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */
147 	((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */
148 	((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */
149 	((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */
150 	((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */
151 	((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */
152 	((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */
153 	((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */
154 };
155 
156 /* DDR configuration 22-28 */
157 u16 ddr_cfg_2_rbc_p2[] = {
158 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */
159 	((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */
160 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */
161 	((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */
162 	((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */
163 	((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */
164 	((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */
165 };
166 
167 u8 d4_rbc_2_d3_rbc[][2] = {
168 	{10, 0},
169 	{11, 2},
170 	{12, 23},
171 	{13, 1},
172 	{14, 28},
173 	{15, 24},
174 	{16, 27},
175 	{17, 7},
176 	{18, 6},
177 	{19, 25},
178 	{20, 26},
179 	{21, 3}
180 };
181 
182 u32 addrmap[23][9] = {
183 	{24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
184 		0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */
185 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
186 		0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */
187 	{23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
188 		0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */
189 	{22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606,
190 		0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */
191 	{24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909,
192 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */
193 	{6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707,
194 		0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */
195 	{7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808,
196 		0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */
197 	{8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909,
198 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */
199 	{22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
200 		0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */
201 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
202 		0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */
203 
204 	{24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
205 		0x08080808, 0x00000f0f, 0x0801}, /* 10 */
206 	{23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
207 		0x0f080808, 0x00000f0f, 0x0801}, /* 11 */
208 	{24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
209 		0x07070707, 0x00000f07, 0x0700}, /* 12 */
210 	{23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
211 		0x07070707, 0x00000f0f, 0x0700}, /* 13 */
212 	{24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
213 		0x07070707, 0x00000f07, 0x3f01}, /* 14 */
214 	{23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
215 		0x07070707, 0x00000f0f, 0x3f01}, /* 15 */
216 	{23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606,
217 		0x06060606, 0x00000f06, 0x3f00}, /* 16 */
218 	{8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909,
219 		0x0f090909, 0x00000f0f, 0x0801}, /* 17 */
220 	{7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808,
221 		0x08080808, 0x00000f0f, 0x0700}, /* 18 */
222 	{7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
223 		0x08080808, 0x00000f0f, 0x3f01}, /* 19 */
224 
225 	{6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
226 		0x07070707, 0x00000f07, 0x3f00}, /* 20 */
227 	{23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606,
228 		0x06060606, 0x00000f06, 0x0600}, /* 21 */
229 	{21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505,
230 		0x05050505, 0x00000f0f, 0x3f3f} /* 22 */
231 };
232 
233 static u8 dq_sel[22][3] = {
234 	{0x0, 0x17, 0x22},
235 	{0x1, 0x18, 0x23},
236 	{0x2, 0x19, 0x24},
237 	{0x3, 0x1a, 0x25},
238 	{0x4, 0x1b, 0x26},
239 	{0x5, 0x1c, 0x27},
240 	{0x6, 0x1d, 0x28},
241 	{0x7, 0x1e, 0x29},
242 	{0x8, 0x16, 0x21},
243 	{0x9, 0x1f, 0x2a},
244 	{0xa, 0x20, 0x2b},
245 	{0x10, 0x1, 0xc},
246 	{0x11, 0x2, 0xd},
247 	{0x12, 0x3, 0xe},
248 	{0x13, 0x4, 0xf},
249 	{0x14, 0x5, 0x10},
250 	{0x15, 0x6, 0x11},
251 	{0x16, 0x7, 0x12},
252 	{0x17, 0x8, 0x13},
253 	{0x18, 0x0, 0xb},
254 	{0x19, 0x9, 0x14},
255 	{0x1a, 0xa, 0x15}
256 };
257 
258 static u16 grp_addr[4] = {
259 	ADD_GROUP_CS0_A,
260 	ADD_GROUP_CS0_B,
261 	ADD_GROUP_CS1_A,
262 	ADD_GROUP_CS1_B
263 };
264 
265 static u8 wrlvl_result_offset[2][4] = {
266 	{0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27},
267 	{0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29},
268 };
269 
270 static u16 dqs_dq_skew_adr[16] = {
271 	0x170 + 0,	/* SKEW_UPDATE_RX_CS0_DQS0 */
272 	0x170 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS1 */
273 	0x1d0 + 0,	/* SKEW_UPDATE_RX_CS0_DQS2 */
274 	0x1d0 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS3 */
275 	0x1a0 + 0,	/* SKEW_UPDATE_RX_CS1_DQS0 */
276 	0x1a0 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS1 */
277 	0x200 + 0,	/* SKEW_UPDATE_RX_CS1_DQS2 */
278 	0x200 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS3 */
279 	0x170 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS0 */
280 	0x170 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS1 */
281 	0x1d0 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS2 */
282 	0x1d0 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS3 */
283 	0x1a0 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS0 */
284 	0x1a0 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS1 */
285 	0x200 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS2 */
286 	0x200 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS3 */
287 };
288 
289 static void rkclk_ddr_reset(struct dram_info *dram,
290 			    u32 ctl_srstn, u32 ctl_psrstn,
291 			    u32 phy_srstn, u32 phy_psrstn)
292 {
293 	writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) |
294 	       UPCTL2_ASRSTN_REQ(ctl_srstn),
295 	       BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13);
296 
297 	writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn),
298 	       &dram->cru->softrst_con[12]);
299 }
300 
301 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
302 {
303 	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
304 	int delay = 1000;
305 	u32 mhz = hz / MHz;
306 
307 	refdiv = 1;
308 	if (mhz <= 100) {
309 		postdiv1 = 6;
310 		postdiv2 = 4;
311 	} else if (mhz <= 150) {
312 		postdiv1 = 4;
313 		postdiv2 = 4;
314 	} else if (mhz <= 200) {
315 		postdiv1 = 6;
316 		postdiv2 = 2;
317 	} else if (mhz <= 300) {
318 		postdiv1 = 4;
319 		postdiv2 = 2;
320 	} else if (mhz <= 400) {
321 		postdiv1 = 6;
322 		postdiv2 = 1;
323 	} else {
324 		postdiv1 = 4;
325 		postdiv2 = 1;
326 	}
327 	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
328 
329 	writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
330 
331 	writel(0x1f000000, &dram->cru->clksel_con[64]);
332 	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
333 	writel(DSMPD(1) | POSTDIV2(postdiv2) | REFDIV(refdiv),
334 	       &dram->cru->pll[1].con1);
335 
336 	while (delay > 0) {
337 		udelay(1);
338 		if (LOCK(readl(&dram->cru->pll[1].con1)))
339 			break;
340 		delay--;
341 	}
342 
343 	writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
344 }
345 
346 static void rkclk_configure_ddr(struct dram_info *dram,
347 				struct rv1126_sdram_params *sdram_params)
348 {
349 	/* for inno ddr phy need freq / 2 */
350 	rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2);
351 }
352 
353 static unsigned int
354 	calculate_ddrconfig(struct rv1126_sdram_params *sdram_params)
355 {
356 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
357 	u32 cs, bw, die_bw, col, row, bank;
358 	u32 cs1_row;
359 	u32 i, tmp;
360 	u32 ddrconf = -1;
361 	u32 row_3_4;
362 
363 	cs = cap_info->rank;
364 	bw = cap_info->bw;
365 	die_bw = cap_info->dbw;
366 	col = cap_info->col;
367 	row = cap_info->cs0_row;
368 	cs1_row = cap_info->cs1_row;
369 	bank = cap_info->bk;
370 	row_3_4 = cap_info->row_3_4;
371 
372 	if (sdram_params->base.dramtype == DDR4) {
373 		if (cs == 2 && row == cs1_row && !row_3_4) {
374 			tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) |
375 			      die_bw;
376 			for (i = 17; i < 21; i++) {
377 				if (((tmp & 0xf) ==
378 				     (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
379 				    ((tmp & 0x70) <=
380 				     (ddr4_cfg_2_rbc[i - 10] & 0x70))) {
381 					ddrconf = i;
382 					goto out;
383 				}
384 			}
385 		}
386 
387 		tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw;
388 		for (i = 10; i < 21; i++) {
389 			if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
390 			    ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) &&
391 			    ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) {
392 				ddrconf = i;
393 				goto out;
394 			}
395 		}
396 	} else {
397 		if (cs == 2 && row == cs1_row && bank == 3) {
398 			for (i = 5; i < 8; i++) {
399 				if (((bw + col - 10) == (ddr_cfg_2_rbc[i] &
400 							 0x7)) &&
401 				    ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] &
402 							  (0x7 << 5))) {
403 					ddrconf = i;
404 					goto out;
405 				}
406 			}
407 		}
408 
409 		tmp = ((cs - 1) << 8) | ((row - 13) << 5) |
410 		      ((bw + col - 10) << 0);
411 		if (bank == 3)
412 			tmp |= (1 << 3);
413 
414 		for (i = 0; i < 9; i++)
415 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) &&
416 			    ((tmp & (7 << 5)) <=
417 			     (ddr_cfg_2_rbc[i] & (7 << 5))) &&
418 			    ((tmp & (1 << 8)) <=
419 			     (ddr_cfg_2_rbc[i] & (1 << 8)))) {
420 				ddrconf = i;
421 				goto out;
422 			}
423 
424 		for (i = 0; i < 7; i++)
425 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc_p2[i] & 0x1f)) &&
426 			    ((tmp & (7 << 5)) <=
427 			     (ddr_cfg_2_rbc_p2[i] & (7 << 5))) &&
428 			    ((tmp & (1 << 8)) <=
429 			     (ddr_cfg_2_rbc_p2[i] & (1 << 8)))) {
430 				ddrconf = i + 22;
431 				goto out;
432 			}
433 
434 		if (cs == 1 && bank == 3 && row <= 17 &&
435 		    (col + bw) == 12)
436 			ddrconf = 23;
437 	}
438 
439 out:
440 	if (ddrconf > 28)
441 		printascii("calculate ddrconfig error\n");
442 
443 	if (sdram_params->base.dramtype == DDR4) {
444 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
445 			if (ddrconf == d4_rbc_2_d3_rbc[i][0]) {
446 				if (ddrconf == 21 && row > 16)
447 					printascii("warn:ddrconf21 row > 16\n");
448 				else
449 					ddrconf = d4_rbc_2_d3_rbc[i][1];
450 				break;
451 			}
452 		}
453 	}
454 
455 	return ddrconf;
456 }
457 
458 static void sw_set_req(struct dram_info *dram)
459 {
460 	void __iomem *pctl_base = dram->pctl;
461 
462 	/* clear sw_done=0 */
463 	writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL);
464 }
465 
466 static void sw_set_ack(struct dram_info *dram)
467 {
468 	void __iomem *pctl_base = dram->pctl;
469 
470 	/* set sw_done=1 */
471 	writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL);
472 	while (1) {
473 		/* wait programming done */
474 		if (readl(pctl_base + DDR_PCTL2_SWSTAT) &
475 				PCTL2_SW_DONE_ACK)
476 			break;
477 	}
478 }
479 
480 static void set_ctl_address_map(struct dram_info *dram,
481 				struct rv1126_sdram_params *sdram_params)
482 {
483 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
484 	void __iomem *pctl_base = dram->pctl;
485 	u32 ddrconf = cap_info->ddrconfig;
486 	u32 i, row;
487 
488 	row = cap_info->cs0_row;
489 	if (sdram_params->base.dramtype == DDR4) {
490 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
491 			if (ddrconf == d4_rbc_2_d3_rbc[i][1]) {
492 				ddrconf = d4_rbc_2_d3_rbc[i][0];
493 				break;
494 			}
495 		}
496 	}
497 
498 	if (ddrconf > ARRAY_SIZE(addrmap)) {
499 		printascii("set ctl address map fail\n");
500 		return;
501 	}
502 
503 	sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
504 			  &addrmap[ddrconf][0], 9 * 4);
505 
506 	/* unused row set to 0xf */
507 	for (i = 17; i >= row; i--)
508 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
509 			((i - 12) * 8 / 32) * 4,
510 			0xf << ((i - 12) * 8 % 32));
511 
512 	if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4)
513 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
514 	if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1)
515 		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
516 
517 	if (cap_info->rank == 1)
518 		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
519 }
520 
521 static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait)
522 {
523 	void __iomem *phy_base = dram->phy;
524 	u32 fbdiv, prediv, postdiv, postdiv_en;
525 
526 	if (wait) {
527 		clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB);
528 		while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK))
529 			continue;
530 	} else {
531 		freq /= MHz;
532 		prediv = 1;
533 		if (freq <= 200) {
534 			fbdiv = 16;
535 			postdiv = 2;
536 			postdiv_en = 1;
537 		} else if (freq <= 456) {
538 			fbdiv = 8;
539 			postdiv = 1;
540 			postdiv_en = 1;
541 		} else {
542 			fbdiv = 4;
543 			postdiv = 0;
544 			postdiv_en = 0;
545 		}
546 		writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50));
547 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK,
548 				(fbdiv >> 8) & 1);
549 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK,
550 				postdiv_en << PHY_POSTDIV_EN_SHIFT);
551 
552 		clrsetbits_le32(PHY_REG(phy_base, 0x52),
553 				PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv);
554 		clrsetbits_le32(PHY_REG(phy_base, 0x53),
555 				PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT,
556 				postdiv << PHY_POSTDIV_SHIFT);
557 	}
558 }
559 
560 static const u16 d3_phy_drv_2_ohm[][2] = {
561 	{PHY_DDR3_RON_455ohm, 455},
562 	{PHY_DDR3_RON_230ohm, 230},
563 	{PHY_DDR3_RON_153ohm, 153},
564 	{PHY_DDR3_RON_115ohm, 115},
565 	{PHY_DDR3_RON_91ohm, 91},
566 	{PHY_DDR3_RON_76ohm, 76},
567 	{PHY_DDR3_RON_65ohm, 65},
568 	{PHY_DDR3_RON_57ohm, 57},
569 	{PHY_DDR3_RON_51ohm, 51},
570 	{PHY_DDR3_RON_46ohm, 46},
571 	{PHY_DDR3_RON_41ohm, 41},
572 	{PHY_DDR3_RON_38ohm, 38},
573 	{PHY_DDR3_RON_35ohm, 35},
574 	{PHY_DDR3_RON_32ohm, 32},
575 	{PHY_DDR3_RON_30ohm, 30},
576 	{PHY_DDR3_RON_28ohm, 28},
577 	{PHY_DDR3_RON_27ohm, 27},
578 	{PHY_DDR3_RON_25ohm, 25},
579 	{PHY_DDR3_RON_24ohm, 24},
580 	{PHY_DDR3_RON_23ohm, 23},
581 	{PHY_DDR3_RON_22ohm, 22},
582 	{PHY_DDR3_RON_21ohm, 21},
583 	{PHY_DDR3_RON_20ohm, 20}
584 };
585 
586 static u16 d3_phy_odt_2_ohm[][2] = {
587 	{PHY_DDR3_RTT_DISABLE, 0},
588 	{PHY_DDR3_RTT_561ohm, 561},
589 	{PHY_DDR3_RTT_282ohm, 282},
590 	{PHY_DDR3_RTT_188ohm, 188},
591 	{PHY_DDR3_RTT_141ohm, 141},
592 	{PHY_DDR3_RTT_113ohm, 113},
593 	{PHY_DDR3_RTT_94ohm, 94},
594 	{PHY_DDR3_RTT_81ohm, 81},
595 	{PHY_DDR3_RTT_72ohm, 72},
596 	{PHY_DDR3_RTT_64ohm, 64},
597 	{PHY_DDR3_RTT_58ohm, 58},
598 	{PHY_DDR3_RTT_52ohm, 52},
599 	{PHY_DDR3_RTT_48ohm, 48},
600 	{PHY_DDR3_RTT_44ohm, 44},
601 	{PHY_DDR3_RTT_41ohm, 41},
602 	{PHY_DDR3_RTT_38ohm, 38},
603 	{PHY_DDR3_RTT_37ohm, 37},
604 	{PHY_DDR3_RTT_34ohm, 34},
605 	{PHY_DDR3_RTT_32ohm, 32},
606 	{PHY_DDR3_RTT_31ohm, 31},
607 	{PHY_DDR3_RTT_29ohm, 29},
608 	{PHY_DDR3_RTT_28ohm, 28},
609 	{PHY_DDR3_RTT_27ohm, 27},
610 	{PHY_DDR3_RTT_25ohm, 25}
611 };
612 
613 static u16 d4lp3_phy_drv_2_ohm[][2] = {
614 	{PHY_DDR4_LPDDR3_RON_482ohm, 482},
615 	{PHY_DDR4_LPDDR3_RON_244ohm, 244},
616 	{PHY_DDR4_LPDDR3_RON_162ohm, 162},
617 	{PHY_DDR4_LPDDR3_RON_122ohm, 122},
618 	{PHY_DDR4_LPDDR3_RON_97ohm, 97},
619 	{PHY_DDR4_LPDDR3_RON_81ohm, 81},
620 	{PHY_DDR4_LPDDR3_RON_69ohm, 69},
621 	{PHY_DDR4_LPDDR3_RON_61ohm, 61},
622 	{PHY_DDR4_LPDDR3_RON_54ohm, 54},
623 	{PHY_DDR4_LPDDR3_RON_48ohm, 48},
624 	{PHY_DDR4_LPDDR3_RON_44ohm, 44},
625 	{PHY_DDR4_LPDDR3_RON_40ohm, 40},
626 	{PHY_DDR4_LPDDR3_RON_37ohm, 37},
627 	{PHY_DDR4_LPDDR3_RON_34ohm, 34},
628 	{PHY_DDR4_LPDDR3_RON_32ohm, 32},
629 	{PHY_DDR4_LPDDR3_RON_30ohm, 30},
630 	{PHY_DDR4_LPDDR3_RON_28ohm, 28},
631 	{PHY_DDR4_LPDDR3_RON_27ohm, 27},
632 	{PHY_DDR4_LPDDR3_RON_25ohm, 25},
633 	{PHY_DDR4_LPDDR3_RON_24ohm, 24},
634 	{PHY_DDR4_LPDDR3_RON_23ohm, 23},
635 	{PHY_DDR4_LPDDR3_RON_22ohm, 22},
636 	{PHY_DDR4_LPDDR3_RON_21ohm, 21}
637 };
638 
639 static u16 d4lp3_phy_odt_2_ohm[][2] = {
640 	{PHY_DDR4_LPDDR3_RTT_DISABLE, 0},
641 	{PHY_DDR4_LPDDR3_RTT_586ohm, 586},
642 	{PHY_DDR4_LPDDR3_RTT_294ohm, 294},
643 	{PHY_DDR4_LPDDR3_RTT_196ohm, 196},
644 	{PHY_DDR4_LPDDR3_RTT_148ohm, 148},
645 	{PHY_DDR4_LPDDR3_RTT_118ohm, 118},
646 	{PHY_DDR4_LPDDR3_RTT_99ohm, 99},
647 	{PHY_DDR4_LPDDR3_RTT_85ohm, 58},
648 	{PHY_DDR4_LPDDR3_RTT_76ohm, 76},
649 	{PHY_DDR4_LPDDR3_RTT_67ohm, 67},
650 	{PHY_DDR4_LPDDR3_RTT_60ohm, 60},
651 	{PHY_DDR4_LPDDR3_RTT_55ohm, 55},
652 	{PHY_DDR4_LPDDR3_RTT_50ohm, 50},
653 	{PHY_DDR4_LPDDR3_RTT_46ohm, 46},
654 	{PHY_DDR4_LPDDR3_RTT_43ohm, 43},
655 	{PHY_DDR4_LPDDR3_RTT_40ohm, 40},
656 	{PHY_DDR4_LPDDR3_RTT_38ohm, 38},
657 	{PHY_DDR4_LPDDR3_RTT_36ohm, 36},
658 	{PHY_DDR4_LPDDR3_RTT_34ohm, 34},
659 	{PHY_DDR4_LPDDR3_RTT_32ohm, 32},
660 	{PHY_DDR4_LPDDR3_RTT_31ohm, 31},
661 	{PHY_DDR4_LPDDR3_RTT_29ohm, 29},
662 	{PHY_DDR4_LPDDR3_RTT_28ohm, 28},
663 	{PHY_DDR4_LPDDR3_RTT_27ohm, 27}
664 };
665 
666 static u16 lp4_phy_drv_2_ohm[][2] = {
667 	{PHY_LPDDR4_RON_501ohm, 501},
668 	{PHY_LPDDR4_RON_253ohm, 253},
669 	{PHY_LPDDR4_RON_168ohm, 168},
670 	{PHY_LPDDR4_RON_126ohm, 126},
671 	{PHY_LPDDR4_RON_101ohm, 101},
672 	{PHY_LPDDR4_RON_84ohm, 84},
673 	{PHY_LPDDR4_RON_72ohm, 72},
674 	{PHY_LPDDR4_RON_63ohm, 63},
675 	{PHY_LPDDR4_RON_56ohm, 56},
676 	{PHY_LPDDR4_RON_50ohm, 50},
677 	{PHY_LPDDR4_RON_46ohm, 46},
678 	{PHY_LPDDR4_RON_42ohm, 42},
679 	{PHY_LPDDR4_RON_38ohm, 38},
680 	{PHY_LPDDR4_RON_36ohm, 36},
681 	{PHY_LPDDR4_RON_33ohm, 33},
682 	{PHY_LPDDR4_RON_31ohm, 31},
683 	{PHY_LPDDR4_RON_29ohm, 29},
684 	{PHY_LPDDR4_RON_28ohm, 28},
685 	{PHY_LPDDR4_RON_26ohm, 26},
686 	{PHY_LPDDR4_RON_25ohm, 25},
687 	{PHY_LPDDR4_RON_24ohm, 24},
688 	{PHY_LPDDR4_RON_23ohm, 23},
689 	{PHY_LPDDR4_RON_22ohm, 22}
690 };
691 
692 static u16 lp4_phy_odt_2_ohm[][2] = {
693 	{PHY_LPDDR4_RTT_DISABLE, 0},
694 	{PHY_LPDDR4_RTT_604ohm, 604},
695 	{PHY_LPDDR4_RTT_303ohm, 303},
696 	{PHY_LPDDR4_RTT_202ohm, 202},
697 	{PHY_LPDDR4_RTT_152ohm, 152},
698 	{PHY_LPDDR4_RTT_122ohm, 122},
699 	{PHY_LPDDR4_RTT_101ohm, 101},
700 	{PHY_LPDDR4_RTT_87ohm,	87},
701 	{PHY_LPDDR4_RTT_78ohm, 78},
702 	{PHY_LPDDR4_RTT_69ohm, 69},
703 	{PHY_LPDDR4_RTT_62ohm, 62},
704 	{PHY_LPDDR4_RTT_56ohm, 56},
705 	{PHY_LPDDR4_RTT_52ohm, 52},
706 	{PHY_LPDDR4_RTT_48ohm, 48},
707 	{PHY_LPDDR4_RTT_44ohm, 44},
708 	{PHY_LPDDR4_RTT_41ohm, 41},
709 	{PHY_LPDDR4_RTT_39ohm, 39},
710 	{PHY_LPDDR4_RTT_37ohm, 37},
711 	{PHY_LPDDR4_RTT_35ohm, 35},
712 	{PHY_LPDDR4_RTT_33ohm, 33},
713 	{PHY_LPDDR4_RTT_32ohm, 32},
714 	{PHY_LPDDR4_RTT_30ohm, 30},
715 	{PHY_LPDDR4_RTT_29ohm, 29},
716 	{PHY_LPDDR4_RTT_27ohm, 27}
717 };
718 
719 static u32 lp4_odt_calc(u32 odt_ohm)
720 {
721 	u32 odt;
722 
723 	if (odt_ohm == 0)
724 		odt = LPDDR4_DQODT_DIS;
725 	else if (odt_ohm <= 40)
726 		odt = LPDDR4_DQODT_40;
727 	else if (odt_ohm <= 48)
728 		odt = LPDDR4_DQODT_48;
729 	else if (odt_ohm <= 60)
730 		odt = LPDDR4_DQODT_60;
731 	else if (odt_ohm <= 80)
732 		odt = LPDDR4_DQODT_80;
733 	else if (odt_ohm <= 120)
734 		odt = LPDDR4_DQODT_120;
735 	else
736 		odt = LPDDR4_DQODT_240;
737 
738 	return odt;
739 }
740 
741 static void *get_ddr_drv_odt_info(u32 dramtype)
742 {
743 	struct sdram_head_info_index_v2 *index =
744 		(struct sdram_head_info_index_v2 *)common_info;
745 	void *ddr_info = 0;
746 
747 	if (dramtype == DDR4)
748 		ddr_info = (void *)common_info + index->ddr4_index.offset * 4;
749 	else if (dramtype == DDR3)
750 		ddr_info = (void *)common_info + index->ddr3_index.offset * 4;
751 	else if (dramtype == LPDDR3)
752 		ddr_info = (void *)common_info + index->lp3_index.offset * 4;
753 	else if (dramtype == LPDDR4)
754 		ddr_info = (void *)common_info + index->lp4_index.offset * 4;
755 	else
756 		printascii("unsupported dram type\n");
757 	return ddr_info;
758 }
759 
760 static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info,
761 			 u32 freq_mhz, u32 dst_fsp)
762 {
763 	void __iomem *pctl_base = dram->pctl;
764 	u32 ca_vref, dq_vref;
765 
766 	if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
767 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff);
768 	else
769 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten);
770 
771 	if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq))
772 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff);
773 	else
774 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten);
775 
776 	if (ca_vref < 100)
777 		ca_vref = 100;
778 	if (ca_vref > 420)
779 		ca_vref = 420;
780 
781 	if (ca_vref <= 300)
782 		ca_vref = (0 << 6) | (ca_vref - 100) / 4;
783 	else
784 		ca_vref = (1 << 6) | (ca_vref - 220) / 4;
785 
786 	if (dq_vref < 100)
787 		dq_vref = 100;
788 	if (dq_vref > 420)
789 		dq_vref = 420;
790 
791 	if (dq_vref <= 300)
792 		dq_vref = (0 << 6) | (dq_vref - 100) / 4;
793 	else
794 		dq_vref = (1 << 6) | (dq_vref - 220) / 4;
795 
796 	sw_set_req(dram);
797 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
798 			DDR_PCTL2_INIT6,
799 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT,
800 			ca_vref << PCTL2_LPDDR4_MR12_SHIFT);
801 
802 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
803 			DDR_PCTL2_INIT7,
804 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT,
805 			dq_vref << PCTL2_LPDDR4_MR14_SHIFT);
806 	sw_set_ack(dram);
807 }
808 
809 static void set_ds_odt(struct dram_info *dram,
810 		       struct rv1126_sdram_params *sdram_params, u32 dst_fsp)
811 {
812 	void __iomem *phy_base = dram->phy;
813 	void __iomem *pctl_base = dram->pctl;
814 	u32 dramtype = sdram_params->base.dramtype;
815 	struct ddr2_3_4_lp2_3_info *ddr_info;
816 	struct lp4_info *lp4_info;
817 	u32 i, j, tmp;
818 	const u16 (*p_drv)[2];
819 	const u16 (*p_odt)[2];
820 	u32 drv_info, sr_info;
821 	u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm;
822 	u32 phy_odt_ohm, dram_odt_ohm;
823 	u32 lp4_pu_cal, phy_lp4_drv_pd_en;
824 	u32 phy_odt_up_en, phy_odt_dn_en;
825 	u32 sr_dq, sr_clk;
826 	u32 freq = sdram_params->base.ddr_freq;
827 	u32 mr1_mr3, mr11, mr22, vref_out, vref_inner;
828 	u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0;
829 	u32 phy_dq_drv = 0;
830 	u32 phy_odt_up = 0, phy_odt_dn = 0;
831 
832 	ddr_info = get_ddr_drv_odt_info(dramtype);
833 	lp4_info = (void *)ddr_info;
834 
835 	if (!ddr_info)
836 		return;
837 
838 	/* dram odt en freq control phy drv, dram odt and phy sr */
839 	if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) {
840 		drv_info = ddr_info->drv_when_odtoff;
841 		dram_odt_ohm = 0;
842 		sr_info = ddr_info->sr_when_odtoff;
843 		phy_lp4_drv_pd_en =
844 			PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info);
845 	} else {
846 		drv_info = ddr_info->drv_when_odten;
847 		dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info);
848 		sr_info = ddr_info->sr_when_odten;
849 		phy_lp4_drv_pd_en =
850 			PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info);
851 	}
852 	phy_dq_drv_ohm =
853 		DRV_INFO_PHY_DQ_DRV(drv_info);
854 	phy_clk_drv_ohm =
855 		DRV_INFO_PHY_CLK_DRV(drv_info);
856 	phy_ca_drv_ohm =
857 		DRV_INFO_PHY_CA_DRV(drv_info);
858 
859 	sr_dq = DQ_SR_INFO(sr_info);
860 	sr_clk = CLK_SR_INFO(sr_info);
861 
862 	/* phy odt en freq control dram drv and phy odt */
863 	if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) {
864 		dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff);
865 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info);
866 		phy_odt_ohm = 0;
867 		phy_odt_up_en = 0;
868 		phy_odt_dn_en = 0;
869 	} else {
870 		dram_drv_ohm =
871 			DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten);
872 		phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info);
873 		phy_odt_up_en =
874 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
875 		phy_odt_dn_en =
876 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
877 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info);
878 	}
879 
880 	if (dramtype == LPDDR4) {
881 		if (phy_odt_ohm) {
882 			phy_odt_up_en = 0;
883 			phy_odt_dn_en = 1;
884 		}
885 		if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
886 			dram_caodt_ohm = 0;
887 		else
888 			dram_caodt_ohm =
889 				ODT_INFO_LP4_CA_ODT(lp4_info->odt_info);
890 	}
891 
892 	if (dramtype == DDR3) {
893 		p_drv = d3_phy_drv_2_ohm;
894 		p_odt = d3_phy_odt_2_ohm;
895 	} else if (dramtype == LPDDR4) {
896 		p_drv = lp4_phy_drv_2_ohm;
897 		p_odt = lp4_phy_odt_2_ohm;
898 	} else {
899 		p_drv = d4lp3_phy_drv_2_ohm;
900 		p_odt = d4lp3_phy_odt_2_ohm;
901 	}
902 
903 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
904 		if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) {
905 			phy_dq_drv = **(p_drv + i);
906 			break;
907 		}
908 		if (i == 0)
909 			break;
910 	}
911 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
912 		if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) {
913 			phy_clk_drv = **(p_drv + i);
914 			break;
915 		}
916 		if (i == 0)
917 			break;
918 	}
919 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
920 		if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) {
921 			phy_ca_drv = **(p_drv + i);
922 			break;
923 		}
924 		if (i == 0)
925 			break;
926 	}
927 	if (!phy_odt_ohm)
928 		phy_odt = 0;
929 	else
930 		for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) {
931 			if (phy_odt_ohm <= *(*(p_odt + i) + 1)) {
932 				phy_odt = **(p_odt + i);
933 				break;
934 			}
935 			if (i == 0)
936 				break;
937 		}
938 
939 	if (dramtype != LPDDR4) {
940 		if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en))
941 			vref_inner = 0x80;
942 		else if (phy_odt_up_en)
943 			vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 /
944 				     (dram_drv_ohm + phy_odt_ohm);
945 		else
946 			vref_inner = phy_odt_ohm * 128 /
947 				(phy_odt_ohm + dram_drv_ohm);
948 
949 		if (dramtype != DDR3 && dram_odt_ohm)
950 			vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 /
951 				   (phy_dq_drv_ohm + dram_odt_ohm);
952 		else
953 			vref_out = 0x80;
954 	} else {
955 		/* for lp4 */
956 		if (phy_odt_ohm)
957 			vref_inner =
958 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) *
959 				 256) / 1000;
960 		else
961 			vref_inner =
962 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) *
963 				 256) / 1000;
964 
965 		vref_out = 0x80;
966 	}
967 
968 	/* default ZQCALIB bypass mode */
969 	clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv);
970 	clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv);
971 	clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv);
972 	clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv);
973 	if (dramtype == LPDDR4) {
974 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_clk_drv);
975 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_clk_drv);
976 	} else {
977 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_ca_drv);
978 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_ca_drv);
979 	}
980 	/* clk / cmd slew rate */
981 	clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk);
982 
983 	phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1;
984 	if (phy_odt_up_en)
985 		phy_odt_up = phy_odt;
986 	if (phy_odt_dn_en)
987 		phy_odt_dn = phy_odt;
988 
989 	for (i = 0; i < 4; i++) {
990 		j = 0x110 + i * 0x10;
991 		clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up);
992 		clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn);
993 		clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv);
994 		clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv);
995 		writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10));
996 
997 		clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10),
998 				1 << 3, phy_lp4_drv_pd_en << 3);
999 		/* dq slew rate */
1000 		clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10),
1001 				0x1f, sr_dq);
1002 	}
1003 
1004 	/* reg_rx_vref_value_update */
1005 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1006 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1007 
1008 	/* RAM VREF */
1009 	writel(vref_out, PHY_REG(phy_base, 0x105));
1010 	if (dramtype == LPDDR3)
1011 		udelay(100);
1012 
1013 	if (dramtype == LPDDR4)
1014 		set_lp4_vref(dram, lp4_info, freq, dst_fsp);
1015 
1016 	if (dramtype == DDR3 || dramtype == DDR4) {
1017 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1018 				DDR_PCTL2_INIT3);
1019 		mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK;
1020 	} else {
1021 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1022 				DDR_PCTL2_INIT4);
1023 		mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK;
1024 	}
1025 
1026 	if (dramtype == DDR3) {
1027 		mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK);
1028 		if (dram_drv_ohm == 34)
1029 			mr1_mr3 |= DDR3_DS_34;
1030 
1031 		if (dram_odt_ohm == 0)
1032 			mr1_mr3 |= DDR3_RTT_NOM_DIS;
1033 		else if (dram_odt_ohm <= 40)
1034 			mr1_mr3 |= DDR3_RTT_NOM_40;
1035 		else if (dram_odt_ohm <= 60)
1036 			mr1_mr3 |= DDR3_RTT_NOM_60;
1037 		else
1038 			mr1_mr3 |= DDR3_RTT_NOM_120;
1039 
1040 	} else if (dramtype == DDR4) {
1041 		mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK);
1042 		if (dram_drv_ohm == 48)
1043 			mr1_mr3 |= DDR4_DS_48;
1044 
1045 		if (dram_odt_ohm == 0)
1046 			mr1_mr3 |= DDR4_RTT_NOM_DIS;
1047 		else if (dram_odt_ohm <= 34)
1048 			mr1_mr3 |= DDR4_RTT_NOM_34;
1049 		else if (dram_odt_ohm <= 40)
1050 			mr1_mr3 |= DDR4_RTT_NOM_40;
1051 		else if (dram_odt_ohm <= 48)
1052 			mr1_mr3 |= DDR4_RTT_NOM_48;
1053 		else if (dram_odt_ohm <= 60)
1054 			mr1_mr3 |= DDR4_RTT_NOM_60;
1055 		else
1056 			mr1_mr3 |= DDR4_RTT_NOM_120;
1057 
1058 	} else if (dramtype == LPDDR3) {
1059 		if (dram_drv_ohm <= 34)
1060 			mr1_mr3 |= LPDDR3_DS_34;
1061 		else if (dram_drv_ohm <= 40)
1062 			mr1_mr3 |= LPDDR3_DS_40;
1063 		else if (dram_drv_ohm <= 48)
1064 			mr1_mr3 |= LPDDR3_DS_48;
1065 		else if (dram_drv_ohm <= 60)
1066 			mr1_mr3 |= LPDDR3_DS_60;
1067 		else if (dram_drv_ohm <= 80)
1068 			mr1_mr3 |= LPDDR3_DS_80;
1069 
1070 		if (dram_odt_ohm == 0)
1071 			lp3_odt_value = LPDDR3_ODT_DIS;
1072 		else if (dram_odt_ohm <= 60)
1073 			lp3_odt_value = LPDDR3_ODT_60;
1074 		else if (dram_odt_ohm <= 120)
1075 			lp3_odt_value = LPDDR3_ODT_120;
1076 		else
1077 			lp3_odt_value = LPDDR3_ODT_240;
1078 	} else {/* for lpddr4 */
1079 		/* MR3 for lp4 PU-CAL and PDDS */
1080 		mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK);
1081 		mr1_mr3 |= lp4_pu_cal;
1082 
1083 		tmp = lp4_odt_calc(dram_drv_ohm);
1084 		if (!tmp)
1085 			tmp = LPDDR4_PDDS_240;
1086 		mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT);
1087 
1088 		/* MR11 for lp4 ca odt, dq odt set */
1089 		mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1090 			     DDR_PCTL2_INIT6);
1091 		mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK;
1092 
1093 		mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK);
1094 
1095 		tmp = lp4_odt_calc(dram_odt_ohm);
1096 		mr11 |= (tmp << LPDDR4_DQODT_SHIFT);
1097 
1098 		tmp = lp4_odt_calc(dram_caodt_ohm);
1099 		mr11 |= (tmp << LPDDR4_CAODT_SHIFT);
1100 		sw_set_req(dram);
1101 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1102 				DDR_PCTL2_INIT6,
1103 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT,
1104 				mr11 << PCTL2_LPDDR4_MR11_SHIFT);
1105 		sw_set_ack(dram);
1106 
1107 		/* MR22 for soc odt/odt-ck/odt-cs/odt-ca */
1108 		mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1109 			     DDR_PCTL2_INIT7);
1110 		mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK;
1111 		mr22 &= ~LPDDR4_SOC_ODT_MASK;
1112 
1113 		tmp = lp4_odt_calc(phy_odt_ohm);
1114 		mr22 |= tmp;
1115 		mr22 = mr22 |
1116 		       (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) <<
1117 			LPDDR4_ODTE_CK_SHIFT) |
1118 		       (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) <<
1119 			LPDDR4_ODTE_CS_SHIFT) |
1120 		       (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) <<
1121 			LPDDR4_ODTD_CA_SHIFT);
1122 
1123 		sw_set_req(dram);
1124 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1125 				DDR_PCTL2_INIT7,
1126 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT,
1127 				mr22 << PCTL2_LPDDR4_MR22_SHIFT);
1128 		sw_set_ack(dram);
1129 	}
1130 
1131 	if (dramtype == DDR4 || dramtype == DDR3) {
1132 		sw_set_req(dram);
1133 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1134 				DDR_PCTL2_INIT3,
1135 				PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT,
1136 				mr1_mr3 << PCTL2_DDR34_MR1_SHIFT);
1137 		sw_set_ack(dram);
1138 	} else {
1139 		sw_set_req(dram);
1140 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1141 				DDR_PCTL2_INIT4,
1142 				PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT,
1143 				mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT);
1144 		sw_set_ack(dram);
1145 	}
1146 }
1147 
1148 static int sdram_cmd_dq_path_remap(struct dram_info *dram,
1149 				   struct rv1126_sdram_params *sdram_params)
1150 {
1151 	void __iomem *phy_base = dram->phy;
1152 	u32 dramtype = sdram_params->base.dramtype;
1153 	struct sdram_head_info_index_v2 *index =
1154 		(struct sdram_head_info_index_v2 *)common_info;
1155 	struct dq_map_info *map_info;
1156 
1157 	map_info = (struct dq_map_info *)((void *)common_info +
1158 		index->dq_map_index.offset * 4);
1159 
1160 	if (dramtype <= LPDDR4)
1161 		writel((map_info->byte_map[dramtype / 4] >>
1162 			((dramtype % 4) * 8)) & 0xff,
1163 		       PHY_REG(phy_base, 0x4f));
1164 
1165 	return 0;
1166 }
1167 
1168 static void phy_cfg(struct dram_info *dram,
1169 		    struct rv1126_sdram_params *sdram_params)
1170 {
1171 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
1172 	void __iomem *phy_base = dram->phy;
1173 	u32 i, dq_map, tmp;
1174 	u32 byte1 = 0, byte0 = 0;
1175 
1176 	sdram_cmd_dq_path_remap(dram, sdram_params);
1177 
1178 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0);
1179 	for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
1180 		writel(sdram_params->phy_regs.phy[i][1],
1181 		       phy_base + sdram_params->phy_regs.phy[i][0]);
1182 	}
1183 
1184 	clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5));
1185 	dq_map = readl(PHY_REG(phy_base, 0x4f));
1186 	for (i = 0; i < 4; i++) {
1187 		if (((dq_map >> (i * 2)) & 0x3) == 0)
1188 			byte0 = i;
1189 		if (((dq_map >> (i * 2)) & 0x3) == 1)
1190 			byte1 = i;
1191 	}
1192 
1193 	tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK);
1194 	if (cap_info->bw == 2)
1195 		tmp |= 0xf;
1196 	else if (cap_info->bw == 1)
1197 		tmp |= ((1 << byte0) | (1 << byte1));
1198 	else
1199 		tmp |= (1 << byte0);
1200 
1201 	writel(tmp, PHY_REG(phy_base, 0xf));
1202 
1203 	/* lpddr4 odt control by phy, enable cs0 odt */
1204 	if (sdram_params->base.dramtype == LPDDR4)
1205 		clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4,
1206 				(1 << 6) | (1 << 4));
1207 	/* for ca training ca vref choose range1 */
1208 	setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6));
1209 	setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6));
1210 	/* for wr training PHY_0x7c[5], choose range0 */
1211 	clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5));
1212 }
1213 
1214 static int update_refresh_reg(struct dram_info *dram)
1215 {
1216 	void __iomem *pctl_base = dram->pctl;
1217 	u32 ret;
1218 
1219 	ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
1220 	writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
1221 
1222 	return 0;
1223 }
1224 
1225 /*
1226  * rank = 1: cs0
1227  * rank = 2: cs1
1228  */
1229 int read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype)
1230 {
1231 	u32 ret;
1232 	u32 i, temp;
1233 	u32 dqmap;
1234 
1235 	void __iomem *pctl_base = dram->pctl;
1236 	struct sdram_head_info_index_v2 *index =
1237 		(struct sdram_head_info_index_v2 *)common_info;
1238 	struct dq_map_info *map_info;
1239 
1240 	map_info = (struct dq_map_info *)((void *)common_info +
1241 		index->dq_map_index.offset * 4);
1242 
1243 	if (dramtype == LPDDR2)
1244 		dqmap = map_info->lp2_dq0_7_map;
1245 	else
1246 		dqmap = map_info->lp3_dq0_7_map;
1247 
1248 	pctl_read_mr(pctl_base, rank, mr_num);
1249 
1250 	ret = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff);
1251 
1252 	if (dramtype != LPDDR4) {
1253 		temp = 0;
1254 		for (i = 0; i < 8; i++) {
1255 			temp = temp | (((ret >> i) & 0x1) <<
1256 				       ((dqmap >> (i * 4)) & 0xf));
1257 		}
1258 	} else {
1259 		temp = (readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff);
1260 	}
1261 
1262 	return temp;
1263 }
1264 
1265 /* before call this function autorefresh should be disabled */
1266 void send_a_refresh(struct dram_info *dram)
1267 {
1268 	void __iomem *pctl_base = dram->pctl;
1269 
1270 	while (readl(pctl_base + DDR_PCTL2_DBGSTAT) & 0x3)
1271 		continue;
1272 	writel(0x3, pctl_base + DDR_PCTL2_DBGCMD);
1273 }
1274 
1275 static void enter_sr(struct dram_info *dram, u32 en)
1276 {
1277 	void __iomem *pctl_base = dram->pctl;
1278 
1279 	if (en) {
1280 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1281 		while (1) {
1282 			if (((readl(pctl_base + DDR_PCTL2_STAT) &
1283 			      PCTL2_SELFREF_TYPE_MASK) ==
1284 			     PCTL2_SELFREF_TYPE_SR_NOT_AUTO) &&
1285 			    ((readl(pctl_base + DDR_PCTL2_STAT) &
1286 			      PCTL2_OPERATING_MODE_MASK) ==
1287 			     PCTL2_OPERATING_MODE_SR))
1288 				break;
1289 		}
1290 	} else {
1291 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1292 		while ((readl(pctl_base + DDR_PCTL2_STAT) &
1293 		       PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR)
1294 			continue;
1295 	}
1296 }
1297 
1298 void record_dq_prebit(struct dram_info *dram)
1299 {
1300 	u32 group, i, tmp;
1301 	void __iomem *phy_base = dram->phy;
1302 
1303 	for (group = 0; group < 4; group++) {
1304 		for (i = 0; i < ARRAY_SIZE(dq_sel); i++) {
1305 			/* l_loop_invdelaysel */
1306 			writel(dq_sel[i][0], PHY_REG(phy_base,
1307 						     grp_addr[group] + 0x2c));
1308 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e));
1309 			writel(tmp, PHY_REG(phy_base,
1310 					    grp_addr[group] + dq_sel[i][1]));
1311 
1312 			/* r_loop_invdelaysel */
1313 			writel(dq_sel[i][0], PHY_REG(phy_base,
1314 						     grp_addr[group] + 0x2d));
1315 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f));
1316 			writel(tmp, PHY_REG(phy_base,
1317 					    grp_addr[group] + dq_sel[i][2]));
1318 		}
1319 	}
1320 }
1321 
1322 static void update_dq_rx_prebit(struct dram_info *dram)
1323 {
1324 	void __iomem *phy_base = dram->phy;
1325 
1326 	clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4),
1327 			BIT(4));
1328 	udelay(1);
1329 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4));
1330 }
1331 
1332 static void update_dq_tx_prebit(struct dram_info *dram)
1333 {
1334 	void __iomem *phy_base = dram->phy;
1335 
1336 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1337 	setbits_le32(PHY_REG(phy_base, 0x2), BIT(3));
1338 	setbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1339 	udelay(1);
1340 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1341 }
1342 
1343 static void update_ca_prebit(struct dram_info *dram)
1344 {
1345 	void __iomem *phy_base = dram->phy;
1346 
1347 	clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2));
1348 	setbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1349 	udelay(1);
1350 	clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1351 }
1352 
1353 /*
1354  * dir: 0: de-skew = delta_*
1355  *	1: de-skew = reg val - delta_*
1356  * delta_dir: value for differential signal: clk/
1357  * delta_sig: value for single signal: ca/cmd
1358  */
1359 static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif,
1360 			     int delta_sig, u32 cs, u32 dramtype)
1361 {
1362 	void __iomem *phy_base = dram->phy;
1363 	u32 i, cs_en, tmp;
1364 	u32 dfi_lp_stat = 0;
1365 
1366 	if (cs == 0)
1367 		cs_en = 1;
1368 	else if (cs == 2)
1369 		cs_en = 2;
1370 	else
1371 		cs_en = 3;
1372 
1373 	if (dramtype == LPDDR4 &&
1374 	    ((readl(PHY_REG(phy_base, 0x60)) & BIT(5)) == 0)) {
1375 		dfi_lp_stat = 1;
1376 		setbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1377 	}
1378 	enter_sr(dram, 1);
1379 
1380 	for (i = 0; i < 0x20; i++) {
1381 		if (dir == DESKEW_MDF_ABS_VAL)
1382 			tmp = delta_sig;
1383 		else
1384 			tmp = readl(PHY_REG(phy_base, 0x150 + i)) +
1385 			      delta_sig;
1386 		writel(tmp, PHY_REG(phy_base, 0x150 + i));
1387 	}
1388 
1389 	if (dir == DESKEW_MDF_ABS_VAL)
1390 		tmp = delta_dif;
1391 	else
1392 		tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) -
1393 		       delta_sig + delta_dif;
1394 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x17));
1395 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x18));
1396 	if (dramtype == LPDDR4) {
1397 		writel(tmp, PHY_REG(phy_base, 0x150 + 0x4));
1398 		writel(tmp, PHY_REG(phy_base, 0x150 + 0xa));
1399 
1400 		clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6);
1401 		update_ca_prebit(dram);
1402 	}
1403 	enter_sr(dram, 0);
1404 
1405 	if (dfi_lp_stat)
1406 		clrbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1407 
1408 }
1409 
1410 static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank)
1411 {
1412 	u32 i, j, offset = 0;
1413 	u32 min = 0x3f;
1414 	void __iomem *phy_base = dram->phy;
1415 	u32 byte_en;
1416 
1417 	if (signal == SKEW_TX_SIGNAL)
1418 		offset = 8;
1419 
1420 	if (signal == SKEW_CA_SIGNAL) {
1421 		for (i = 0; i < 0x20; i++)
1422 			min = MIN(min, readl(PHY_REG(phy_base, 0x150  + i)));
1423 	} else {
1424 		byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1425 		for (j = offset; j < offset + rank * 4; j++) {
1426 			if (!((byte_en >> (j % 4)) & 1))
1427 				continue;
1428 			for (i = 0; i < 11; i++)
1429 				min = MIN(min,
1430 					  readl(PHY_REG(phy_base,
1431 							dqs_dq_skew_adr[j] +
1432 							i)));
1433 		}
1434 	}
1435 
1436 	return min;
1437 }
1438 
1439 static u32 low_power_update(struct dram_info *dram, u32 en)
1440 {
1441 	void __iomem *pctl_base = dram->pctl;
1442 	u32 lp_stat = 0;
1443 
1444 	if (en) {
1445 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf);
1446 	} else {
1447 		lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf;
1448 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf);
1449 	}
1450 
1451 	return lp_stat;
1452 }
1453 
1454 /*
1455  * signal:
1456  * dir: 0: de-skew = delta_*
1457  *	1: de-skew = reg val - delta_*
1458  * delta_dir: value for differential signal: dqs
1459  * delta_sig: value for single signal: dq/dm
1460  */
1461 static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir,
1462 			     int delta_dif, int delta_sig, u32 rank)
1463 {
1464 	void __iomem *phy_base = dram->phy;
1465 	u32 i, j, tmp, offset;
1466 	u32 byte_en;
1467 
1468 	byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1469 
1470 	if (signal == SKEW_RX_SIGNAL)
1471 		offset = 0;
1472 	else
1473 		offset = 8;
1474 
1475 	for (j = offset; j < (offset + rank * 4); j++) {
1476 		if (!((byte_en >> (j % 4)) & 1))
1477 			continue;
1478 		for (i = 0; i < 0x9; i++) {
1479 			if (dir == DESKEW_MDF_ABS_VAL)
1480 				tmp = delta_sig;
1481 			else
1482 				tmp = delta_sig + readl(PHY_REG(phy_base,
1483 							dqs_dq_skew_adr[j] +
1484 							i));
1485 			writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i));
1486 		}
1487 		if (dir == DESKEW_MDF_ABS_VAL)
1488 			tmp = delta_dif;
1489 		else
1490 			tmp = delta_dif + readl(PHY_REG(phy_base,
1491 						dqs_dq_skew_adr[j] + 9));
1492 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9));
1493 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa));
1494 	}
1495 	if (signal == SKEW_RX_SIGNAL)
1496 		update_dq_rx_prebit(dram);
1497 	else
1498 		update_dq_tx_prebit(dram);
1499 }
1500 
1501 static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype)
1502 {
1503 	void __iomem *phy_base = dram->phy;
1504 	u32 ret;
1505 	u32 dis_auto_zq = 0;
1506 	u32 odt_val_up, odt_val_dn;
1507 	u32 i, j;
1508 
1509 	odt_val_dn = readl(PHY_REG(phy_base, 0x110));
1510 	odt_val_up = readl(PHY_REG(phy_base, 0x111));
1511 
1512 	if (dramtype != LPDDR4) {
1513 		for (i = 0; i < 4; i++) {
1514 			j = 0x110 + i * 0x10;
1515 			writel(PHY_DDR4_LPDDR3_RTT_294ohm,
1516 			       PHY_REG(phy_base, j));
1517 			writel(PHY_DDR4_LPDDR3_RTT_DISABLE,
1518 			       PHY_REG(phy_base, j + 0x1));
1519 		}
1520 	}
1521 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1522 	/* use normal read mode for data training */
1523 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1524 
1525 	if (dramtype == DDR4)
1526 		setbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1527 
1528 	/* choose training cs */
1529 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
1530 	/* enable gate training */
1531 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
1532 	udelay(50);
1533 	ret = readl(PHY_REG(phy_base, 0x91));
1534 	/* disable gate training */
1535 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
1536 	clrbits_le32(PHY_REG(phy_base, 2), 0x30);
1537 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1538 
1539 	if (ret & 0x20)
1540 		ret = -1;
1541 	else
1542 		ret = (ret & 0xf) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf);
1543 
1544 	if (dramtype != LPDDR4) {
1545 		for (i = 0; i < 4; i++) {
1546 			j = 0x110 + i * 0x10;
1547 			writel(odt_val_dn, PHY_REG(phy_base, j));
1548 			writel(odt_val_up, PHY_REG(phy_base, j + 0x1));
1549 		}
1550 	}
1551 	return ret;
1552 }
1553 
1554 static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype,
1555 			    u32 rank)
1556 {
1557 	void __iomem *pctl_base = dram->pctl;
1558 	void __iomem *phy_base = dram->phy;
1559 	u32 dis_auto_zq = 0;
1560 	u32 tmp;
1561 	u32 cur_fsp;
1562 	u32 timeout_us = 1000;
1563 
1564 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1565 
1566 	clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1567 
1568 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1569 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) &
1570 	      0xffff;
1571 	writel(tmp & 0xff, PHY_REG(phy_base, 0x3));
1572 
1573 	/* disable another cs's output */
1574 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1575 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12),
1576 			      dramtype);
1577 	if (dramtype == DDR3 || dramtype == DDR4)
1578 		writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1579 	else
1580 		writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1581 
1582 	/* choose cs */
1583 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1584 			((0x2 >> cs) << 6) | (0 << 2));
1585 	/* enable write leveling */
1586 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1587 			((0x2 >> cs) << 6) | (1 << 2));
1588 
1589 	while (1) {
1590 		if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) ==
1591 		    (readl(PHY_REG(phy_base, 0xf)) & 0xf))
1592 			break;
1593 
1594 		udelay(1);
1595 		if (timeout_us-- == 0) {
1596 			printascii("error: write leveling timeout\n");
1597 			while (1)
1598 				;
1599 		}
1600 	}
1601 
1602 	/* disable write leveling */
1603 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1604 			((0x2 >> cs) << 6) | (0 << 2));
1605 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6);
1606 
1607 	/* enable another cs's output */
1608 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1609 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12),
1610 			      dramtype);
1611 
1612 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1613 
1614 	return 0;
1615 }
1616 
1617 char pattern[32] = {
1618 	0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa,
1619 	0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55,
1620 	0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55,
1621 	0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa
1622 };
1623 
1624 static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype,
1625 			    u32 mhz)
1626 {
1627 	void __iomem *pctl_base = dram->pctl;
1628 	void __iomem *phy_base = dram->phy;
1629 	u32 trefi_1x, trfc_1x;
1630 	u32 dis_auto_zq = 0;
1631 	u32 timeout_us = 1000;
1632 	u32 dqs_default;
1633 	u32 cur_fsp;
1634 	u32 vref_inner;
1635 	u32 i;
1636 	struct sdram_head_info_index_v2 *index =
1637 		(struct sdram_head_info_index_v2 *)common_info;
1638 	struct dq_map_info *map_info;
1639 
1640 	vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff;
1641 	if (dramtype == DDR3 && vref_inner == 0x80) {
1642 		for (i = 0; i < 4; i++)
1643 			writel(vref_inner - 0xa,
1644 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1645 
1646 		/* reg_rx_vref_value_update */
1647 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1648 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1649 	}
1650 
1651 	map_info = (struct dq_map_info *)((void *)common_info +
1652 		index->dq_map_index.offset * 4);
1653 	/* only 1cs a time, 0:cs0 1 cs1 */
1654 	if (cs > 1)
1655 		return -1;
1656 
1657 	dqs_default = 0xf;
1658 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1659 
1660 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1661 	/* config refresh timing */
1662 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1663 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1664 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1665 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1666 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1667 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1668 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1669 	/* reg_phy_trfc */
1670 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1671 	/* reg_max_refi_cnt */
1672 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1673 
1674 	/* choose training cs */
1675 	clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6);
1676 
1677 	/* set dq map for ddr4 */
1678 	if (dramtype == DDR4) {
1679 		setbits_le32(PHY_REG(phy_base, 0x70), BIT(7));
1680 		for (i = 0; i < 4; i++) {
1681 			writel((map_info->ddr4_dq_map[cs * 2] >>
1682 				((i % 4) * 8)) & 0xff,
1683 				PHY_REG(phy_base, 0x238 + i));
1684 			writel((map_info->ddr4_dq_map[cs * 2 + 1] >>
1685 				((i % 4) * 8)) & 0xff,
1686 				PHY_REG(phy_base, 0x2b8 + i));
1687 		}
1688 	}
1689 
1690 	/* cha_l reg_l_rd_train_dqs_default[5:0] */
1691 	clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default);
1692 	/* cha_h reg_h_rd_train_dqs_default[5:0] */
1693 	clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default);
1694 	/* chb_l reg_l_rd_train_dqs_default[5:0] */
1695 	clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default);
1696 	/* chb_h reg_h_rd_train_dqs_default[5:0] */
1697 	clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default);
1698 
1699 	/* Choose the read train auto mode */
1700 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1);
1701 	/* Enable the auto train of the read train */
1702 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3);
1703 
1704 	/* Wait the train done. */
1705 	while (1) {
1706 		if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1)
1707 			break;
1708 
1709 		udelay(1);
1710 		if (timeout_us-- == 0) {
1711 			printascii("error: read training timeout\n");
1712 			return -1;
1713 		}
1714 	}
1715 
1716 	/* Check the read train state */
1717 	if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) ||
1718 	    (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) {
1719 		printascii("error: read training error\n");
1720 		return -1;
1721 	}
1722 
1723 	/* Exit the Read Training by setting */
1724 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1));
1725 
1726 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1727 
1728 	if (dramtype == DDR3 && vref_inner == 0x80) {
1729 		for (i = 0; i < 4; i++)
1730 			writel(vref_inner,
1731 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1732 
1733 		/* reg_rx_vref_value_update */
1734 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1735 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1736 	}
1737 
1738 	return 0;
1739 }
1740 
1741 static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype,
1742 			    u32 mhz, u32 dst_fsp)
1743 {
1744 	void __iomem *pctl_base = dram->pctl;
1745 	void __iomem *phy_base = dram->phy;
1746 	u32 trefi_1x, trfc_1x;
1747 	u32 dis_auto_zq = 0;
1748 	u32 timeout_us = 1000;
1749 	u32 cur_fsp;
1750 	u32 mr_tmp, cl, cwl, phy_fsp, offset = 0;
1751 
1752 	if (dramtype == LPDDR3 && mhz <= 400) {
1753 		phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3;
1754 		offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3;
1755 		cl = readl(PHY_REG(phy_base, offset));
1756 		cwl = readl(PHY_REG(phy_base, offset + 2));
1757 
1758 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8);
1759 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4);
1760 		pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype);
1761 	}
1762 
1763 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1764 
1765 	/* PHY_0x7b[7:0] reg_train_col_addr[7:0] */
1766 	clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0);
1767 	/* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */
1768 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2);
1769 	/* PHY_0x7c[1:0] reg_train_col_addr[9:8] */
1770 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0);
1771 	/* PHY_0x7d[7:0] reg_train_row_addr[7:0] */
1772 	clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0);
1773 	/* PHY_0x7e[7:0] reg_train_row_addr[15:8] */
1774 	clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0);
1775 
1776 	/* PHY_0x71[3] wrtrain_check_data_value_random_gen */
1777 	clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3));
1778 
1779 	/* config refresh timing */
1780 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1781 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1782 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1783 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1784 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1785 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1786 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1787 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1788 	/* reg_phy_trfc */
1789 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1790 	/* reg_max_refi_cnt */
1791 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1792 
1793 	/* choose training cs */
1794 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6);
1795 
1796 	/* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */
1797 	/* 0: Use the write-leveling value. */
1798 	/* 1: use reg0x233 0x237 0x2b3 0x2b7 */
1799 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4));
1800 
1801 	/* PHY_0x7a [0] reg_dq_wr_train_auto */
1802 	setbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1803 
1804 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1805 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1806 
1807 	send_a_refresh(dram);
1808 
1809 	while (1) {
1810 		if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1)
1811 			break;
1812 
1813 		udelay(1);
1814 		if (timeout_us-- == 0) {
1815 			printascii("error: write training timeout\n");
1816 			while (1)
1817 				;
1818 		}
1819 	}
1820 
1821 	/* Check the write train state */
1822 	if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) {
1823 		printascii("error: write training error\n");
1824 		return -1;
1825 	}
1826 
1827 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1828 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1829 
1830 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1831 
1832 	/* save LPDDR4 write vref to fsp_param for dfs */
1833 	if (dramtype == LPDDR4) {
1834 		fsp_param[dst_fsp].vref_dq[cs] =
1835 			((readl(PHY_REG(phy_base, 0x384)) & 0x3f) +
1836 			 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2;
1837 		/* add range info */
1838 		fsp_param[dst_fsp].vref_dq[cs] |=
1839 			((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1);
1840 	}
1841 
1842 	if (dramtype == LPDDR3 && mhz <= 400) {
1843 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl);
1844 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl);
1845 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1846 			       DDR_PCTL2_INIT3);
1847 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
1848 			      dramtype);
1849 	}
1850 
1851 	return 0;
1852 }
1853 
1854 static int data_training(struct dram_info *dram, u32 cs,
1855 			 struct rv1126_sdram_params *sdram_params, u32 dst_fsp,
1856 			 u32 training_flag)
1857 {
1858 	u32 ret = 0;
1859 
1860 	if (training_flag == FULL_TRAINING)
1861 		training_flag = READ_GATE_TRAINING | WRITE_LEVELING |
1862 				WRITE_TRAINING | READ_TRAINING;
1863 
1864 	if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) {
1865 		ret = data_training_wl(dram, cs,
1866 				       sdram_params->base.dramtype,
1867 				       sdram_params->ch.cap_info.rank);
1868 		if (ret != 0)
1869 			goto out;
1870 	}
1871 
1872 	if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) {
1873 		ret = data_training_rg(dram, cs,
1874 				       sdram_params->base.dramtype);
1875 		if (ret != 0)
1876 			goto out;
1877 	}
1878 
1879 	if ((training_flag & READ_TRAINING) == READ_TRAINING) {
1880 		ret = data_training_rd(dram, cs,
1881 				       sdram_params->base.dramtype,
1882 				       sdram_params->base.ddr_freq);
1883 		if (ret != 0)
1884 			goto out;
1885 	}
1886 
1887 	if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) {
1888 		ret = data_training_wr(dram, cs,
1889 				       sdram_params->base.dramtype,
1890 				       sdram_params->base.ddr_freq, dst_fsp);
1891 		if (ret != 0)
1892 			goto out;
1893 	}
1894 
1895 out:
1896 	return ret;
1897 }
1898 
1899 static int get_wrlvl_val(struct dram_info *dram,
1900 			 struct rv1126_sdram_params *sdram_params)
1901 {
1902 	u32 i, j, clk_skew;
1903 	void __iomem *phy_base = dram->phy;
1904 	u32 lp_stat;
1905 	int ret;
1906 
1907 	lp_stat = low_power_update(dram, 0);
1908 
1909 	clk_skew = 0x1f;
1910 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, clk_skew, 3,
1911 			 sdram_params->base.dramtype);
1912 
1913 	ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING);
1914 	if (sdram_params->ch.cap_info.rank == 2)
1915 		ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING);
1916 
1917 	for (j = 0; j < 2; j++)
1918 		for (i = 0; i < 4; i++)
1919 			wrlvl_result[j][i] =
1920 				readl(PHY_REG(phy_base,
1921 					      wrlvl_result_offset[j][i])) -
1922 				clk_skew;
1923 
1924 	low_power_update(dram, lp_stat);
1925 
1926 	return ret;
1927 }
1928 
1929 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
1930 static void init_rw_trn_result_struct(struct rw_trn_result *result,
1931 				      void __iomem *phy_base, u8 cs_num)
1932 {
1933 	int i;
1934 
1935 	result->cs_num = cs_num;
1936 	result->byte_en = readb(PHY_REG(dram_info.phy, 0xf)) &
1937 			  PHY_DQ_WIDTH_MASK;
1938 	for (i = 0; i < FSP_NUM; i++)
1939 		result->fsp_mhz[i] = 0;
1940 }
1941 
1942 static void save_rw_trn_min_max(void __iomem *phy_base,
1943 				struct cs_rw_trn_result *rd_result,
1944 				struct cs_rw_trn_result *wr_result,
1945 				u8 byte_en)
1946 {
1947 	u16 phy_ofs;
1948 	u8 dqs;
1949 	u8 dq;
1950 
1951 	for (dqs = 0; dqs < BYTE_NUM; dqs++) {
1952 		if ((byte_en & BIT(dqs)) == 0)
1953 			continue;
1954 
1955 		/* Channel A or B (low or high 16 bit) */
1956 		phy_ofs = dqs < 2 ? 0x230 : 0x2b0;
1957 		/* low or high 8 bit */
1958 		phy_ofs += (dqs & 0x1) == 0 ? 0 : 0x9;
1959 		for (dq = 0; dq < 8; dq++) {
1960 			rd_result->dqs[dqs].dq_min[dq] =
1961 				readb(PHY_REG(phy_base, phy_ofs + 0x15 + dq));
1962 			rd_result->dqs[dqs].dq_max[dq] =
1963 				readb(PHY_REG(phy_base, phy_ofs + 0x27 + dq));
1964 			wr_result->dqs[dqs].dq_min[dq] =
1965 				readb(PHY_REG(phy_base, phy_ofs + 0x3d + dq));
1966 			wr_result->dqs[dqs].dq_max[dq] =
1967 				readb(PHY_REG(phy_base, phy_ofs + 0x4f + dq));
1968 		}
1969 	}
1970 }
1971 
1972 static void save_rw_trn_deskew(void __iomem *phy_base,
1973 			       struct fsp_rw_trn_result *result, u8 cs_num,
1974 			       int min_val, bool rw)
1975 {
1976 	u16 phy_ofs;
1977 	u8 cs;
1978 	u8 dq;
1979 
1980 	result->min_val = min_val;
1981 
1982 	for (cs = 0; cs < cs_num; cs++) {
1983 		phy_ofs = cs == 0 ? 0x170 : 0x1a0;
1984 		phy_ofs += rw == SKEW_RX_SIGNAL ? 0x1 : 0x17;
1985 		for (dq = 0; dq < 8; dq++) {
1986 			result->cs[cs].dqs[0].dq_deskew[dq] =
1987 				readb(PHY_REG(phy_base, phy_ofs + dq));
1988 			result->cs[cs].dqs[1].dq_deskew[dq] =
1989 				readb(PHY_REG(phy_base, phy_ofs + 0xb + dq));
1990 			result->cs[cs].dqs[2].dq_deskew[dq] =
1991 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + dq));
1992 			result->cs[cs].dqs[3].dq_deskew[dq] =
1993 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + dq));
1994 		}
1995 
1996 		result->cs[cs].dqs[0].dqs_deskew =
1997 			readb(PHY_REG(phy_base, phy_ofs + 0x8));
1998 		result->cs[cs].dqs[1].dqs_deskew =
1999 			readb(PHY_REG(phy_base, phy_ofs + 0xb + 0x8));
2000 		result->cs[cs].dqs[2].dqs_deskew =
2001 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0x8));
2002 		result->cs[cs].dqs[3].dqs_deskew =
2003 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + 0x8));
2004 	}
2005 }
2006 
2007 static void save_rw_trn_result_to_ddr(struct rw_trn_result *result)
2008 {
2009 	result->flag = DDR_DQ_EYE_FLAG;
2010 	memcpy((void *)(RW_TRN_RESULT_ADDR), result, sizeof(*result));
2011 }
2012 #endif
2013 
2014 static int high_freq_training(struct dram_info *dram,
2015 			      struct rv1126_sdram_params *sdram_params,
2016 			      u32 fsp)
2017 {
2018 	u32 i, j;
2019 	void __iomem *phy_base = dram->phy;
2020 	u32 dramtype = sdram_params->base.dramtype;
2021 	int min_val;
2022 	int dqs_skew, clk_skew, ca_skew;
2023 	u8 byte_en;
2024 	int ret;
2025 
2026 	byte_en = readl(PHY_REG(phy_base, 0xf)) & PHY_DQ_WIDTH_MASK;
2027 	dqs_skew = 0;
2028 	for (j = 0; j < sdram_params->ch.cap_info.rank; j++) {
2029 		for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) {
2030 			if ((byte_en & BIT(i)) != 0)
2031 				dqs_skew += wrlvl_result[j][i];
2032 		}
2033 	}
2034 	dqs_skew = dqs_skew / (sdram_params->ch.cap_info.rank *
2035 			       (1 << sdram_params->ch.cap_info.bw));
2036 
2037 	clk_skew = 0x20 - dqs_skew;
2038 	dqs_skew = 0x20;
2039 
2040 	if (dramtype == LPDDR4) {
2041 		clk_skew = 0;
2042 		ca_skew = 0;
2043 	} else if (dramtype == LPDDR3) {
2044 		ca_skew = clk_skew - 4;
2045 	} else {
2046 		ca_skew = clk_skew;
2047 	}
2048 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3,
2049 			 dramtype);
2050 
2051 	writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233));
2052 	writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237));
2053 	writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2054 	writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2055 	ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING |
2056 			    READ_TRAINING | WRITE_TRAINING);
2057 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2058 	rw_trn_result.fsp_mhz[fsp] = (u16)sdram_params->base.ddr_freq;
2059 	save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[0],
2060 			    &rw_trn_result.wr_fsp[fsp].cs[0],
2061 			    rw_trn_result.byte_en);
2062 #endif
2063 	if (sdram_params->ch.cap_info.rank == 2) {
2064 		writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233));
2065 		writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237));
2066 		writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2067 		writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2068 		ret |= data_training(dram, 1, sdram_params, fsp,
2069 				     READ_GATE_TRAINING | READ_TRAINING |
2070 				     WRITE_TRAINING);
2071 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2072 		save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[1],
2073 				    &rw_trn_result.wr_fsp[fsp].cs[1],
2074 				    rw_trn_result.byte_en);
2075 #endif
2076 	}
2077 	if (ret)
2078 		goto out;
2079 
2080 	record_dq_prebit(dram);
2081 
2082 	min_val = get_min_value(dram, SKEW_RX_SIGNAL,
2083 				sdram_params->ch.cap_info.rank) * -1;
2084 	modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2085 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2086 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2087 	save_rw_trn_deskew(phy_base, &rw_trn_result.rd_fsp[fsp],
2088 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2089 			   SKEW_RX_SIGNAL);
2090 #endif
2091 
2092 	min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL,
2093 				    sdram_params->ch.cap_info.rank),
2094 		      get_min_value(dram, SKEW_CA_SIGNAL,
2095 				    sdram_params->ch.cap_info.rank)) * -1;
2096 
2097 	/* clk = 0, rx all skew -7, tx - min_value */
2098 	modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3,
2099 			 dramtype);
2100 
2101 	modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2102 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2103 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2104 	save_rw_trn_deskew(phy_base, &rw_trn_result.wr_fsp[fsp],
2105 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2106 			   SKEW_TX_SIGNAL);
2107 #endif
2108 
2109 	ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING);
2110 	if (sdram_params->ch.cap_info.rank == 2)
2111 		ret |= data_training(dram, 1, sdram_params, 0,
2112 				     READ_GATE_TRAINING);
2113 out:
2114 	return ret;
2115 }
2116 
2117 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
2118 {
2119 	writel(ddrconfig, &dram->msch->deviceconf);
2120 	clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0);
2121 }
2122 
2123 static void update_noc_timing(struct dram_info *dram,
2124 			      struct rv1126_sdram_params *sdram_params)
2125 {
2126 	void __iomem *pctl_base = dram->pctl;
2127 	u32 bw, bl;
2128 
2129 	bw = 8 << sdram_params->ch.cap_info.bw;
2130 	bl = ((readl(pctl_base + DDR_PCTL2_MSTR) >> 16) & 0xf) * 2;
2131 
2132 	/* update the noc timing related to data bus width */
2133 	if ((bw / 8 * bl) <= 16)
2134 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 0;
2135 	else if ((bw / 8 * bl) == 32)
2136 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 1;
2137 	else if ((bw / 8 * bl) == 64)
2138 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 2;
2139 	else
2140 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 3;
2141 
2142 	sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty =
2143 		(bl * bw / 8) > 16 ? (bl / 4) : (16 / (bl * bw / 8)) * bl / 4;
2144 
2145 	if (sdram_params->base.dramtype == LPDDR4) {
2146 		sdram_params->ch.noc_timings.ddrmode.b.mwrsize =
2147 			(bw == 16) ? 0x1 : 0x2;
2148 		sdram_params->ch.noc_timings.ddrtimingc0.b.wrtomwr =
2149 			3 * sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty;
2150 	}
2151 
2152 	writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
2153 	       &dram->msch->ddrtiminga0);
2154 	writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
2155 	       &dram->msch->ddrtimingb0);
2156 	writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
2157 	       &dram->msch->ddrtimingc0);
2158 	writel(sdram_params->ch.noc_timings.devtodev0.d32,
2159 	       &dram->msch->devtodev0);
2160 	writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
2161 	writel(sdram_params->ch.noc_timings.ddr4timing.d32,
2162 	       &dram->msch->ddr4timing);
2163 }
2164 
2165 static void dram_all_config(struct dram_info *dram,
2166 			    struct rv1126_sdram_params *sdram_params)
2167 {
2168 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2169 	u32 dram_type = sdram_params->base.dramtype;
2170 	void __iomem *pctl_base = dram->pctl;
2171 	u32 sys_reg2 = 0;
2172 	u32 sys_reg3 = 0;
2173 	u64 cs_cap[2];
2174 	u32 cs_pst;
2175 
2176 	set_ddrconfig(dram, cap_info->ddrconfig);
2177 	sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
2178 			 &sys_reg3, 0);
2179 	writel(sys_reg2, &dram->pmugrf->os_reg[2]);
2180 	writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2181 
2182 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2183 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2184 
2185 	if (cap_info->rank == 2) {
2186 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2187 			6 + 2;
2188 		if (cs_pst > 28)
2189 			cs_cap[0] = 1llu << cs_pst;
2190 	}
2191 
2192 	writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
2193 			(((cs_cap[0] >> 20) / 64) & 0xff),
2194 			&dram->msch->devicesize);
2195 	update_noc_timing(dram, sdram_params);
2196 }
2197 
2198 static void enable_low_power(struct dram_info *dram,
2199 			     struct rv1126_sdram_params *sdram_params)
2200 {
2201 	void __iomem *pctl_base = dram->pctl;
2202 	u32 grf_lp_con;
2203 
2204 	writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]);
2205 
2206 	if (sdram_params->base.dramtype == DDR4)
2207 		grf_lp_con = (0x7 << 16) | (1 << 1);
2208 	else if (sdram_params->base.dramtype == DDR3)
2209 		grf_lp_con = (0x7 << 16) | (1 << 0);
2210 	else
2211 		grf_lp_con = (0x7 << 16) | (1 << 2);
2212 
2213 	/* en lpckdis_en */
2214 	grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
2215 	writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con);
2216 
2217 	/* enable sr, pd */
2218 	if (dram->pd_idle == 0)
2219 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2220 	else
2221 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2222 	if (dram->sr_idle == 0)
2223 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2224 	else
2225 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2226 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
2227 }
2228 
2229 static void ddr_set_atags(struct dram_info *dram,
2230 			  struct rv1126_sdram_params *sdram_params)
2231 {
2232 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2233 	u32 dram_type = sdram_params->base.dramtype;
2234 	void __iomem *pctl_base = dram->pctl;
2235 	struct tag_serial t_serial;
2236 	struct tag_ddr_mem t_ddrmem;
2237 	struct tag_soc_info t_socinfo;
2238 	u64 cs_cap[2];
2239 	u32 cs_pst = 0;
2240 
2241 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2242 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2243 
2244 	memset(&t_serial, 0, sizeof(struct tag_serial));
2245 
2246 	t_serial.version = 0;
2247 	t_serial.enable = 1;
2248 	t_serial.addr = CONFIG_DEBUG_UART_BASE;
2249 	t_serial.baudrate = CONFIG_BAUDRATE;
2250 	t_serial.m_mode = SERIAL_M_MODE_M0;
2251 	t_serial.id = 2;
2252 
2253 	atags_destroy();
2254 	atags_set_tag(ATAG_SERIAL,  &t_serial);
2255 
2256 	memset(&t_ddrmem, 0, sizeof(struct tag_ddr_mem));
2257 	if (cap_info->row_3_4) {
2258 		cs_cap[0] =  cs_cap[0] * 3 / 4;
2259 		cs_cap[1] =  cs_cap[1] * 3 / 4;
2260 	}
2261 	t_ddrmem.version = 0;
2262 	t_ddrmem.bank[0] = CONFIG_SYS_SDRAM_BASE;
2263 	if (cs_cap[1]) {
2264 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2265 			6 + 2;
2266 	}
2267 
2268 	if (cs_cap[1] && cs_pst > 27) {
2269 		t_ddrmem.count = 2;
2270 		t_ddrmem.bank[1] = 1 << cs_pst;
2271 		t_ddrmem.bank[2] = cs_cap[0];
2272 		t_ddrmem.bank[3] = cs_cap[1];
2273 	} else {
2274 		t_ddrmem.count = 1;
2275 		t_ddrmem.bank[1] = (u64)cs_cap[0] + (u64)cs_cap[1];
2276 	}
2277 
2278 	atags_set_tag(ATAG_DDR_MEM,  &t_ddrmem);
2279 
2280 	memset(&t_socinfo, 0, sizeof(struct tag_soc_info));
2281 	t_socinfo.version = 0;
2282 	t_socinfo.name = 0x1126;
2283 }
2284 
2285 static void print_ddr_info(struct rv1126_sdram_params *sdram_params)
2286 {
2287 	u32 split;
2288 
2289 	if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2290 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2291 		split = 0;
2292 	else
2293 		split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2294 			SPLIT_SIZE_MASK;
2295 
2296 	sdram_print_ddr_info(&sdram_params->ch.cap_info,
2297 			     &sdram_params->base, split);
2298 }
2299 
2300 static int sdram_init_(struct dram_info *dram,
2301 		       struct rv1126_sdram_params *sdram_params, u32 post_init)
2302 {
2303 	void __iomem *pctl_base = dram->pctl;
2304 	void __iomem *phy_base = dram->phy;
2305 	u32 ddr4_vref;
2306 	u32 mr_tmp;
2307 
2308 	rkclk_configure_ddr(dram, sdram_params);
2309 
2310 	rkclk_ddr_reset(dram, 1, 1, 1, 1);
2311 	udelay(10);
2312 
2313 	rkclk_ddr_reset(dram, 1, 1, 1, 0);
2314 	phy_cfg(dram, sdram_params);
2315 
2316 	rkclk_ddr_reset(dram, 1, 1, 0, 0);
2317 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1);
2318 
2319 	rkclk_ddr_reset(dram, 1, 0, 0, 0);
2320 	pctl_cfg(dram->pctl, &sdram_params->pctl_regs,
2321 		 dram->sr_idle, dram->pd_idle);
2322 
2323 	if (sdram_params->ch.cap_info.bw == 2)
2324 		/* 32bit interface use pageclose */
2325 		setbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2326 	else
2327 		clrbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2328 
2329 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2330 	u32 tmp, trefi;
2331 
2332 	tmp = readl(pctl_base + DDR_PCTL2_RFSHTMG);
2333 	trefi = (tmp >> 16) & 0xfff;
2334 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2335 	       pctl_base + DDR_PCTL2_RFSHTMG);
2336 #endif
2337 
2338 	/* set frequency_mode */
2339 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
2340 	/* set target_frequency to Frequency 0 */
2341 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0);
2342 
2343 	set_ds_odt(dram, sdram_params, 0);
2344 	sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params);
2345 	set_ctl_address_map(dram, sdram_params);
2346 
2347 	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
2348 
2349 	rkclk_ddr_reset(dram, 0, 0, 0, 0);
2350 
2351 	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
2352 		continue;
2353 
2354 	if (sdram_params->base.dramtype == LPDDR3) {
2355 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3);
2356 	} else if (sdram_params->base.dramtype == LPDDR4) {
2357 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6);
2358 		/* MR11 */
2359 		pctl_write_mr(dram->pctl, 3, 11,
2360 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2361 			      LPDDR4);
2362 		/* MR12 */
2363 		pctl_write_mr(dram->pctl, 3, 12,
2364 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2365 			      LPDDR4);
2366 
2367 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2368 		/* MR22 */
2369 		pctl_write_mr(dram->pctl, 3, 22,
2370 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2371 			      LPDDR4);
2372 		/* MR14 */
2373 		pctl_write_mr(dram->pctl, 3, 14,
2374 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2375 			      LPDDR4);
2376 	}
2377 
2378 	if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) != 0) {
2379 		if (post_init != 0)
2380 			printascii("DTT cs0 error\n");
2381 		return -1;
2382 	}
2383 
2384 	if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) {
2385 		if (data_training(dram, 1, sdram_params, 0,
2386 				  READ_GATE_TRAINING) != 0) {
2387 			printascii("DTT cs1 error\n");
2388 			return -1;
2389 		}
2390 	}
2391 
2392 	if (sdram_params->base.dramtype == DDR4) {
2393 		ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39;
2394 		pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref,
2395 				  sdram_params->base.dramtype);
2396 	}
2397 
2398 	dram_all_config(dram, sdram_params);
2399 	enable_low_power(dram, sdram_params);
2400 
2401 	return 0;
2402 }
2403 
2404 static u64 dram_detect_cap(struct dram_info *dram,
2405 			   struct rv1126_sdram_params *sdram_params,
2406 			   unsigned char channel)
2407 {
2408 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2409 	void __iomem *pctl_base = dram->pctl;
2410 	void __iomem *phy_base = dram->phy;
2411 	u32 mr8;
2412 
2413 	u32 bktmp;
2414 	u32 coltmp;
2415 	u32 rowtmp;
2416 	u32 cs;
2417 	u32 dram_type = sdram_params->base.dramtype;
2418 	u32 pwrctl;
2419 	u32 i, dq_map;
2420 	u32 byte1 = 0, byte0 = 0;
2421 
2422 	cap_info->bw = dram_type == DDR3 ? 0 : 1;
2423 	if (dram_type != LPDDR4) {
2424 		if (dram_type != DDR4) {
2425 			coltmp = 12;
2426 			bktmp = 3;
2427 			if (dram_type == LPDDR2)
2428 				rowtmp = 15;
2429 			else
2430 				rowtmp = 16;
2431 
2432 			if (sdram_detect_col(cap_info, coltmp) != 0)
2433 				goto cap_err;
2434 
2435 			sdram_detect_bank(cap_info, coltmp, bktmp);
2436 			sdram_detect_dbw(cap_info, dram_type);
2437 		} else {
2438 			coltmp = 10;
2439 			bktmp = 4;
2440 			rowtmp = 17;
2441 
2442 			cap_info->col = 10;
2443 			cap_info->bk = 2;
2444 			sdram_detect_bg(cap_info, coltmp);
2445 		}
2446 
2447 		if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
2448 			goto cap_err;
2449 
2450 		sdram_detect_row_3_4(cap_info, coltmp, bktmp);
2451 	} else {
2452 		mr8 = (read_mr(dram, 1, 8, dram_type) >> 2) & 0xf;
2453 		cap_info->col = 10;
2454 		cap_info->bk = 3;
2455 		cap_info->cs0_row = 14 + (mr8 + 1) / 2;
2456 		if (mr8 % 2)
2457 			cap_info->row_3_4 = 1;
2458 		else
2459 			cap_info->row_3_4 = 0;
2460 		cap_info->dbw = 1;
2461 		cap_info->bw = 2;
2462 	}
2463 
2464 	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
2465 	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
2466 
2467 	if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0)
2468 		cs = 1;
2469 	else
2470 		cs = 0;
2471 	cap_info->rank = cs + 1;
2472 
2473 	if (dram_type != LPDDR4) {
2474 		setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2475 
2476 		if (data_training(dram, 0, sdram_params, 0,
2477 				  READ_GATE_TRAINING) == 0) {
2478 			cap_info->bw = 2;
2479 		} else {
2480 			dq_map = readl(PHY_REG(phy_base, 0x4f));
2481 			for (i = 0; i < 4; i++) {
2482 				if (((dq_map >> (i * 2)) & 0x3) == 0)
2483 					byte0 = i;
2484 				if (((dq_map >> (i * 2)) & 0x3) == 1)
2485 					byte1 = i;
2486 			}
2487 			clrsetbits_le32(PHY_REG(phy_base, 0xf), PHY_DQ_WIDTH_MASK,
2488 					BIT(byte0) | BIT(byte1));
2489 			if (data_training(dram, 0, sdram_params, 0,
2490 					  READ_GATE_TRAINING) == 0)
2491 				cap_info->bw = 1;
2492 			else
2493 				cap_info->bw = 0;
2494 		}
2495 		if (cap_info->bw > 0)
2496 			cap_info->dbw = 1;
2497 	}
2498 
2499 	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
2500 
2501 	cap_info->cs0_high16bit_row = cap_info->cs0_row;
2502 	if (cs) {
2503 		cap_info->cs1_row = cap_info->cs0_row;
2504 		cap_info->cs1_high16bit_row = cap_info->cs0_row;
2505 	} else {
2506 		cap_info->cs1_row = 0;
2507 		cap_info->cs1_high16bit_row = 0;
2508 	}
2509 
2510 	return 0;
2511 cap_err:
2512 	return -1;
2513 }
2514 
2515 static int dram_detect_cs1_row(struct dram_info *dram,
2516 			       struct rv1126_sdram_params *sdram_params,
2517 			       unsigned char channel)
2518 {
2519 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2520 	void __iomem *pctl_base = dram->pctl;
2521 	u32 ret = 0;
2522 	void __iomem *test_addr;
2523 	u32 row, bktmp, coltmp, bw;
2524 	u64 cs0_cap;
2525 	u32 byte_mask;
2526 	u32 cs_pst;
2527 	u32 cs_add = 0;
2528 	u32 max_row;
2529 
2530 	if (cap_info->rank == 2) {
2531 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2532 			6 + 2;
2533 		if (cs_pst < 28)
2534 			cs_add = 1;
2535 
2536 		cs0_cap = 1 << cs_pst;
2537 
2538 		if (sdram_params->base.dramtype == DDR4) {
2539 			if (cap_info->dbw == 0)
2540 				bktmp = cap_info->bk + 2;
2541 			else
2542 				bktmp = cap_info->bk + 1;
2543 		} else {
2544 			bktmp = cap_info->bk;
2545 		}
2546 		bw = cap_info->bw;
2547 		coltmp = cap_info->col;
2548 
2549 		if (bw == 2)
2550 			byte_mask = 0xFFFF;
2551 		else
2552 			byte_mask = 0xFF;
2553 
2554 		max_row = (cs_pst == 31) ? 30 : 31;
2555 
2556 		max_row = max_row - bktmp - coltmp - bw - cs_add + 1;
2557 
2558 		row = (cap_info->cs0_row > max_row) ? max_row :
2559 			cap_info->cs0_row;
2560 
2561 		for (; row > 12; row--) {
2562 			test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
2563 				    (u32)cs0_cap +
2564 				    (1ul << (row + bktmp + coltmp +
2565 					     cs_add + bw - 1ul)));
2566 
2567 			writel(0, CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap);
2568 			writel(PATTERN, test_addr);
2569 
2570 			if (((readl(test_addr) & byte_mask) ==
2571 			     (PATTERN & byte_mask)) &&
2572 			    ((readl(CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap) &
2573 			      byte_mask) == 0)) {
2574 				ret = row;
2575 				break;
2576 			}
2577 		}
2578 	}
2579 
2580 	return ret;
2581 }
2582 
2583 /* return: 0 = success, other = fail */
2584 static int sdram_init_detect(struct dram_info *dram,
2585 			     struct rv1126_sdram_params *sdram_params)
2586 {
2587 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2588 	u32 ret;
2589 	u32 sys_reg = 0;
2590 	u32 sys_reg3 = 0;
2591 	struct sdram_head_info_index_v2 *index =
2592 		(struct sdram_head_info_index_v2 *)common_info;
2593 	struct dq_map_info *map_info;
2594 
2595 	map_info = (struct dq_map_info *)((void *)common_info +
2596 		index->dq_map_index.offset * 4);
2597 
2598 	if (sdram_init_(dram, sdram_params, 0)) {
2599 		if (sdram_params->base.dramtype == DDR3) {
2600 			clrsetbits_le32(&map_info->byte_map[0], 0xff << 24,
2601 					((0x1 << 6) | (0x3 << 4) | (0x2 << 2) |
2602 					(0x0 << 0)) << 24);
2603 			if (sdram_init_(dram, sdram_params, 0))
2604 				return -1;
2605 		} else {
2606 			return -1;
2607 		}
2608 	}
2609 
2610 	if (sdram_params->base.dramtype == DDR3) {
2611 		writel(PATTERN, CONFIG_SYS_SDRAM_BASE);
2612 		if (readl(CONFIG_SYS_SDRAM_BASE) != PATTERN)
2613 			return -1;
2614 	}
2615 
2616 	if (dram_detect_cap(dram, sdram_params, 0) != 0)
2617 		return -1;
2618 
2619 	pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
2620 				   sdram_params->base.dramtype);
2621 	ret = sdram_init_(dram, sdram_params, 1);
2622 	if (ret != 0)
2623 		goto out;
2624 
2625 	cap_info->cs1_row =
2626 		dram_detect_cs1_row(dram, sdram_params, 0);
2627 	if (cap_info->cs1_row) {
2628 		sys_reg = readl(&dram->pmugrf->os_reg[2]);
2629 		sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
2630 		SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
2631 				    sys_reg, sys_reg3, 0);
2632 		writel(sys_reg, &dram->pmugrf->os_reg[2]);
2633 		writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2634 	}
2635 
2636 	sdram_detect_high_row(cap_info);
2637 
2638 out:
2639 	return ret;
2640 }
2641 
2642 struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz)
2643 {
2644 	u32 i;
2645 	u32 offset = 0;
2646 	struct ddr2_3_4_lp2_3_info *ddr_info;
2647 
2648 	if (!freq_mhz) {
2649 		ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype);
2650 		if (ddr_info)
2651 			freq_mhz =
2652 				(ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
2653 				DDR_FREQ_MASK;
2654 		else
2655 			freq_mhz = 0;
2656 	}
2657 
2658 	for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) {
2659 		if (sdram_configs[i].base.ddr_freq == 0 ||
2660 		    freq_mhz < sdram_configs[i].base.ddr_freq)
2661 			break;
2662 	}
2663 	offset = i == 0 ? 0 : i - 1;
2664 
2665 	return &sdram_configs[offset];
2666 }
2667 
2668 static const u16 pctl_need_update_reg[] = {
2669 	DDR_PCTL2_RFSHTMG,
2670 	DDR_PCTL2_INIT3,
2671 	DDR_PCTL2_INIT4,
2672 	DDR_PCTL2_INIT6,
2673 	DDR_PCTL2_INIT7,
2674 	DDR_PCTL2_DRAMTMG0,
2675 	DDR_PCTL2_DRAMTMG1,
2676 	DDR_PCTL2_DRAMTMG2,
2677 	DDR_PCTL2_DRAMTMG3,
2678 	DDR_PCTL2_DRAMTMG4,
2679 	DDR_PCTL2_DRAMTMG5,
2680 	DDR_PCTL2_DRAMTMG6,
2681 	DDR_PCTL2_DRAMTMG7,
2682 	DDR_PCTL2_DRAMTMG8,
2683 	DDR_PCTL2_DRAMTMG9,
2684 	DDR_PCTL2_DRAMTMG12,
2685 	DDR_PCTL2_DRAMTMG13,
2686 	DDR_PCTL2_DRAMTMG14,
2687 	DDR_PCTL2_ZQCTL0,
2688 	DDR_PCTL2_DFITMG0,
2689 	DDR_PCTL2_ODTCFG
2690 };
2691 
2692 static const u16 phy_need_update_reg[] = {
2693 	0x14,
2694 	0x18,
2695 	0x1c
2696 };
2697 
2698 static void pre_set_rate(struct dram_info *dram,
2699 			 struct rv1126_sdram_params *sdram_params,
2700 			 u32 dst_fsp, u32 dst_fsp_lp4)
2701 {
2702 	u32 i, j, find;
2703 	void __iomem *pctl_base = dram->pctl;
2704 	void __iomem *phy_base = dram->phy;
2705 	u32 phy_offset;
2706 	u32 mr_tmp;
2707 	u32 dramtype = sdram_params->base.dramtype;
2708 
2709 	sw_set_req(dram);
2710 	/* pctl timing update */
2711 	for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) {
2712 		for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF;
2713 		     j++) {
2714 			if (sdram_params->pctl_regs.pctl[j][0] ==
2715 			    pctl_need_update_reg[i]) {
2716 				writel(sdram_params->pctl_regs.pctl[j][1],
2717 				       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2718 				       pctl_need_update_reg[i]);
2719 				find = j;
2720 				break;
2721 			}
2722 		}
2723 	}
2724 
2725 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2726 	u32 tmp, trefi;
2727 
2728 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2729 	trefi = (tmp >> 16) & 0xfff;
2730 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2731 	       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2732 #endif
2733 
2734 	sw_set_ack(dram);
2735 
2736 	/* phy timing update */
2737 	if (dst_fsp == 0)
2738 		phy_offset = 0;
2739 	else
2740 		phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3);
2741 	/* cl cwl al update */
2742 	for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) {
2743 		for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF;
2744 		     j++) {
2745 			if (sdram_params->phy_regs.phy[j][0] ==
2746 			    phy_need_update_reg[i]) {
2747 				writel(sdram_params->phy_regs.phy[j][1],
2748 				       phy_base + phy_offset +
2749 				       phy_need_update_reg[i]);
2750 				find = j;
2751 				break;
2752 			}
2753 		}
2754 	}
2755 
2756 	set_ds_odt(dram, sdram_params, dst_fsp);
2757 	if (dramtype == LPDDR4) {
2758 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2759 			       DDR_PCTL2_INIT4);
2760 		/* MR13 */
2761 		pctl_write_mr(dram->pctl, 3, 13,
2762 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2763 			       PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2764 			      ((0x2 << 6) >> dst_fsp_lp4), dramtype);
2765 		writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2766 				      PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2767 				      ((0x2 << 6) >> dst_fsp_lp4),
2768 				       PHY_REG(phy_base, 0x1b));
2769 		/* MR3 */
2770 		pctl_write_mr(dram->pctl, 3, 3,
2771 			      mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT &
2772 			      PCTL2_MR_MASK,
2773 			      dramtype);
2774 		writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK,
2775 		       PHY_REG(phy_base, 0x19));
2776 
2777 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2778 			       DDR_PCTL2_INIT3);
2779 		/* MR1 */
2780 		pctl_write_mr(dram->pctl, 3, 1,
2781 			      mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT &
2782 			      PCTL2_MR_MASK,
2783 			      dramtype);
2784 		writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK,
2785 		       PHY_REG(phy_base, 0x17));
2786 		/* MR2 */
2787 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
2788 			      dramtype);
2789 		writel(mr_tmp & PCTL2_MR_MASK,
2790 		       PHY_REG(phy_base, 0x18));
2791 
2792 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2793 			       DDR_PCTL2_INIT6);
2794 		/* MR11 */
2795 		pctl_write_mr(dram->pctl, 3, 11,
2796 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2797 			      dramtype);
2798 		writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2799 		       PHY_REG(phy_base, 0x1a));
2800 		/* MR12 */
2801 		pctl_write_mr(dram->pctl, 3, 12,
2802 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2803 			      dramtype);
2804 
2805 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2806 			       DDR_PCTL2_INIT7);
2807 		/* MR22 */
2808 		pctl_write_mr(dram->pctl, 3, 22,
2809 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2810 			      dramtype);
2811 		writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2812 		       PHY_REG(phy_base, 0x1d));
2813 		/* MR14 */
2814 		pctl_write_mr(dram->pctl, 3, 14,
2815 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2816 			      dramtype);
2817 		writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2818 		       PHY_REG(phy_base, 0x1c));
2819 	}
2820 
2821 	update_noc_timing(dram, sdram_params);
2822 }
2823 
2824 static void save_fsp_param(struct dram_info *dram, u32 dst_fsp,
2825 			   struct rv1126_sdram_params *sdram_params)
2826 {
2827 	void __iomem *pctl_base = dram->pctl;
2828 	void __iomem *phy_base = dram->phy;
2829 	struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp];
2830 	u32 temp, temp1;
2831 	struct ddr2_3_4_lp2_3_info *ddr_info;
2832 
2833 	ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype);
2834 
2835 	p_fsp_param->freq_mhz = sdram_params->base.ddr_freq;
2836 
2837 	if (sdram_params->base.dramtype == LPDDR4) {
2838 		p_fsp_param->rd_odt_up_en = 0;
2839 		p_fsp_param->rd_odt_down_en = 1;
2840 	} else {
2841 		p_fsp_param->rd_odt_up_en =
2842 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
2843 		p_fsp_param->rd_odt_down_en =
2844 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
2845 	}
2846 
2847 	if (p_fsp_param->rd_odt_up_en)
2848 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111));
2849 	else if (p_fsp_param->rd_odt_down_en)
2850 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110));
2851 	else
2852 		p_fsp_param->rd_odt = 0;
2853 	p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112));
2854 	p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100));
2855 	p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102));
2856 	p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128));
2857 	p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105));
2858 
2859 	if (sdram_params->base.dramtype == DDR3) {
2860 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2861 			     DDR_PCTL2_INIT3);
2862 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
2863 		p_fsp_param->ds_pdds = temp & DDR3_DS_MASK;
2864 		p_fsp_param->dq_odt = temp & DDR3_RTT_NOM_MASK;
2865 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2866 	} else if (sdram_params->base.dramtype == DDR4) {
2867 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2868 			     DDR_PCTL2_INIT3);
2869 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
2870 		p_fsp_param->ds_pdds = temp & DDR4_DS_MASK;
2871 		p_fsp_param->dq_odt = temp & DDR4_RTT_NOM_MASK;
2872 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2873 	} else if (sdram_params->base.dramtype == LPDDR3) {
2874 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2875 			     DDR_PCTL2_INIT4);
2876 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
2877 		p_fsp_param->ds_pdds = temp & 0xf;
2878 
2879 		p_fsp_param->dq_odt = lp3_odt_value;
2880 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2881 	} else if (sdram_params->base.dramtype == LPDDR4) {
2882 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2883 			     DDR_PCTL2_INIT4);
2884 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
2885 		p_fsp_param->ds_pdds = temp & LPDDR4_PDDS_MASK;
2886 
2887 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2888 			     DDR_PCTL2_INIT6);
2889 		temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK;
2890 		p_fsp_param->dq_odt = temp & LPDDR4_DQODT_MASK;
2891 		p_fsp_param->ca_odt = temp & LPDDR4_CAODT_MASK;
2892 
2893 		temp = MAX(readl(PHY_REG(phy_base, 0x3ae)),
2894 			   readl(PHY_REG(phy_base, 0x3ce)));
2895 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)),
2896 			    readl(PHY_REG(phy_base, 0x3de)));
2897 		p_fsp_param->vref_ca[0] = (temp + temp1) / 2;
2898 		temp = MAX(readl(PHY_REG(phy_base, 0x3af)),
2899 			   readl(PHY_REG(phy_base, 0x3cf)));
2900 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)),
2901 			    readl(PHY_REG(phy_base, 0x3df)));
2902 		p_fsp_param->vref_ca[1] = (temp + temp1) / 2;
2903 		p_fsp_param->vref_ca[0] |=
2904 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
2905 		p_fsp_param->vref_ca[1] |=
2906 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
2907 
2908 		p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >>
2909 					      3) & 0x1;
2910 	}
2911 
2912 	p_fsp_param->noc_timings.ddrtiminga0 =
2913 		sdram_params->ch.noc_timings.ddrtiminga0;
2914 	p_fsp_param->noc_timings.ddrtimingb0 =
2915 		sdram_params->ch.noc_timings.ddrtimingb0;
2916 	p_fsp_param->noc_timings.ddrtimingc0 =
2917 		sdram_params->ch.noc_timings.ddrtimingc0;
2918 	p_fsp_param->noc_timings.devtodev0 =
2919 		sdram_params->ch.noc_timings.devtodev0;
2920 	p_fsp_param->noc_timings.ddrmode =
2921 		sdram_params->ch.noc_timings.ddrmode;
2922 	p_fsp_param->noc_timings.ddr4timing =
2923 		sdram_params->ch.noc_timings.ddr4timing;
2924 	p_fsp_param->noc_timings.agingx0 =
2925 		sdram_params->ch.noc_timings.agingx0;
2926 	p_fsp_param->noc_timings.aging0 =
2927 		sdram_params->ch.noc_timings.aging0;
2928 	p_fsp_param->noc_timings.aging1 =
2929 		sdram_params->ch.noc_timings.aging1;
2930 	p_fsp_param->noc_timings.aging2 =
2931 		sdram_params->ch.noc_timings.aging2;
2932 	p_fsp_param->noc_timings.aging3 =
2933 		sdram_params->ch.noc_timings.aging3;
2934 
2935 	p_fsp_param->flag = FSP_FLAG;
2936 }
2937 
2938 #ifndef CONFIG_SPL_KERNEL_BOOT
2939 static void copy_fsp_param_to_ddr(void)
2940 {
2941 	memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param,
2942 	       sizeof(fsp_param));
2943 }
2944 #endif
2945 
2946 static void pctl_modify_trfc(struct ddr_pctl_regs *pctl_regs,
2947 			     struct sdram_cap_info *cap_info, u32 dram_type,
2948 			     u32 freq)
2949 {
2950 	u64 cs0_cap;
2951 	u32 die_cap;
2952 	u32 trfc_ns, trfc4_ns;
2953 	u32 trfc, txsnr;
2954 	u32 txs_abort_fast = 0;
2955 	u32 tmp;
2956 
2957 	cs0_cap = sdram_get_cs_cap(cap_info, 0, dram_type);
2958 	die_cap = (u32)(cs0_cap >> (20 + (cap_info->bw - cap_info->dbw)));
2959 
2960 	switch (dram_type) {
2961 	case DDR3:
2962 		if (die_cap <= DIE_CAP_512MBIT)
2963 			trfc_ns = 90;
2964 		else if (die_cap <= DIE_CAP_1GBIT)
2965 			trfc_ns = 110;
2966 		else if (die_cap <= DIE_CAP_2GBIT)
2967 			trfc_ns = 160;
2968 		else if (die_cap <= DIE_CAP_4GBIT)
2969 			trfc_ns = 260;
2970 		else
2971 			trfc_ns = 350;
2972 		txsnr = MAX(5, ((trfc_ns + 10) * freq + 999) / 1000);
2973 		break;
2974 
2975 	case DDR4:
2976 		if (die_cap <= DIE_CAP_2GBIT) {
2977 			trfc_ns = 160;
2978 			trfc4_ns = 90;
2979 		} else if (die_cap <= DIE_CAP_4GBIT) {
2980 			trfc_ns = 260;
2981 			trfc4_ns = 110;
2982 		} else if (die_cap <= DIE_CAP_8GBIT) {
2983 			trfc_ns = 350;
2984 			trfc4_ns = 160;
2985 		} else {
2986 			trfc_ns = 550;
2987 			trfc4_ns = 260;
2988 		}
2989 		txsnr = ((trfc_ns + 10) * freq + 999) / 1000;
2990 		txs_abort_fast = ((trfc4_ns + 10) * freq + 999) / 1000;
2991 		break;
2992 
2993 	case LPDDR3:
2994 		if (die_cap <= DIE_CAP_4GBIT)
2995 			trfc_ns = 130;
2996 		else
2997 			trfc_ns = 210;
2998 		txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
2999 		break;
3000 
3001 	case LPDDR4:
3002 	case LPDDR4X:
3003 		if (die_cap <= DIE_CAP_4GBIT)
3004 			trfc_ns = 130;
3005 		else if (die_cap <= DIE_CAP_8GBIT)
3006 			trfc_ns = 180;
3007 		else if (die_cap <= DIE_CAP_16GBIT)
3008 			trfc_ns = 280;
3009 		else
3010 			trfc_ns = 380;
3011 		txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3012 		break;
3013 
3014 	default:
3015 		return;
3016 	}
3017 	trfc = (trfc_ns * freq + 999) / 1000;
3018 
3019 	for (int i = 0; pctl_regs->pctl[i][0] != 0xffffffff; i++) {
3020 		switch (pctl_regs->pctl[i][0]) {
3021 		case DDR_PCTL2_RFSHTMG:
3022 			tmp = pctl_regs->pctl[i][1];
3023 			/* t_rfc_min */
3024 			tmp &= ~((u32)0x3ff);
3025 			tmp |= ((trfc + 1) / 2) & 0x3ff;
3026 			pctl_regs->pctl[i][1] = tmp;
3027 			break;
3028 
3029 		case DDR_PCTL2_DRAMTMG8:
3030 			if (dram_type == DDR3 || dram_type == DDR4) {
3031 				tmp = pctl_regs->pctl[i][1];
3032 				/* t_xs_x32 */
3033 				tmp &= ~((u32)0x7f);
3034 				tmp |= ((txsnr + 63) / 64) & 0x7f;
3035 
3036 				if (dram_type == DDR4) {
3037 					/* t_xs_abort_x32 */
3038 					tmp &= ~((u32)(0x7f << 16));
3039 					tmp |= (((txs_abort_fast + 63) / 64) & 0x7f) << 16;
3040 					/* t_xs_fast_x32 */
3041 					tmp &= ~((u32)(0x7f << 24));
3042 					tmp |= (((txs_abort_fast + 63) / 64) & 0x7f) << 24;
3043 				}
3044 
3045 				pctl_regs->pctl[i][1] = tmp;
3046 			}
3047 			break;
3048 
3049 		case DDR_PCTL2_DRAMTMG14:
3050 			if (dram_type == LPDDR3 ||
3051 			    dram_type == LPDDR4 || dram_type == LPDDR4X) {
3052 				tmp = pctl_regs->pctl[i][1];
3053 				/* t_xsr */
3054 				tmp &= ~((u32)0xfff);
3055 				tmp |= ((txsnr + 1) / 2) & 0xfff;
3056 				pctl_regs->pctl[i][1] = tmp;
3057 			}
3058 			break;
3059 
3060 		default:
3061 			break;
3062 		}
3063 	}
3064 }
3065 
3066 void ddr_set_rate(struct dram_info *dram,
3067 		  struct rv1126_sdram_params *sdram_params,
3068 		  u32 freq, u32 cur_freq, u32 dst_fsp,
3069 		  u32 dst_fsp_lp4, u32 training_en)
3070 {
3071 	u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off;
3072 	u32 mr_tmp;
3073 	u32 lp_stat;
3074 	u32 dramtype = sdram_params->base.dramtype;
3075 	struct rv1126_sdram_params *sdram_params_new;
3076 	void __iomem *pctl_base = dram->pctl;
3077 	void __iomem *phy_base = dram->phy;
3078 
3079 	lp_stat = low_power_update(dram, 0);
3080 	sdram_params_new = get_default_sdram_config(freq);
3081 	sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank;
3082 	sdram_params_new->ch.cap_info.bw = sdram_params->ch.cap_info.bw;
3083 
3084 	pctl_modify_trfc(&sdram_params_new->pctl_regs,
3085 			 &sdram_params->ch.cap_info, dramtype, freq);
3086 	pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4);
3087 
3088 	while ((readl(pctl_base + DDR_PCTL2_STAT) &
3089 			 PCTL2_OPERATING_MODE_MASK) ==
3090 			 PCTL2_OPERATING_MODE_SR)
3091 		continue;
3092 
3093 	dest_dll_off = 0;
3094 	dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3095 			  DDR_PCTL2_INIT3);
3096 	if ((dramtype == DDR3 && (dst_init3 & 1)) ||
3097 	    (dramtype == DDR4 && !(dst_init3 & 1)))
3098 		dest_dll_off = 1;
3099 
3100 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
3101 	cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
3102 			  DDR_PCTL2_INIT3);
3103 	cur_init3 &= PCTL2_MR_MASK;
3104 	cur_dll_off = 1;
3105 	if ((dramtype == DDR3 && !(cur_init3 & 1)) ||
3106 	    (dramtype == DDR4 && (cur_init3 & 1)))
3107 		cur_dll_off = 0;
3108 
3109 	if (!cur_dll_off) {
3110 		if (dramtype == DDR3)
3111 			cur_init3 |= 1;
3112 		else
3113 			cur_init3 &= ~1;
3114 		pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype);
3115 	}
3116 
3117 	setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3118 		     PCTL2_DIS_AUTO_REFRESH);
3119 	update_refresh_reg(dram);
3120 
3121 	enter_sr(dram, 1);
3122 
3123 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3124 	       PMUGRF_CON_DDRPHY_BUFFEREN_EN,
3125 	       &dram->pmugrf->soc_con[0]);
3126 	sw_set_req(dram);
3127 	clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC,
3128 		     PCTL2_DFI_INIT_COMPLETE_EN);
3129 	sw_set_ack(dram);
3130 
3131 	sw_set_req(dram);
3132 	if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off)
3133 		setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3134 	else
3135 		clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3136 
3137 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0,
3138 		     PCTL2_DIS_SRX_ZQCL);
3139 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0,
3140 		     PCTL2_DIS_SRX_ZQCL);
3141 	sw_set_ack(dram);
3142 
3143 	writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT),
3144 	       &dram->cru->clkgate_con[21]);
3145 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3146 					(0x1 << CLK_DDR_UPCTL_EN_SHIFT) |
3147 					(0x1 << ACLK_DDR_UPCTL_EN_SHIFT),
3148 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3149 
3150 	clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3151 	rkclk_set_dpll(dram, freq * MHz / 2);
3152 	phy_pll_set(dram, freq * MHz, 0);
3153 	phy_pll_set(dram, freq * MHz, 1);
3154 	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3155 
3156 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3157 			PMUGRF_CON_DDRPHY_BUFFEREN_DIS,
3158 			&dram->pmugrf->soc_con[0]);
3159 	writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT),
3160 	       &dram->cru->clkgate_con[21]);
3161 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3162 					(0x0 << CLK_DDR_UPCTL_EN_SHIFT) |
3163 					(0x0 << ACLK_DDR_UPCTL_EN_SHIFT),
3164 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3165 	while ((readl(pctl_base + DDR_PCTL2_DFISTAT) &
3166 	       PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE)
3167 		continue;
3168 
3169 	sw_set_req(dram);
3170 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
3171 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp);
3172 	sw_set_ack(dram);
3173 	update_refresh_reg(dram);
3174 	clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2);
3175 
3176 	enter_sr(dram, 0);
3177 
3178 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3179 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3180 
3181 	mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4);
3182 	if (dramtype == LPDDR3) {
3183 		pctl_write_mr(dram->pctl, 3, 1,
3184 			      (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) &
3185 			      PCTL2_MR_MASK,
3186 			      dramtype);
3187 		pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK,
3188 			      dramtype);
3189 		pctl_write_mr(dram->pctl, 3, 3,
3190 			      (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) &
3191 			      PCTL2_MR_MASK,
3192 			      dramtype);
3193 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype);
3194 	} else if ((dramtype == DDR3) || (dramtype == DDR4)) {
3195 		pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK,
3196 			      dramtype);
3197 		if (!dest_dll_off) {
3198 			pctl_write_mr(dram->pctl, 3, 0,
3199 				      ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) &
3200 				      PCTL2_MR_MASK) | DDR3_DLL_RESET,
3201 				      dramtype);
3202 			udelay(2);
3203 		}
3204 		pctl_write_mr(dram->pctl, 3, 0,
3205 			      (dst_init3 >> PCTL2_DDR34_MR0_SHIFT &
3206 			       PCTL2_MR_MASK) & (~DDR3_DLL_RESET),
3207 			      dramtype);
3208 		pctl_write_mr(dram->pctl, 3, 2,
3209 			      ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) &
3210 			       PCTL2_MR_MASK), dramtype);
3211 		if (dramtype == DDR4) {
3212 			pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK,
3213 				      dramtype);
3214 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3215 				       DDR_PCTL2_INIT6);
3216 			pctl_write_mr(dram->pctl, 3, 4,
3217 				      (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) &
3218 				       PCTL2_MR_MASK,
3219 				      dramtype);
3220 			pctl_write_mr(dram->pctl, 3, 5,
3221 				      mr_tmp >> PCTL2_DDR4_MR5_SHIFT &
3222 				      PCTL2_MR_MASK,
3223 				      dramtype);
3224 
3225 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3226 				       DDR_PCTL2_INIT7);
3227 			pctl_write_mr(dram->pctl, 3, 6,
3228 				      mr_tmp >> PCTL2_DDR4_MR6_SHIFT &
3229 				      PCTL2_MR_MASK,
3230 				      dramtype);
3231 		}
3232 	} else if (dramtype == LPDDR4) {
3233 		pctl_write_mr(dram->pctl, 3, 13,
3234 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
3235 			       PCTL2_MR_MASK) & (~(BIT(7)))) |
3236 			      dst_fsp_lp4 << 7, dramtype);
3237 	}
3238 	clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3239 		     PCTL2_DIS_AUTO_REFRESH);
3240 	update_refresh_reg(dram);
3241 
3242 	/* training */
3243 	high_freq_training(dram, sdram_params_new, dst_fsp);
3244 	low_power_update(dram, lp_stat);
3245 
3246 	save_fsp_param(dram, dst_fsp, sdram_params_new);
3247 }
3248 
3249 static void ddr_set_rate_for_fsp(struct dram_info *dram,
3250 				 struct rv1126_sdram_params *sdram_params)
3251 {
3252 	struct ddr2_3_4_lp2_3_info *ddr_info;
3253 	u32 f0;
3254 	u32 dramtype = sdram_params->base.dramtype;
3255 #ifndef CONFIG_SPL_KERNEL_BOOT
3256 	u32 f1, f2, f3;
3257 #endif
3258 
3259 	ddr_info = get_ddr_drv_odt_info(dramtype);
3260 	if (!ddr_info)
3261 		return;
3262 
3263 	f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
3264 	     DDR_FREQ_MASK;
3265 
3266 #ifndef CONFIG_SPL_KERNEL_BOOT
3267 	memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param));
3268 	memset((void *)&fsp_param, 0, sizeof(fsp_param));
3269 
3270 	f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) &
3271 	     DDR_FREQ_MASK;
3272 	f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) &
3273 	     DDR_FREQ_MASK;
3274 	f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) &
3275 	     DDR_FREQ_MASK;
3276 #endif
3277 
3278 	if (get_wrlvl_val(dram, sdram_params))
3279 		printascii("get wrlvl value fail\n");
3280 
3281 #ifndef CONFIG_SPL_KERNEL_BOOT
3282 	printascii("change to: ");
3283 	printdec(f1);
3284 	printascii("MHz\n");
3285 	ddr_set_rate(&dram_info, sdram_params, f1,
3286 		     sdram_params->base.ddr_freq, 1, 1, 1);
3287 	printascii("change to: ");
3288 	printdec(f2);
3289 	printascii("MHz\n");
3290 	ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1);
3291 	printascii("change to: ");
3292 	printdec(f3);
3293 	printascii("MHz\n");
3294 	ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1);
3295 #endif
3296 	printascii("change to: ");
3297 	printdec(f0);
3298 	printascii("MHz(final freq)\n");
3299 #ifndef CONFIG_SPL_KERNEL_BOOT
3300 	ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1);
3301 #else
3302 	ddr_set_rate(&dram_info, sdram_params, f0, sdram_params->base.ddr_freq, 1, 1, 1);
3303 #endif
3304 }
3305 
3306 int get_uart_config(void)
3307 {
3308 	struct sdram_head_info_index_v2 *index =
3309 		(struct sdram_head_info_index_v2 *)common_info;
3310 	struct global_info *gbl_info;
3311 
3312 	gbl_info = (struct global_info *)((void *)common_info +
3313 		index->global_index.offset * 4);
3314 
3315 	return gbl_info->uart_info;
3316 }
3317 
3318 /* return: 0 = success, other = fail */
3319 int sdram_init(void)
3320 {
3321 	struct rv1126_sdram_params *sdram_params;
3322 	int ret = 0;
3323 	struct sdram_head_info_index_v2 *index =
3324 		(struct sdram_head_info_index_v2 *)common_info;
3325 	struct global_info *gbl_info;
3326 
3327 	dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
3328 	dram_info.pctl = (void *)UPCTL2_BASE_ADDR;
3329 	dram_info.grf = (void *)GRF_BASE_ADDR;
3330 	dram_info.cru = (void *)CRU_BASE_ADDR;
3331 	dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR;
3332 	dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR;
3333 	dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR;
3334 
3335 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
3336 	printascii("extended temp support\n");
3337 #endif
3338 	if (index->version_info != 2 ||
3339 	    (index->global_index.size != sizeof(struct global_info) / 4) ||
3340 	    (index->ddr3_index.size !=
3341 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3342 	    (index->ddr4_index.size !=
3343 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3344 	    (index->lp3_index.size !=
3345 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3346 	    (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) ||
3347 	    index->global_index.offset == 0 ||
3348 	    index->ddr3_index.offset == 0 ||
3349 	    index->ddr4_index.offset == 0 ||
3350 	    index->lp3_index.offset == 0 ||
3351 	    index->lp4_index.offset == 0) {
3352 		printascii("common info error\n");
3353 		goto error;
3354 	}
3355 
3356 	gbl_info = (struct global_info *)((void *)common_info +
3357 		index->global_index.offset * 4);
3358 
3359 	dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info);
3360 	dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info);
3361 
3362 	sdram_params = &sdram_configs[0];
3363 
3364 	if (sdram_params->base.dramtype == DDR3 ||
3365 	    sdram_params->base.dramtype == DDR4) {
3366 		if (DDR_2T_INFO(gbl_info->info_2t))
3367 			sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10;
3368 		else
3369 			sdram_params->pctl_regs.pctl[0][1] &=
3370 				~(0x1 << 10);
3371 	}
3372 	ret = sdram_init_detect(&dram_info, sdram_params);
3373 	if (ret) {
3374 		sdram_print_dram_type(sdram_params->base.dramtype);
3375 		printascii(", ");
3376 		printdec(sdram_params->base.ddr_freq);
3377 		printascii("MHz\n");
3378 		goto error;
3379 	}
3380 	print_ddr_info(sdram_params);
3381 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3382 	init_rw_trn_result_struct(&rw_trn_result, dram_info.phy,
3383 				  (u8)sdram_params->ch.cap_info.rank);
3384 #endif
3385 
3386 	ddr_set_rate_for_fsp(&dram_info, sdram_params);
3387 #ifndef CONFIG_SPL_KERNEL_BOOT
3388 	copy_fsp_param_to_ddr();
3389 #endif
3390 
3391 	ddr_set_atags(&dram_info, sdram_params);
3392 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3393 	save_rw_trn_result_to_ddr(&rw_trn_result);
3394 #endif
3395 
3396 	printascii("out\n");
3397 
3398 	return ret;
3399 error:
3400 	printascii("error\n");
3401 	return (-1);
3402 }
3403 #endif /* CONFIG_TPL_BUILD */
3404