xref: /rk3399_rockchip-uboot/drivers/ram/rockchip/sdram_rv1126.c (revision d49e7c34033a0a0b80d5d1c4c2f1a45fa58a285b)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * (C) Copyright 2020 Rockchip Electronics Co., Ltd.
4  */
5 
6 #include <common.h>
7 #include <debug_uart.h>
8 #include <dm.h>
9 #include <ram.h>
10 #include <syscon.h>
11 #include <asm/io.h>
12 #include <asm/arch/clock.h>
13 #include <asm/arch/hardware.h>
14 #include <asm/arch/rk_atags.h>
15 #include <asm/arch/cru_rv1126.h>
16 #include <asm/arch/grf_rv1126.h>
17 #include <asm/arch/sdram_common.h>
18 #include <asm/arch/sdram_rv1126.h>
19 
20 /* define training flag */
21 #define CA_TRAINING			(0x1 << 0)
22 #define READ_GATE_TRAINING		(0x1 << 1)
23 #define WRITE_LEVELING			(0x1 << 2)
24 #define WRITE_TRAINING			(0x1 << 3)
25 #define READ_TRAINING			(0x1 << 4)
26 #define FULL_TRAINING			(0xff)
27 
28 #define SKEW_RX_SIGNAL			(0)
29 #define SKEW_TX_SIGNAL			(1)
30 #define SKEW_CA_SIGNAL			(2)
31 
32 #define DESKEW_MDF_ABS_VAL		(0)
33 #define DESKEW_MDF_DIFF_VAL		(1)
34 
35 #ifdef CONFIG_TPL_BUILD
36 #ifndef CONFIG_TPL_TINY_FRAMEWORK
37 #error please defined CONFIG_TPL_TINY_FRAMEWORK for RV1126 !!!
38 #endif
39 #endif
40 
41 #ifdef CONFIG_TPL_BUILD
42 
43 struct dram_info {
44 	void __iomem *pctl;
45 	void __iomem *phy;
46 	struct rv1126_cru *cru;
47 	struct msch_regs *msch;
48 	struct rv1126_ddrgrf *ddrgrf;
49 	struct rv1126_grf *grf;
50 	struct ram_info info;
51 	struct rv1126_pmugrf *pmugrf;
52 	u32 sr_idle;
53 	u32 pd_idle;
54 };
55 
56 #define GRF_BASE_ADDR			0xfe000000
57 #define PMU_GRF_BASE_ADDR		0xfe020000
58 #define DDR_GRF_BASE_ADDR		0xfe030000
59 #define BUS_SGRF_BASE_ADDR		0xfe0a0000
60 #define SERVER_MSCH_BASE_ADDR		0xfe800000
61 #define CRU_BASE_ADDR			0xff490000
62 #define DDR_PHY_BASE_ADDR		0xff4a0000
63 #define UPCTL2_BASE_ADDR		0xffa50000
64 
65 #define SGRF_SOC_CON2			0x8
66 #define SGRF_SOC_CON12			0x30
67 #define SGRF_SOC_CON13			0x34
68 
69 struct dram_info dram_info;
70 
71 #if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 3)
72 struct rv1126_sdram_params sdram_configs[] = {
73 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-328.inc"
74 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-396.inc"
75 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-528.inc"
76 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-664.inc"
77 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-784.inc"
78 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-924.inc"
79 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-1056.inc"
80 };
81 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 0)
82 struct rv1126_sdram_params sdram_configs[] = {
83 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-328.inc"
84 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-396.inc"
85 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-528.inc"
86 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-664.inc"
87 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-784.inc"
88 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-924.inc"
89 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-1056.inc"
90 };
91 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 6)
92 struct rv1126_sdram_params sdram_configs[] = {
93 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-328.inc"
94 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-396.inc"
95 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-528.inc"
96 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-664.inc"
97 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-784.inc"
98 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-924.inc"
99 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-1056.inc"
100 };
101 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 7)
102 struct rv1126_sdram_params sdram_configs[] = {
103 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-328.inc"
104 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-396.inc"
105 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-528.inc"
106 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-664.inc"
107 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-784.inc"
108 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-924.inc"
109 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-1056.inc"
110 };
111 #endif
112 
113 u32 common_info[] = {
114 	#include "sdram_inc/rv1126/sdram-rv1126-loader_params.inc"
115 };
116 
117 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
118 static struct rw_trn_result rw_trn_result;
119 #endif
120 
121 static struct rv1126_fsp_param fsp_param[MAX_IDX];
122 
123 static u8 lp3_odt_value;
124 
125 static s8 wrlvl_result[2][4];
126 
127 /* DDR configuration 0-9 */
128 u16 ddr_cfg_2_rbc[] = {
129 	((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */
130 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */
131 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */
132 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */
133 	((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */
134 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */
135 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */
136 	((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */
137 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */
138 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */
139 };
140 
141 /* DDR configuration 10-21 */
142 u8 ddr4_cfg_2_rbc[] = {
143 	((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */
144 	((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */
145 	((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */
146 	((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */
147 	((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */
148 	((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */
149 	((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */
150 	((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */
151 	((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */
152 	((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */
153 	((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */
154 	((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */
155 };
156 
157 /* DDR configuration 22-28 */
158 u16 ddr_cfg_2_rbc_p2[] = {
159 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */
160 	((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */
161 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */
162 	((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */
163 	((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */
164 	((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */
165 	((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */
166 };
167 
168 u8 d4_rbc_2_d3_rbc[][2] = {
169 	{10, 0},
170 	{11, 2},
171 	{12, 23},
172 	{13, 1},
173 	{14, 28},
174 	{15, 24},
175 	{16, 27},
176 	{17, 7},
177 	{18, 6},
178 	{19, 25},
179 	{20, 26},
180 	{21, 3}
181 };
182 
183 u32 addrmap[23][9] = {
184 	{24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
185 		0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */
186 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
187 		0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */
188 	{23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
189 		0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */
190 	{22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606,
191 		0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */
192 	{24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909,
193 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */
194 	{6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707,
195 		0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */
196 	{7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808,
197 		0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */
198 	{8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909,
199 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */
200 	{22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
201 		0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */
202 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
203 		0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */
204 
205 	{24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
206 		0x08080808, 0x00000f0f, 0x0801}, /* 10 */
207 	{23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
208 		0x0f080808, 0x00000f0f, 0x0801}, /* 11 */
209 	{24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
210 		0x07070707, 0x00000f07, 0x0700}, /* 12 */
211 	{23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
212 		0x07070707, 0x00000f0f, 0x0700}, /* 13 */
213 	{24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
214 		0x07070707, 0x00000f07, 0x3f01}, /* 14 */
215 	{23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
216 		0x07070707, 0x00000f0f, 0x3f01}, /* 15 */
217 	{23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606,
218 		0x06060606, 0x00000f06, 0x3f00}, /* 16 */
219 	{8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909,
220 		0x0f090909, 0x00000f0f, 0x0801}, /* 17 */
221 	{7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808,
222 		0x08080808, 0x00000f0f, 0x0700}, /* 18 */
223 	{7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
224 		0x08080808, 0x00000f0f, 0x3f01}, /* 19 */
225 
226 	{6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
227 		0x07070707, 0x00000f07, 0x3f00}, /* 20 */
228 	{23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606,
229 		0x06060606, 0x00000f06, 0x0600}, /* 21 */
230 	{21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505,
231 		0x05050505, 0x00000f0f, 0x3f3f} /* 22 */
232 };
233 
234 static u8 dq_sel[22][3] = {
235 	{0x0, 0x17, 0x22},
236 	{0x1, 0x18, 0x23},
237 	{0x2, 0x19, 0x24},
238 	{0x3, 0x1a, 0x25},
239 	{0x4, 0x1b, 0x26},
240 	{0x5, 0x1c, 0x27},
241 	{0x6, 0x1d, 0x28},
242 	{0x7, 0x1e, 0x29},
243 	{0x8, 0x16, 0x21},
244 	{0x9, 0x1f, 0x2a},
245 	{0xa, 0x20, 0x2b},
246 	{0x10, 0x1, 0xc},
247 	{0x11, 0x2, 0xd},
248 	{0x12, 0x3, 0xe},
249 	{0x13, 0x4, 0xf},
250 	{0x14, 0x5, 0x10},
251 	{0x15, 0x6, 0x11},
252 	{0x16, 0x7, 0x12},
253 	{0x17, 0x8, 0x13},
254 	{0x18, 0x0, 0xb},
255 	{0x19, 0x9, 0x14},
256 	{0x1a, 0xa, 0x15}
257 };
258 
259 static u16 grp_addr[4] = {
260 	ADD_GROUP_CS0_A,
261 	ADD_GROUP_CS0_B,
262 	ADD_GROUP_CS1_A,
263 	ADD_GROUP_CS1_B
264 };
265 
266 static u8 wrlvl_result_offset[2][4] = {
267 	{0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27},
268 	{0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29},
269 };
270 
271 static u16 dqs_dq_skew_adr[16] = {
272 	0x170 + 0,	/* SKEW_UPDATE_RX_CS0_DQS0 */
273 	0x170 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS1 */
274 	0x1d0 + 0,	/* SKEW_UPDATE_RX_CS0_DQS2 */
275 	0x1d0 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS3 */
276 	0x1a0 + 0,	/* SKEW_UPDATE_RX_CS1_DQS0 */
277 	0x1a0 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS1 */
278 	0x200 + 0,	/* SKEW_UPDATE_RX_CS1_DQS2 */
279 	0x200 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS3 */
280 	0x170 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS0 */
281 	0x170 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS1 */
282 	0x1d0 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS2 */
283 	0x1d0 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS3 */
284 	0x1a0 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS0 */
285 	0x1a0 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS1 */
286 	0x200 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS2 */
287 	0x200 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS3 */
288 };
289 
290 static void rkclk_ddr_reset(struct dram_info *dram,
291 			    u32 ctl_srstn, u32 ctl_psrstn,
292 			    u32 phy_srstn, u32 phy_psrstn)
293 {
294 	writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) |
295 	       UPCTL2_ASRSTN_REQ(ctl_srstn),
296 	       BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13);
297 
298 	writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn),
299 	       &dram->cru->softrst_con[12]);
300 }
301 
302 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
303 {
304 	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
305 	int delay = 1000;
306 	u32 mhz = hz / MHz;
307 	struct global_info *gbl_info;
308 	struct sdram_head_info_index_v2 *index =
309 		(struct sdram_head_info_index_v2 *)common_info;
310 	u32 ssmod_info;
311 	u32 dsmpd = 1;
312 
313 	gbl_info = (struct global_info *)((void *)common_info +
314 		    index->global_index.offset * 4);
315 	ssmod_info = gbl_info->info_2t;
316 	refdiv = 1;
317 	if (mhz <= 100) {
318 		postdiv1 = 6;
319 		postdiv2 = 4;
320 	} else if (mhz <= 150) {
321 		postdiv1 = 4;
322 		postdiv2 = 4;
323 	} else if (mhz <= 200) {
324 		postdiv1 = 6;
325 		postdiv2 = 2;
326 	} else if (mhz <= 300) {
327 		postdiv1 = 4;
328 		postdiv2 = 2;
329 	} else if (mhz <= 400) {
330 		postdiv1 = 6;
331 		postdiv2 = 1;
332 	} else {
333 		postdiv1 = 4;
334 		postdiv2 = 1;
335 	}
336 	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
337 
338 	writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
339 
340 	writel(0x1f000000, &dram->cru->clksel_con[64]);
341 	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
342 	/* enable ssmod */
343 	if (PLL_SSMOD_SPREAD(ssmod_info)) {
344 		dsmpd = 0;
345 		clrsetbits_le32(&dram->cru->pll[1].con2,
346 				0xffffff << 0, 0x0 << 0);
347 		writel(SSMOD_SPREAD(PLL_SSMOD_SPREAD(ssmod_info)) |
348 		       SSMOD_DIVVAL(PLL_SSMOD_DIV(ssmod_info)) |
349 		       SSMOD_DOWNSPREAD(PLL_SSMOD_DOWNSPREAD(ssmod_info)) |
350 		       SSMOD_RESET(0) |
351 		       SSMOD_DIS_SSCG(0) |
352 		       SSMOD_BP(0),
353 		       &dram->cru->pll[1].con3);
354 	}
355 	writel(DSMPD(dsmpd) | POSTDIV2(postdiv2) | REFDIV(refdiv),
356 	       &dram->cru->pll[1].con1);
357 
358 	while (delay > 0) {
359 		udelay(1);
360 		if (LOCK(readl(&dram->cru->pll[1].con1)))
361 			break;
362 		delay--;
363 	}
364 
365 	writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
366 }
367 
368 static void rkclk_configure_ddr(struct dram_info *dram,
369 				struct rv1126_sdram_params *sdram_params)
370 {
371 	/* for inno ddr phy need freq / 2 */
372 	rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2);
373 }
374 
375 static unsigned int
376 	calculate_ddrconfig(struct rv1126_sdram_params *sdram_params)
377 {
378 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
379 	u32 cs, bw, die_bw, col, row, bank;
380 	u32 cs1_row;
381 	u32 i, tmp;
382 	u32 ddrconf = -1;
383 	u32 row_3_4;
384 
385 	cs = cap_info->rank;
386 	bw = cap_info->bw;
387 	die_bw = cap_info->dbw;
388 	col = cap_info->col;
389 	row = cap_info->cs0_row;
390 	cs1_row = cap_info->cs1_row;
391 	bank = cap_info->bk;
392 	row_3_4 = cap_info->row_3_4;
393 
394 	if (sdram_params->base.dramtype == DDR4) {
395 		if (cs == 2 && row == cs1_row && !row_3_4) {
396 			tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) |
397 			      die_bw;
398 			for (i = 17; i < 21; i++) {
399 				if (((tmp & 0xf) ==
400 				     (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
401 				    ((tmp & 0x70) <=
402 				     (ddr4_cfg_2_rbc[i - 10] & 0x70))) {
403 					ddrconf = i;
404 					goto out;
405 				}
406 			}
407 		}
408 
409 		tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw;
410 		for (i = 10; i < 21; i++) {
411 			if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
412 			    ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) &&
413 			    ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) {
414 				ddrconf = i;
415 				goto out;
416 			}
417 		}
418 	} else {
419 		if (cs == 2 && row == cs1_row && bank == 3) {
420 			for (i = 5; i < 8; i++) {
421 				if (((bw + col - 10) == (ddr_cfg_2_rbc[i] &
422 							 0x7)) &&
423 				    ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] &
424 							  (0x7 << 5))) {
425 					ddrconf = i;
426 					goto out;
427 				}
428 			}
429 		}
430 
431 		tmp = ((cs - 1) << 8) | ((row - 13) << 5) |
432 		      ((bw + col - 10) << 0);
433 		if (bank == 3)
434 			tmp |= (1 << 3);
435 
436 		for (i = 0; i < 9; i++)
437 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) &&
438 			    ((tmp & (7 << 5)) <=
439 			     (ddr_cfg_2_rbc[i] & (7 << 5))) &&
440 			    ((tmp & (1 << 8)) <=
441 			     (ddr_cfg_2_rbc[i] & (1 << 8)))) {
442 				ddrconf = i;
443 				goto out;
444 			}
445 
446 		for (i = 0; i < 7; i++)
447 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc_p2[i] & 0x1f)) &&
448 			    ((tmp & (7 << 5)) <=
449 			     (ddr_cfg_2_rbc_p2[i] & (7 << 5))) &&
450 			    ((tmp & (1 << 8)) <=
451 			     (ddr_cfg_2_rbc_p2[i] & (1 << 8)))) {
452 				ddrconf = i + 22;
453 				goto out;
454 			}
455 
456 		if (cs == 1 && bank == 3 && row <= 17 &&
457 		    (col + bw) == 12)
458 			ddrconf = 23;
459 	}
460 
461 out:
462 	if (ddrconf > 28)
463 		printascii("calculate ddrconfig error\n");
464 
465 	if (sdram_params->base.dramtype == DDR4) {
466 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
467 			if (ddrconf == d4_rbc_2_d3_rbc[i][0]) {
468 				if (ddrconf == 21 && row > 16)
469 					printascii("warn:ddrconf21 row > 16\n");
470 				else
471 					ddrconf = d4_rbc_2_d3_rbc[i][1];
472 				break;
473 			}
474 		}
475 	}
476 
477 	return ddrconf;
478 }
479 
480 static void sw_set_req(struct dram_info *dram)
481 {
482 	void __iomem *pctl_base = dram->pctl;
483 
484 	/* clear sw_done=0 */
485 	writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL);
486 }
487 
488 static void sw_set_ack(struct dram_info *dram)
489 {
490 	void __iomem *pctl_base = dram->pctl;
491 
492 	/* set sw_done=1 */
493 	writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL);
494 	while (1) {
495 		/* wait programming done */
496 		if (readl(pctl_base + DDR_PCTL2_SWSTAT) &
497 				PCTL2_SW_DONE_ACK)
498 			break;
499 	}
500 }
501 
502 static void set_ctl_address_map(struct dram_info *dram,
503 				struct rv1126_sdram_params *sdram_params)
504 {
505 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
506 	void __iomem *pctl_base = dram->pctl;
507 	u32 ddrconf = cap_info->ddrconfig;
508 	u32 i, row;
509 
510 	row = cap_info->cs0_row;
511 	if (sdram_params->base.dramtype == DDR4) {
512 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
513 			if (ddrconf == d4_rbc_2_d3_rbc[i][1]) {
514 				ddrconf = d4_rbc_2_d3_rbc[i][0];
515 				break;
516 			}
517 		}
518 	}
519 
520 	if (ddrconf >= ARRAY_SIZE(addrmap)) {
521 		printascii("set ctl address map fail\n");
522 		return;
523 	}
524 
525 	sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
526 			  &addrmap[ddrconf][0], 9 * 4);
527 
528 	/* unused row set to 0xf */
529 	for (i = 17; i >= row; i--)
530 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
531 			((i - 12) * 8 / 32) * 4,
532 			0xf << ((i - 12) * 8 % 32));
533 
534 	if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4)
535 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
536 	if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1)
537 		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
538 
539 	if (cap_info->rank == 1)
540 		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
541 }
542 
543 static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait)
544 {
545 	void __iomem *phy_base = dram->phy;
546 	u32 fbdiv, prediv, postdiv, postdiv_en;
547 
548 	if (wait) {
549 		clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB);
550 		while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK))
551 			continue;
552 	} else {
553 		freq /= MHz;
554 		prediv = 1;
555 		if (freq <= 200) {
556 			fbdiv = 16;
557 			postdiv = 2;
558 			postdiv_en = 1;
559 		} else if (freq <= 456) {
560 			fbdiv = 8;
561 			postdiv = 1;
562 			postdiv_en = 1;
563 		} else {
564 			fbdiv = 4;
565 			postdiv = 0;
566 			postdiv_en = 0;
567 		}
568 		writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50));
569 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK,
570 				(fbdiv >> 8) & 1);
571 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK,
572 				postdiv_en << PHY_POSTDIV_EN_SHIFT);
573 
574 		clrsetbits_le32(PHY_REG(phy_base, 0x52),
575 				PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv);
576 		clrsetbits_le32(PHY_REG(phy_base, 0x53),
577 				PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT,
578 				postdiv << PHY_POSTDIV_SHIFT);
579 	}
580 }
581 
582 static const u16 d3_phy_drv_2_ohm[][2] = {
583 	{PHY_DDR3_RON_455ohm, 455},
584 	{PHY_DDR3_RON_230ohm, 230},
585 	{PHY_DDR3_RON_153ohm, 153},
586 	{PHY_DDR3_RON_115ohm, 115},
587 	{PHY_DDR3_RON_91ohm, 91},
588 	{PHY_DDR3_RON_76ohm, 76},
589 	{PHY_DDR3_RON_65ohm, 65},
590 	{PHY_DDR3_RON_57ohm, 57},
591 	{PHY_DDR3_RON_51ohm, 51},
592 	{PHY_DDR3_RON_46ohm, 46},
593 	{PHY_DDR3_RON_41ohm, 41},
594 	{PHY_DDR3_RON_38ohm, 38},
595 	{PHY_DDR3_RON_35ohm, 35},
596 	{PHY_DDR3_RON_32ohm, 32},
597 	{PHY_DDR3_RON_30ohm, 30},
598 	{PHY_DDR3_RON_28ohm, 28},
599 	{PHY_DDR3_RON_27ohm, 27},
600 	{PHY_DDR3_RON_25ohm, 25},
601 	{PHY_DDR3_RON_24ohm, 24},
602 	{PHY_DDR3_RON_23ohm, 23},
603 	{PHY_DDR3_RON_22ohm, 22},
604 	{PHY_DDR3_RON_21ohm, 21},
605 	{PHY_DDR3_RON_20ohm, 20}
606 };
607 
608 static u16 d3_phy_odt_2_ohm[][2] = {
609 	{PHY_DDR3_RTT_DISABLE, 0},
610 	{PHY_DDR3_RTT_561ohm, 561},
611 	{PHY_DDR3_RTT_282ohm, 282},
612 	{PHY_DDR3_RTT_188ohm, 188},
613 	{PHY_DDR3_RTT_141ohm, 141},
614 	{PHY_DDR3_RTT_113ohm, 113},
615 	{PHY_DDR3_RTT_94ohm, 94},
616 	{PHY_DDR3_RTT_81ohm, 81},
617 	{PHY_DDR3_RTT_72ohm, 72},
618 	{PHY_DDR3_RTT_64ohm, 64},
619 	{PHY_DDR3_RTT_58ohm, 58},
620 	{PHY_DDR3_RTT_52ohm, 52},
621 	{PHY_DDR3_RTT_48ohm, 48},
622 	{PHY_DDR3_RTT_44ohm, 44},
623 	{PHY_DDR3_RTT_41ohm, 41},
624 	{PHY_DDR3_RTT_38ohm, 38},
625 	{PHY_DDR3_RTT_37ohm, 37},
626 	{PHY_DDR3_RTT_34ohm, 34},
627 	{PHY_DDR3_RTT_32ohm, 32},
628 	{PHY_DDR3_RTT_31ohm, 31},
629 	{PHY_DDR3_RTT_29ohm, 29},
630 	{PHY_DDR3_RTT_28ohm, 28},
631 	{PHY_DDR3_RTT_27ohm, 27},
632 	{PHY_DDR3_RTT_25ohm, 25}
633 };
634 
635 static u16 d4lp3_phy_drv_2_ohm[][2] = {
636 	{PHY_DDR4_LPDDR3_RON_482ohm, 482},
637 	{PHY_DDR4_LPDDR3_RON_244ohm, 244},
638 	{PHY_DDR4_LPDDR3_RON_162ohm, 162},
639 	{PHY_DDR4_LPDDR3_RON_122ohm, 122},
640 	{PHY_DDR4_LPDDR3_RON_97ohm, 97},
641 	{PHY_DDR4_LPDDR3_RON_81ohm, 81},
642 	{PHY_DDR4_LPDDR3_RON_69ohm, 69},
643 	{PHY_DDR4_LPDDR3_RON_61ohm, 61},
644 	{PHY_DDR4_LPDDR3_RON_54ohm, 54},
645 	{PHY_DDR4_LPDDR3_RON_48ohm, 48},
646 	{PHY_DDR4_LPDDR3_RON_44ohm, 44},
647 	{PHY_DDR4_LPDDR3_RON_40ohm, 40},
648 	{PHY_DDR4_LPDDR3_RON_37ohm, 37},
649 	{PHY_DDR4_LPDDR3_RON_34ohm, 34},
650 	{PHY_DDR4_LPDDR3_RON_32ohm, 32},
651 	{PHY_DDR4_LPDDR3_RON_30ohm, 30},
652 	{PHY_DDR4_LPDDR3_RON_28ohm, 28},
653 	{PHY_DDR4_LPDDR3_RON_27ohm, 27},
654 	{PHY_DDR4_LPDDR3_RON_25ohm, 25},
655 	{PHY_DDR4_LPDDR3_RON_24ohm, 24},
656 	{PHY_DDR4_LPDDR3_RON_23ohm, 23},
657 	{PHY_DDR4_LPDDR3_RON_22ohm, 22},
658 	{PHY_DDR4_LPDDR3_RON_21ohm, 21}
659 };
660 
661 static u16 d4lp3_phy_odt_2_ohm[][2] = {
662 	{PHY_DDR4_LPDDR3_RTT_DISABLE, 0},
663 	{PHY_DDR4_LPDDR3_RTT_586ohm, 586},
664 	{PHY_DDR4_LPDDR3_RTT_294ohm, 294},
665 	{PHY_DDR4_LPDDR3_RTT_196ohm, 196},
666 	{PHY_DDR4_LPDDR3_RTT_148ohm, 148},
667 	{PHY_DDR4_LPDDR3_RTT_118ohm, 118},
668 	{PHY_DDR4_LPDDR3_RTT_99ohm, 99},
669 	{PHY_DDR4_LPDDR3_RTT_85ohm, 58},
670 	{PHY_DDR4_LPDDR3_RTT_76ohm, 76},
671 	{PHY_DDR4_LPDDR3_RTT_67ohm, 67},
672 	{PHY_DDR4_LPDDR3_RTT_60ohm, 60},
673 	{PHY_DDR4_LPDDR3_RTT_55ohm, 55},
674 	{PHY_DDR4_LPDDR3_RTT_50ohm, 50},
675 	{PHY_DDR4_LPDDR3_RTT_46ohm, 46},
676 	{PHY_DDR4_LPDDR3_RTT_43ohm, 43},
677 	{PHY_DDR4_LPDDR3_RTT_40ohm, 40},
678 	{PHY_DDR4_LPDDR3_RTT_38ohm, 38},
679 	{PHY_DDR4_LPDDR3_RTT_36ohm, 36},
680 	{PHY_DDR4_LPDDR3_RTT_34ohm, 34},
681 	{PHY_DDR4_LPDDR3_RTT_32ohm, 32},
682 	{PHY_DDR4_LPDDR3_RTT_31ohm, 31},
683 	{PHY_DDR4_LPDDR3_RTT_29ohm, 29},
684 	{PHY_DDR4_LPDDR3_RTT_28ohm, 28},
685 	{PHY_DDR4_LPDDR3_RTT_27ohm, 27}
686 };
687 
688 static u16 lp4_phy_drv_2_ohm[][2] = {
689 	{PHY_LPDDR4_RON_501ohm, 501},
690 	{PHY_LPDDR4_RON_253ohm, 253},
691 	{PHY_LPDDR4_RON_168ohm, 168},
692 	{PHY_LPDDR4_RON_126ohm, 126},
693 	{PHY_LPDDR4_RON_101ohm, 101},
694 	{PHY_LPDDR4_RON_84ohm, 84},
695 	{PHY_LPDDR4_RON_72ohm, 72},
696 	{PHY_LPDDR4_RON_63ohm, 63},
697 	{PHY_LPDDR4_RON_56ohm, 56},
698 	{PHY_LPDDR4_RON_50ohm, 50},
699 	{PHY_LPDDR4_RON_46ohm, 46},
700 	{PHY_LPDDR4_RON_42ohm, 42},
701 	{PHY_LPDDR4_RON_38ohm, 38},
702 	{PHY_LPDDR4_RON_36ohm, 36},
703 	{PHY_LPDDR4_RON_33ohm, 33},
704 	{PHY_LPDDR4_RON_31ohm, 31},
705 	{PHY_LPDDR4_RON_29ohm, 29},
706 	{PHY_LPDDR4_RON_28ohm, 28},
707 	{PHY_LPDDR4_RON_26ohm, 26},
708 	{PHY_LPDDR4_RON_25ohm, 25},
709 	{PHY_LPDDR4_RON_24ohm, 24},
710 	{PHY_LPDDR4_RON_23ohm, 23},
711 	{PHY_LPDDR4_RON_22ohm, 22}
712 };
713 
714 static u16 lp4_phy_odt_2_ohm[][2] = {
715 	{PHY_LPDDR4_RTT_DISABLE, 0},
716 	{PHY_LPDDR4_RTT_604ohm, 604},
717 	{PHY_LPDDR4_RTT_303ohm, 303},
718 	{PHY_LPDDR4_RTT_202ohm, 202},
719 	{PHY_LPDDR4_RTT_152ohm, 152},
720 	{PHY_LPDDR4_RTT_122ohm, 122},
721 	{PHY_LPDDR4_RTT_101ohm, 101},
722 	{PHY_LPDDR4_RTT_87ohm,	87},
723 	{PHY_LPDDR4_RTT_78ohm, 78},
724 	{PHY_LPDDR4_RTT_69ohm, 69},
725 	{PHY_LPDDR4_RTT_62ohm, 62},
726 	{PHY_LPDDR4_RTT_56ohm, 56},
727 	{PHY_LPDDR4_RTT_52ohm, 52},
728 	{PHY_LPDDR4_RTT_48ohm, 48},
729 	{PHY_LPDDR4_RTT_44ohm, 44},
730 	{PHY_LPDDR4_RTT_41ohm, 41},
731 	{PHY_LPDDR4_RTT_39ohm, 39},
732 	{PHY_LPDDR4_RTT_37ohm, 37},
733 	{PHY_LPDDR4_RTT_35ohm, 35},
734 	{PHY_LPDDR4_RTT_33ohm, 33},
735 	{PHY_LPDDR4_RTT_32ohm, 32},
736 	{PHY_LPDDR4_RTT_30ohm, 30},
737 	{PHY_LPDDR4_RTT_29ohm, 29},
738 	{PHY_LPDDR4_RTT_27ohm, 27}
739 };
740 
741 static u32 lp4_odt_calc(u32 odt_ohm)
742 {
743 	u32 odt;
744 
745 	if (odt_ohm == 0)
746 		odt = LPDDR4_DQODT_DIS;
747 	else if (odt_ohm <= 40)
748 		odt = LPDDR4_DQODT_40;
749 	else if (odt_ohm <= 48)
750 		odt = LPDDR4_DQODT_48;
751 	else if (odt_ohm <= 60)
752 		odt = LPDDR4_DQODT_60;
753 	else if (odt_ohm <= 80)
754 		odt = LPDDR4_DQODT_80;
755 	else if (odt_ohm <= 120)
756 		odt = LPDDR4_DQODT_120;
757 	else
758 		odt = LPDDR4_DQODT_240;
759 
760 	return odt;
761 }
762 
763 static void *get_ddr_drv_odt_info(u32 dramtype)
764 {
765 	struct sdram_head_info_index_v2 *index =
766 		(struct sdram_head_info_index_v2 *)common_info;
767 	void *ddr_info = 0;
768 
769 	if (dramtype == DDR4)
770 		ddr_info = (void *)common_info + index->ddr4_index.offset * 4;
771 	else if (dramtype == DDR3)
772 		ddr_info = (void *)common_info + index->ddr3_index.offset * 4;
773 	else if (dramtype == LPDDR3)
774 		ddr_info = (void *)common_info + index->lp3_index.offset * 4;
775 	else if (dramtype == LPDDR4)
776 		ddr_info = (void *)common_info + index->lp4_index.offset * 4;
777 	else if (dramtype == LPDDR4X)
778 		ddr_info = (void *)common_info + index->lp4x_index.offset * 4;
779 	else
780 		printascii("unsupported dram type\n");
781 	return ddr_info;
782 }
783 
784 static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info,
785 			 u32 freq_mhz, u32 dst_fsp, u32 dramtype)
786 {
787 	void __iomem *pctl_base = dram->pctl;
788 	u32 ca_vref, dq_vref;
789 
790 	if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
791 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff);
792 	else
793 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten);
794 
795 	if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq))
796 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff);
797 	else
798 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten);
799 
800 	if (dramtype == LPDDR4) {
801 		if (ca_vref < 100)
802 			ca_vref = 100;
803 		if (ca_vref > 420)
804 			ca_vref = 420;
805 
806 		if (ca_vref <= 300)
807 			ca_vref = (0 << 6) | (ca_vref - 100) / 4;
808 		else
809 			ca_vref = (1 << 6) | (ca_vref - 220) / 4;
810 
811 		if (dq_vref < 100)
812 			dq_vref = 100;
813 		if (dq_vref > 420)
814 			dq_vref = 420;
815 
816 		if (dq_vref <= 300)
817 			dq_vref = (0 << 6) | (dq_vref - 100) / 4;
818 		else
819 			dq_vref = (1 << 6) | (dq_vref - 220) / 4;
820 	} else {
821 		ca_vref = ca_vref * 11 / 6;
822 		if (ca_vref < 150)
823 			ca_vref = 150;
824 		if (ca_vref > 629)
825 			ca_vref = 629;
826 
827 		if (ca_vref <= 449)
828 			ca_vref = (0 << 6) | (ca_vref - 150) / 4;
829 		else
830 			ca_vref = (1 << 6) | (ca_vref - 329) / 4;
831 
832 		if (dq_vref < 150)
833 			dq_vref = 150;
834 		if (dq_vref > 629)
835 			dq_vref = 629;
836 
837 		if (dq_vref <= 449)
838 			dq_vref = (0 << 6) | (dq_vref - 150) / 6;
839 		else
840 			dq_vref = (1 << 6) | (dq_vref - 329) / 6;
841 	}
842 	sw_set_req(dram);
843 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
844 			DDR_PCTL2_INIT6,
845 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT,
846 			ca_vref << PCTL2_LPDDR4_MR12_SHIFT);
847 
848 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
849 			DDR_PCTL2_INIT7,
850 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT,
851 			dq_vref << PCTL2_LPDDR4_MR14_SHIFT);
852 	sw_set_ack(dram);
853 }
854 
855 static void set_ds_odt(struct dram_info *dram,
856 		       struct rv1126_sdram_params *sdram_params, u32 dst_fsp)
857 {
858 	void __iomem *phy_base = dram->phy;
859 	void __iomem *pctl_base = dram->pctl;
860 	u32 dramtype = sdram_params->base.dramtype;
861 	struct ddr2_3_4_lp2_3_info *ddr_info;
862 	struct lp4_info *lp4_info;
863 	u32 i, j, tmp;
864 	const u16 (*p_drv)[2];
865 	const u16 (*p_odt)[2];
866 	u32 drv_info, sr_info;
867 	u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm;
868 	u32 phy_odt_ohm, dram_odt_ohm;
869 	u32 lp4_pu_cal, phy_lp4_drv_pd_en;
870 	u32 phy_odt_up_en, phy_odt_dn_en;
871 	u32 sr_dq, sr_clk;
872 	u32 freq = sdram_params->base.ddr_freq;
873 	u32 mr1_mr3, mr11, mr22, vref_out, vref_inner;
874 	u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0;
875 	u32 phy_dq_drv = 0;
876 	u32 phy_odt_up = 0, phy_odt_dn = 0;
877 
878 	ddr_info = get_ddr_drv_odt_info(dramtype);
879 	lp4_info = (void *)ddr_info;
880 
881 	if (!ddr_info)
882 		return;
883 
884 	/* dram odt en freq control phy drv, dram odt and phy sr */
885 	if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) {
886 		drv_info = ddr_info->drv_when_odtoff;
887 		dram_odt_ohm = 0;
888 		sr_info = ddr_info->sr_when_odtoff;
889 		phy_lp4_drv_pd_en =
890 			PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info);
891 	} else {
892 		drv_info = ddr_info->drv_when_odten;
893 		dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info);
894 		sr_info = ddr_info->sr_when_odten;
895 		phy_lp4_drv_pd_en =
896 			PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info);
897 	}
898 	phy_dq_drv_ohm =
899 		DRV_INFO_PHY_DQ_DRV(drv_info);
900 	phy_clk_drv_ohm =
901 		DRV_INFO_PHY_CLK_DRV(drv_info);
902 	phy_ca_drv_ohm =
903 		DRV_INFO_PHY_CA_DRV(drv_info);
904 
905 	sr_dq = DQ_SR_INFO(sr_info);
906 	sr_clk = CLK_SR_INFO(sr_info);
907 
908 	/* phy odt en freq control dram drv and phy odt */
909 	if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) {
910 		dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff);
911 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info);
912 		phy_odt_ohm = 0;
913 		phy_odt_up_en = 0;
914 		phy_odt_dn_en = 0;
915 	} else {
916 		dram_drv_ohm =
917 			DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten);
918 		phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info);
919 		phy_odt_up_en =
920 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
921 		phy_odt_dn_en =
922 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
923 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info);
924 	}
925 
926 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
927 		if (phy_odt_ohm) {
928 			phy_odt_up_en = 0;
929 			phy_odt_dn_en = 1;
930 		}
931 		if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
932 			dram_caodt_ohm = 0;
933 		else
934 			dram_caodt_ohm =
935 				ODT_INFO_LP4_CA_ODT(lp4_info->odt_info);
936 	}
937 
938 	if (dramtype == DDR3) {
939 		p_drv = d3_phy_drv_2_ohm;
940 		p_odt = d3_phy_odt_2_ohm;
941 	} else if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
942 		p_drv = lp4_phy_drv_2_ohm;
943 		p_odt = lp4_phy_odt_2_ohm;
944 	} else {
945 		p_drv = d4lp3_phy_drv_2_ohm;
946 		p_odt = d4lp3_phy_odt_2_ohm;
947 	}
948 
949 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
950 		if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) {
951 			phy_dq_drv = **(p_drv + i);
952 			break;
953 		}
954 		if (i == 0)
955 			break;
956 	}
957 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
958 		if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) {
959 			phy_clk_drv = **(p_drv + i);
960 			break;
961 		}
962 		if (i == 0)
963 			break;
964 	}
965 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
966 		if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) {
967 			phy_ca_drv = **(p_drv + i);
968 			break;
969 		}
970 		if (i == 0)
971 			break;
972 	}
973 	if (!phy_odt_ohm)
974 		phy_odt = 0;
975 	else
976 		for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) {
977 			if (phy_odt_ohm <= *(*(p_odt + i) + 1)) {
978 				phy_odt = **(p_odt + i);
979 				break;
980 			}
981 			if (i == 0)
982 				break;
983 		}
984 
985 	if (dramtype != LPDDR4 && dramtype != LPDDR4X) {
986 		if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en))
987 			vref_inner = 0x80;
988 		else if (phy_odt_up_en)
989 			vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 /
990 				     (dram_drv_ohm + phy_odt_ohm);
991 		else
992 			vref_inner = phy_odt_ohm * 128 /
993 				(phy_odt_ohm + dram_drv_ohm);
994 
995 		if (dramtype != DDR3 && dram_odt_ohm)
996 			vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 /
997 				   (phy_dq_drv_ohm + dram_odt_ohm);
998 		else
999 			vref_out = 0x80;
1000 	} else {
1001 		/* for lp4 and lp4x*/
1002 		if (phy_odt_ohm)
1003 			vref_inner =
1004 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) *
1005 				 256) / 1000;
1006 		else
1007 			vref_inner =
1008 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) *
1009 				 256) / 1000;
1010 
1011 		vref_out = 0x80;
1012 	}
1013 
1014 	/* default ZQCALIB bypass mode */
1015 	clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv);
1016 	clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv);
1017 	clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv);
1018 	clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv);
1019 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1020 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_clk_drv);
1021 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_clk_drv);
1022 	} else {
1023 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_ca_drv);
1024 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_ca_drv);
1025 	}
1026 	/* clk / cmd slew rate */
1027 	clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk);
1028 
1029 	phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1;
1030 	if (phy_odt_up_en)
1031 		phy_odt_up = phy_odt;
1032 	if (phy_odt_dn_en)
1033 		phy_odt_dn = phy_odt;
1034 
1035 	for (i = 0; i < 4; i++) {
1036 		j = 0x110 + i * 0x10;
1037 		clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up);
1038 		clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn);
1039 		clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv);
1040 		clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv);
1041 		writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10));
1042 
1043 		clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10),
1044 				1 << 3, phy_lp4_drv_pd_en << 3);
1045 		if (dramtype == LPDDR4 || dramtype == LPDDR4X)
1046 			clrbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10), BIT(5));
1047 		/* dq slew rate */
1048 		clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10),
1049 				0x1f, sr_dq);
1050 	}
1051 
1052 	/* reg_rx_vref_value_update */
1053 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1054 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1055 
1056 	/* RAM VREF */
1057 	writel(vref_out, PHY_REG(phy_base, 0x105));
1058 	if (dramtype == LPDDR3)
1059 		udelay(100);
1060 
1061 	if (dramtype == LPDDR4 || dramtype == LPDDR4X)
1062 		set_lp4_vref(dram, lp4_info, freq, dst_fsp, dramtype);
1063 
1064 	if (dramtype == DDR3 || dramtype == DDR4) {
1065 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1066 				DDR_PCTL2_INIT3);
1067 		mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK;
1068 	} else {
1069 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1070 				DDR_PCTL2_INIT4);
1071 		mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK;
1072 	}
1073 
1074 	if (dramtype == DDR3) {
1075 		mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK);
1076 		if (dram_drv_ohm == 34)
1077 			mr1_mr3 |= DDR3_DS_34;
1078 
1079 		if (dram_odt_ohm == 0)
1080 			mr1_mr3 |= DDR3_RTT_NOM_DIS;
1081 		else if (dram_odt_ohm <= 40)
1082 			mr1_mr3 |= DDR3_RTT_NOM_40;
1083 		else if (dram_odt_ohm <= 60)
1084 			mr1_mr3 |= DDR3_RTT_NOM_60;
1085 		else
1086 			mr1_mr3 |= DDR3_RTT_NOM_120;
1087 
1088 	} else if (dramtype == DDR4) {
1089 		mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK);
1090 		if (dram_drv_ohm == 48)
1091 			mr1_mr3 |= DDR4_DS_48;
1092 
1093 		if (dram_odt_ohm == 0)
1094 			mr1_mr3 |= DDR4_RTT_NOM_DIS;
1095 		else if (dram_odt_ohm <= 34)
1096 			mr1_mr3 |= DDR4_RTT_NOM_34;
1097 		else if (dram_odt_ohm <= 40)
1098 			mr1_mr3 |= DDR4_RTT_NOM_40;
1099 		else if (dram_odt_ohm <= 48)
1100 			mr1_mr3 |= DDR4_RTT_NOM_48;
1101 		else if (dram_odt_ohm <= 60)
1102 			mr1_mr3 |= DDR4_RTT_NOM_60;
1103 		else
1104 			mr1_mr3 |= DDR4_RTT_NOM_120;
1105 
1106 	} else if (dramtype == LPDDR3) {
1107 		if (dram_drv_ohm <= 34)
1108 			mr1_mr3 |= LPDDR3_DS_34;
1109 		else if (dram_drv_ohm <= 40)
1110 			mr1_mr3 |= LPDDR3_DS_40;
1111 		else if (dram_drv_ohm <= 48)
1112 			mr1_mr3 |= LPDDR3_DS_48;
1113 		else if (dram_drv_ohm <= 60)
1114 			mr1_mr3 |= LPDDR3_DS_60;
1115 		else if (dram_drv_ohm <= 80)
1116 			mr1_mr3 |= LPDDR3_DS_80;
1117 
1118 		if (dram_odt_ohm == 0)
1119 			lp3_odt_value = LPDDR3_ODT_DIS;
1120 		else if (dram_odt_ohm <= 60)
1121 			lp3_odt_value = LPDDR3_ODT_60;
1122 		else if (dram_odt_ohm <= 120)
1123 			lp3_odt_value = LPDDR3_ODT_120;
1124 		else
1125 			lp3_odt_value = LPDDR3_ODT_240;
1126 	} else {/* for lpddr4 and lpddr4x */
1127 		/* MR3 for lp4 PU-CAL and PDDS */
1128 		mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK);
1129 		mr1_mr3 |= lp4_pu_cal;
1130 
1131 		tmp = lp4_odt_calc(dram_drv_ohm);
1132 		if (!tmp)
1133 			tmp = LPDDR4_PDDS_240;
1134 		mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT);
1135 
1136 		/* MR11 for lp4 ca odt, dq odt set */
1137 		mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1138 			     DDR_PCTL2_INIT6);
1139 		mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK;
1140 
1141 		mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK);
1142 
1143 		tmp = lp4_odt_calc(dram_odt_ohm);
1144 		mr11 |= (tmp << LPDDR4_DQODT_SHIFT);
1145 
1146 		tmp = lp4_odt_calc(dram_caodt_ohm);
1147 		mr11 |= (tmp << LPDDR4_CAODT_SHIFT);
1148 		sw_set_req(dram);
1149 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1150 				DDR_PCTL2_INIT6,
1151 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT,
1152 				mr11 << PCTL2_LPDDR4_MR11_SHIFT);
1153 		sw_set_ack(dram);
1154 
1155 		/* MR22 for soc odt/odt-ck/odt-cs/odt-ca */
1156 		mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1157 			     DDR_PCTL2_INIT7);
1158 		mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK;
1159 		mr22 &= ~LPDDR4_SOC_ODT_MASK;
1160 
1161 		tmp = lp4_odt_calc(phy_odt_ohm);
1162 		mr22 |= tmp;
1163 		mr22 = mr22 |
1164 		       (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) <<
1165 			LPDDR4_ODTE_CK_SHIFT) |
1166 		       (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) <<
1167 			LPDDR4_ODTE_CS_SHIFT) |
1168 		       (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) <<
1169 			LPDDR4_ODTD_CA_SHIFT);
1170 
1171 		sw_set_req(dram);
1172 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1173 				DDR_PCTL2_INIT7,
1174 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT,
1175 				mr22 << PCTL2_LPDDR4_MR22_SHIFT);
1176 		sw_set_ack(dram);
1177 	}
1178 
1179 	if (dramtype == DDR4 || dramtype == DDR3) {
1180 		sw_set_req(dram);
1181 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1182 				DDR_PCTL2_INIT3,
1183 				PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT,
1184 				mr1_mr3 << PCTL2_DDR34_MR1_SHIFT);
1185 		sw_set_ack(dram);
1186 	} else {
1187 		sw_set_req(dram);
1188 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1189 				DDR_PCTL2_INIT4,
1190 				PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT,
1191 				mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT);
1192 		sw_set_ack(dram);
1193 	}
1194 }
1195 
1196 static int sdram_cmd_dq_path_remap(struct dram_info *dram,
1197 				   struct rv1126_sdram_params *sdram_params)
1198 {
1199 	void __iomem *phy_base = dram->phy;
1200 	u32 dramtype = sdram_params->base.dramtype;
1201 	struct sdram_head_info_index_v2 *index =
1202 		(struct sdram_head_info_index_v2 *)common_info;
1203 	struct dq_map_info *map_info;
1204 
1205 	map_info = (struct dq_map_info *)((void *)common_info +
1206 		index->dq_map_index.offset * 4);
1207 
1208 	if (dramtype == LPDDR4X)
1209 		dramtype = LPDDR4;
1210 
1211 	if (dramtype <= LPDDR4)
1212 		writel((map_info->byte_map[dramtype / 4] >>
1213 			((dramtype % 4) * 8)) & 0xff,
1214 		       PHY_REG(phy_base, 0x4f));
1215 
1216 	return 0;
1217 }
1218 
1219 static void phy_cfg(struct dram_info *dram,
1220 		    struct rv1126_sdram_params *sdram_params)
1221 {
1222 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
1223 	void __iomem *phy_base = dram->phy;
1224 	u32 i, dq_map, tmp;
1225 	u32 byte1 = 0, byte0 = 0;
1226 
1227 	sdram_cmd_dq_path_remap(dram, sdram_params);
1228 
1229 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0);
1230 	for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
1231 		writel(sdram_params->phy_regs.phy[i][1],
1232 		       phy_base + sdram_params->phy_regs.phy[i][0]);
1233 	}
1234 
1235 	clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5));
1236 	dq_map = readl(PHY_REG(phy_base, 0x4f));
1237 	for (i = 0; i < 4; i++) {
1238 		if (((dq_map >> (i * 2)) & 0x3) == 0)
1239 			byte0 = i;
1240 		if (((dq_map >> (i * 2)) & 0x3) == 1)
1241 			byte1 = i;
1242 	}
1243 
1244 	tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK);
1245 	if (cap_info->bw == 2)
1246 		tmp |= 0xf;
1247 	else if (cap_info->bw == 1)
1248 		tmp |= ((1 << byte0) | (1 << byte1));
1249 	else
1250 		tmp |= (1 << byte0);
1251 
1252 	writel(tmp, PHY_REG(phy_base, 0xf));
1253 
1254 	/* lpddr4 odt control by phy, enable cs0 odt */
1255 	if (sdram_params->base.dramtype == LPDDR4 ||
1256 	    sdram_params->base.dramtype == LPDDR4X)
1257 		clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4,
1258 				(1 << 6) | (1 << 4));
1259 	/* for ca training ca vref choose range1 */
1260 	setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6));
1261 	setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6));
1262 	/* for wr training PHY_0x7c[5], choose range0 */
1263 	clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5));
1264 }
1265 
1266 static int update_refresh_reg(struct dram_info *dram)
1267 {
1268 	void __iomem *pctl_base = dram->pctl;
1269 	u32 ret;
1270 
1271 	ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
1272 	writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
1273 
1274 	return 0;
1275 }
1276 
1277 /*
1278  * rank = 1: cs0
1279  * rank = 2: cs1
1280  */
1281 int read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype)
1282 {
1283 	u32 ret;
1284 	u32 i, temp;
1285 	u32 dqmap;
1286 
1287 	void __iomem *pctl_base = dram->pctl;
1288 	struct sdram_head_info_index_v2 *index =
1289 		(struct sdram_head_info_index_v2 *)common_info;
1290 	struct dq_map_info *map_info;
1291 
1292 	map_info = (struct dq_map_info *)((void *)common_info +
1293 		index->dq_map_index.offset * 4);
1294 
1295 	if (dramtype == LPDDR2)
1296 		dqmap = map_info->lp2_dq0_7_map;
1297 	else
1298 		dqmap = map_info->lp3_dq0_7_map;
1299 
1300 	pctl_read_mr(pctl_base, rank, mr_num);
1301 
1302 	ret = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff);
1303 
1304 	if (dramtype != LPDDR4) {
1305 		temp = 0;
1306 		for (i = 0; i < 8; i++) {
1307 			temp = temp | (((ret >> i) & 0x1) <<
1308 				       ((dqmap >> (i * 4)) & 0xf));
1309 		}
1310 	} else {
1311 		temp = (readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff);
1312 	}
1313 
1314 	return temp;
1315 }
1316 
1317 /* before call this function autorefresh should be disabled */
1318 void send_a_refresh(struct dram_info *dram)
1319 {
1320 	void __iomem *pctl_base = dram->pctl;
1321 
1322 	while (readl(pctl_base + DDR_PCTL2_DBGSTAT) & 0x3)
1323 		continue;
1324 	writel(0x3, pctl_base + DDR_PCTL2_DBGCMD);
1325 }
1326 
1327 static void enter_sr(struct dram_info *dram, u32 en)
1328 {
1329 	void __iomem *pctl_base = dram->pctl;
1330 
1331 	if (en) {
1332 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1333 		while (1) {
1334 			if (((readl(pctl_base + DDR_PCTL2_STAT) &
1335 			      PCTL2_SELFREF_TYPE_MASK) ==
1336 			     PCTL2_SELFREF_TYPE_SR_NOT_AUTO) &&
1337 			    ((readl(pctl_base + DDR_PCTL2_STAT) &
1338 			      PCTL2_OPERATING_MODE_MASK) ==
1339 			     PCTL2_OPERATING_MODE_SR))
1340 				break;
1341 		}
1342 	} else {
1343 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1344 		while ((readl(pctl_base + DDR_PCTL2_STAT) &
1345 		       PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR)
1346 			continue;
1347 	}
1348 }
1349 
1350 void record_dq_prebit(struct dram_info *dram)
1351 {
1352 	u32 group, i, tmp;
1353 	void __iomem *phy_base = dram->phy;
1354 
1355 	for (group = 0; group < 4; group++) {
1356 		for (i = 0; i < ARRAY_SIZE(dq_sel); i++) {
1357 			/* l_loop_invdelaysel */
1358 			writel(dq_sel[i][0], PHY_REG(phy_base,
1359 						     grp_addr[group] + 0x2c));
1360 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e));
1361 			writel(tmp, PHY_REG(phy_base,
1362 					    grp_addr[group] + dq_sel[i][1]));
1363 
1364 			/* r_loop_invdelaysel */
1365 			writel(dq_sel[i][0], PHY_REG(phy_base,
1366 						     grp_addr[group] + 0x2d));
1367 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f));
1368 			writel(tmp, PHY_REG(phy_base,
1369 					    grp_addr[group] + dq_sel[i][2]));
1370 		}
1371 	}
1372 }
1373 
1374 static void update_dq_rx_prebit(struct dram_info *dram)
1375 {
1376 	void __iomem *phy_base = dram->phy;
1377 
1378 	clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4),
1379 			BIT(4));
1380 	udelay(1);
1381 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4));
1382 }
1383 
1384 static void update_dq_tx_prebit(struct dram_info *dram)
1385 {
1386 	void __iomem *phy_base = dram->phy;
1387 
1388 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1389 	setbits_le32(PHY_REG(phy_base, 0x2), BIT(3));
1390 	setbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1391 	udelay(1);
1392 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1393 }
1394 
1395 static void update_ca_prebit(struct dram_info *dram)
1396 {
1397 	void __iomem *phy_base = dram->phy;
1398 
1399 	clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2));
1400 	setbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1401 	udelay(1);
1402 	clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1403 }
1404 
1405 /*
1406  * dir: 0: de-skew = delta_*
1407  *	1: de-skew = reg val - delta_*
1408  * delta_dir: value for differential signal: clk/
1409  * delta_sig: value for single signal: ca/cmd
1410  */
1411 static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif,
1412 			     int delta_sig, u32 cs, u32 dramtype)
1413 {
1414 	void __iomem *phy_base = dram->phy;
1415 	u32 i, cs_en, tmp;
1416 	u32 dfi_lp_stat = 0;
1417 
1418 	if (cs == 0)
1419 		cs_en = 1;
1420 	else if (cs == 2)
1421 		cs_en = 2;
1422 	else
1423 		cs_en = 3;
1424 
1425 	if ((dramtype == LPDDR4 || dramtype == LPDDR4X) &&
1426 	    ((readl(PHY_REG(phy_base, 0x60)) & BIT(5)) == 0)) {
1427 		dfi_lp_stat = 1;
1428 		setbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1429 	}
1430 	enter_sr(dram, 1);
1431 
1432 	for (i = 0; i < 0x20; i++) {
1433 		if (dir == DESKEW_MDF_ABS_VAL)
1434 			tmp = delta_sig;
1435 		else
1436 			tmp = readl(PHY_REG(phy_base, 0x150 + i)) +
1437 			      delta_sig;
1438 		writel(tmp, PHY_REG(phy_base, 0x150 + i));
1439 	}
1440 
1441 	if (dir == DESKEW_MDF_ABS_VAL)
1442 		tmp = delta_dif;
1443 	else
1444 		tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) -
1445 		       delta_sig + delta_dif;
1446 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x17));
1447 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x18));
1448 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1449 		writel(tmp, PHY_REG(phy_base, 0x150 + 0x4));
1450 		writel(tmp, PHY_REG(phy_base, 0x150 + 0xa));
1451 
1452 		clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6);
1453 		update_ca_prebit(dram);
1454 	}
1455 	enter_sr(dram, 0);
1456 
1457 	if (dfi_lp_stat)
1458 		clrbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1459 
1460 }
1461 
1462 static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank)
1463 {
1464 	u32 i, j, offset = 0;
1465 	u32 min = 0x3f;
1466 	void __iomem *phy_base = dram->phy;
1467 	u32 byte_en;
1468 
1469 	if (signal == SKEW_TX_SIGNAL)
1470 		offset = 8;
1471 
1472 	if (signal == SKEW_CA_SIGNAL) {
1473 		for (i = 0; i < 0x20; i++)
1474 			min = MIN(min, readl(PHY_REG(phy_base, 0x150  + i)));
1475 	} else {
1476 		byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1477 		for (j = offset; j < offset + rank * 4; j++) {
1478 			if (!((byte_en >> (j % 4)) & 1))
1479 				continue;
1480 			for (i = 0; i < 11; i++)
1481 				min = MIN(min,
1482 					  readl(PHY_REG(phy_base,
1483 							dqs_dq_skew_adr[j] +
1484 							i)));
1485 		}
1486 	}
1487 
1488 	return min;
1489 }
1490 
1491 static u32 low_power_update(struct dram_info *dram, u32 en)
1492 {
1493 	void __iomem *pctl_base = dram->pctl;
1494 	u32 lp_stat = 0;
1495 
1496 	if (en) {
1497 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf);
1498 	} else {
1499 		lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf;
1500 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf);
1501 	}
1502 
1503 	return lp_stat;
1504 }
1505 
1506 /*
1507  * signal:
1508  * dir: 0: de-skew = delta_*
1509  *	1: de-skew = reg val - delta_*
1510  * delta_dir: value for differential signal: dqs
1511  * delta_sig: value for single signal: dq/dm
1512  */
1513 static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir,
1514 			     int delta_dif, int delta_sig, u32 rank)
1515 {
1516 	void __iomem *phy_base = dram->phy;
1517 	u32 i, j, tmp, offset;
1518 	u32 byte_en;
1519 
1520 	byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1521 
1522 	if (signal == SKEW_RX_SIGNAL)
1523 		offset = 0;
1524 	else
1525 		offset = 8;
1526 
1527 	for (j = offset; j < (offset + rank * 4); j++) {
1528 		if (!((byte_en >> (j % 4)) & 1))
1529 			continue;
1530 		for (i = 0; i < 0x9; i++) {
1531 			if (dir == DESKEW_MDF_ABS_VAL)
1532 				tmp = delta_sig;
1533 			else
1534 				tmp = delta_sig + readl(PHY_REG(phy_base,
1535 							dqs_dq_skew_adr[j] +
1536 							i));
1537 			writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i));
1538 		}
1539 		if (dir == DESKEW_MDF_ABS_VAL)
1540 			tmp = delta_dif;
1541 		else
1542 			tmp = delta_dif + readl(PHY_REG(phy_base,
1543 						dqs_dq_skew_adr[j] + 9));
1544 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9));
1545 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa));
1546 	}
1547 	if (signal == SKEW_RX_SIGNAL)
1548 		update_dq_rx_prebit(dram);
1549 	else
1550 		update_dq_tx_prebit(dram);
1551 }
1552 
1553 static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype)
1554 {
1555 	void __iomem *phy_base = dram->phy;
1556 	u32 ret;
1557 	u32 dis_auto_zq = 0;
1558 	u32 odt_val_up, odt_val_dn;
1559 	u32 i, j;
1560 
1561 	odt_val_dn = readl(PHY_REG(phy_base, 0x110));
1562 	odt_val_up = readl(PHY_REG(phy_base, 0x111));
1563 
1564 	if (dramtype != LPDDR4 || dramtype != LPDDR4X) {
1565 		for (i = 0; i < 4; i++) {
1566 			j = 0x110 + i * 0x10;
1567 			writel(PHY_DDR4_LPDDR3_RTT_294ohm,
1568 			       PHY_REG(phy_base, j));
1569 			writel(PHY_DDR4_LPDDR3_RTT_DISABLE,
1570 			       PHY_REG(phy_base, j + 0x1));
1571 		}
1572 	}
1573 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1574 	/* use normal read mode for data training */
1575 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1576 
1577 	if (dramtype == DDR4)
1578 		setbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1579 
1580 	/* choose training cs */
1581 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
1582 	/* enable gate training */
1583 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
1584 	udelay(50);
1585 	ret = readl(PHY_REG(phy_base, 0x91));
1586 	/* disable gate training */
1587 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
1588 	clrbits_le32(PHY_REG(phy_base, 2), 0x30);
1589 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1590 
1591 	if (ret & 0x20)
1592 		ret = -1;
1593 	else
1594 		ret = (ret & 0xf) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf);
1595 
1596 	if (dramtype != LPDDR4 || dramtype != LPDDR4X) {
1597 		for (i = 0; i < 4; i++) {
1598 			j = 0x110 + i * 0x10;
1599 			writel(odt_val_dn, PHY_REG(phy_base, j));
1600 			writel(odt_val_up, PHY_REG(phy_base, j + 0x1));
1601 		}
1602 	}
1603 	return ret;
1604 }
1605 
1606 static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype,
1607 			    u32 rank)
1608 {
1609 	void __iomem *pctl_base = dram->pctl;
1610 	void __iomem *phy_base = dram->phy;
1611 	u32 dis_auto_zq = 0;
1612 	u32 tmp;
1613 	u32 cur_fsp;
1614 	u32 timeout_us = 1000;
1615 
1616 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1617 
1618 	clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1619 
1620 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1621 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) &
1622 	      0xffff;
1623 	writel(tmp & 0xff, PHY_REG(phy_base, 0x3));
1624 
1625 	/* disable another cs's output */
1626 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1627 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12),
1628 			      dramtype);
1629 	if (dramtype == DDR3 || dramtype == DDR4)
1630 		writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1631 	else
1632 		writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1633 
1634 	/* choose cs */
1635 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1636 			((0x2 >> cs) << 6) | (0 << 2));
1637 	/* enable write leveling */
1638 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1639 			((0x2 >> cs) << 6) | (1 << 2));
1640 
1641 	while (1) {
1642 		if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) ==
1643 		    (readl(PHY_REG(phy_base, 0xf)) & 0xf))
1644 			break;
1645 
1646 		udelay(1);
1647 		if (timeout_us-- == 0) {
1648 			printascii("error: write leveling timeout\n");
1649 			while (1)
1650 				;
1651 		}
1652 	}
1653 
1654 	/* disable write leveling */
1655 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1656 			((0x2 >> cs) << 6) | (0 << 2));
1657 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6);
1658 
1659 	/* enable another cs's output */
1660 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1661 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12),
1662 			      dramtype);
1663 
1664 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1665 
1666 	return 0;
1667 }
1668 
1669 char pattern[32] = {
1670 	0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa,
1671 	0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55,
1672 	0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55,
1673 	0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa
1674 };
1675 
1676 static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype,
1677 			    u32 mhz)
1678 {
1679 	void __iomem *pctl_base = dram->pctl;
1680 	void __iomem *phy_base = dram->phy;
1681 	u32 trefi_1x, trfc_1x;
1682 	u32 dis_auto_zq = 0;
1683 	u32 timeout_us = 1000;
1684 	u32 dqs_default;
1685 	u32 cur_fsp;
1686 	u32 vref_inner;
1687 	u32 i;
1688 	struct sdram_head_info_index_v2 *index =
1689 		(struct sdram_head_info_index_v2 *)common_info;
1690 	struct dq_map_info *map_info;
1691 
1692 	vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff;
1693 	if (dramtype == DDR3 && vref_inner == 0x80) {
1694 		for (i = 0; i < 4; i++)
1695 			writel(vref_inner - 0xa,
1696 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1697 
1698 		/* reg_rx_vref_value_update */
1699 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1700 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1701 	}
1702 
1703 	map_info = (struct dq_map_info *)((void *)common_info +
1704 		index->dq_map_index.offset * 4);
1705 	/* only 1cs a time, 0:cs0 1 cs1 */
1706 	if (cs > 1)
1707 		return -1;
1708 
1709 	dqs_default = 0xf;
1710 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1711 
1712 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1713 	/* config refresh timing */
1714 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1715 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1716 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1717 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1718 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1719 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1720 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1721 	/* reg_phy_trfc */
1722 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1723 	/* reg_max_refi_cnt */
1724 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1725 
1726 	/* choose training cs */
1727 	clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6);
1728 
1729 	/* set dq map for ddr4 */
1730 	if (dramtype == DDR4) {
1731 		setbits_le32(PHY_REG(phy_base, 0x70), BIT(7));
1732 		for (i = 0; i < 4; i++) {
1733 			writel((map_info->ddr4_dq_map[cs * 2] >>
1734 				((i % 4) * 8)) & 0xff,
1735 				PHY_REG(phy_base, 0x238 + i));
1736 			writel((map_info->ddr4_dq_map[cs * 2 + 1] >>
1737 				((i % 4) * 8)) & 0xff,
1738 				PHY_REG(phy_base, 0x2b8 + i));
1739 		}
1740 	}
1741 
1742 	/* cha_l reg_l_rd_train_dqs_default[5:0] */
1743 	clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default);
1744 	/* cha_h reg_h_rd_train_dqs_default[5:0] */
1745 	clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default);
1746 	/* chb_l reg_l_rd_train_dqs_default[5:0] */
1747 	clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default);
1748 	/* chb_h reg_h_rd_train_dqs_default[5:0] */
1749 	clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default);
1750 
1751 	/* Choose the read train auto mode */
1752 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1);
1753 	/* Enable the auto train of the read train */
1754 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3);
1755 
1756 	/* Wait the train done. */
1757 	while (1) {
1758 		if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1)
1759 			break;
1760 
1761 		udelay(1);
1762 		if (timeout_us-- == 0) {
1763 			printascii("error: read training timeout\n");
1764 			return -1;
1765 		}
1766 	}
1767 
1768 	/* Check the read train state */
1769 	if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) ||
1770 	    (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) {
1771 		printascii("error: read training error\n");
1772 		return -1;
1773 	}
1774 
1775 	/* Exit the Read Training by setting */
1776 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1));
1777 
1778 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1779 
1780 	if (dramtype == DDR3 && vref_inner == 0x80) {
1781 		for (i = 0; i < 4; i++)
1782 			writel(vref_inner,
1783 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1784 
1785 		/* reg_rx_vref_value_update */
1786 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1787 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1788 	}
1789 
1790 	return 0;
1791 }
1792 
1793 static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype,
1794 			    u32 mhz, u32 dst_fsp)
1795 {
1796 	void __iomem *pctl_base = dram->pctl;
1797 	void __iomem *phy_base = dram->phy;
1798 	u32 trefi_1x, trfc_1x;
1799 	u32 dis_auto_zq = 0;
1800 	u32 timeout_us = 1000;
1801 	u32 cur_fsp;
1802 	u32 mr_tmp, cl, cwl, phy_fsp, offset = 0;
1803 
1804 	if (dramtype == LPDDR3 && mhz <= 400) {
1805 		phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3;
1806 		offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3;
1807 		cl = readl(PHY_REG(phy_base, offset));
1808 		cwl = readl(PHY_REG(phy_base, offset + 2));
1809 
1810 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8);
1811 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4);
1812 		pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype);
1813 	}
1814 
1815 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1816 
1817 	/* PHY_0x7b[7:0] reg_train_col_addr[7:0] */
1818 	clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0);
1819 	/* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */
1820 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2);
1821 	/* PHY_0x7c[1:0] reg_train_col_addr[9:8] */
1822 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0);
1823 	/* PHY_0x7d[7:0] reg_train_row_addr[7:0] */
1824 	clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0);
1825 	/* PHY_0x7e[7:0] reg_train_row_addr[15:8] */
1826 	clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0);
1827 
1828 	/* PHY_0x71[3] wrtrain_check_data_value_random_gen */
1829 	clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3));
1830 
1831 	/* config refresh timing */
1832 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1833 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1834 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1835 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1836 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1837 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1838 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1839 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1840 	/* reg_phy_trfc */
1841 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1842 	/* reg_max_refi_cnt */
1843 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1844 
1845 	/* choose training cs */
1846 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6);
1847 
1848 	/* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */
1849 	/* 0: Use the write-leveling value. */
1850 	/* 1: use reg0x233 0x237 0x2b3 0x2b7 */
1851 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4));
1852 
1853 	/* PHY_0x7a [0] reg_dq_wr_train_auto */
1854 	setbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1855 
1856 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1857 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1858 
1859 	send_a_refresh(dram);
1860 
1861 	while (1) {
1862 		if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1)
1863 			break;
1864 
1865 		udelay(1);
1866 		if (timeout_us-- == 0) {
1867 			printascii("error: write training timeout\n");
1868 			while (1)
1869 				;
1870 		}
1871 	}
1872 
1873 	/* Check the write train state */
1874 	if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) {
1875 		printascii("error: write training error\n");
1876 		return -1;
1877 	}
1878 
1879 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1880 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1881 
1882 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1883 
1884 	/* save LPDDR4/LPDDR4X write vref to fsp_param for dfs */
1885 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1886 		fsp_param[dst_fsp].vref_dq[cs] =
1887 			((readl(PHY_REG(phy_base, 0x384)) & 0x3f) +
1888 			 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2;
1889 		/* add range info */
1890 		fsp_param[dst_fsp].vref_dq[cs] |=
1891 			((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1);
1892 	}
1893 
1894 	if (dramtype == LPDDR3 && mhz <= 400) {
1895 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl);
1896 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl);
1897 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1898 			       DDR_PCTL2_INIT3);
1899 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
1900 			      dramtype);
1901 	}
1902 
1903 	return 0;
1904 }
1905 
1906 static int data_training(struct dram_info *dram, u32 cs,
1907 			 struct rv1126_sdram_params *sdram_params, u32 dst_fsp,
1908 			 u32 training_flag)
1909 {
1910 	u32 ret = 0;
1911 
1912 	if (training_flag == FULL_TRAINING)
1913 		training_flag = READ_GATE_TRAINING | WRITE_LEVELING |
1914 				WRITE_TRAINING | READ_TRAINING;
1915 
1916 	if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) {
1917 		ret = data_training_wl(dram, cs,
1918 				       sdram_params->base.dramtype,
1919 				       sdram_params->ch.cap_info.rank);
1920 		if (ret != 0)
1921 			goto out;
1922 	}
1923 
1924 	if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) {
1925 		ret = data_training_rg(dram, cs,
1926 				       sdram_params->base.dramtype);
1927 		if (ret != 0)
1928 			goto out;
1929 	}
1930 
1931 	if ((training_flag & READ_TRAINING) == READ_TRAINING) {
1932 		ret = data_training_rd(dram, cs,
1933 				       sdram_params->base.dramtype,
1934 				       sdram_params->base.ddr_freq);
1935 		if (ret != 0)
1936 			goto out;
1937 	}
1938 
1939 	if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) {
1940 		ret = data_training_wr(dram, cs,
1941 				       sdram_params->base.dramtype,
1942 				       sdram_params->base.ddr_freq, dst_fsp);
1943 		if (ret != 0)
1944 			goto out;
1945 	}
1946 
1947 out:
1948 	return ret;
1949 }
1950 
1951 static int get_wrlvl_val(struct dram_info *dram,
1952 			 struct rv1126_sdram_params *sdram_params)
1953 {
1954 	u32 i, j, clk_skew;
1955 	void __iomem *phy_base = dram->phy;
1956 	u32 lp_stat;
1957 	int ret;
1958 
1959 	lp_stat = low_power_update(dram, 0);
1960 
1961 	clk_skew = 0x1f;
1962 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, clk_skew, 3,
1963 			 sdram_params->base.dramtype);
1964 
1965 	ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING);
1966 	if (sdram_params->ch.cap_info.rank == 2)
1967 		ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING);
1968 
1969 	for (j = 0; j < 2; j++)
1970 		for (i = 0; i < 4; i++)
1971 			wrlvl_result[j][i] =
1972 				readl(PHY_REG(phy_base,
1973 					      wrlvl_result_offset[j][i])) -
1974 				clk_skew;
1975 
1976 	low_power_update(dram, lp_stat);
1977 
1978 	return ret;
1979 }
1980 
1981 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
1982 static void init_rw_trn_result_struct(struct rw_trn_result *result,
1983 				      void __iomem *phy_base, u8 cs_num)
1984 {
1985 	int i;
1986 
1987 	result->cs_num = cs_num;
1988 	result->byte_en = readb(PHY_REG(dram_info.phy, 0xf)) &
1989 			  PHY_DQ_WIDTH_MASK;
1990 	for (i = 0; i < FSP_NUM; i++)
1991 		result->fsp_mhz[i] = 0;
1992 }
1993 
1994 static void save_rw_trn_min_max(void __iomem *phy_base,
1995 				struct cs_rw_trn_result *rd_result,
1996 				struct cs_rw_trn_result *wr_result,
1997 				u8 byte_en)
1998 {
1999 	u16 phy_ofs;
2000 	u8 dqs;
2001 	u8 dq;
2002 
2003 	for (dqs = 0; dqs < BYTE_NUM; dqs++) {
2004 		if ((byte_en & BIT(dqs)) == 0)
2005 			continue;
2006 
2007 		/* Channel A or B (low or high 16 bit) */
2008 		phy_ofs = dqs < 2 ? 0x230 : 0x2b0;
2009 		/* low or high 8 bit */
2010 		phy_ofs += (dqs & 0x1) == 0 ? 0 : 0x9;
2011 		for (dq = 0; dq < 8; dq++) {
2012 			rd_result->dqs[dqs].dq_min[dq] =
2013 				readb(PHY_REG(phy_base, phy_ofs + 0x15 + dq));
2014 			rd_result->dqs[dqs].dq_max[dq] =
2015 				readb(PHY_REG(phy_base, phy_ofs + 0x27 + dq));
2016 			wr_result->dqs[dqs].dq_min[dq] =
2017 				readb(PHY_REG(phy_base, phy_ofs + 0x3d + dq));
2018 			wr_result->dqs[dqs].dq_max[dq] =
2019 				readb(PHY_REG(phy_base, phy_ofs + 0x4f + dq));
2020 		}
2021 	}
2022 }
2023 
2024 static void save_rw_trn_deskew(void __iomem *phy_base,
2025 			       struct fsp_rw_trn_result *result, u8 cs_num,
2026 			       int min_val, bool rw)
2027 {
2028 	u16 phy_ofs;
2029 	u8 cs;
2030 	u8 dq;
2031 
2032 	result->min_val = min_val;
2033 
2034 	for (cs = 0; cs < cs_num; cs++) {
2035 		phy_ofs = cs == 0 ? 0x170 : 0x1a0;
2036 		phy_ofs += rw == SKEW_RX_SIGNAL ? 0x1 : 0x17;
2037 		for (dq = 0; dq < 8; dq++) {
2038 			result->cs[cs].dqs[0].dq_deskew[dq] =
2039 				readb(PHY_REG(phy_base, phy_ofs + dq));
2040 			result->cs[cs].dqs[1].dq_deskew[dq] =
2041 				readb(PHY_REG(phy_base, phy_ofs + 0xb + dq));
2042 			result->cs[cs].dqs[2].dq_deskew[dq] =
2043 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + dq));
2044 			result->cs[cs].dqs[3].dq_deskew[dq] =
2045 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + dq));
2046 		}
2047 
2048 		result->cs[cs].dqs[0].dqs_deskew =
2049 			readb(PHY_REG(phy_base, phy_ofs + 0x8));
2050 		result->cs[cs].dqs[1].dqs_deskew =
2051 			readb(PHY_REG(phy_base, phy_ofs + 0xb + 0x8));
2052 		result->cs[cs].dqs[2].dqs_deskew =
2053 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0x8));
2054 		result->cs[cs].dqs[3].dqs_deskew =
2055 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + 0x8));
2056 	}
2057 }
2058 
2059 static void save_rw_trn_result_to_ddr(struct rw_trn_result *result)
2060 {
2061 	result->flag = DDR_DQ_EYE_FLAG;
2062 	memcpy((void *)(RW_TRN_RESULT_ADDR), result, sizeof(*result));
2063 }
2064 #endif
2065 
2066 static int high_freq_training(struct dram_info *dram,
2067 			      struct rv1126_sdram_params *sdram_params,
2068 			      u32 fsp)
2069 {
2070 	u32 i, j;
2071 	void __iomem *phy_base = dram->phy;
2072 	u32 dramtype = sdram_params->base.dramtype;
2073 	int min_val;
2074 	int dqs_skew, clk_skew, ca_skew;
2075 	u8 byte_en;
2076 	int ret;
2077 
2078 	byte_en = readl(PHY_REG(phy_base, 0xf)) & PHY_DQ_WIDTH_MASK;
2079 	dqs_skew = 0;
2080 	for (j = 0; j < sdram_params->ch.cap_info.rank; j++) {
2081 		for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) {
2082 			if ((byte_en & BIT(i)) != 0)
2083 				dqs_skew += wrlvl_result[j][i];
2084 		}
2085 	}
2086 	dqs_skew = dqs_skew / (sdram_params->ch.cap_info.rank *
2087 			       (1 << sdram_params->ch.cap_info.bw));
2088 
2089 	clk_skew = 0x20 - dqs_skew;
2090 	dqs_skew = 0x20;
2091 
2092 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
2093 		min_val = 0xff;
2094 		for (j = 0; j < sdram_params->ch.cap_info.rank; j++)
2095 			for (i = 0; i < sdram_params->ch.cap_info.bw; i++)
2096 				min_val = MIN(wrlvl_result[j][i], min_val);
2097 
2098 		if (min_val < 0) {
2099 			clk_skew = -min_val;
2100 			ca_skew = -min_val;
2101 		} else {
2102 			clk_skew = 0;
2103 			ca_skew = 0;
2104 		}
2105 	} else if (dramtype == LPDDR3) {
2106 		ca_skew = clk_skew - 4;
2107 	} else {
2108 		ca_skew = clk_skew;
2109 	}
2110 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3,
2111 			 dramtype);
2112 
2113 	writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233));
2114 	writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237));
2115 	writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2116 	writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2117 	ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING |
2118 			    READ_TRAINING | WRITE_TRAINING);
2119 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2120 	rw_trn_result.fsp_mhz[fsp] = (u16)sdram_params->base.ddr_freq;
2121 	save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[0],
2122 			    &rw_trn_result.wr_fsp[fsp].cs[0],
2123 			    rw_trn_result.byte_en);
2124 #endif
2125 	if (sdram_params->ch.cap_info.rank == 2) {
2126 		writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233));
2127 		writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237));
2128 		writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2129 		writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2130 		ret |= data_training(dram, 1, sdram_params, fsp,
2131 				     READ_GATE_TRAINING | READ_TRAINING |
2132 				     WRITE_TRAINING);
2133 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2134 		save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[1],
2135 				    &rw_trn_result.wr_fsp[fsp].cs[1],
2136 				    rw_trn_result.byte_en);
2137 #endif
2138 	}
2139 	if (ret)
2140 		goto out;
2141 
2142 	record_dq_prebit(dram);
2143 
2144 	min_val = get_min_value(dram, SKEW_RX_SIGNAL,
2145 				sdram_params->ch.cap_info.rank) * -1;
2146 	modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2147 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2148 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2149 	save_rw_trn_deskew(phy_base, &rw_trn_result.rd_fsp[fsp],
2150 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2151 			   SKEW_RX_SIGNAL);
2152 #endif
2153 
2154 	min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL,
2155 				    sdram_params->ch.cap_info.rank),
2156 		      get_min_value(dram, SKEW_CA_SIGNAL,
2157 				    sdram_params->ch.cap_info.rank)) * -1;
2158 
2159 	/* clk = 0, rx all skew -7, tx - min_value */
2160 	modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3,
2161 			 dramtype);
2162 
2163 	modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2164 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2165 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2166 	save_rw_trn_deskew(phy_base, &rw_trn_result.wr_fsp[fsp],
2167 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2168 			   SKEW_TX_SIGNAL);
2169 #endif
2170 
2171 	ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING);
2172 	if (sdram_params->ch.cap_info.rank == 2)
2173 		ret |= data_training(dram, 1, sdram_params, 0,
2174 				     READ_GATE_TRAINING);
2175 out:
2176 	return ret;
2177 }
2178 
2179 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
2180 {
2181 	writel(ddrconfig, &dram->msch->deviceconf);
2182 	clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0);
2183 }
2184 
2185 static void update_noc_timing(struct dram_info *dram,
2186 			      struct rv1126_sdram_params *sdram_params)
2187 {
2188 	void __iomem *pctl_base = dram->pctl;
2189 	u32 bw, bl;
2190 
2191 	bw = 8 << sdram_params->ch.cap_info.bw;
2192 	bl = ((readl(pctl_base + DDR_PCTL2_MSTR) >> 16) & 0xf) * 2;
2193 
2194 	/* update the noc timing related to data bus width */
2195 	if ((bw / 8 * bl) <= 16)
2196 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 0;
2197 	else if ((bw / 8 * bl) == 32)
2198 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 1;
2199 	else if ((bw / 8 * bl) == 64)
2200 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 2;
2201 	else
2202 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 3;
2203 
2204 	sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty =
2205 		(bl * bw / 8) > 16 ? (bl / 4) : (16 / (bl * bw / 8)) * bl / 4;
2206 
2207 	if (sdram_params->base.dramtype == LPDDR4 ||
2208 	    sdram_params->base.dramtype == LPDDR4X) {
2209 		sdram_params->ch.noc_timings.ddrmode.b.mwrsize =
2210 			(bw == 16) ? 0x1 : 0x2;
2211 		sdram_params->ch.noc_timings.ddrtimingc0.b.wrtomwr =
2212 			3 * sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty;
2213 	}
2214 
2215 	writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
2216 	       &dram->msch->ddrtiminga0);
2217 	writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
2218 	       &dram->msch->ddrtimingb0);
2219 	writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
2220 	       &dram->msch->ddrtimingc0);
2221 	writel(sdram_params->ch.noc_timings.devtodev0.d32,
2222 	       &dram->msch->devtodev0);
2223 	writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
2224 	writel(sdram_params->ch.noc_timings.ddr4timing.d32,
2225 	       &dram->msch->ddr4timing);
2226 }
2227 
2228 static int split_setup(struct dram_info *dram,
2229 		       struct rv1126_sdram_params *sdram_params)
2230 {
2231 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2232 	u32 dramtype = sdram_params->base.dramtype;
2233 	u32 split_size, split_mode;
2234 	u64 cs_cap[2], cap;
2235 
2236 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dramtype);
2237 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dramtype);
2238 	/* only support the larger cap is in low 16bit */
2239 	if (cap_info->cs0_high16bit_row < cap_info->cs0_row) {
2240 		cap = cs_cap[0] / (1 << (cap_info->cs0_row -
2241 		cap_info->cs0_high16bit_row));
2242 	} else if ((cap_info->cs1_high16bit_row < cap_info->cs1_row) &&
2243 		   (cap_info->rank == 2)) {
2244 		if (!cap_info->cs1_high16bit_row)
2245 			cap = cs_cap[0];
2246 		else
2247 			cap = cs_cap[0] + cs_cap[1] / (1 << (cap_info->cs1_row -
2248 				cap_info->cs1_high16bit_row));
2249 	} else {
2250 		goto out;
2251 	}
2252 	split_size = (u32)(cap >> 24) & SPLIT_SIZE_MASK;
2253 	if (cap_info->bw == 2)
2254 		split_mode = SPLIT_MODE_32_L16_VALID;
2255 	else
2256 		split_mode = SPLIT_MODE_16_L8_VALID;
2257 
2258 	rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2259 		     (SPLIT_MODE_MASK << SPLIT_MODE_OFFSET) |
2260 		     (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2261 		     (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2262 		     (split_mode << SPLIT_MODE_OFFSET) |
2263 		     (0x0 << SPLIT_BYPASS_OFFSET) |
2264 		     (split_size << SPLIT_SIZE_OFFSET));
2265 
2266 	rk_clrsetreg(BUS_SGRF_BASE_ADDR + SGRF_SOC_CON2,
2267 		     MSCH_AXI_BYPASS_ALL_MASK << MSCH_AXI_BYPASS_ALL_SHIFT,
2268 		     0x0 << MSCH_AXI_BYPASS_ALL_SHIFT);
2269 
2270 out:
2271 	return 0;
2272 }
2273 
2274 static void split_bypass(struct dram_info *dram)
2275 {
2276 	if ((readl(&dram->ddrgrf->grf_ddrsplit_con) &
2277 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2278 		return;
2279 
2280 	/* bypass split */
2281 	rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2282 		     (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2283 		     (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2284 		     (0x1 << SPLIT_BYPASS_OFFSET) |
2285 		     (0x0 << SPLIT_SIZE_OFFSET));
2286 }
2287 
2288 static void dram_all_config(struct dram_info *dram,
2289 			    struct rv1126_sdram_params *sdram_params)
2290 {
2291 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2292 	u32 dram_type = sdram_params->base.dramtype;
2293 	void __iomem *pctl_base = dram->pctl;
2294 	u32 sys_reg2 = 0;
2295 	u32 sys_reg3 = 0;
2296 	u64 cs_cap[2];
2297 	u32 cs_pst;
2298 
2299 	set_ddrconfig(dram, cap_info->ddrconfig);
2300 	sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
2301 			 &sys_reg3, 0);
2302 	writel(sys_reg2, &dram->pmugrf->os_reg[2]);
2303 	writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2304 
2305 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2306 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2307 
2308 	if (cap_info->rank == 2) {
2309 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2310 			6 + 2;
2311 		if (cs_pst > 28)
2312 			cs_cap[0] = 1llu << cs_pst;
2313 	}
2314 
2315 	writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
2316 			(((cs_cap[0] >> 20) / 64) & 0xff),
2317 			&dram->msch->devicesize);
2318 	update_noc_timing(dram, sdram_params);
2319 }
2320 
2321 static void enable_low_power(struct dram_info *dram,
2322 			     struct rv1126_sdram_params *sdram_params)
2323 {
2324 	void __iomem *pctl_base = dram->pctl;
2325 	u32 grf_lp_con;
2326 
2327 	writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]);
2328 
2329 	if (sdram_params->base.dramtype == DDR4)
2330 		grf_lp_con = (0x7 << 16) | (1 << 1);
2331 	else if (sdram_params->base.dramtype == DDR3)
2332 		grf_lp_con = (0x7 << 16) | (1 << 0);
2333 	else
2334 		grf_lp_con = (0x7 << 16) | (1 << 2);
2335 
2336 	/* en lpckdis_en */
2337 	grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
2338 	writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con);
2339 
2340 	/* enable sr, pd */
2341 	if (dram->pd_idle == 0)
2342 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2343 	else
2344 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2345 	if (dram->sr_idle == 0)
2346 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2347 	else
2348 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2349 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
2350 }
2351 
2352 static void ddr_set_atags(struct dram_info *dram,
2353 			  struct rv1126_sdram_params *sdram_params)
2354 {
2355 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2356 	u32 dram_type = sdram_params->base.dramtype;
2357 	void __iomem *pctl_base = dram->pctl;
2358 	struct tag_serial t_serial;
2359 	struct tag_ddr_mem t_ddrmem;
2360 	struct tag_soc_info t_socinfo;
2361 	u64 cs_cap[2];
2362 	u32 cs_pst = 0;
2363 	u32 split, split_size;
2364 	u64 reduce_cap = 0;
2365 
2366 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2367 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2368 
2369 	memset(&t_serial, 0, sizeof(struct tag_serial));
2370 
2371 	t_serial.version = 0;
2372 	t_serial.enable = 1;
2373 	t_serial.addr = CONFIG_DEBUG_UART_BASE;
2374 	t_serial.baudrate = CONFIG_BAUDRATE;
2375 	t_serial.m_mode = SERIAL_M_MODE_M0;
2376 	t_serial.id = 2;
2377 
2378 	atags_destroy();
2379 	atags_set_tag(ATAG_SERIAL,  &t_serial);
2380 
2381 	split = readl(&dram->ddrgrf->grf_ddrsplit_con);
2382 	memset(&t_ddrmem, 0, sizeof(struct tag_ddr_mem));
2383 	if (cap_info->row_3_4) {
2384 		cs_cap[0] =  cs_cap[0] * 3 / 4;
2385 		cs_cap[1] =  cs_cap[1] * 3 / 4;
2386 	} else if (!(split & (1 << SPLIT_BYPASS_OFFSET))) {
2387 		split_size = (split >> SPLIT_SIZE_OFFSET) & SPLIT_SIZE_MASK;
2388 		reduce_cap = (cs_cap[0] + cs_cap[1] - (split_size << 24)) / 2;
2389 	}
2390 	t_ddrmem.version = 0;
2391 	t_ddrmem.bank[0] = CONFIG_SYS_SDRAM_BASE;
2392 	if (cs_cap[1]) {
2393 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2394 			6 + 2;
2395 	}
2396 
2397 	if (cs_cap[1] && cs_pst > 27) {
2398 		t_ddrmem.count = 2;
2399 		t_ddrmem.bank[1] = 1 << cs_pst;
2400 		t_ddrmem.bank[2] = cs_cap[0];
2401 		t_ddrmem.bank[3] = cs_cap[1] - reduce_cap;
2402 	} else {
2403 		t_ddrmem.count = 1;
2404 		t_ddrmem.bank[1] = (u64)cs_cap[0] + (u64)cs_cap[1] - reduce_cap;
2405 	}
2406 
2407 	atags_set_tag(ATAG_DDR_MEM,  &t_ddrmem);
2408 
2409 	memset(&t_socinfo, 0, sizeof(struct tag_soc_info));
2410 	t_socinfo.version = 0;
2411 	t_socinfo.name = 0x1126;
2412 }
2413 
2414 static void print_ddr_info(struct rv1126_sdram_params *sdram_params)
2415 {
2416 	u32 split;
2417 
2418 	if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2419 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2420 		split = 0;
2421 	else
2422 		split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2423 			SPLIT_SIZE_MASK;
2424 
2425 	sdram_print_ddr_info(&sdram_params->ch.cap_info,
2426 			     &sdram_params->base, split);
2427 }
2428 
2429 static int sdram_init_(struct dram_info *dram,
2430 		       struct rv1126_sdram_params *sdram_params, u32 post_init)
2431 {
2432 	void __iomem *pctl_base = dram->pctl;
2433 	void __iomem *phy_base = dram->phy;
2434 	u32 ddr4_vref;
2435 	u32 mr_tmp;
2436 
2437 	rkclk_configure_ddr(dram, sdram_params);
2438 
2439 	rkclk_ddr_reset(dram, 1, 1, 1, 1);
2440 	udelay(10);
2441 
2442 	rkclk_ddr_reset(dram, 1, 1, 1, 0);
2443 	phy_cfg(dram, sdram_params);
2444 
2445 	rkclk_ddr_reset(dram, 1, 1, 0, 0);
2446 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1);
2447 
2448 	rkclk_ddr_reset(dram, 1, 0, 0, 0);
2449 	pctl_cfg(dram->pctl, &sdram_params->pctl_regs,
2450 		 dram->sr_idle, dram->pd_idle);
2451 
2452 	if (sdram_params->ch.cap_info.bw == 2) {
2453 		/* 32bit interface use pageclose */
2454 		setbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2455 		/* pageclose = 1, pageclose_timer = 0 will err in lp4 328MHz */
2456 		clrsetbits_le32(pctl_base + DDR_PCTL2_SCHED1, 0xff, 0x1 << 0);
2457 	} else {
2458 		clrbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2459 	}
2460 
2461 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2462 	u32 tmp, trefi;
2463 
2464 	tmp = readl(pctl_base + DDR_PCTL2_RFSHTMG);
2465 	trefi = (tmp >> 16) & 0xfff;
2466 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2467 	       pctl_base + DDR_PCTL2_RFSHTMG);
2468 #endif
2469 
2470 	/* set frequency_mode */
2471 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
2472 	/* set target_frequency to Frequency 0 */
2473 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0);
2474 
2475 	set_ds_odt(dram, sdram_params, 0);
2476 	sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params);
2477 	set_ctl_address_map(dram, sdram_params);
2478 
2479 	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
2480 
2481 	rkclk_ddr_reset(dram, 0, 0, 0, 0);
2482 
2483 	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
2484 		continue;
2485 
2486 	if (sdram_params->base.dramtype == LPDDR3) {
2487 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3);
2488 	} else if (sdram_params->base.dramtype == LPDDR4 ||
2489 		   sdram_params->base.dramtype == LPDDR4X) {
2490 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6);
2491 		/* MR11 */
2492 		pctl_write_mr(dram->pctl, 3, 11,
2493 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2494 			      LPDDR4);
2495 		/* MR12 */
2496 		pctl_write_mr(dram->pctl, 3, 12,
2497 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2498 			      LPDDR4);
2499 
2500 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2501 		/* MR22 */
2502 		pctl_write_mr(dram->pctl, 3, 22,
2503 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2504 			      LPDDR4);
2505 	}
2506 
2507 	if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) != 0) {
2508 		if (post_init != 0)
2509 			printascii("DTT cs0 error\n");
2510 		return -1;
2511 	}
2512 
2513 	if (sdram_params->base.dramtype == LPDDR4) {
2514 		mr_tmp = read_mr(dram, 1, 14, LPDDR4);
2515 
2516 		if (mr_tmp != 0x4d)
2517 			return -1;
2518 	}
2519 
2520 	if (sdram_params->base.dramtype == LPDDR4 ||
2521 	    sdram_params->base.dramtype == LPDDR4X) {
2522 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2523 		/* MR14 */
2524 		pctl_write_mr(dram->pctl, 3, 14,
2525 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2526 			      LPDDR4);
2527 	}
2528 	if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) {
2529 		if (data_training(dram, 1, sdram_params, 0,
2530 				  READ_GATE_TRAINING) != 0) {
2531 			printascii("DTT cs1 error\n");
2532 			return -1;
2533 		}
2534 	}
2535 
2536 	if (sdram_params->base.dramtype == DDR4) {
2537 		ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39;
2538 		pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref,
2539 				  sdram_params->base.dramtype);
2540 	}
2541 
2542 	dram_all_config(dram, sdram_params);
2543 	enable_low_power(dram, sdram_params);
2544 
2545 	return 0;
2546 }
2547 
2548 static u64 dram_detect_cap(struct dram_info *dram,
2549 			   struct rv1126_sdram_params *sdram_params,
2550 			   unsigned char channel)
2551 {
2552 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2553 	void __iomem *pctl_base = dram->pctl;
2554 	void __iomem *phy_base = dram->phy;
2555 	u32 mr8;
2556 
2557 	u32 bktmp;
2558 	u32 coltmp;
2559 	u32 rowtmp;
2560 	u32 cs;
2561 	u32 dram_type = sdram_params->base.dramtype;
2562 	u32 pwrctl;
2563 	u32 i, dq_map;
2564 	u32 byte1 = 0, byte0 = 0;
2565 
2566 	cap_info->bw = dram_type == DDR3 ? 0 : 1;
2567 	if (dram_type != LPDDR4 && dram_type != LPDDR4X) {
2568 		if (dram_type != DDR4) {
2569 			coltmp = 12;
2570 			bktmp = 3;
2571 			if (dram_type == LPDDR2)
2572 				rowtmp = 15;
2573 			else
2574 				rowtmp = 16;
2575 
2576 			if (sdram_detect_col(cap_info, coltmp) != 0)
2577 				goto cap_err;
2578 
2579 			sdram_detect_bank(cap_info, coltmp, bktmp);
2580 			sdram_detect_dbw(cap_info, dram_type);
2581 		} else {
2582 			coltmp = 10;
2583 			bktmp = 4;
2584 			rowtmp = 17;
2585 
2586 			cap_info->col = 10;
2587 			cap_info->bk = 2;
2588 			sdram_detect_bg(cap_info, coltmp);
2589 		}
2590 
2591 		if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
2592 			goto cap_err;
2593 
2594 		sdram_detect_row_3_4(cap_info, coltmp, bktmp);
2595 	} else {
2596 		mr8 = (read_mr(dram, 1, 8, dram_type) >> 2) & 0xf;
2597 		cap_info->col = 10;
2598 		cap_info->bk = 3;
2599 		cap_info->cs0_row = 14 + (mr8 + 1) / 2;
2600 		if (mr8 % 2)
2601 			cap_info->row_3_4 = 1;
2602 		else
2603 			cap_info->row_3_4 = 0;
2604 		cap_info->dbw = 1;
2605 		cap_info->bw = 2;
2606 	}
2607 
2608 	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
2609 	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
2610 
2611 	if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0)
2612 		cs = 1;
2613 	else
2614 		cs = 0;
2615 	cap_info->rank = cs + 1;
2616 
2617 	if (dram_type != LPDDR4 && dram_type != LPDDR4X) {
2618 		setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2619 
2620 		if (data_training(dram, 0, sdram_params, 0,
2621 				  READ_GATE_TRAINING) == 0) {
2622 			cap_info->bw = 2;
2623 		} else {
2624 			dq_map = readl(PHY_REG(phy_base, 0x4f));
2625 			for (i = 0; i < 4; i++) {
2626 				if (((dq_map >> (i * 2)) & 0x3) == 0)
2627 					byte0 = i;
2628 				if (((dq_map >> (i * 2)) & 0x3) == 1)
2629 					byte1 = i;
2630 			}
2631 			clrsetbits_le32(PHY_REG(phy_base, 0xf), PHY_DQ_WIDTH_MASK,
2632 					BIT(byte0) | BIT(byte1));
2633 			if (data_training(dram, 0, sdram_params, 0,
2634 					  READ_GATE_TRAINING) == 0)
2635 				cap_info->bw = 1;
2636 			else
2637 				cap_info->bw = 0;
2638 		}
2639 		if (cap_info->bw > 0)
2640 			cap_info->dbw = 1;
2641 	}
2642 
2643 	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
2644 
2645 	cap_info->cs0_high16bit_row = cap_info->cs0_row;
2646 	if (cs) {
2647 		cap_info->cs1_row = cap_info->cs0_row;
2648 		cap_info->cs1_high16bit_row = cap_info->cs0_row;
2649 	} else {
2650 		cap_info->cs1_row = 0;
2651 		cap_info->cs1_high16bit_row = 0;
2652 	}
2653 
2654 	return 0;
2655 cap_err:
2656 	return -1;
2657 }
2658 
2659 static int dram_detect_cs1_row(struct dram_info *dram,
2660 			       struct rv1126_sdram_params *sdram_params,
2661 			       unsigned char channel)
2662 {
2663 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2664 	void __iomem *pctl_base = dram->pctl;
2665 	u32 ret = 0;
2666 	void __iomem *test_addr;
2667 	u32 row, bktmp, coltmp, bw;
2668 	u64 cs0_cap;
2669 	u32 byte_mask;
2670 	u32 cs_pst;
2671 	u32 cs_add = 0;
2672 	u32 max_row;
2673 
2674 	if (cap_info->rank == 2) {
2675 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2676 			6 + 2;
2677 		if (cs_pst < 28)
2678 			cs_add = 1;
2679 
2680 		cs0_cap = 1 << cs_pst;
2681 
2682 		if (sdram_params->base.dramtype == DDR4) {
2683 			if (cap_info->dbw == 0)
2684 				bktmp = cap_info->bk + 2;
2685 			else
2686 				bktmp = cap_info->bk + 1;
2687 		} else {
2688 			bktmp = cap_info->bk;
2689 		}
2690 		bw = cap_info->bw;
2691 		coltmp = cap_info->col;
2692 
2693 		if (bw == 2)
2694 			byte_mask = 0xFFFF;
2695 		else
2696 			byte_mask = 0xFF;
2697 
2698 		max_row = (cs_pst == 31) ? 30 : 31;
2699 
2700 		max_row = max_row - bktmp - coltmp - bw - cs_add + 1;
2701 
2702 		row = (cap_info->cs0_row > max_row) ? max_row :
2703 			cap_info->cs0_row;
2704 
2705 		for (; row > 12; row--) {
2706 			test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
2707 				    (u32)cs0_cap +
2708 				    (1ul << (row + bktmp + coltmp +
2709 					     cs_add + bw - 1ul)));
2710 
2711 			writel(0, CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap);
2712 			writel(PATTERN, test_addr);
2713 
2714 			if (((readl(test_addr) & byte_mask) ==
2715 			     (PATTERN & byte_mask)) &&
2716 			    ((readl(CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap) &
2717 			      byte_mask) == 0)) {
2718 				ret = row;
2719 				break;
2720 			}
2721 		}
2722 	}
2723 
2724 	return ret;
2725 }
2726 
2727 /* return: 0 = success, other = fail */
2728 static int sdram_init_detect(struct dram_info *dram,
2729 			     struct rv1126_sdram_params *sdram_params)
2730 {
2731 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2732 	u32 ret;
2733 	u32 sys_reg = 0;
2734 	u32 sys_reg3 = 0;
2735 	struct sdram_head_info_index_v2 *index =
2736 		(struct sdram_head_info_index_v2 *)common_info;
2737 	struct dq_map_info *map_info;
2738 
2739 	map_info = (struct dq_map_info *)((void *)common_info +
2740 		index->dq_map_index.offset * 4);
2741 
2742 	if (sdram_init_(dram, sdram_params, 0)) {
2743 		if (sdram_params->base.dramtype == DDR3) {
2744 			clrsetbits_le32(&map_info->byte_map[0], 0xff << 24,
2745 					((0x1 << 6) | (0x3 << 4) | (0x2 << 2) |
2746 					(0x0 << 0)) << 24);
2747 			if (sdram_init_(dram, sdram_params, 0))
2748 				return -1;
2749 		} else {
2750 			return -1;
2751 		}
2752 	}
2753 
2754 	if (sdram_params->base.dramtype == DDR3) {
2755 		writel(PATTERN, CONFIG_SYS_SDRAM_BASE);
2756 		if (readl(CONFIG_SYS_SDRAM_BASE) != PATTERN)
2757 			return -1;
2758 	}
2759 
2760 	split_bypass(dram);
2761 	if (dram_detect_cap(dram, sdram_params, 0) != 0)
2762 		return -1;
2763 
2764 	pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
2765 				   sdram_params->base.dramtype);
2766 	ret = sdram_init_(dram, sdram_params, 1);
2767 	if (ret != 0)
2768 		goto out;
2769 
2770 	cap_info->cs1_row =
2771 		dram_detect_cs1_row(dram, sdram_params, 0);
2772 	if (cap_info->cs1_row) {
2773 		sys_reg = readl(&dram->pmugrf->os_reg[2]);
2774 		sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
2775 		SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
2776 				    sys_reg, sys_reg3, 0);
2777 		writel(sys_reg, &dram->pmugrf->os_reg[2]);
2778 		writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2779 	}
2780 
2781 	sdram_detect_high_row(cap_info, sdram_params->base.dramtype);
2782 	split_setup(dram, sdram_params);
2783 out:
2784 	return ret;
2785 }
2786 
2787 struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz)
2788 {
2789 	u32 i;
2790 	u32 offset = 0;
2791 	struct ddr2_3_4_lp2_3_info *ddr_info;
2792 
2793 	if (!freq_mhz) {
2794 		ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype);
2795 		if (ddr_info)
2796 			freq_mhz =
2797 				(ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
2798 				DDR_FREQ_MASK;
2799 		else
2800 			freq_mhz = 0;
2801 	}
2802 
2803 	for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) {
2804 		if (sdram_configs[i].base.ddr_freq == 0 ||
2805 		    freq_mhz < sdram_configs[i].base.ddr_freq)
2806 			break;
2807 	}
2808 	offset = i == 0 ? 0 : i - 1;
2809 
2810 	return &sdram_configs[offset];
2811 }
2812 
2813 static const u16 pctl_need_update_reg[] = {
2814 	DDR_PCTL2_RFSHTMG,
2815 	DDR_PCTL2_INIT3,
2816 	DDR_PCTL2_INIT4,
2817 	DDR_PCTL2_INIT6,
2818 	DDR_PCTL2_INIT7,
2819 	DDR_PCTL2_DRAMTMG0,
2820 	DDR_PCTL2_DRAMTMG1,
2821 	DDR_PCTL2_DRAMTMG2,
2822 	DDR_PCTL2_DRAMTMG3,
2823 	DDR_PCTL2_DRAMTMG4,
2824 	DDR_PCTL2_DRAMTMG5,
2825 	DDR_PCTL2_DRAMTMG6,
2826 	DDR_PCTL2_DRAMTMG7,
2827 	DDR_PCTL2_DRAMTMG8,
2828 	DDR_PCTL2_DRAMTMG9,
2829 	DDR_PCTL2_DRAMTMG12,
2830 	DDR_PCTL2_DRAMTMG13,
2831 	DDR_PCTL2_DRAMTMG14,
2832 	DDR_PCTL2_ZQCTL0,
2833 	DDR_PCTL2_DFITMG0,
2834 	DDR_PCTL2_ODTCFG
2835 };
2836 
2837 static const u16 phy_need_update_reg[] = {
2838 	0x14,
2839 	0x18,
2840 	0x1c
2841 };
2842 
2843 static void pre_set_rate(struct dram_info *dram,
2844 			 struct rv1126_sdram_params *sdram_params,
2845 			 u32 dst_fsp, u32 dst_fsp_lp4)
2846 {
2847 	u32 i, j, find;
2848 	void __iomem *pctl_base = dram->pctl;
2849 	void __iomem *phy_base = dram->phy;
2850 	u32 phy_offset;
2851 	u32 mr_tmp;
2852 	u32 dramtype = sdram_params->base.dramtype;
2853 
2854 	sw_set_req(dram);
2855 	/* pctl timing update */
2856 	for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) {
2857 		for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF;
2858 		     j++) {
2859 			if (sdram_params->pctl_regs.pctl[j][0] ==
2860 			    pctl_need_update_reg[i]) {
2861 				writel(sdram_params->pctl_regs.pctl[j][1],
2862 				       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2863 				       pctl_need_update_reg[i]);
2864 				find = j;
2865 				break;
2866 			}
2867 		}
2868 	}
2869 
2870 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2871 	u32 tmp, trefi;
2872 
2873 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2874 	trefi = (tmp >> 16) & 0xfff;
2875 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2876 	       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2877 #endif
2878 
2879 	sw_set_ack(dram);
2880 
2881 	/* phy timing update */
2882 	if (dst_fsp == 0)
2883 		phy_offset = 0;
2884 	else
2885 		phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3);
2886 	/* cl cwl al update */
2887 	for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) {
2888 		for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF;
2889 		     j++) {
2890 			if (sdram_params->phy_regs.phy[j][0] ==
2891 			    phy_need_update_reg[i]) {
2892 				writel(sdram_params->phy_regs.phy[j][1],
2893 				       phy_base + phy_offset +
2894 				       phy_need_update_reg[i]);
2895 				find = j;
2896 				break;
2897 			}
2898 		}
2899 	}
2900 
2901 	set_ds_odt(dram, sdram_params, dst_fsp);
2902 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
2903 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2904 			       DDR_PCTL2_INIT4);
2905 		/* MR13 */
2906 		pctl_write_mr(dram->pctl, 3, 13,
2907 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2908 			       PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2909 			      ((0x2 << 6) >> dst_fsp_lp4), dramtype);
2910 		writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2911 				      PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2912 				      ((0x2 << 6) >> dst_fsp_lp4),
2913 				       PHY_REG(phy_base, 0x1b));
2914 		/* MR3 */
2915 		pctl_write_mr(dram->pctl, 3, 3,
2916 			      mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT &
2917 			      PCTL2_MR_MASK,
2918 			      dramtype);
2919 		writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK,
2920 		       PHY_REG(phy_base, 0x19));
2921 
2922 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2923 			       DDR_PCTL2_INIT3);
2924 		/* MR1 */
2925 		pctl_write_mr(dram->pctl, 3, 1,
2926 			      mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT &
2927 			      PCTL2_MR_MASK,
2928 			      dramtype);
2929 		writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK,
2930 		       PHY_REG(phy_base, 0x17));
2931 		/* MR2 */
2932 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
2933 			      dramtype);
2934 		writel(mr_tmp & PCTL2_MR_MASK,
2935 		       PHY_REG(phy_base, 0x18));
2936 
2937 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2938 			       DDR_PCTL2_INIT6);
2939 		/* MR11 */
2940 		pctl_write_mr(dram->pctl, 3, 11,
2941 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2942 			      dramtype);
2943 		writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2944 		       PHY_REG(phy_base, 0x1a));
2945 		/* MR12 */
2946 		pctl_write_mr(dram->pctl, 3, 12,
2947 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2948 			      dramtype);
2949 
2950 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2951 			       DDR_PCTL2_INIT7);
2952 		/* MR22 */
2953 		pctl_write_mr(dram->pctl, 3, 22,
2954 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2955 			      dramtype);
2956 		writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2957 		       PHY_REG(phy_base, 0x1d));
2958 		/* MR14 */
2959 		pctl_write_mr(dram->pctl, 3, 14,
2960 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2961 			      dramtype);
2962 		writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2963 		       PHY_REG(phy_base, 0x1c));
2964 	}
2965 
2966 	update_noc_timing(dram, sdram_params);
2967 }
2968 
2969 static void save_fsp_param(struct dram_info *dram, u32 dst_fsp,
2970 			   struct rv1126_sdram_params *sdram_params)
2971 {
2972 	void __iomem *pctl_base = dram->pctl;
2973 	void __iomem *phy_base = dram->phy;
2974 	struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp];
2975 	u32 temp, temp1;
2976 	struct ddr2_3_4_lp2_3_info *ddr_info;
2977 
2978 	ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype);
2979 
2980 	p_fsp_param->freq_mhz = sdram_params->base.ddr_freq;
2981 
2982 	if (sdram_params->base.dramtype == LPDDR4 ||
2983 	    sdram_params->base.dramtype == LPDDR4X) {
2984 		p_fsp_param->rd_odt_up_en = 0;
2985 		p_fsp_param->rd_odt_down_en = 1;
2986 	} else {
2987 		p_fsp_param->rd_odt_up_en =
2988 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
2989 		p_fsp_param->rd_odt_down_en =
2990 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
2991 	}
2992 
2993 	if (p_fsp_param->rd_odt_up_en)
2994 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111));
2995 	else if (p_fsp_param->rd_odt_down_en)
2996 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110));
2997 	else
2998 		p_fsp_param->rd_odt = 0;
2999 	p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112));
3000 	p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100));
3001 	p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102));
3002 	p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128));
3003 	p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105));
3004 
3005 	if (sdram_params->base.dramtype == DDR3) {
3006 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3007 			     DDR_PCTL2_INIT3);
3008 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
3009 		p_fsp_param->ds_pdds = temp & DDR3_DS_MASK;
3010 		p_fsp_param->dq_odt = temp & DDR3_RTT_NOM_MASK;
3011 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3012 	} else if (sdram_params->base.dramtype == DDR4) {
3013 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3014 			     DDR_PCTL2_INIT3);
3015 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
3016 		p_fsp_param->ds_pdds = temp & DDR4_DS_MASK;
3017 		p_fsp_param->dq_odt = temp & DDR4_RTT_NOM_MASK;
3018 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3019 	} else if (sdram_params->base.dramtype == LPDDR3) {
3020 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3021 			     DDR_PCTL2_INIT4);
3022 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
3023 		p_fsp_param->ds_pdds = temp & 0xf;
3024 
3025 		p_fsp_param->dq_odt = lp3_odt_value;
3026 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3027 	} else if (sdram_params->base.dramtype == LPDDR4 ||
3028 		   sdram_params->base.dramtype == LPDDR4X) {
3029 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3030 			     DDR_PCTL2_INIT4);
3031 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
3032 		p_fsp_param->ds_pdds = temp & LPDDR4_PDDS_MASK;
3033 
3034 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3035 			     DDR_PCTL2_INIT6);
3036 		temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK;
3037 		p_fsp_param->dq_odt = temp & LPDDR4_DQODT_MASK;
3038 		p_fsp_param->ca_odt = temp & LPDDR4_CAODT_MASK;
3039 
3040 		temp = MAX(readl(PHY_REG(phy_base, 0x3ae)),
3041 			   readl(PHY_REG(phy_base, 0x3ce)));
3042 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)),
3043 			    readl(PHY_REG(phy_base, 0x3de)));
3044 		p_fsp_param->vref_ca[0] = (temp + temp1) / 2;
3045 		temp = MAX(readl(PHY_REG(phy_base, 0x3af)),
3046 			   readl(PHY_REG(phy_base, 0x3cf)));
3047 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)),
3048 			    readl(PHY_REG(phy_base, 0x3df)));
3049 		p_fsp_param->vref_ca[1] = (temp + temp1) / 2;
3050 		p_fsp_param->vref_ca[0] |=
3051 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
3052 		p_fsp_param->vref_ca[1] |=
3053 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
3054 
3055 		p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >>
3056 					      3) & 0x1;
3057 	}
3058 
3059 	p_fsp_param->noc_timings.ddrtiminga0 =
3060 		sdram_params->ch.noc_timings.ddrtiminga0;
3061 	p_fsp_param->noc_timings.ddrtimingb0 =
3062 		sdram_params->ch.noc_timings.ddrtimingb0;
3063 	p_fsp_param->noc_timings.ddrtimingc0 =
3064 		sdram_params->ch.noc_timings.ddrtimingc0;
3065 	p_fsp_param->noc_timings.devtodev0 =
3066 		sdram_params->ch.noc_timings.devtodev0;
3067 	p_fsp_param->noc_timings.ddrmode =
3068 		sdram_params->ch.noc_timings.ddrmode;
3069 	p_fsp_param->noc_timings.ddr4timing =
3070 		sdram_params->ch.noc_timings.ddr4timing;
3071 	p_fsp_param->noc_timings.agingx0 =
3072 		sdram_params->ch.noc_timings.agingx0;
3073 	p_fsp_param->noc_timings.aging0 =
3074 		sdram_params->ch.noc_timings.aging0;
3075 	p_fsp_param->noc_timings.aging1 =
3076 		sdram_params->ch.noc_timings.aging1;
3077 	p_fsp_param->noc_timings.aging2 =
3078 		sdram_params->ch.noc_timings.aging2;
3079 	p_fsp_param->noc_timings.aging3 =
3080 		sdram_params->ch.noc_timings.aging3;
3081 
3082 	p_fsp_param->flag = FSP_FLAG;
3083 }
3084 
3085 #ifndef CONFIG_SPL_KERNEL_BOOT
3086 static void copy_fsp_param_to_ddr(void)
3087 {
3088 	memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param,
3089 	       sizeof(fsp_param));
3090 }
3091 #endif
3092 
3093 static void pctl_modify_trfc(struct ddr_pctl_regs *pctl_regs,
3094 			     struct sdram_cap_info *cap_info, u32 dram_type,
3095 			     u32 freq)
3096 {
3097 	u64 cs0_cap;
3098 	u32 die_cap;
3099 	u32 trfc_ns, trfc4_ns;
3100 	u32 trfc, txsnr;
3101 	u32 txs_abort_fast = 0;
3102 	u32 tmp;
3103 
3104 	cs0_cap = sdram_get_cs_cap(cap_info, 0, dram_type);
3105 	die_cap = (u32)(cs0_cap >> (20 + (cap_info->bw - cap_info->dbw)));
3106 
3107 	switch (dram_type) {
3108 	case DDR3:
3109 		if (die_cap <= DIE_CAP_512MBIT)
3110 			trfc_ns = 90;
3111 		else if (die_cap <= DIE_CAP_1GBIT)
3112 			trfc_ns = 110;
3113 		else if (die_cap <= DIE_CAP_2GBIT)
3114 			trfc_ns = 160;
3115 		else if (die_cap <= DIE_CAP_4GBIT)
3116 			trfc_ns = 260;
3117 		else
3118 			trfc_ns = 350;
3119 		txsnr = MAX(5, ((trfc_ns + 10) * freq + 999) / 1000);
3120 		break;
3121 
3122 	case DDR4:
3123 		if (die_cap <= DIE_CAP_2GBIT) {
3124 			trfc_ns = 160;
3125 			trfc4_ns = 90;
3126 		} else if (die_cap <= DIE_CAP_4GBIT) {
3127 			trfc_ns = 260;
3128 			trfc4_ns = 110;
3129 		} else if (die_cap <= DIE_CAP_8GBIT) {
3130 			trfc_ns = 350;
3131 			trfc4_ns = 160;
3132 		} else {
3133 			trfc_ns = 550;
3134 			trfc4_ns = 260;
3135 		}
3136 		txsnr = ((trfc_ns + 10) * freq + 999) / 1000;
3137 		txs_abort_fast = ((trfc4_ns + 10) * freq + 999) / 1000;
3138 		break;
3139 
3140 	case LPDDR3:
3141 		if (die_cap <= DIE_CAP_4GBIT)
3142 			trfc_ns = 130;
3143 		else
3144 			trfc_ns = 210;
3145 		txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3146 		break;
3147 
3148 	case LPDDR4:
3149 	case LPDDR4X:
3150 		if (die_cap <= DIE_CAP_4GBIT)
3151 			trfc_ns = 130;
3152 		else if (die_cap <= DIE_CAP_8GBIT)
3153 			trfc_ns = 180;
3154 		else if (die_cap <= DIE_CAP_16GBIT)
3155 			trfc_ns = 280;
3156 		else
3157 			trfc_ns = 380;
3158 		txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3159 		break;
3160 
3161 	default:
3162 		return;
3163 	}
3164 	trfc = (trfc_ns * freq + 999) / 1000;
3165 
3166 	for (int i = 0; pctl_regs->pctl[i][0] != 0xffffffff; i++) {
3167 		switch (pctl_regs->pctl[i][0]) {
3168 		case DDR_PCTL2_RFSHTMG:
3169 			tmp = pctl_regs->pctl[i][1];
3170 			/* t_rfc_min */
3171 			tmp &= ~((u32)0x3ff);
3172 			tmp |= ((trfc + 1) / 2) & 0x3ff;
3173 			pctl_regs->pctl[i][1] = tmp;
3174 			break;
3175 
3176 		case DDR_PCTL2_DRAMTMG8:
3177 			if (dram_type == DDR3 || dram_type == DDR4) {
3178 				tmp = pctl_regs->pctl[i][1];
3179 				/* t_xs_x32 */
3180 				tmp &= ~((u32)0x7f);
3181 				tmp |= ((txsnr + 63) / 64) & 0x7f;
3182 
3183 				if (dram_type == DDR4) {
3184 					/* t_xs_abort_x32 */
3185 					tmp &= ~((u32)(0x7f << 16));
3186 					tmp |= (((txs_abort_fast + 63) / 64) & 0x7f) << 16;
3187 					/* t_xs_fast_x32 */
3188 					tmp &= ~((u32)(0x7f << 24));
3189 					tmp |= (((txs_abort_fast + 63) / 64) & 0x7f) << 24;
3190 				}
3191 
3192 				pctl_regs->pctl[i][1] = tmp;
3193 			}
3194 			break;
3195 
3196 		case DDR_PCTL2_DRAMTMG14:
3197 			if (dram_type == LPDDR3 ||
3198 			    dram_type == LPDDR4 || dram_type == LPDDR4X) {
3199 				tmp = pctl_regs->pctl[i][1];
3200 				/* t_xsr */
3201 				tmp &= ~((u32)0xfff);
3202 				tmp |= ((txsnr + 1) / 2) & 0xfff;
3203 				pctl_regs->pctl[i][1] = tmp;
3204 			}
3205 			break;
3206 
3207 		default:
3208 			break;
3209 		}
3210 	}
3211 }
3212 
3213 void ddr_set_rate(struct dram_info *dram,
3214 		  struct rv1126_sdram_params *sdram_params,
3215 		  u32 freq, u32 cur_freq, u32 dst_fsp,
3216 		  u32 dst_fsp_lp4, u32 training_en)
3217 {
3218 	u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off;
3219 	u32 mr_tmp;
3220 	u32 lp_stat;
3221 	u32 dramtype = sdram_params->base.dramtype;
3222 	struct rv1126_sdram_params *sdram_params_new;
3223 	void __iomem *pctl_base = dram->pctl;
3224 	void __iomem *phy_base = dram->phy;
3225 
3226 	lp_stat = low_power_update(dram, 0);
3227 	sdram_params_new = get_default_sdram_config(freq);
3228 	sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank;
3229 	sdram_params_new->ch.cap_info.bw = sdram_params->ch.cap_info.bw;
3230 
3231 	pctl_modify_trfc(&sdram_params_new->pctl_regs,
3232 			 &sdram_params->ch.cap_info, dramtype, freq);
3233 	pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4);
3234 
3235 	while ((readl(pctl_base + DDR_PCTL2_STAT) &
3236 			 PCTL2_OPERATING_MODE_MASK) ==
3237 			 PCTL2_OPERATING_MODE_SR)
3238 		continue;
3239 
3240 	dest_dll_off = 0;
3241 	dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3242 			  DDR_PCTL2_INIT3);
3243 	if ((dramtype == DDR3 && (dst_init3 & 1)) ||
3244 	    (dramtype == DDR4 && !(dst_init3 & 1)))
3245 		dest_dll_off = 1;
3246 
3247 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
3248 	cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
3249 			  DDR_PCTL2_INIT3);
3250 	cur_init3 &= PCTL2_MR_MASK;
3251 	cur_dll_off = 1;
3252 	if ((dramtype == DDR3 && !(cur_init3 & 1)) ||
3253 	    (dramtype == DDR4 && (cur_init3 & 1)))
3254 		cur_dll_off = 0;
3255 
3256 	if (!cur_dll_off) {
3257 		if (dramtype == DDR3)
3258 			cur_init3 |= 1;
3259 		else
3260 			cur_init3 &= ~1;
3261 		pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype);
3262 	}
3263 
3264 	setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3265 		     PCTL2_DIS_AUTO_REFRESH);
3266 	update_refresh_reg(dram);
3267 
3268 	enter_sr(dram, 1);
3269 
3270 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3271 	       PMUGRF_CON_DDRPHY_BUFFEREN_EN,
3272 	       &dram->pmugrf->soc_con[0]);
3273 	sw_set_req(dram);
3274 	clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC,
3275 		     PCTL2_DFI_INIT_COMPLETE_EN);
3276 	sw_set_ack(dram);
3277 
3278 	sw_set_req(dram);
3279 	if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off)
3280 		setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3281 	else
3282 		clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3283 
3284 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0,
3285 		     PCTL2_DIS_SRX_ZQCL);
3286 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0,
3287 		     PCTL2_DIS_SRX_ZQCL);
3288 	sw_set_ack(dram);
3289 
3290 	writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT),
3291 	       &dram->cru->clkgate_con[21]);
3292 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3293 					(0x1 << CLK_DDR_UPCTL_EN_SHIFT) |
3294 					(0x1 << ACLK_DDR_UPCTL_EN_SHIFT),
3295 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3296 
3297 	clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3298 	rkclk_set_dpll(dram, freq * MHz / 2);
3299 	phy_pll_set(dram, freq * MHz, 0);
3300 	phy_pll_set(dram, freq * MHz, 1);
3301 	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3302 
3303 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3304 			PMUGRF_CON_DDRPHY_BUFFEREN_DIS,
3305 			&dram->pmugrf->soc_con[0]);
3306 	writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT),
3307 	       &dram->cru->clkgate_con[21]);
3308 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3309 					(0x0 << CLK_DDR_UPCTL_EN_SHIFT) |
3310 					(0x0 << ACLK_DDR_UPCTL_EN_SHIFT),
3311 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3312 	while ((readl(pctl_base + DDR_PCTL2_DFISTAT) &
3313 	       PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE)
3314 		continue;
3315 
3316 	sw_set_req(dram);
3317 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
3318 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp);
3319 	sw_set_ack(dram);
3320 	update_refresh_reg(dram);
3321 	clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2);
3322 
3323 	enter_sr(dram, 0);
3324 
3325 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3326 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3327 
3328 	mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4);
3329 	if (dramtype == LPDDR3) {
3330 		pctl_write_mr(dram->pctl, 3, 1,
3331 			      (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) &
3332 			      PCTL2_MR_MASK,
3333 			      dramtype);
3334 		pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK,
3335 			      dramtype);
3336 		pctl_write_mr(dram->pctl, 3, 3,
3337 			      (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) &
3338 			      PCTL2_MR_MASK,
3339 			      dramtype);
3340 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype);
3341 	} else if ((dramtype == DDR3) || (dramtype == DDR4)) {
3342 		pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK,
3343 			      dramtype);
3344 		if (!dest_dll_off) {
3345 			pctl_write_mr(dram->pctl, 3, 0,
3346 				      ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) &
3347 				      PCTL2_MR_MASK) | DDR3_DLL_RESET,
3348 				      dramtype);
3349 			udelay(2);
3350 		}
3351 		pctl_write_mr(dram->pctl, 3, 0,
3352 			      (dst_init3 >> PCTL2_DDR34_MR0_SHIFT &
3353 			       PCTL2_MR_MASK) & (~DDR3_DLL_RESET),
3354 			      dramtype);
3355 		pctl_write_mr(dram->pctl, 3, 2,
3356 			      ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) &
3357 			       PCTL2_MR_MASK), dramtype);
3358 		if (dramtype == DDR4) {
3359 			pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK,
3360 				      dramtype);
3361 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3362 				       DDR_PCTL2_INIT6);
3363 			pctl_write_mr(dram->pctl, 3, 4,
3364 				      (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) &
3365 				       PCTL2_MR_MASK,
3366 				      dramtype);
3367 			pctl_write_mr(dram->pctl, 3, 5,
3368 				      mr_tmp >> PCTL2_DDR4_MR5_SHIFT &
3369 				      PCTL2_MR_MASK,
3370 				      dramtype);
3371 
3372 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3373 				       DDR_PCTL2_INIT7);
3374 			pctl_write_mr(dram->pctl, 3, 6,
3375 				      mr_tmp >> PCTL2_DDR4_MR6_SHIFT &
3376 				      PCTL2_MR_MASK,
3377 				      dramtype);
3378 		}
3379 	} else if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
3380 		pctl_write_mr(dram->pctl, 3, 13,
3381 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
3382 			       PCTL2_MR_MASK) & (~(BIT(7)))) |
3383 			      dst_fsp_lp4 << 7, dramtype);
3384 	}
3385 	clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3386 		     PCTL2_DIS_AUTO_REFRESH);
3387 	update_refresh_reg(dram);
3388 
3389 	/* training */
3390 	high_freq_training(dram, sdram_params_new, dst_fsp);
3391 	low_power_update(dram, lp_stat);
3392 
3393 	save_fsp_param(dram, dst_fsp, sdram_params_new);
3394 }
3395 
3396 static void ddr_set_rate_for_fsp(struct dram_info *dram,
3397 				 struct rv1126_sdram_params *sdram_params)
3398 {
3399 	struct ddr2_3_4_lp2_3_info *ddr_info;
3400 	u32 f0;
3401 	u32 dramtype = sdram_params->base.dramtype;
3402 #ifndef CONFIG_SPL_KERNEL_BOOT
3403 	u32 f1, f2, f3;
3404 #endif
3405 
3406 	ddr_info = get_ddr_drv_odt_info(dramtype);
3407 	if (!ddr_info)
3408 		return;
3409 
3410 	f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
3411 	     DDR_FREQ_MASK;
3412 
3413 #ifndef CONFIG_SPL_KERNEL_BOOT
3414 	memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param));
3415 	memset((void *)&fsp_param, 0, sizeof(fsp_param));
3416 
3417 	f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) &
3418 	     DDR_FREQ_MASK;
3419 	f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) &
3420 	     DDR_FREQ_MASK;
3421 	f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) &
3422 	     DDR_FREQ_MASK;
3423 #endif
3424 
3425 	if (get_wrlvl_val(dram, sdram_params))
3426 		printascii("get wrlvl value fail\n");
3427 
3428 #ifndef CONFIG_SPL_KERNEL_BOOT
3429 	printascii("change to: ");
3430 	printdec(f1);
3431 	printascii("MHz\n");
3432 	ddr_set_rate(&dram_info, sdram_params, f1,
3433 		     sdram_params->base.ddr_freq, 1, 1, 1);
3434 	printascii("change to: ");
3435 	printdec(f2);
3436 	printascii("MHz\n");
3437 	ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1);
3438 	printascii("change to: ");
3439 	printdec(f3);
3440 	printascii("MHz\n");
3441 	ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1);
3442 #endif
3443 	printascii("change to: ");
3444 	printdec(f0);
3445 	printascii("MHz(final freq)\n");
3446 #ifndef CONFIG_SPL_KERNEL_BOOT
3447 	ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1);
3448 #else
3449 	ddr_set_rate(&dram_info, sdram_params, f0, sdram_params->base.ddr_freq, 1, 1, 1);
3450 #endif
3451 }
3452 
3453 int get_uart_config(void)
3454 {
3455 	struct sdram_head_info_index_v2 *index =
3456 		(struct sdram_head_info_index_v2 *)common_info;
3457 	struct global_info *gbl_info;
3458 
3459 	gbl_info = (struct global_info *)((void *)common_info +
3460 		index->global_index.offset * 4);
3461 
3462 	return gbl_info->uart_info;
3463 }
3464 
3465 /* return: 0 = success, other = fail */
3466 int sdram_init(void)
3467 {
3468 	struct rv1126_sdram_params *sdram_params;
3469 	int ret = 0;
3470 	struct sdram_head_info_index_v2 *index =
3471 		(struct sdram_head_info_index_v2 *)common_info;
3472 	struct global_info *gbl_info;
3473 
3474 	dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
3475 	dram_info.pctl = (void *)UPCTL2_BASE_ADDR;
3476 	dram_info.grf = (void *)GRF_BASE_ADDR;
3477 	dram_info.cru = (void *)CRU_BASE_ADDR;
3478 	dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR;
3479 	dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR;
3480 	dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR;
3481 
3482 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
3483 	printascii("extended temp support\n");
3484 #endif
3485 	if (index->version_info != 2 ||
3486 	    (index->global_index.size != sizeof(struct global_info) / 4) ||
3487 	    (index->ddr3_index.size !=
3488 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3489 	    (index->ddr4_index.size !=
3490 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3491 	    (index->lp3_index.size !=
3492 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3493 	    (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) ||
3494 	    (index->lp4x_index.size != (sizeof(struct lp4_info) / 4)) ||
3495 	    index->global_index.offset == 0 ||
3496 	    index->ddr3_index.offset == 0 ||
3497 	    index->ddr4_index.offset == 0 ||
3498 	    index->lp3_index.offset == 0 ||
3499 	    index->lp4_index.offset == 0 ||
3500 	    index->lp4x_index.offset == 0) {
3501 		printascii("common info error\n");
3502 		goto error;
3503 	}
3504 
3505 	gbl_info = (struct global_info *)((void *)common_info +
3506 		index->global_index.offset * 4);
3507 
3508 	dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info);
3509 	dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info);
3510 
3511 	sdram_params = &sdram_configs[0];
3512 	#if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 8)
3513 	for (j = 0; j < ARRAY_SIZE(sdram_configs); j++)
3514 		sdram_configs[j].base.dramtype = LPDDR4X;
3515 	#endif
3516 	if (sdram_params->base.dramtype == DDR3 ||
3517 	    sdram_params->base.dramtype == DDR4) {
3518 		if (DDR_2T_INFO(gbl_info->info_2t))
3519 			sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10;
3520 		else
3521 			sdram_params->pctl_regs.pctl[0][1] &=
3522 				~(0x1 << 10);
3523 	}
3524 	ret = sdram_init_detect(&dram_info, sdram_params);
3525 	if (ret) {
3526 		sdram_print_dram_type(sdram_params->base.dramtype);
3527 		printascii(", ");
3528 		printdec(sdram_params->base.ddr_freq);
3529 		printascii("MHz\n");
3530 		goto error;
3531 	}
3532 	print_ddr_info(sdram_params);
3533 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3534 	init_rw_trn_result_struct(&rw_trn_result, dram_info.phy,
3535 				  (u8)sdram_params->ch.cap_info.rank);
3536 #endif
3537 
3538 	ddr_set_rate_for_fsp(&dram_info, sdram_params);
3539 #ifndef CONFIG_SPL_KERNEL_BOOT
3540 	copy_fsp_param_to_ddr();
3541 #endif
3542 
3543 	ddr_set_atags(&dram_info, sdram_params);
3544 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3545 	save_rw_trn_result_to_ddr(&rw_trn_result);
3546 #endif
3547 
3548 	printascii("out\n");
3549 
3550 	return ret;
3551 error:
3552 	printascii("error\n");
3553 	return (-1);
3554 }
3555 #endif /* CONFIG_TPL_BUILD */
3556