xref: /rk3399_rockchip-uboot/drivers/ram/rockchip/sdram_rv1126.c (revision 74eb6027432600de60ed1c8bf892f1f8243c2c8a)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * (C) Copyright 2020 Rockchip Electronics Co., Ltd.
4  */
5 
6 #include <common.h>
7 #include <debug_uart.h>
8 #include <dm.h>
9 #include <ram.h>
10 #include <syscon.h>
11 #include <asm/io.h>
12 #include <asm/arch/clock.h>
13 #include <asm/arch/hardware.h>
14 #include <asm/arch/rk_atags.h>
15 #include <asm/arch/cru_rv1126.h>
16 #include <asm/arch/grf_rv1126.h>
17 #include <asm/arch/sdram_common.h>
18 #include <asm/arch/sdram_rv1126.h>
19 
20 /* define training flag */
21 #define CA_TRAINING			(0x1 << 0)
22 #define READ_GATE_TRAINING		(0x1 << 1)
23 #define WRITE_LEVELING			(0x1 << 2)
24 #define WRITE_TRAINING			(0x1 << 3)
25 #define READ_TRAINING			(0x1 << 4)
26 #define FULL_TRAINING			(0xff)
27 
28 #define SKEW_RX_SIGNAL			(0)
29 #define SKEW_TX_SIGNAL			(1)
30 #define SKEW_CA_SIGNAL			(2)
31 
32 #define DESKEW_MDF_ABS_VAL		(0)
33 #define DESKEW_MDF_DIFF_VAL		(1)
34 
35 #ifdef CONFIG_TPL_BUILD
36 #ifndef CONFIG_TPL_TINY_FRAMEWORK
37 #error please defined CONFIG_TPL_TINY_FRAMEWORK for RV1126 !!!
38 #endif
39 #endif
40 
41 #ifdef CONFIG_TPL_BUILD
42 
43 struct dram_info {
44 	void __iomem *pctl;
45 	void __iomem *phy;
46 	struct rv1126_cru *cru;
47 	struct msch_regs *msch;
48 	struct rv1126_ddrgrf *ddrgrf;
49 	struct rv1126_grf *grf;
50 	struct ram_info info;
51 	struct rv1126_pmugrf *pmugrf;
52 	u32 sr_idle;
53 	u32 pd_idle;
54 };
55 
56 #define GRF_BASE_ADDR			0xfe000000
57 #define PMU_GRF_BASE_ADDR		0xfe020000
58 #define DDR_GRF_BASE_ADDR		0xfe030000
59 #define BUS_SGRF_BASE_ADDR		0xfe0a0000
60 #define SERVER_MSCH_BASE_ADDR		0xfe800000
61 #define CRU_BASE_ADDR			0xff490000
62 #define DDR_PHY_BASE_ADDR		0xff4a0000
63 #define UPCTL2_BASE_ADDR		0xffa50000
64 
65 #define SGRF_SOC_CON12			0x30
66 #define SGRF_SOC_CON13			0x34
67 
68 struct dram_info dram_info;
69 
70 #if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 3)
71 struct rv1126_sdram_params sdram_configs[] = {
72 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-328.inc"
73 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-396.inc"
74 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-528.inc"
75 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-664.inc"
76 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-784.inc"
77 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-924.inc"
78 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-1056.inc"
79 };
80 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 0)
81 struct rv1126_sdram_params sdram_configs[] = {
82 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-328.inc"
83 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-396.inc"
84 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-528.inc"
85 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-664.inc"
86 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-784.inc"
87 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-924.inc"
88 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-1056.inc"
89 };
90 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 6)
91 struct rv1126_sdram_params sdram_configs[] = {
92 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-328.inc"
93 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-396.inc"
94 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-528.inc"
95 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-664.inc"
96 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-784.inc"
97 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-924.inc"
98 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-1056.inc"
99 };
100 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 7)
101 struct rv1126_sdram_params sdram_configs[] = {
102 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-328.inc"
103 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-396.inc"
104 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-528.inc"
105 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-664.inc"
106 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-784.inc"
107 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-924.inc"
108 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-1056.inc"
109 };
110 #endif
111 
112 u32 common_info[] = {
113 	#include "sdram_inc/rv1126/sdram-rv1126-loader_params.inc"
114 };
115 
116 static struct rv1126_fsp_param fsp_param[MAX_IDX];
117 
118 static u8 lp3_odt_value;
119 
120 static s8 wrlvl_result[2][4];
121 
122 /* DDR configuration 0-9 */
123 u16 ddr_cfg_2_rbc[] = {
124 	((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */
125 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */
126 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */
127 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */
128 	((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */
129 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */
130 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */
131 	((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */
132 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */
133 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */
134 };
135 
136 /* DDR configuration 10-21 */
137 u8 ddr4_cfg_2_rbc[] = {
138 	((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */
139 	((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */
140 	((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */
141 	((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */
142 	((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */
143 	((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */
144 	((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */
145 	((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */
146 	((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */
147 	((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */
148 	((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */
149 	((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */
150 };
151 
152 /* DDR configuration 22-28 */
153 u16 ddr_cfg_2_rbc_p2[] = {
154 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */
155 	((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */
156 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */
157 	((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */
158 	((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */
159 	((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */
160 	((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */
161 };
162 
163 u8 d4_rbc_2_d3_rbc[][2] = {
164 	{10, 0},
165 	{11, 2},
166 	{12, 23},
167 	{13, 1},
168 	{14, 28},
169 	{15, 24},
170 	{16, 27},
171 	{17, 7},
172 	{18, 6},
173 	{19, 25},
174 	{20, 26},
175 	{21, 3}
176 };
177 
178 u32 addrmap[23][9] = {
179 	{24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
180 		0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */
181 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
182 		0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */
183 	{23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
184 		0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */
185 	{22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606,
186 		0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */
187 	{24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909,
188 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */
189 	{6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707,
190 		0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */
191 	{7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808,
192 		0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */
193 	{8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909,
194 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */
195 	{22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
196 		0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */
197 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
198 		0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */
199 
200 	{24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
201 		0x08080808, 0x00000f0f, 0x0801}, /* 10 */
202 	{23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
203 		0x0f080808, 0x00000f0f, 0x0801}, /* 11 */
204 	{24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
205 		0x07070707, 0x00000f07, 0x0700}, /* 12 */
206 	{23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
207 		0x07070707, 0x00000f0f, 0x0700}, /* 13 */
208 	{24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
209 		0x07070707, 0x00000f07, 0x3f01}, /* 14 */
210 	{23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
211 		0x07070707, 0x00000f0f, 0x3f01}, /* 15 */
212 	{23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606,
213 		0x06060606, 0x00000f06, 0x3f00}, /* 16 */
214 	{8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909,
215 		0x0f090909, 0x00000f0f, 0x0801}, /* 17 */
216 	{7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808,
217 		0x08080808, 0x00000f0f, 0x0700}, /* 18 */
218 	{7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
219 		0x08080808, 0x00000f0f, 0x3f01}, /* 19 */
220 
221 	{6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
222 		0x07070707, 0x00000f07, 0x3f00}, /* 20 */
223 	{23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606,
224 		0x06060606, 0x00000f06, 0x0600}, /* 21 */
225 	{21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505,
226 		0x05050505, 0x00000f0f, 0x3f3f} /* 22 */
227 };
228 
229 static u8 dq_sel[22][3] = {
230 	{0x0, 0x17, 0x22},
231 	{0x1, 0x18, 0x23},
232 	{0x2, 0x19, 0x24},
233 	{0x3, 0x1a, 0x25},
234 	{0x4, 0x1b, 0x26},
235 	{0x5, 0x1c, 0x27},
236 	{0x6, 0x1d, 0x28},
237 	{0x7, 0x1e, 0x29},
238 	{0x8, 0x16, 0x21},
239 	{0x9, 0x1f, 0x2a},
240 	{0xa, 0x20, 0x2b},
241 	{0x10, 0x1, 0xc},
242 	{0x11, 0x2, 0xd},
243 	{0x12, 0x3, 0xe},
244 	{0x13, 0x4, 0xf},
245 	{0x14, 0x5, 0x10},
246 	{0x15, 0x6, 0x11},
247 	{0x16, 0x7, 0x12},
248 	{0x17, 0x8, 0x13},
249 	{0x18, 0x0, 0xb},
250 	{0x19, 0x9, 0x14},
251 	{0x1a, 0xa, 0x15}
252 };
253 
254 static u16 grp_addr[4] = {
255 	ADD_GROUP_CS0_A,
256 	ADD_GROUP_CS0_B,
257 	ADD_GROUP_CS1_A,
258 	ADD_GROUP_CS1_B
259 };
260 
261 static u8 wrlvl_result_offset[2][4] = {
262 	{0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27},
263 	{0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29},
264 };
265 
266 static u16 dqs_dq_skew_adr[16] = {
267 	0x170 + 0,	/* SKEW_UPDATE_RX_CS0_DQS0 */
268 	0x170 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS1 */
269 	0x1d0 + 0,	/* SKEW_UPDATE_RX_CS0_DQS2 */
270 	0x1d0 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS3 */
271 	0x1a0 + 0,	/* SKEW_UPDATE_RX_CS1_DQS0 */
272 	0x1a0 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS1 */
273 	0x200 + 0,	/* SKEW_UPDATE_RX_CS1_DQS2 */
274 	0x200 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS3 */
275 	0x170 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS0 */
276 	0x170 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS1 */
277 	0x1d0 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS2 */
278 	0x1d0 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS3 */
279 	0x1a0 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS0 */
280 	0x1a0 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS1 */
281 	0x200 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS2 */
282 	0x200 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS3 */
283 };
284 
285 static void rkclk_ddr_reset(struct dram_info *dram,
286 			    u32 ctl_srstn, u32 ctl_psrstn,
287 			    u32 phy_srstn, u32 phy_psrstn)
288 {
289 	writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) |
290 	       UPCTL2_ASRSTN_REQ(ctl_srstn),
291 	       BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13);
292 
293 	writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn),
294 	       &dram->cru->softrst_con[12]);
295 }
296 
297 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
298 {
299 	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
300 	int delay = 1000;
301 	u32 mhz = hz / MHz;
302 
303 	refdiv = 1;
304 	if (mhz <= 100) {
305 		postdiv1 = 6;
306 		postdiv2 = 4;
307 	} else if (mhz <= 150) {
308 		postdiv1 = 4;
309 		postdiv2 = 4;
310 	} else if (mhz <= 200) {
311 		postdiv1 = 6;
312 		postdiv2 = 2;
313 	} else if (mhz <= 300) {
314 		postdiv1 = 4;
315 		postdiv2 = 2;
316 	} else if (mhz <= 400) {
317 		postdiv1 = 6;
318 		postdiv2 = 1;
319 	} else {
320 		postdiv1 = 4;
321 		postdiv2 = 1;
322 	}
323 	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
324 
325 	writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
326 
327 	writel(0x1f000000, &dram->cru->clksel_con[64]);
328 	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
329 	writel(DSMPD(1) | POSTDIV2(postdiv2) | REFDIV(refdiv),
330 	       &dram->cru->pll[1].con1);
331 
332 	while (delay > 0) {
333 		udelay(1);
334 		if (LOCK(readl(&dram->cru->pll[1].con1)))
335 			break;
336 		delay--;
337 	}
338 
339 	writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
340 }
341 
342 static void rkclk_configure_ddr(struct dram_info *dram,
343 				struct rv1126_sdram_params *sdram_params)
344 {
345 	/* for inno ddr phy need freq / 2 */
346 	rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2);
347 }
348 
349 static unsigned int
350 	calculate_ddrconfig(struct rv1126_sdram_params *sdram_params)
351 {
352 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
353 	u32 cs, bw, die_bw, col, row, bank;
354 	u32 cs1_row;
355 	u32 i, tmp;
356 	u32 ddrconf = -1;
357 	u32 row_3_4;
358 
359 	cs = cap_info->rank;
360 	bw = cap_info->bw;
361 	die_bw = cap_info->dbw;
362 	col = cap_info->col;
363 	row = cap_info->cs0_row;
364 	cs1_row = cap_info->cs1_row;
365 	bank = cap_info->bk;
366 	row_3_4 = cap_info->row_3_4;
367 
368 	if (sdram_params->base.dramtype == DDR4) {
369 		if (cs == 2 && row == cs1_row && !row_3_4) {
370 			tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) |
371 			      die_bw;
372 			for (i = 17; i < 21; i++) {
373 				if (((tmp & 0xf) ==
374 				     (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
375 				    ((tmp & 0x70) <=
376 				     (ddr4_cfg_2_rbc[i - 10] & 0x70))) {
377 					ddrconf = i;
378 					goto out;
379 				}
380 			}
381 		}
382 
383 		tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw;
384 		for (i = 10; i < 21; i++) {
385 			if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
386 			    ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) &&
387 			    ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) {
388 				ddrconf = i;
389 				goto out;
390 			}
391 		}
392 	} else {
393 		if (cs == 2 && row == cs1_row && bank == 3) {
394 			for (i = 5; i < 8; i++) {
395 				if (((bw + col - 10) == (ddr_cfg_2_rbc[i] &
396 							 0x7)) &&
397 				    ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] &
398 							  (0x7 << 5))) {
399 					ddrconf = i;
400 					goto out;
401 				}
402 			}
403 		}
404 
405 		tmp = ((cs - 1) << 8) | ((row - 13) << 5) |
406 		      ((bw + col - 10) << 0);
407 		if (bank == 3)
408 			tmp |= (1 << 3);
409 
410 		for (i = 0; i < 9; i++)
411 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) &&
412 			    ((tmp & (7 << 5)) <=
413 			     (ddr_cfg_2_rbc[i] & (7 << 5))) &&
414 			    ((tmp & (1 << 8)) <=
415 			     (ddr_cfg_2_rbc[i] & (1 << 8)))) {
416 				ddrconf = i;
417 				goto out;
418 			}
419 		if (cs == 1 && bank == 3 && row <= 17 &&
420 		    (col + bw) == 12)
421 			ddrconf = 23;
422 	}
423 
424 out:
425 	if (ddrconf > 28)
426 		printascii("calculate ddrconfig error\n");
427 
428 	if (sdram_params->base.dramtype == DDR4) {
429 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
430 			if (ddrconf == d4_rbc_2_d3_rbc[i][0]) {
431 				if (ddrconf == 21 && row > 16)
432 					printascii("warn:ddrconf21 row > 16\n");
433 				else
434 					ddrconf = d4_rbc_2_d3_rbc[i][1];
435 				break;
436 			}
437 		}
438 	}
439 
440 	return ddrconf;
441 }
442 
443 static void sw_set_req(struct dram_info *dram)
444 {
445 	void __iomem *pctl_base = dram->pctl;
446 
447 	/* clear sw_done=0 */
448 	writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL);
449 }
450 
451 static void sw_set_ack(struct dram_info *dram)
452 {
453 	void __iomem *pctl_base = dram->pctl;
454 
455 	/* set sw_done=1 */
456 	writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL);
457 	while (1) {
458 		/* wait programming done */
459 		if (readl(pctl_base + DDR_PCTL2_SWSTAT) &
460 				PCTL2_SW_DONE_ACK)
461 			break;
462 	}
463 }
464 
465 static void set_ctl_address_map(struct dram_info *dram,
466 				struct rv1126_sdram_params *sdram_params)
467 {
468 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
469 	void __iomem *pctl_base = dram->pctl;
470 	u32 ddrconf = cap_info->ddrconfig;
471 	u32 i, row;
472 
473 	row = cap_info->cs0_row;
474 	if (sdram_params->base.dramtype == DDR4) {
475 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
476 			if (ddrconf == d4_rbc_2_d3_rbc[i][1]) {
477 				ddrconf = d4_rbc_2_d3_rbc[i][0];
478 				break;
479 			}
480 		}
481 	}
482 
483 	if (ddrconf > ARRAY_SIZE(addrmap)) {
484 		printascii("set ctl address map fail\n");
485 		return;
486 	}
487 
488 	sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
489 			  &addrmap[ddrconf][0], 9 * 4);
490 
491 	/* unused row set to 0xf */
492 	for (i = 17; i >= row; i--)
493 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
494 			((i - 12) * 8 / 32) * 4,
495 			0xf << ((i - 12) * 8 % 32));
496 
497 	if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4)
498 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
499 	if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1)
500 		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
501 
502 	if (cap_info->rank == 1)
503 		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
504 }
505 
506 static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait)
507 {
508 	void __iomem *phy_base = dram->phy;
509 	u32 fbdiv, prediv, postdiv, postdiv_en;
510 
511 	if (wait) {
512 		clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB);
513 		while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK))
514 			continue;
515 	} else {
516 		freq /= MHz;
517 		prediv = 1;
518 		if (freq <= 200) {
519 			fbdiv = 16;
520 			postdiv = 2;
521 			postdiv_en = 1;
522 		} else if (freq <= 456) {
523 			fbdiv = 8;
524 			postdiv = 1;
525 			postdiv_en = 1;
526 		} else {
527 			fbdiv = 4;
528 			postdiv = 0;
529 			postdiv_en = 0;
530 		}
531 		writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50));
532 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK,
533 				(fbdiv >> 8) & 1);
534 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK,
535 				postdiv_en << PHY_POSTDIV_EN_SHIFT);
536 
537 		clrsetbits_le32(PHY_REG(phy_base, 0x52),
538 				PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv);
539 		clrsetbits_le32(PHY_REG(phy_base, 0x53),
540 				PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT,
541 				postdiv << PHY_POSTDIV_SHIFT);
542 	}
543 }
544 
545 static const u16 d3_phy_drv_2_ohm[][2] = {
546 	{PHY_DDR3_RON_455ohm, 455},
547 	{PHY_DDR3_RON_230ohm, 230},
548 	{PHY_DDR3_RON_153ohm, 153},
549 	{PHY_DDR3_RON_115ohm, 115},
550 	{PHY_DDR3_RON_91ohm, 91},
551 	{PHY_DDR3_RON_76ohm, 76},
552 	{PHY_DDR3_RON_65ohm, 65},
553 	{PHY_DDR3_RON_57ohm, 57},
554 	{PHY_DDR3_RON_51ohm, 51},
555 	{PHY_DDR3_RON_46ohm, 46},
556 	{PHY_DDR3_RON_41ohm, 41},
557 	{PHY_DDR3_RON_38ohm, 38},
558 	{PHY_DDR3_RON_35ohm, 35},
559 	{PHY_DDR3_RON_32ohm, 32},
560 	{PHY_DDR3_RON_30ohm, 30},
561 	{PHY_DDR3_RON_28ohm, 28},
562 	{PHY_DDR3_RON_27ohm, 27},
563 	{PHY_DDR3_RON_25ohm, 25},
564 	{PHY_DDR3_RON_24ohm, 24},
565 	{PHY_DDR3_RON_23ohm, 23},
566 	{PHY_DDR3_RON_22ohm, 22},
567 	{PHY_DDR3_RON_21ohm, 21},
568 	{PHY_DDR3_RON_20ohm, 20}
569 };
570 
571 static u16 d3_phy_odt_2_ohm[][2] = {
572 	{PHY_DDR3_RTT_DISABLE, 0},
573 	{PHY_DDR3_RTT_561ohm, 561},
574 	{PHY_DDR3_RTT_282ohm, 282},
575 	{PHY_DDR3_RTT_188ohm, 188},
576 	{PHY_DDR3_RTT_141ohm, 141},
577 	{PHY_DDR3_RTT_113ohm, 113},
578 	{PHY_DDR3_RTT_94ohm, 94},
579 	{PHY_DDR3_RTT_81ohm, 81},
580 	{PHY_DDR3_RTT_72ohm, 72},
581 	{PHY_DDR3_RTT_64ohm, 64},
582 	{PHY_DDR3_RTT_58ohm, 58},
583 	{PHY_DDR3_RTT_52ohm, 52},
584 	{PHY_DDR3_RTT_48ohm, 48},
585 	{PHY_DDR3_RTT_44ohm, 44},
586 	{PHY_DDR3_RTT_41ohm, 41},
587 	{PHY_DDR3_RTT_38ohm, 38},
588 	{PHY_DDR3_RTT_37ohm, 37},
589 	{PHY_DDR3_RTT_34ohm, 34},
590 	{PHY_DDR3_RTT_32ohm, 32},
591 	{PHY_DDR3_RTT_31ohm, 31},
592 	{PHY_DDR3_RTT_29ohm, 29},
593 	{PHY_DDR3_RTT_28ohm, 28},
594 	{PHY_DDR3_RTT_27ohm, 27},
595 	{PHY_DDR3_RTT_25ohm, 25}
596 };
597 
598 static u16 d4lp3_phy_drv_2_ohm[][2] = {
599 	{PHY_DDR4_LPDDR3_RON_482ohm, 482},
600 	{PHY_DDR4_LPDDR3_RON_244ohm, 244},
601 	{PHY_DDR4_LPDDR3_RON_162ohm, 162},
602 	{PHY_DDR4_LPDDR3_RON_122ohm, 122},
603 	{PHY_DDR4_LPDDR3_RON_97ohm, 97},
604 	{PHY_DDR4_LPDDR3_RON_81ohm, 81},
605 	{PHY_DDR4_LPDDR3_RON_69ohm, 69},
606 	{PHY_DDR4_LPDDR3_RON_61ohm, 61},
607 	{PHY_DDR4_LPDDR3_RON_54ohm, 54},
608 	{PHY_DDR4_LPDDR3_RON_48ohm, 48},
609 	{PHY_DDR4_LPDDR3_RON_44ohm, 44},
610 	{PHY_DDR4_LPDDR3_RON_40ohm, 40},
611 	{PHY_DDR4_LPDDR3_RON_37ohm, 37},
612 	{PHY_DDR4_LPDDR3_RON_34ohm, 34},
613 	{PHY_DDR4_LPDDR3_RON_32ohm, 32},
614 	{PHY_DDR4_LPDDR3_RON_30ohm, 30},
615 	{PHY_DDR4_LPDDR3_RON_28ohm, 28},
616 	{PHY_DDR4_LPDDR3_RON_27ohm, 27},
617 	{PHY_DDR4_LPDDR3_RON_25ohm, 25},
618 	{PHY_DDR4_LPDDR3_RON_24ohm, 24},
619 	{PHY_DDR4_LPDDR3_RON_23ohm, 23},
620 	{PHY_DDR4_LPDDR3_RON_22ohm, 22},
621 	{PHY_DDR4_LPDDR3_RON_21ohm, 21}
622 };
623 
624 static u16 d4lp3_phy_odt_2_ohm[][2] = {
625 	{PHY_DDR4_LPDDR3_RTT_DISABLE, 0},
626 	{PHY_DDR4_LPDDR3_RTT_586ohm, 586},
627 	{PHY_DDR4_LPDDR3_RTT_294ohm, 294},
628 	{PHY_DDR4_LPDDR3_RTT_196ohm, 196},
629 	{PHY_DDR4_LPDDR3_RTT_148ohm, 148},
630 	{PHY_DDR4_LPDDR3_RTT_118ohm, 118},
631 	{PHY_DDR4_LPDDR3_RTT_99ohm, 99},
632 	{PHY_DDR4_LPDDR3_RTT_85ohm, 58},
633 	{PHY_DDR4_LPDDR3_RTT_76ohm, 76},
634 	{PHY_DDR4_LPDDR3_RTT_67ohm, 67},
635 	{PHY_DDR4_LPDDR3_RTT_60ohm, 60},
636 	{PHY_DDR4_LPDDR3_RTT_55ohm, 55},
637 	{PHY_DDR4_LPDDR3_RTT_50ohm, 50},
638 	{PHY_DDR4_LPDDR3_RTT_46ohm, 46},
639 	{PHY_DDR4_LPDDR3_RTT_43ohm, 43},
640 	{PHY_DDR4_LPDDR3_RTT_40ohm, 40},
641 	{PHY_DDR4_LPDDR3_RTT_38ohm, 38},
642 	{PHY_DDR4_LPDDR3_RTT_36ohm, 36},
643 	{PHY_DDR4_LPDDR3_RTT_34ohm, 34},
644 	{PHY_DDR4_LPDDR3_RTT_32ohm, 32},
645 	{PHY_DDR4_LPDDR3_RTT_31ohm, 31},
646 	{PHY_DDR4_LPDDR3_RTT_29ohm, 29},
647 	{PHY_DDR4_LPDDR3_RTT_28ohm, 28},
648 	{PHY_DDR4_LPDDR3_RTT_27ohm, 27}
649 };
650 
651 static u16 lp4_phy_drv_2_ohm[][2] = {
652 	{PHY_LPDDR4_RON_501ohm, 501},
653 	{PHY_LPDDR4_RON_253ohm, 253},
654 	{PHY_LPDDR4_RON_168ohm, 168},
655 	{PHY_LPDDR4_RON_126ohm, 126},
656 	{PHY_LPDDR4_RON_101ohm, 101},
657 	{PHY_LPDDR4_RON_84ohm, 84},
658 	{PHY_LPDDR4_RON_72ohm, 72},
659 	{PHY_LPDDR4_RON_63ohm, 63},
660 	{PHY_LPDDR4_RON_56ohm, 56},
661 	{PHY_LPDDR4_RON_50ohm, 50},
662 	{PHY_LPDDR4_RON_46ohm, 46},
663 	{PHY_LPDDR4_RON_42ohm, 42},
664 	{PHY_LPDDR4_RON_38ohm, 38},
665 	{PHY_LPDDR4_RON_36ohm, 36},
666 	{PHY_LPDDR4_RON_33ohm, 33},
667 	{PHY_LPDDR4_RON_31ohm, 31},
668 	{PHY_LPDDR4_RON_29ohm, 29},
669 	{PHY_LPDDR4_RON_28ohm, 28},
670 	{PHY_LPDDR4_RON_26ohm, 26},
671 	{PHY_LPDDR4_RON_25ohm, 25},
672 	{PHY_LPDDR4_RON_24ohm, 24},
673 	{PHY_LPDDR4_RON_23ohm, 23},
674 	{PHY_LPDDR4_RON_22ohm, 22}
675 };
676 
677 static u16 lp4_phy_odt_2_ohm[][2] = {
678 	{PHY_LPDDR4_RTT_DISABLE, 0},
679 	{PHY_LPDDR4_RTT_604ohm, 604},
680 	{PHY_LPDDR4_RTT_303ohm, 303},
681 	{PHY_LPDDR4_RTT_202ohm, 202},
682 	{PHY_LPDDR4_RTT_152ohm, 152},
683 	{PHY_LPDDR4_RTT_122ohm, 122},
684 	{PHY_LPDDR4_RTT_101ohm, 101},
685 	{PHY_LPDDR4_RTT_87ohm,	87},
686 	{PHY_LPDDR4_RTT_78ohm, 78},
687 	{PHY_LPDDR4_RTT_69ohm, 69},
688 	{PHY_LPDDR4_RTT_62ohm, 62},
689 	{PHY_LPDDR4_RTT_56ohm, 56},
690 	{PHY_LPDDR4_RTT_52ohm, 52},
691 	{PHY_LPDDR4_RTT_48ohm, 48},
692 	{PHY_LPDDR4_RTT_44ohm, 44},
693 	{PHY_LPDDR4_RTT_41ohm, 41},
694 	{PHY_LPDDR4_RTT_39ohm, 39},
695 	{PHY_LPDDR4_RTT_37ohm, 37},
696 	{PHY_LPDDR4_RTT_35ohm, 35},
697 	{PHY_LPDDR4_RTT_33ohm, 33},
698 	{PHY_LPDDR4_RTT_32ohm, 32},
699 	{PHY_LPDDR4_RTT_30ohm, 30},
700 	{PHY_LPDDR4_RTT_29ohm, 29},
701 	{PHY_LPDDR4_RTT_27ohm, 27}
702 };
703 
704 static u32 lp4_odt_calc(u32 odt_ohm)
705 {
706 	u32 odt;
707 
708 	if (odt_ohm == 0)
709 		odt = LPDDR4_DQODT_DIS;
710 	else if (odt_ohm <= 40)
711 		odt = LPDDR4_DQODT_40;
712 	else if (odt_ohm <= 48)
713 		odt = LPDDR4_DQODT_48;
714 	else if (odt_ohm <= 60)
715 		odt = LPDDR4_DQODT_60;
716 	else if (odt_ohm <= 80)
717 		odt = LPDDR4_DQODT_80;
718 	else if (odt_ohm <= 120)
719 		odt = LPDDR4_DQODT_120;
720 	else
721 		odt = LPDDR4_DQODT_240;
722 
723 	return odt;
724 }
725 
726 static void *get_ddr_drv_odt_info(u32 dramtype)
727 {
728 	struct sdram_head_info_index_v2 *index =
729 		(struct sdram_head_info_index_v2 *)common_info;
730 	void *ddr_info = 0;
731 
732 	if (dramtype == DDR4)
733 		ddr_info = (void *)common_info + index->ddr4_index.offset * 4;
734 	else if (dramtype == DDR3)
735 		ddr_info = (void *)common_info + index->ddr3_index.offset * 4;
736 	else if (dramtype == LPDDR3)
737 		ddr_info = (void *)common_info + index->lp3_index.offset * 4;
738 	else if (dramtype == LPDDR4)
739 		ddr_info = (void *)common_info + index->lp4_index.offset * 4;
740 	else
741 		printascii("unsupported dram type\n");
742 	return ddr_info;
743 }
744 
745 static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info,
746 			 u32 freq_mhz, u32 dst_fsp)
747 {
748 	void __iomem *pctl_base = dram->pctl;
749 	u32 ca_vref, dq_vref;
750 
751 	if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
752 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff);
753 	else
754 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten);
755 
756 	if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq))
757 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff);
758 	else
759 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten);
760 
761 	if (ca_vref < 100)
762 		ca_vref = 100;
763 	if (ca_vref > 420)
764 		ca_vref = 420;
765 
766 	if (ca_vref <= 300)
767 		ca_vref = (0 << 6) | (ca_vref - 100) / 4;
768 	else
769 		ca_vref = (1 << 6) | (ca_vref - 220) / 4;
770 
771 	if (dq_vref < 100)
772 		dq_vref = 100;
773 	if (dq_vref > 420)
774 		dq_vref = 420;
775 
776 	if (dq_vref <= 300)
777 		dq_vref = (0 << 6) | (dq_vref - 100) / 4;
778 	else
779 		dq_vref = (1 << 6) | (dq_vref - 220) / 4;
780 
781 	sw_set_req(dram);
782 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
783 			DDR_PCTL2_INIT6,
784 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT,
785 			ca_vref << PCTL2_LPDDR4_MR12_SHIFT);
786 
787 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
788 			DDR_PCTL2_INIT7,
789 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT,
790 			dq_vref << PCTL2_LPDDR4_MR14_SHIFT);
791 	sw_set_ack(dram);
792 }
793 
794 static void set_ds_odt(struct dram_info *dram,
795 		       struct rv1126_sdram_params *sdram_params, u32 dst_fsp)
796 {
797 	void __iomem *phy_base = dram->phy;
798 	void __iomem *pctl_base = dram->pctl;
799 	u32 dramtype = sdram_params->base.dramtype;
800 	struct ddr2_3_4_lp2_3_info *ddr_info;
801 	struct lp4_info *lp4_info;
802 	u32 i, j, tmp;
803 	const u16 (*p_drv)[2];
804 	const u16 (*p_odt)[2];
805 	u32 drv_info, sr_info;
806 	u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm;
807 	u32 phy_odt_ohm, dram_odt_ohm;
808 	u32 lp4_pu_cal, phy_lp4_drv_pd_en;
809 	u32 phy_odt_up_en, phy_odt_dn_en;
810 	u32 sr_dq, sr_clk;
811 	u32 freq = sdram_params->base.ddr_freq;
812 	u32 mr1_mr3, mr11, mr22, vref_out, vref_inner;
813 	u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0;
814 	u32 phy_dq_drv = 0;
815 	u32 phy_odt_up = 0, phy_odt_dn = 0;
816 
817 	ddr_info = get_ddr_drv_odt_info(dramtype);
818 	lp4_info = (void *)ddr_info;
819 
820 	if (!ddr_info)
821 		return;
822 
823 	/* dram odt en freq control phy drv, dram odt and phy sr */
824 	if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) {
825 		drv_info = ddr_info->drv_when_odtoff;
826 		dram_odt_ohm = 0;
827 		sr_info = ddr_info->sr_when_odtoff;
828 		phy_lp4_drv_pd_en =
829 			PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info);
830 	} else {
831 		drv_info = ddr_info->drv_when_odten;
832 		dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info);
833 		sr_info = ddr_info->sr_when_odten;
834 		phy_lp4_drv_pd_en =
835 			PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info);
836 	}
837 	phy_dq_drv_ohm =
838 		DRV_INFO_PHY_DQ_DRV(drv_info);
839 	phy_clk_drv_ohm =
840 		DRV_INFO_PHY_CLK_DRV(drv_info);
841 	phy_ca_drv_ohm =
842 		DRV_INFO_PHY_CA_DRV(drv_info);
843 
844 	sr_dq = DQ_SR_INFO(sr_info);
845 	sr_clk = CLK_SR_INFO(sr_info);
846 
847 	/* phy odt en freq control dram drv and phy odt */
848 	if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) {
849 		dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff);
850 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info);
851 		phy_odt_ohm = 0;
852 		phy_odt_up_en = 0;
853 		phy_odt_dn_en = 0;
854 	} else {
855 		dram_drv_ohm =
856 			DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten);
857 		phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info);
858 		phy_odt_up_en =
859 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
860 		phy_odt_dn_en =
861 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
862 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info);
863 	}
864 
865 	if (dramtype == LPDDR4) {
866 		if (phy_odt_ohm) {
867 			phy_odt_up_en = 0;
868 			phy_odt_dn_en = 1;
869 		}
870 		if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
871 			dram_caodt_ohm = 0;
872 		else
873 			dram_caodt_ohm =
874 				ODT_INFO_LP4_CA_ODT(lp4_info->odt_info);
875 	}
876 
877 	if (dramtype == DDR3) {
878 		p_drv = d3_phy_drv_2_ohm;
879 		p_odt = d3_phy_odt_2_ohm;
880 	} else if (dramtype == LPDDR4) {
881 		p_drv = lp4_phy_drv_2_ohm;
882 		p_odt = lp4_phy_odt_2_ohm;
883 	} else {
884 		p_drv = d4lp3_phy_drv_2_ohm;
885 		p_odt = d4lp3_phy_odt_2_ohm;
886 	}
887 
888 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
889 		if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) {
890 			phy_dq_drv = **(p_drv + i);
891 			break;
892 		}
893 		if (i == 0)
894 			break;
895 	}
896 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
897 		if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) {
898 			phy_clk_drv = **(p_drv + i);
899 			break;
900 		}
901 		if (i == 0)
902 			break;
903 	}
904 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
905 		if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) {
906 			phy_ca_drv = **(p_drv + i);
907 			break;
908 		}
909 		if (i == 0)
910 			break;
911 	}
912 	if (!phy_odt_ohm)
913 		phy_odt = 0;
914 	else
915 		for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) {
916 			if (phy_odt_ohm <= *(*(p_odt + i) + 1)) {
917 				phy_odt = **(p_odt + i);
918 				break;
919 			}
920 			if (i == 0)
921 				break;
922 		}
923 
924 	if (dramtype != LPDDR4) {
925 		if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en))
926 			vref_inner = 0x80;
927 		else if (phy_odt_up_en)
928 			vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 /
929 				     (dram_drv_ohm + phy_odt_ohm);
930 		else
931 			vref_inner = phy_odt_ohm * 128 /
932 				(phy_odt_ohm + dram_drv_ohm);
933 
934 		if (dramtype != DDR3 && dram_odt_ohm)
935 			vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 /
936 				   (phy_dq_drv_ohm + dram_odt_ohm);
937 		else
938 			vref_out = 0x80;
939 	} else {
940 		/* for lp4 */
941 		if (phy_odt_ohm)
942 			vref_inner =
943 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) *
944 				 256) / 1000;
945 		else
946 			vref_inner =
947 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) *
948 				 256) / 1000;
949 
950 		vref_out = 0x80;
951 	}
952 
953 	/* default ZQCALIB bypass mode */
954 	clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv);
955 	clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv);
956 	clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv);
957 	clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv);
958 	/* clk / cmd slew rate */
959 	clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk);
960 
961 	phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1;
962 	if (phy_odt_up_en)
963 		phy_odt_up = phy_odt;
964 	if (phy_odt_dn_en)
965 		phy_odt_dn = phy_odt;
966 
967 	for (i = 0; i < 4; i++) {
968 		j = 0x110 + i * 0x10;
969 		clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up);
970 		clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn);
971 		clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv);
972 		clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv);
973 		writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10));
974 
975 		clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10),
976 				1 << 3, phy_lp4_drv_pd_en << 3);
977 		/* dq slew rate */
978 		clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10),
979 				0x1f, sr_dq);
980 	}
981 
982 	/* reg_rx_vref_value_update */
983 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
984 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
985 
986 	/* RAM VREF */
987 	writel(vref_out, PHY_REG(phy_base, 0x105));
988 	if (dramtype == LPDDR3)
989 		udelay(100);
990 
991 	if (dramtype == LPDDR4)
992 		set_lp4_vref(dram, lp4_info, freq, dst_fsp);
993 
994 	if (dramtype == DDR3 || dramtype == DDR4) {
995 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
996 				DDR_PCTL2_INIT3);
997 		mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK;
998 	} else {
999 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1000 				DDR_PCTL2_INIT4);
1001 		mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK;
1002 	}
1003 
1004 	if (dramtype == DDR3) {
1005 		mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK);
1006 		if (dram_drv_ohm == 34)
1007 			mr1_mr3 |= DDR3_DS_34;
1008 
1009 		if (dram_odt_ohm == 0)
1010 			mr1_mr3 |= DDR3_RTT_NOM_DIS;
1011 		else if (dram_odt_ohm <= 40)
1012 			mr1_mr3 |= DDR3_RTT_NOM_40;
1013 		else if (dram_odt_ohm <= 60)
1014 			mr1_mr3 |= DDR3_RTT_NOM_60;
1015 		else
1016 			mr1_mr3 |= DDR3_RTT_NOM_120;
1017 
1018 	} else if (dramtype == DDR4) {
1019 		mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK);
1020 		if (dram_drv_ohm == 48)
1021 			mr1_mr3 |= DDR4_DS_48;
1022 
1023 		if (dram_odt_ohm == 0)
1024 			mr1_mr3 |= DDR4_RTT_NOM_DIS;
1025 		else if (dram_odt_ohm <= 34)
1026 			mr1_mr3 |= DDR4_RTT_NOM_34;
1027 		else if (dram_odt_ohm <= 40)
1028 			mr1_mr3 |= DDR4_RTT_NOM_40;
1029 		else if (dram_odt_ohm <= 48)
1030 			mr1_mr3 |= DDR4_RTT_NOM_48;
1031 		else if (dram_odt_ohm <= 60)
1032 			mr1_mr3 |= DDR4_RTT_NOM_60;
1033 		else
1034 			mr1_mr3 |= DDR4_RTT_NOM_120;
1035 
1036 	} else if (dramtype == LPDDR3) {
1037 		if (dram_drv_ohm <= 34)
1038 			mr1_mr3 |= LPDDR3_DS_34;
1039 		else if (dram_drv_ohm <= 40)
1040 			mr1_mr3 |= LPDDR3_DS_40;
1041 		else if (dram_drv_ohm <= 48)
1042 			mr1_mr3 |= LPDDR3_DS_48;
1043 		else if (dram_drv_ohm <= 60)
1044 			mr1_mr3 |= LPDDR3_DS_60;
1045 		else if (dram_drv_ohm <= 80)
1046 			mr1_mr3 |= LPDDR3_DS_80;
1047 
1048 		if (dram_odt_ohm == 0)
1049 			lp3_odt_value = LPDDR3_ODT_DIS;
1050 		else if (dram_odt_ohm <= 60)
1051 			lp3_odt_value = LPDDR3_ODT_60;
1052 		else if (dram_odt_ohm <= 120)
1053 			lp3_odt_value = LPDDR3_ODT_120;
1054 		else
1055 			lp3_odt_value = LPDDR3_ODT_240;
1056 	} else {/* for lpddr4 */
1057 		/* MR3 for lp4 PU-CAL and PDDS */
1058 		mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK);
1059 		mr1_mr3 |= lp4_pu_cal;
1060 
1061 		tmp = lp4_odt_calc(dram_drv_ohm);
1062 		if (!tmp)
1063 			tmp = LPDDR4_PDDS_240;
1064 		mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT);
1065 
1066 		/* MR11 for lp4 ca odt, dq odt set */
1067 		mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1068 			     DDR_PCTL2_INIT6);
1069 		mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK;
1070 
1071 		mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK);
1072 
1073 		tmp = lp4_odt_calc(dram_odt_ohm);
1074 		mr11 |= (tmp << LPDDR4_DQODT_SHIFT);
1075 
1076 		tmp = lp4_odt_calc(dram_caodt_ohm);
1077 		mr11 |= (tmp << LPDDR4_CAODT_SHIFT);
1078 		sw_set_req(dram);
1079 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1080 				DDR_PCTL2_INIT6,
1081 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT,
1082 				mr11 << PCTL2_LPDDR4_MR11_SHIFT);
1083 		sw_set_ack(dram);
1084 
1085 		/* MR22 for soc odt/odt-ck/odt-cs/odt-ca */
1086 		mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1087 			     DDR_PCTL2_INIT7);
1088 		mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK;
1089 		mr22 &= ~LPDDR4_SOC_ODT_MASK;
1090 
1091 		tmp = lp4_odt_calc(phy_odt_ohm);
1092 		mr22 |= tmp;
1093 		mr22 = mr22 |
1094 		       (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) <<
1095 			LPDDR4_ODTE_CK_SHIFT) |
1096 		       (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) <<
1097 			LPDDR4_ODTE_CS_SHIFT) |
1098 		       (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) <<
1099 			LPDDR4_ODTD_CA_SHIFT);
1100 
1101 		sw_set_req(dram);
1102 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1103 				DDR_PCTL2_INIT7,
1104 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT,
1105 				mr22 << PCTL2_LPDDR4_MR22_SHIFT);
1106 		sw_set_ack(dram);
1107 	}
1108 
1109 	if (dramtype == DDR4 || dramtype == DDR3) {
1110 		sw_set_req(dram);
1111 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1112 				DDR_PCTL2_INIT3,
1113 				PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT,
1114 				mr1_mr3 << PCTL2_DDR34_MR1_SHIFT);
1115 		sw_set_ack(dram);
1116 	} else {
1117 		sw_set_req(dram);
1118 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1119 				DDR_PCTL2_INIT4,
1120 				PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT,
1121 				mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT);
1122 		sw_set_ack(dram);
1123 	}
1124 }
1125 
1126 static int sdram_cmd_dq_path_remap(struct dram_info *dram,
1127 				   struct rv1126_sdram_params *sdram_params)
1128 {
1129 	void __iomem *phy_base = dram->phy;
1130 	u32 dramtype = sdram_params->base.dramtype;
1131 	struct sdram_head_info_index_v2 *index =
1132 		(struct sdram_head_info_index_v2 *)common_info;
1133 	struct dq_map_info *map_info;
1134 
1135 	map_info = (struct dq_map_info *)((void *)common_info +
1136 		index->dq_map_index.offset * 4);
1137 
1138 	if (dramtype <= LPDDR4)
1139 		writel((map_info->byte_map[dramtype / 4] >>
1140 			((dramtype % 4) * 8)) & 0xff,
1141 		       PHY_REG(phy_base, 0x4f));
1142 
1143 	return 0;
1144 }
1145 
1146 static void phy_cfg(struct dram_info *dram,
1147 		    struct rv1126_sdram_params *sdram_params)
1148 {
1149 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
1150 	void __iomem *phy_base = dram->phy;
1151 	u32 i, dq_map, tmp;
1152 	u32 byte1 = 0, byte0 = 0;
1153 
1154 	sdram_cmd_dq_path_remap(dram, sdram_params);
1155 
1156 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0);
1157 	for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
1158 		writel(sdram_params->phy_regs.phy[i][1],
1159 		       phy_base + sdram_params->phy_regs.phy[i][0]);
1160 	}
1161 
1162 	clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5));
1163 	dq_map = readl(PHY_REG(phy_base, 0x4f));
1164 	for (i = 0; i < 4; i++) {
1165 		if (((dq_map >> (i * 2)) & 0x3) == 0)
1166 			byte0 = i;
1167 		if (((dq_map >> (i * 2)) & 0x3) == 1)
1168 			byte1 = i;
1169 	}
1170 
1171 	tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK);
1172 	if (cap_info->bw == 2)
1173 		tmp |= 0xf;
1174 	else if (cap_info->bw == 1)
1175 		tmp |= ((1 << byte0) | (1 << byte1));
1176 	else
1177 		tmp |= (1 << byte0);
1178 
1179 	writel(tmp, PHY_REG(phy_base, 0xf));
1180 
1181 	/* lpddr4 odt control by phy, enable cs0 odt */
1182 	if (sdram_params->base.dramtype == LPDDR4)
1183 		clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4,
1184 				(1 << 6) | (1 << 4));
1185 	/* for ca training ca vref choose range1 */
1186 	setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6));
1187 	setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6));
1188 	/* for wr training PHY_0x7c[5], choose range0 */
1189 	clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5));
1190 }
1191 
1192 static int update_refresh_reg(struct dram_info *dram)
1193 {
1194 	void __iomem *pctl_base = dram->pctl;
1195 	u32 ret;
1196 
1197 	ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
1198 	writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
1199 
1200 	return 0;
1201 }
1202 
1203 /*
1204  * rank = 1: cs0
1205  * rank = 2: cs1
1206  */
1207 int read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype)
1208 {
1209 	u32 ret;
1210 	u32 i, temp;
1211 	u32 dqmap;
1212 
1213 	void __iomem *pctl_base = dram->pctl;
1214 	struct sdram_head_info_index_v2 *index =
1215 		(struct sdram_head_info_index_v2 *)common_info;
1216 	struct dq_map_info *map_info;
1217 
1218 	map_info = (struct dq_map_info *)((void *)common_info +
1219 		index->dq_map_index.offset * 4);
1220 
1221 	if (dramtype == LPDDR2)
1222 		dqmap = map_info->lp2_dq0_7_map;
1223 	else
1224 		dqmap = map_info->lp3_dq0_7_map;
1225 
1226 	pctl_read_mr(pctl_base, rank, mr_num);
1227 
1228 	ret = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff);
1229 
1230 	if (dramtype != LPDDR4) {
1231 		temp = 0;
1232 		for (i = 0; i < 8; i++) {
1233 			temp = temp | (((ret >> i) & 0x1) <<
1234 				       ((dqmap >> (i * 4)) & 0xf));
1235 		}
1236 	} else {
1237 		ret = (readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff);
1238 	}
1239 
1240 	return ret;
1241 }
1242 
1243 /* before call this function autorefresh should be disabled */
1244 void send_a_refresh(struct dram_info *dram)
1245 {
1246 	void __iomem *pctl_base = dram->pctl;
1247 
1248 	while (readl(pctl_base + DDR_PCTL2_DBGSTAT) & 0x3)
1249 		continue;
1250 	writel(0x3, pctl_base + DDR_PCTL2_DBGCMD);
1251 }
1252 
1253 static void enter_sr(struct dram_info *dram, u32 en)
1254 {
1255 	void __iomem *pctl_base = dram->pctl;
1256 
1257 	if (en) {
1258 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1259 		while (1) {
1260 			if (((readl(pctl_base + DDR_PCTL2_STAT) &
1261 			      PCTL2_SELFREF_TYPE_MASK) ==
1262 			     PCTL2_SELFREF_TYPE_SR_NOT_AUTO) &&
1263 			    ((readl(pctl_base + DDR_PCTL2_STAT) &
1264 			      PCTL2_OPERATING_MODE_MASK) ==
1265 			     PCTL2_OPERATING_MODE_SR))
1266 				break;
1267 		}
1268 	} else {
1269 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1270 		while ((readl(pctl_base + DDR_PCTL2_STAT) &
1271 		       PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR)
1272 			continue;
1273 	}
1274 }
1275 
1276 void record_dq_prebit(struct dram_info *dram)
1277 {
1278 	u32 group, i, tmp;
1279 	void __iomem *phy_base = dram->phy;
1280 
1281 	for (group = 0; group < 4; group++) {
1282 		for (i = 0; i < ARRAY_SIZE(dq_sel); i++) {
1283 			/* l_loop_invdelaysel */
1284 			writel(dq_sel[i][0], PHY_REG(phy_base,
1285 						     grp_addr[group] + 0x2c));
1286 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e));
1287 			writel(tmp, PHY_REG(phy_base,
1288 					    grp_addr[group] + dq_sel[i][1]));
1289 
1290 			/* r_loop_invdelaysel */
1291 			writel(dq_sel[i][0], PHY_REG(phy_base,
1292 						     grp_addr[group] + 0x2d));
1293 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f));
1294 			writel(tmp, PHY_REG(phy_base,
1295 					    grp_addr[group] + dq_sel[i][2]));
1296 		}
1297 	}
1298 }
1299 
1300 static void update_dq_rx_prebit(struct dram_info *dram)
1301 {
1302 	void __iomem *phy_base = dram->phy;
1303 
1304 	clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4),
1305 			BIT(4));
1306 	udelay(1);
1307 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4));
1308 }
1309 
1310 static void update_dq_tx_prebit(struct dram_info *dram)
1311 {
1312 	void __iomem *phy_base = dram->phy;
1313 
1314 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1315 	setbits_le32(PHY_REG(phy_base, 0x2), BIT(3));
1316 	setbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1317 	udelay(1);
1318 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1319 }
1320 
1321 static void update_ca_prebit(struct dram_info *dram)
1322 {
1323 	void __iomem *phy_base = dram->phy;
1324 
1325 	clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2));
1326 	setbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1327 	udelay(1);
1328 	clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1329 }
1330 
1331 /*
1332  * dir: 0: de-skew = delta_*
1333  *	1: de-skew = reg val - delta_*
1334  * delta_dir: value for differential signal: clk/
1335  * delta_sig: value for single signal: ca/cmd
1336  */
1337 static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif,
1338 			     int delta_sig, u32 cs, u32 dramtype)
1339 {
1340 	void __iomem *phy_base = dram->phy;
1341 	u32 i, cs_en, tmp;
1342 	u32 dfi_lp_stat = 0;
1343 
1344 	if (cs == 0)
1345 		cs_en = 1;
1346 	else if (cs == 2)
1347 		cs_en = 2;
1348 	else
1349 		cs_en = 3;
1350 
1351 	if (dramtype == LPDDR4 &&
1352 	    ((readl(PHY_REG(phy_base, 0x60)) & BIT(5)) == 0)) {
1353 		dfi_lp_stat = 1;
1354 		setbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1355 	}
1356 	enter_sr(dram, 1);
1357 
1358 	for (i = 0; i < 0x20; i++) {
1359 		if (dir == DESKEW_MDF_ABS_VAL)
1360 			tmp = delta_sig;
1361 		else
1362 			tmp = readl(PHY_REG(phy_base, 0x150 + i)) +
1363 			      delta_sig;
1364 		writel(tmp, PHY_REG(phy_base, 0x150 + i));
1365 	}
1366 
1367 	if (dir == DESKEW_MDF_ABS_VAL)
1368 		tmp = delta_dif;
1369 	else
1370 		tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) -
1371 		       delta_sig + delta_dif;
1372 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x17));
1373 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x18));
1374 	if (dramtype == LPDDR4) {
1375 		writel(tmp, PHY_REG(phy_base, 0x150 + 0x4));
1376 		writel(tmp, PHY_REG(phy_base, 0x150 + 0xa));
1377 
1378 		clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6);
1379 		update_ca_prebit(dram);
1380 	}
1381 	enter_sr(dram, 0);
1382 
1383 	if (dfi_lp_stat)
1384 		clrbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1385 
1386 }
1387 
1388 static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank)
1389 {
1390 	u32 i, j, offset = 0;
1391 	u32 min = 0x3f;
1392 	void __iomem *phy_base = dram->phy;
1393 	u32 byte_en;
1394 
1395 	if (signal == SKEW_TX_SIGNAL)
1396 		offset = 8;
1397 
1398 	if (signal == SKEW_CA_SIGNAL) {
1399 		for (i = 0; i < 0x20; i++)
1400 			min = MIN(min, readl(PHY_REG(phy_base, 0x150  + i)));
1401 	} else {
1402 		byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1403 		for (j = offset; j < offset + rank * 4; j++) {
1404 			if (!((byte_en >> (j % 4)) & 1))
1405 				continue;
1406 			for (i = 0; i < 11; i++)
1407 				min = MIN(min,
1408 					  readl(PHY_REG(phy_base,
1409 							dqs_dq_skew_adr[j] +
1410 							i)));
1411 		}
1412 	}
1413 
1414 	return min;
1415 }
1416 
1417 static u32 low_power_update(struct dram_info *dram, u32 en)
1418 {
1419 	void __iomem *pctl_base = dram->pctl;
1420 	u32 lp_stat = 0;
1421 
1422 	if (en) {
1423 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf);
1424 	} else {
1425 		lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf;
1426 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf);
1427 	}
1428 
1429 	return lp_stat;
1430 }
1431 
1432 /*
1433  * signal:
1434  * dir: 0: de-skew = delta_*
1435  *	1: de-skew = reg val - delta_*
1436  * delta_dir: value for differential signal: dqs
1437  * delta_sig: value for single signal: dq/dm
1438  */
1439 static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir,
1440 			     int delta_dif, int delta_sig, u32 rank)
1441 {
1442 	void __iomem *phy_base = dram->phy;
1443 	u32 i, j, tmp, offset;
1444 	u32 byte_en;
1445 
1446 	byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1447 
1448 	if (signal == SKEW_RX_SIGNAL)
1449 		offset = 0;
1450 	else
1451 		offset = 8;
1452 
1453 	for (j = offset; j < (offset + rank * 4); j++) {
1454 		if (!((byte_en >> (j % 4)) & 1))
1455 			continue;
1456 		for (i = 0; i < 0x9; i++) {
1457 			if (dir == DESKEW_MDF_ABS_VAL)
1458 				tmp = delta_sig;
1459 			else
1460 				tmp = delta_sig + readl(PHY_REG(phy_base,
1461 							dqs_dq_skew_adr[j] +
1462 							i));
1463 			writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i));
1464 		}
1465 		if (dir == DESKEW_MDF_ABS_VAL)
1466 			tmp = delta_dif;
1467 		else
1468 			tmp = delta_dif + readl(PHY_REG(phy_base,
1469 						dqs_dq_skew_adr[j] + 9));
1470 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9));
1471 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa));
1472 	}
1473 	if (signal == SKEW_RX_SIGNAL)
1474 		update_dq_rx_prebit(dram);
1475 	else
1476 		update_dq_tx_prebit(dram);
1477 }
1478 
1479 static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype)
1480 {
1481 	void __iomem *phy_base = dram->phy;
1482 	u32 ret;
1483 	u32 dis_auto_zq = 0;
1484 	u32 odt_val_up, odt_val_dn;
1485 	u32 i, j;
1486 
1487 	odt_val_dn = readl(PHY_REG(phy_base, 0x110));
1488 	odt_val_up = readl(PHY_REG(phy_base, 0x111));
1489 
1490 	if (dramtype != LPDDR4) {
1491 		for (i = 0; i < 4; i++) {
1492 			j = 0x110 + i * 0x10;
1493 			writel(PHY_DDR4_LPDDR3_RTT_294ohm,
1494 			       PHY_REG(phy_base, j));
1495 			writel(PHY_DDR4_LPDDR3_RTT_DISABLE,
1496 			       PHY_REG(phy_base, j + 0x1));
1497 		}
1498 	}
1499 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1500 	/* use normal read mode for data training */
1501 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1502 
1503 	if (dramtype == DDR4)
1504 		setbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1505 
1506 	/* choose training cs */
1507 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
1508 	/* enable gate training */
1509 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
1510 	udelay(50);
1511 	ret = readl(PHY_REG(phy_base, 0x91));
1512 	/* disable gate training */
1513 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
1514 	clrbits_le32(PHY_REG(phy_base, 2), 0x30);
1515 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1516 
1517 	if (ret & 0x20)
1518 		ret = -1;
1519 	else
1520 		ret = (ret & 0xf) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf);
1521 
1522 	if (dramtype != LPDDR4) {
1523 		for (i = 0; i < 4; i++) {
1524 			j = 0x110 + i * 0x10;
1525 			writel(odt_val_dn, PHY_REG(phy_base, j));
1526 			writel(odt_val_up, PHY_REG(phy_base, j + 0x1));
1527 		}
1528 	}
1529 	return ret;
1530 }
1531 
1532 static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype,
1533 			    u32 rank)
1534 {
1535 	void __iomem *pctl_base = dram->pctl;
1536 	void __iomem *phy_base = dram->phy;
1537 	u32 dis_auto_zq = 0;
1538 	u32 tmp;
1539 	u32 cur_fsp;
1540 	u32 timeout_us = 1000;
1541 
1542 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1543 
1544 	clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1545 
1546 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1547 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) &
1548 	      0xffff;
1549 	writel(tmp & 0xff, PHY_REG(phy_base, 0x3));
1550 
1551 	/* disable another cs's output */
1552 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1553 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12),
1554 			      dramtype);
1555 	if (dramtype == DDR3 || dramtype == DDR4)
1556 		writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1557 	else
1558 		writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1559 
1560 	/* choose cs */
1561 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1562 			((0x2 >> cs) << 6) | (0 << 2));
1563 	/* enable write leveling */
1564 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1565 			((0x2 >> cs) << 6) | (1 << 2));
1566 
1567 	while (1) {
1568 		if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) ==
1569 		    (readl(PHY_REG(phy_base, 0xf)) & 0xf))
1570 			break;
1571 
1572 		udelay(1);
1573 		if (timeout_us-- == 0) {
1574 			printascii("error: write leveling timeout\n");
1575 			while (1)
1576 				;
1577 		}
1578 	}
1579 
1580 	/* disable write leveling */
1581 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1582 			((0x2 >> cs) << 6) | (0 << 2));
1583 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6);
1584 
1585 	/* enable another cs's output */
1586 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1587 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12),
1588 			      dramtype);
1589 
1590 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1591 
1592 	return 0;
1593 }
1594 
1595 char pattern[32] = {
1596 	0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa,
1597 	0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55,
1598 	0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55,
1599 	0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa
1600 };
1601 
1602 static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype,
1603 			    u32 mhz)
1604 {
1605 	void __iomem *pctl_base = dram->pctl;
1606 	void __iomem *phy_base = dram->phy;
1607 	u32 trefi_1x, trfc_1x;
1608 	u32 dis_auto_zq = 0;
1609 	u32 timeout_us = 1000;
1610 	u32 dqs_default;
1611 	u32 cur_fsp;
1612 	u32 vref_inner;
1613 	u32 i;
1614 	struct sdram_head_info_index_v2 *index =
1615 		(struct sdram_head_info_index_v2 *)common_info;
1616 	struct dq_map_info *map_info;
1617 
1618 	vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff;
1619 	if (dramtype == DDR3 && vref_inner == 0x80) {
1620 		for (i = 0; i < 4; i++)
1621 			writel(vref_inner - 0xa,
1622 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1623 
1624 		/* reg_rx_vref_value_update */
1625 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1626 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1627 	}
1628 
1629 	map_info = (struct dq_map_info *)((void *)common_info +
1630 		index->dq_map_index.offset * 4);
1631 	/* only 1cs a time, 0:cs0 1 cs1 */
1632 	if (cs > 1)
1633 		return -1;
1634 
1635 	dqs_default = 0xf;
1636 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1637 
1638 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1639 	/* config refresh timing */
1640 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1641 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1642 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1643 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1644 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1645 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1646 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1647 	/* reg_phy_trfc */
1648 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1649 	/* reg_max_refi_cnt */
1650 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1651 
1652 	/* choose training cs */
1653 	clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6);
1654 
1655 	/* set dq map for ddr4 */
1656 	if (dramtype == DDR4) {
1657 		setbits_le32(PHY_REG(phy_base, 0x70), BIT(7));
1658 		for (i = 0; i < 4; i++) {
1659 			writel((map_info->ddr4_dq_map[cs * 2] >>
1660 				((i % 4) * 8)) & 0xff,
1661 				PHY_REG(phy_base, 0x238 + i));
1662 			writel((map_info->ddr4_dq_map[cs * 2 + 1] >>
1663 				((i % 4) * 8)) & 0xff,
1664 				PHY_REG(phy_base, 0x2b8 + i));
1665 		}
1666 	}
1667 
1668 	/* cha_l reg_l_rd_train_dqs_default[5:0] */
1669 	clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default);
1670 	/* cha_h reg_h_rd_train_dqs_default[5:0] */
1671 	clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default);
1672 	/* chb_l reg_l_rd_train_dqs_default[5:0] */
1673 	clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default);
1674 	/* chb_h reg_h_rd_train_dqs_default[5:0] */
1675 	clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default);
1676 
1677 	/* Choose the read train auto mode */
1678 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1);
1679 	/* Enable the auto train of the read train */
1680 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3);
1681 
1682 	/* Wait the train done. */
1683 	while (1) {
1684 		if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1)
1685 			break;
1686 
1687 		udelay(1);
1688 		if (timeout_us-- == 0) {
1689 			printascii("error: read training timeout\n");
1690 			return -1;
1691 		}
1692 	}
1693 
1694 	/* Check the read train state */
1695 	if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) ||
1696 	    (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) {
1697 		printascii("error: read training error\n");
1698 		return -1;
1699 	}
1700 
1701 	/* Exit the Read Training by setting */
1702 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1));
1703 
1704 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1705 
1706 	if (dramtype == DDR3 && vref_inner == 0x80) {
1707 		for (i = 0; i < 4; i++)
1708 			writel(vref_inner,
1709 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1710 
1711 		/* reg_rx_vref_value_update */
1712 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1713 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1714 	}
1715 
1716 	return 0;
1717 }
1718 
1719 static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype,
1720 			    u32 mhz, u32 dst_fsp)
1721 {
1722 	void __iomem *pctl_base = dram->pctl;
1723 	void __iomem *phy_base = dram->phy;
1724 	u32 trefi_1x, trfc_1x;
1725 	u32 dis_auto_zq = 0;
1726 	u32 timeout_us = 1000;
1727 	u32 cur_fsp;
1728 	u32 mr_tmp, cl, cwl, phy_fsp, offset = 0;
1729 
1730 	if (dramtype == LPDDR3 && mhz <= 400) {
1731 		phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3;
1732 		offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3;
1733 		cl = readl(PHY_REG(phy_base, offset));
1734 		cwl = readl(PHY_REG(phy_base, offset + 2));
1735 
1736 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8);
1737 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4);
1738 		pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype);
1739 	}
1740 
1741 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1742 
1743 	/* PHY_0x7b[7:0] reg_train_col_addr[7:0] */
1744 	clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0);
1745 	/* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */
1746 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2);
1747 	/* PHY_0x7c[1:0] reg_train_col_addr[9:8] */
1748 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0);
1749 	/* PHY_0x7d[7:0] reg_train_row_addr[7:0] */
1750 	clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0);
1751 	/* PHY_0x7e[7:0] reg_train_row_addr[15:8] */
1752 	clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0);
1753 
1754 	/* PHY_0x71[3] wrtrain_check_data_value_random_gen */
1755 	clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3));
1756 
1757 	/* config refresh timing */
1758 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1759 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1760 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1761 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1762 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1763 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1764 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1765 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1766 	/* reg_phy_trfc */
1767 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1768 	/* reg_max_refi_cnt */
1769 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1770 
1771 	/* choose training cs */
1772 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6);
1773 
1774 	/* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */
1775 	/* 0: Use the write-leveling value. */
1776 	/* 1: use reg0x233 0x237 0x2b3 0x2b7 */
1777 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4));
1778 
1779 	/* PHY_0x7a [0] reg_dq_wr_train_auto */
1780 	setbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1781 
1782 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1783 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1784 
1785 	send_a_refresh(dram);
1786 
1787 	while (1) {
1788 		if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1)
1789 			break;
1790 
1791 		udelay(1);
1792 		if (timeout_us-- == 0) {
1793 			printascii("error: write training timeout\n");
1794 			while (1)
1795 				;
1796 		}
1797 	}
1798 
1799 	/* Check the write train state */
1800 	if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) {
1801 		printascii("error: write training error\n");
1802 		return -1;
1803 	}
1804 
1805 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1806 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1807 
1808 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1809 
1810 	/* save LPDDR4 write vref to fsp_param for dfs */
1811 	if (dramtype == LPDDR4) {
1812 		fsp_param[dst_fsp].vref_dq[cs] =
1813 			((readl(PHY_REG(phy_base, 0x384)) & 0x3f) +
1814 			 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2;
1815 		/* add range info */
1816 		fsp_param[dst_fsp].vref_dq[cs] |=
1817 			((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1);
1818 	}
1819 
1820 	if (dramtype == LPDDR3 && mhz <= 400) {
1821 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl);
1822 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl);
1823 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1824 			       DDR_PCTL2_INIT3);
1825 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
1826 			      dramtype);
1827 	}
1828 
1829 	return 0;
1830 }
1831 
1832 static int data_training(struct dram_info *dram, u32 cs,
1833 			 struct rv1126_sdram_params *sdram_params, u32 dst_fsp,
1834 			 u32 training_flag)
1835 {
1836 	u32 ret = 0;
1837 
1838 	if (training_flag == FULL_TRAINING)
1839 		training_flag = READ_GATE_TRAINING | WRITE_LEVELING |
1840 				WRITE_TRAINING | READ_TRAINING;
1841 
1842 	if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) {
1843 		ret = data_training_wl(dram, cs,
1844 				       sdram_params->base.dramtype,
1845 				       sdram_params->ch.cap_info.rank);
1846 		if (ret != 0)
1847 			goto out;
1848 	}
1849 
1850 	if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) {
1851 		ret = data_training_rg(dram, cs,
1852 				       sdram_params->base.dramtype);
1853 		if (ret != 0)
1854 			goto out;
1855 	}
1856 
1857 	if ((training_flag & READ_TRAINING) == READ_TRAINING) {
1858 		ret = data_training_rd(dram, cs,
1859 				       sdram_params->base.dramtype,
1860 				       sdram_params->base.ddr_freq);
1861 		if (ret != 0)
1862 			goto out;
1863 	}
1864 
1865 	if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) {
1866 		ret = data_training_wr(dram, cs,
1867 				       sdram_params->base.dramtype,
1868 				       sdram_params->base.ddr_freq, dst_fsp);
1869 		if (ret != 0)
1870 			goto out;
1871 	}
1872 
1873 out:
1874 	return ret;
1875 }
1876 
1877 static int get_wrlvl_val(struct dram_info *dram,
1878 			 struct rv1126_sdram_params *sdram_params)
1879 {
1880 	u32 i, j, clk_skew;
1881 	void __iomem *phy_base = dram->phy;
1882 	u32 lp_stat;
1883 	int ret;
1884 
1885 	lp_stat = low_power_update(dram, 0);
1886 
1887 	clk_skew = 0x1f;
1888 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, clk_skew, 3,
1889 			 sdram_params->base.dramtype);
1890 
1891 	ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING);
1892 	if (sdram_params->ch.cap_info.rank == 2)
1893 		ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING);
1894 
1895 	for (j = 0; j < 2; j++)
1896 		for (i = 0; i < 4; i++)
1897 			wrlvl_result[j][i] =
1898 				readl(PHY_REG(phy_base,
1899 					      wrlvl_result_offset[j][i])) -
1900 				clk_skew;
1901 
1902 	low_power_update(dram, lp_stat);
1903 
1904 	return ret;
1905 }
1906 
1907 static int high_freq_training(struct dram_info *dram,
1908 			      struct rv1126_sdram_params *sdram_params,
1909 			      u32 fsp)
1910 {
1911 	u32 i, j;
1912 	void __iomem *phy_base = dram->phy;
1913 	u32 dramtype = sdram_params->base.dramtype;
1914 	int min_val;
1915 	int dqs_skew, clk_skew, ca_skew;
1916 	int ret;
1917 
1918 	dqs_skew = 0;
1919 	for (j = 0; j < sdram_params->ch.cap_info.rank; j++)
1920 		for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++)
1921 			dqs_skew += wrlvl_result[j][i];
1922 	dqs_skew = dqs_skew / (sdram_params->ch.cap_info.rank *
1923 			       ARRAY_SIZE(wrlvl_result[0]));
1924 
1925 	clk_skew = 0x20 - dqs_skew;
1926 	dqs_skew = 0x20;
1927 
1928 	if (dramtype == LPDDR4) {
1929 		clk_skew = 0;
1930 		ca_skew = 0;
1931 	} else if (dramtype == LPDDR3) {
1932 		ca_skew = clk_skew - 4;
1933 	} else {
1934 		ca_skew = clk_skew;
1935 	}
1936 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3,
1937 			 dramtype);
1938 
1939 	writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233));
1940 	writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237));
1941 	writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
1942 	writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
1943 	ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING |
1944 			    READ_TRAINING | WRITE_TRAINING);
1945 	if (sdram_params->ch.cap_info.rank == 2) {
1946 		writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233));
1947 		writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237));
1948 		writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
1949 		writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
1950 		ret |= data_training(dram, 1, sdram_params, fsp,
1951 				     READ_GATE_TRAINING | READ_TRAINING |
1952 				     WRITE_TRAINING);
1953 	}
1954 	if (ret)
1955 		goto out;
1956 
1957 	record_dq_prebit(dram);
1958 
1959 	min_val = get_min_value(dram, SKEW_RX_SIGNAL,
1960 				sdram_params->ch.cap_info.rank) * -1;
1961 	modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL,
1962 			 min_val, min_val, sdram_params->ch.cap_info.rank);
1963 
1964 	min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL,
1965 				    sdram_params->ch.cap_info.rank),
1966 		      get_min_value(dram, SKEW_CA_SIGNAL,
1967 				    sdram_params->ch.cap_info.rank)) * -1;
1968 
1969 	/* clk = 0, rx all skew -7, tx - min_value */
1970 	modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3,
1971 			 dramtype);
1972 
1973 	modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL,
1974 			 min_val, min_val, sdram_params->ch.cap_info.rank);
1975 
1976 	ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING);
1977 	if (sdram_params->ch.cap_info.rank == 2)
1978 		ret |= data_training(dram, 1, sdram_params, 0,
1979 				     READ_GATE_TRAINING);
1980 out:
1981 	return ret;
1982 }
1983 
1984 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
1985 {
1986 	writel(ddrconfig, &dram->msch->deviceconf);
1987 	clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0);
1988 }
1989 
1990 static void update_noc_timing(struct dram_info *dram,
1991 			      struct rv1126_sdram_params *sdram_params)
1992 {
1993 	writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
1994 	       &dram->msch->ddrtiminga0);
1995 	writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
1996 	       &dram->msch->ddrtimingb0);
1997 	writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
1998 	       &dram->msch->ddrtimingc0);
1999 	writel(sdram_params->ch.noc_timings.devtodev0.d32,
2000 	       &dram->msch->devtodev0);
2001 	writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
2002 	writel(sdram_params->ch.noc_timings.ddr4timing.d32,
2003 	       &dram->msch->ddr4timing);
2004 }
2005 
2006 static void dram_all_config(struct dram_info *dram,
2007 			    struct rv1126_sdram_params *sdram_params)
2008 {
2009 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2010 	u32 dram_type = sdram_params->base.dramtype;
2011 	void __iomem *pctl_base = dram->pctl;
2012 	u32 sys_reg2 = 0;
2013 	u32 sys_reg3 = 0;
2014 	u64 cs_cap[2];
2015 	u32 cs_pst;
2016 
2017 	set_ddrconfig(dram, cap_info->ddrconfig);
2018 	sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
2019 			 &sys_reg3, 0);
2020 	writel(sys_reg2, &dram->pmugrf->os_reg[2]);
2021 	writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2022 
2023 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2024 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2025 
2026 	if (cap_info->rank == 2) {
2027 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2028 			6 + 2;
2029 		if (cs_pst > 28)
2030 			cs_cap[0] = 1 << cs_pst;
2031 	}
2032 
2033 	writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
2034 			(((cs_cap[0] >> 20) / 64) & 0xff),
2035 			&dram->msch->devicesize);
2036 	update_noc_timing(dram, sdram_params);
2037 }
2038 
2039 static void enable_low_power(struct dram_info *dram,
2040 			     struct rv1126_sdram_params *sdram_params)
2041 {
2042 	void __iomem *pctl_base = dram->pctl;
2043 	u32 grf_lp_con;
2044 
2045 	writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]);
2046 
2047 	if (sdram_params->base.dramtype == DDR4)
2048 		grf_lp_con = (0x7 << 16) | (1 << 1);
2049 	else if (sdram_params->base.dramtype == DDR3)
2050 		grf_lp_con = (0x7 << 16) | (1 << 0);
2051 	else
2052 		grf_lp_con = (0x7 << 16) | (1 << 2);
2053 
2054 	/* en lpckdis_en */
2055 	grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
2056 	writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con);
2057 
2058 	/* enable sr, pd */
2059 	if (dram->pd_idle == 0)
2060 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2061 	else
2062 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2063 	if (dram->sr_idle == 0)
2064 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2065 	else
2066 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2067 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
2068 }
2069 
2070 static void ddr_set_atags(struct dram_info *dram,
2071 			  struct rv1126_sdram_params *sdram_params)
2072 {
2073 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2074 	u32 dram_type = sdram_params->base.dramtype;
2075 	void __iomem *pctl_base = dram->pctl;
2076 	struct tag_serial t_serial;
2077 	struct tag_ddr_mem t_ddrmem;
2078 	struct tag_soc_info t_socinfo;
2079 	u64 cs_cap[2];
2080 	u32 cs_pst = 0;
2081 
2082 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2083 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2084 
2085 	memset(&t_serial, 0, sizeof(struct tag_serial));
2086 
2087 	t_serial.version = 0;
2088 	t_serial.enable = 1;
2089 	t_serial.addr = CONFIG_DEBUG_UART_BASE;
2090 	t_serial.baudrate = CONFIG_BAUDRATE;
2091 	t_serial.m_mode = SERIAL_M_MODE_M0;
2092 	t_serial.id = 2;
2093 
2094 	atags_destroy();
2095 	atags_set_tag(ATAG_SERIAL,  &t_serial);
2096 
2097 	memset(&t_ddrmem, 0, sizeof(struct tag_ddr_mem));
2098 	if (cap_info->row_3_4) {
2099 		cs_cap[0] =  cs_cap[0] * 3 / 4;
2100 		cs_cap[1] =  cs_cap[1] * 3 / 4;
2101 	}
2102 	t_ddrmem.version = 0;
2103 	t_ddrmem.bank[0] = CONFIG_SYS_SDRAM_BASE;
2104 	if (cs_cap[1]) {
2105 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2106 			6 + 2;
2107 	}
2108 
2109 	if (cs_cap[1] && cs_pst > 27) {
2110 		t_ddrmem.count = 2;
2111 		t_ddrmem.bank[1] = 1 << cs_pst;
2112 		t_ddrmem.bank[2] = cs_cap[0];
2113 		t_ddrmem.bank[3] = cs_cap[1];
2114 	} else {
2115 		t_ddrmem.count = 1;
2116 		t_ddrmem.bank[1] = (u64)cs_cap[0] + (u64)cs_cap[1];
2117 	}
2118 
2119 	atags_set_tag(ATAG_DDR_MEM,  &t_ddrmem);
2120 
2121 	memset(&t_socinfo, 0, sizeof(struct tag_soc_info));
2122 	t_socinfo.version = 0;
2123 	t_socinfo.name = 0x1126;
2124 }
2125 
2126 static void print_ddr_info(struct rv1126_sdram_params *sdram_params)
2127 {
2128 	u32 split;
2129 
2130 	if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2131 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2132 		split = 0;
2133 	else
2134 		split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2135 			SPLIT_SIZE_MASK;
2136 
2137 	sdram_print_ddr_info(&sdram_params->ch.cap_info,
2138 			     &sdram_params->base, split);
2139 }
2140 
2141 static int sdram_init_(struct dram_info *dram,
2142 		       struct rv1126_sdram_params *sdram_params, u32 post_init)
2143 {
2144 	void __iomem *pctl_base = dram->pctl;
2145 	void __iomem *phy_base = dram->phy;
2146 	u32 ddr4_vref;
2147 	u32 mr_tmp;
2148 
2149 	rkclk_configure_ddr(dram, sdram_params);
2150 
2151 	rkclk_ddr_reset(dram, 1, 1, 1, 1);
2152 	udelay(10);
2153 
2154 	rkclk_ddr_reset(dram, 1, 1, 1, 0);
2155 	phy_cfg(dram, sdram_params);
2156 
2157 	rkclk_ddr_reset(dram, 1, 1, 0, 0);
2158 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1);
2159 
2160 	rkclk_ddr_reset(dram, 1, 0, 0, 0);
2161 	pctl_cfg(dram->pctl, &sdram_params->pctl_regs,
2162 		 dram->sr_idle, dram->pd_idle);
2163 
2164 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2165 	u32 tmp, trefi;
2166 
2167 	tmp = readl(pctl_base + DDR_PCTL2_RFSHTMG);
2168 	trefi = (tmp >> 16) & 0xfff;
2169 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2170 	       pctl_base + DDR_PCTL2_RFSHTMG);
2171 #endif
2172 
2173 	/* set frequency_mode */
2174 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
2175 	/* set target_frequency to Frequency 0 */
2176 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0);
2177 
2178 	set_ds_odt(dram, sdram_params, 0);
2179 	sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params);
2180 	set_ctl_address_map(dram, sdram_params);
2181 
2182 	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
2183 
2184 	rkclk_ddr_reset(dram, 0, 0, 0, 0);
2185 
2186 	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
2187 		continue;
2188 
2189 	if (sdram_params->base.dramtype == LPDDR3) {
2190 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3);
2191 	} else if (sdram_params->base.dramtype == LPDDR4) {
2192 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6);
2193 		/* MR11 */
2194 		pctl_write_mr(dram->pctl, 3, 11,
2195 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2196 			      LPDDR4);
2197 		/* MR12 */
2198 		pctl_write_mr(dram->pctl, 3, 12,
2199 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2200 			      LPDDR4);
2201 
2202 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2203 		/* MR22 */
2204 		pctl_write_mr(dram->pctl, 3, 22,
2205 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2206 			      LPDDR4);
2207 		/* MR14 */
2208 		pctl_write_mr(dram->pctl, 3, 14,
2209 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2210 			      LPDDR4);
2211 	}
2212 
2213 	if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) != 0) {
2214 		if (post_init != 0)
2215 			printascii("DTT cs0 error\n");
2216 		return -1;
2217 	}
2218 
2219 	if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) {
2220 		if (data_training(dram, 1, sdram_params, 0,
2221 				  READ_GATE_TRAINING) != 0) {
2222 			printascii("DTT cs1 error\n");
2223 			return -1;
2224 		}
2225 	}
2226 
2227 	if (sdram_params->base.dramtype == DDR4) {
2228 		ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39;
2229 		pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref,
2230 				  sdram_params->base.dramtype);
2231 	}
2232 
2233 	dram_all_config(dram, sdram_params);
2234 	enable_low_power(dram, sdram_params);
2235 
2236 	return 0;
2237 }
2238 
2239 static u64 dram_detect_cap(struct dram_info *dram,
2240 			   struct rv1126_sdram_params *sdram_params,
2241 			   unsigned char channel)
2242 {
2243 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2244 	void __iomem *pctl_base = dram->pctl;
2245 	void __iomem *phy_base = dram->phy;
2246 	u32 mr8;
2247 
2248 	u32 bktmp;
2249 	u32 coltmp;
2250 	u32 rowtmp;
2251 	u32 cs;
2252 	u32 bw = 1;
2253 	u32 dram_type = sdram_params->base.dramtype;
2254 	u32 pwrctl;
2255 
2256 	cap_info->bw = bw;
2257 	if (dram_type != LPDDR4) {
2258 		if (dram_type != DDR4) {
2259 			coltmp = 12;
2260 			bktmp = 3;
2261 			if (dram_type == LPDDR2)
2262 				rowtmp = 15;
2263 			else
2264 				rowtmp = 16;
2265 
2266 			if (sdram_detect_col(cap_info, coltmp) != 0)
2267 				goto cap_err;
2268 
2269 			sdram_detect_bank(cap_info, coltmp, bktmp);
2270 			sdram_detect_dbw(cap_info, dram_type);
2271 		} else {
2272 			coltmp = 10;
2273 			bktmp = 4;
2274 			rowtmp = 17;
2275 
2276 			cap_info->col = 10;
2277 			cap_info->bk = 2;
2278 			sdram_detect_bg(cap_info, coltmp);
2279 		}
2280 
2281 		if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
2282 			goto cap_err;
2283 
2284 		sdram_detect_row_3_4(cap_info, coltmp, bktmp);
2285 	} else {
2286 		mr8 = (read_mr(dram, 1, 8, dram_type) >> 2) & 0xf;
2287 		cap_info->col = 10;
2288 		cap_info->bk = 3;
2289 		cap_info->cs0_row = 14 + (mr8 + 1) / 2;
2290 		if (mr8 % 2)
2291 			cap_info->row_3_4 = 1;
2292 		else
2293 			cap_info->row_3_4 = 0;
2294 		cap_info->dbw = 1;
2295 		cap_info->bw = 2;
2296 	}
2297 
2298 	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
2299 	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
2300 
2301 	if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0)
2302 		cs = 1;
2303 	else
2304 		cs = 0;
2305 	cap_info->rank = cs + 1;
2306 
2307 	if (dram_type != LPDDR4) {
2308 		setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2309 
2310 		if (data_training(dram, 0, sdram_params, 0,
2311 				  READ_GATE_TRAINING) == 0)
2312 			cap_info->bw = 2;
2313 		else
2314 			cap_info->bw = 1;
2315 	}
2316 
2317 	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
2318 
2319 	cap_info->cs0_high16bit_row = cap_info->cs0_row;
2320 	if (cs) {
2321 		cap_info->cs1_row = cap_info->cs0_row;
2322 		cap_info->cs1_high16bit_row = cap_info->cs0_row;
2323 	} else {
2324 		cap_info->cs1_row = 0;
2325 		cap_info->cs1_high16bit_row = 0;
2326 	}
2327 
2328 	return 0;
2329 cap_err:
2330 	return -1;
2331 }
2332 
2333 static int dram_detect_cs1_row(struct dram_info *dram,
2334 			       struct rv1126_sdram_params *sdram_params,
2335 			       unsigned char channel)
2336 {
2337 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2338 	void __iomem *pctl_base = dram->pctl;
2339 	u32 ret = 0;
2340 	void __iomem *test_addr;
2341 	u32 row, bktmp, coltmp, bw;
2342 	u64 cs0_cap;
2343 	u32 byte_mask;
2344 	u32 cs_pst;
2345 	u32 cs_add = 0;
2346 	u32 max_row;
2347 
2348 	if (cap_info->rank == 2) {
2349 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2350 			6 + 2;
2351 		if (cs_pst < 28)
2352 			cs_add = 1;
2353 
2354 		cs0_cap = 1 << cs_pst;
2355 
2356 		if (sdram_params->base.dramtype == DDR4) {
2357 			if (cap_info->dbw == 0)
2358 				bktmp = cap_info->bk + 2;
2359 			else
2360 				bktmp = cap_info->bk + 1;
2361 		} else {
2362 			bktmp = cap_info->bk;
2363 		}
2364 		bw = cap_info->bw;
2365 		coltmp = cap_info->col;
2366 
2367 		if (bw == 2)
2368 			byte_mask = 0xFFFF;
2369 		else
2370 			byte_mask = 0xFF;
2371 
2372 		max_row = (cs_pst == 31) ? 30 : 31;
2373 
2374 		max_row = max_row - bktmp - coltmp - bw - cs_add + 1;
2375 
2376 		row = (cap_info->cs0_row > max_row) ? max_row :
2377 			cap_info->cs0_row;
2378 
2379 		for (; row > 12; row--) {
2380 			test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
2381 				    (u32)cs0_cap +
2382 				    (1ul << (row + bktmp + coltmp +
2383 					     cs_add + bw - 1ul)));
2384 
2385 			writel(0, CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap);
2386 			writel(PATTERN, test_addr);
2387 
2388 			if (((readl(test_addr) & byte_mask) ==
2389 			     (PATTERN & byte_mask)) &&
2390 			    ((readl(CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap) &
2391 			      byte_mask) == 0)) {
2392 				ret = row;
2393 				break;
2394 			}
2395 		}
2396 	}
2397 
2398 	return ret;
2399 }
2400 
2401 /* return: 0 = success, other = fail */
2402 static int sdram_init_detect(struct dram_info *dram,
2403 			     struct rv1126_sdram_params *sdram_params)
2404 {
2405 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2406 	u32 ret;
2407 	u32 sys_reg = 0;
2408 	u32 sys_reg3 = 0;
2409 
2410 	if (sdram_init_(dram, sdram_params, 0) != 0)
2411 		return -1;
2412 
2413 	if (sdram_params->base.dramtype == DDR3) {
2414 		writel(PATTERN, CONFIG_SYS_SDRAM_BASE);
2415 		if (readl(CONFIG_SYS_SDRAM_BASE) != PATTERN)
2416 			return -1;
2417 	}
2418 
2419 	if (dram_detect_cap(dram, sdram_params, 0) != 0)
2420 		return -1;
2421 
2422 	pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
2423 				   sdram_params->base.dramtype);
2424 	ret = sdram_init_(dram, sdram_params, 1);
2425 	if (ret != 0)
2426 		goto out;
2427 
2428 	cap_info->cs1_row =
2429 		dram_detect_cs1_row(dram, sdram_params, 0);
2430 	if (cap_info->cs1_row) {
2431 		sys_reg = readl(&dram->pmugrf->os_reg[2]);
2432 		sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
2433 		SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
2434 				    sys_reg, sys_reg3, 0);
2435 		writel(sys_reg, &dram->pmugrf->os_reg[2]);
2436 		writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2437 	}
2438 
2439 	sdram_detect_high_row(cap_info);
2440 
2441 out:
2442 	return ret;
2443 }
2444 
2445 struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz)
2446 {
2447 	u32 i;
2448 	u32 offset = 0;
2449 	struct ddr2_3_4_lp2_3_info *ddr_info;
2450 
2451 	if (!freq_mhz) {
2452 		ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype);
2453 		if (ddr_info)
2454 			freq_mhz =
2455 				(ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
2456 				DDR_FREQ_MASK;
2457 		else
2458 			freq_mhz = 0;
2459 	}
2460 
2461 	for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) {
2462 		if (sdram_configs[i].base.ddr_freq == 0 ||
2463 		    freq_mhz < sdram_configs[i].base.ddr_freq)
2464 			break;
2465 	}
2466 	offset = i == 0 ? 0 : i - 1;
2467 
2468 	return &sdram_configs[offset];
2469 }
2470 
2471 static const u16 pctl_need_update_reg[] = {
2472 	DDR_PCTL2_RFSHTMG,
2473 	DDR_PCTL2_INIT3,
2474 	DDR_PCTL2_INIT4,
2475 	DDR_PCTL2_INIT6,
2476 	DDR_PCTL2_INIT7,
2477 	DDR_PCTL2_DRAMTMG0,
2478 	DDR_PCTL2_DRAMTMG1,
2479 	DDR_PCTL2_DRAMTMG2,
2480 	DDR_PCTL2_DRAMTMG3,
2481 	DDR_PCTL2_DRAMTMG4,
2482 	DDR_PCTL2_DRAMTMG5,
2483 	DDR_PCTL2_DRAMTMG6,
2484 	DDR_PCTL2_DRAMTMG7,
2485 	DDR_PCTL2_DRAMTMG8,
2486 	DDR_PCTL2_DRAMTMG9,
2487 	DDR_PCTL2_DRAMTMG12,
2488 	DDR_PCTL2_DRAMTMG13,
2489 	DDR_PCTL2_DRAMTMG14,
2490 	DDR_PCTL2_ZQCTL0,
2491 	DDR_PCTL2_DFITMG0,
2492 	DDR_PCTL2_ODTCFG
2493 };
2494 
2495 static const u16 phy_need_update_reg[] = {
2496 	0x14,
2497 	0x18,
2498 	0x1c
2499 };
2500 
2501 static void pre_set_rate(struct dram_info *dram,
2502 			 struct rv1126_sdram_params *sdram_params,
2503 			 u32 dst_fsp, u32 dst_fsp_lp4)
2504 {
2505 	u32 i, j, find;
2506 	void __iomem *pctl_base = dram->pctl;
2507 	void __iomem *phy_base = dram->phy;
2508 	u32 phy_offset;
2509 	u32 mr_tmp;
2510 	u32 dramtype = sdram_params->base.dramtype;
2511 
2512 	sw_set_req(dram);
2513 	/* pctl timing update */
2514 	for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) {
2515 		for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF;
2516 		     j++) {
2517 			if (sdram_params->pctl_regs.pctl[j][0] ==
2518 			    pctl_need_update_reg[i]) {
2519 				writel(sdram_params->pctl_regs.pctl[j][1],
2520 				       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2521 				       pctl_need_update_reg[i]);
2522 				find = j;
2523 				break;
2524 			}
2525 		}
2526 	}
2527 
2528 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2529 	u32 tmp, trefi;
2530 
2531 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2532 	trefi = (tmp >> 16) & 0xfff;
2533 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2534 	       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2535 #endif
2536 
2537 	sw_set_ack(dram);
2538 
2539 	/* phy timing update */
2540 	if (dst_fsp == 0)
2541 		phy_offset = 0;
2542 	else
2543 		phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3);
2544 	/* cl cwl al update */
2545 	for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) {
2546 		for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF;
2547 		     j++) {
2548 			if (sdram_params->phy_regs.phy[j][0] ==
2549 			    phy_need_update_reg[i]) {
2550 				writel(sdram_params->phy_regs.phy[j][1],
2551 				       phy_base + phy_offset +
2552 				       phy_need_update_reg[i]);
2553 				find = j;
2554 				break;
2555 			}
2556 		}
2557 	}
2558 
2559 	set_ds_odt(dram, sdram_params, dst_fsp);
2560 	if (dramtype == LPDDR4) {
2561 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2562 			       DDR_PCTL2_INIT4);
2563 		/* MR13 */
2564 		pctl_write_mr(dram->pctl, 3, 13,
2565 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2566 			       PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2567 			      ((0x2 << 6) >> dst_fsp_lp4), dramtype);
2568 		writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2569 				      PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2570 				      ((0x2 << 6) >> dst_fsp_lp4),
2571 				       PHY_REG(phy_base, 0x1b));
2572 		/* MR3 */
2573 		pctl_write_mr(dram->pctl, 3, 3,
2574 			      mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT &
2575 			      PCTL2_MR_MASK,
2576 			      dramtype);
2577 		writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK,
2578 		       PHY_REG(phy_base, 0x19));
2579 
2580 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2581 			       DDR_PCTL2_INIT3);
2582 		/* MR1 */
2583 		pctl_write_mr(dram->pctl, 3, 1,
2584 			      mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT &
2585 			      PCTL2_MR_MASK,
2586 			      dramtype);
2587 		writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK,
2588 		       PHY_REG(phy_base, 0x17));
2589 		/* MR2 */
2590 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
2591 			      dramtype);
2592 		writel(mr_tmp & PCTL2_MR_MASK,
2593 		       PHY_REG(phy_base, 0x18));
2594 
2595 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2596 			       DDR_PCTL2_INIT6);
2597 		/* MR11 */
2598 		pctl_write_mr(dram->pctl, 3, 11,
2599 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2600 			      dramtype);
2601 		writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2602 		       PHY_REG(phy_base, 0x1a));
2603 		/* MR12 */
2604 		pctl_write_mr(dram->pctl, 3, 12,
2605 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2606 			      dramtype);
2607 
2608 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2609 			       DDR_PCTL2_INIT7);
2610 		/* MR22 */
2611 		pctl_write_mr(dram->pctl, 3, 22,
2612 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2613 			      dramtype);
2614 		writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2615 		       PHY_REG(phy_base, 0x1d));
2616 		/* MR14 */
2617 		pctl_write_mr(dram->pctl, 3, 14,
2618 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2619 			      dramtype);
2620 		writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2621 		       PHY_REG(phy_base, 0x1c));
2622 	}
2623 
2624 	update_noc_timing(dram, sdram_params);
2625 }
2626 
2627 static void save_fsp_param(struct dram_info *dram, u32 dst_fsp,
2628 			   struct rv1126_sdram_params *sdram_params)
2629 {
2630 	void __iomem *pctl_base = dram->pctl;
2631 	void __iomem *phy_base = dram->phy;
2632 	struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp];
2633 	u32 temp, temp1;
2634 	struct ddr2_3_4_lp2_3_info *ddr_info;
2635 
2636 	ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype);
2637 
2638 	p_fsp_param->freq_mhz = sdram_params->base.ddr_freq;
2639 
2640 	if (sdram_params->base.dramtype == LPDDR4) {
2641 		p_fsp_param->rd_odt_up_en = 0;
2642 		p_fsp_param->rd_odt_down_en = 1;
2643 	} else {
2644 		p_fsp_param->rd_odt_up_en =
2645 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
2646 		p_fsp_param->rd_odt_down_en =
2647 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
2648 	}
2649 
2650 	if (p_fsp_param->rd_odt_up_en)
2651 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111));
2652 	else if (p_fsp_param->rd_odt_down_en)
2653 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110));
2654 	else
2655 		p_fsp_param->rd_odt = 0;
2656 	p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112));
2657 	p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100));
2658 	p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102));
2659 	p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128));
2660 	p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105));
2661 
2662 	if (sdram_params->base.dramtype == DDR3) {
2663 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2664 			     DDR_PCTL2_INIT3);
2665 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
2666 		p_fsp_param->ds_pdds = ((temp >> 1) & 0x1) |
2667 				       (((temp >> 5) & 0x1) << 1);
2668 		p_fsp_param->dq_odt = ((temp >> 2) & 0x1) |
2669 				      (((temp >> 6) & 0x1) << 1) |
2670 				      (((temp >> 9) & 0x1) << 2);
2671 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2672 	} else if (sdram_params->base.dramtype == DDR4) {
2673 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2674 			     DDR_PCTL2_INIT3);
2675 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
2676 		p_fsp_param->ds_pdds = (temp >> 1) & 0x3;
2677 		p_fsp_param->dq_odt = (temp >> 8) & 0x7;
2678 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2679 	} else if (sdram_params->base.dramtype == LPDDR3) {
2680 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2681 			     DDR_PCTL2_INIT4);
2682 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
2683 		p_fsp_param->ds_pdds = temp & 0xf;
2684 
2685 		p_fsp_param->dq_odt = lp3_odt_value;
2686 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2687 	} else if (sdram_params->base.dramtype == LPDDR4) {
2688 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2689 			     DDR_PCTL2_INIT4);
2690 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
2691 		p_fsp_param->ds_pdds = (temp >> 3) & 0x7;
2692 
2693 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2694 			     DDR_PCTL2_INIT6);
2695 		temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK;
2696 		p_fsp_param->dq_odt = temp & 0x7;
2697 		p_fsp_param->ca_odt = (temp >> 4) & 0x7;
2698 
2699 		temp = MAX(readl(PHY_REG(phy_base, 0x3ae)),
2700 			   readl(PHY_REG(phy_base, 0x3ce)));
2701 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)),
2702 			    readl(PHY_REG(phy_base, 0x3de)));
2703 		p_fsp_param->vref_ca[0] = (temp + temp1) / 2;
2704 		temp = MAX(readl(PHY_REG(phy_base, 0x3af)),
2705 			   readl(PHY_REG(phy_base, 0x3cf)));
2706 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)),
2707 			    readl(PHY_REG(phy_base, 0x3df)));
2708 		p_fsp_param->vref_ca[1] = (temp + temp1) / 2;
2709 		p_fsp_param->vref_ca[0] |=
2710 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
2711 		p_fsp_param->vref_ca[1] |=
2712 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
2713 
2714 		p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >>
2715 					      3) & 0x1;
2716 	}
2717 
2718 	p_fsp_param->noc_timings.ddrtiminga0 =
2719 		sdram_params->ch.noc_timings.ddrtiminga0;
2720 	p_fsp_param->noc_timings.ddrtimingb0 =
2721 		sdram_params->ch.noc_timings.ddrtimingb0;
2722 	p_fsp_param->noc_timings.ddrtimingc0 =
2723 		sdram_params->ch.noc_timings.ddrtimingc0;
2724 	p_fsp_param->noc_timings.devtodev0 =
2725 		sdram_params->ch.noc_timings.devtodev0;
2726 	p_fsp_param->noc_timings.ddrmode =
2727 		sdram_params->ch.noc_timings.ddrmode;
2728 	p_fsp_param->noc_timings.ddr4timing =
2729 		sdram_params->ch.noc_timings.ddr4timing;
2730 	p_fsp_param->noc_timings.agingx0 =
2731 		sdram_params->ch.noc_timings.agingx0;
2732 	p_fsp_param->noc_timings.aging0 =
2733 		sdram_params->ch.noc_timings.aging0;
2734 	p_fsp_param->noc_timings.aging1 =
2735 		sdram_params->ch.noc_timings.aging1;
2736 	p_fsp_param->noc_timings.aging2 =
2737 		sdram_params->ch.noc_timings.aging2;
2738 	p_fsp_param->noc_timings.aging3 =
2739 		sdram_params->ch.noc_timings.aging3;
2740 
2741 	p_fsp_param->flag = FSP_FLAG;
2742 }
2743 
2744 #ifndef CONFIG_ROCKCHIP_THUNDER_BOOT
2745 static void copy_fsp_param_to_ddr(void)
2746 {
2747 	memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param,
2748 	       sizeof(fsp_param));
2749 }
2750 #endif
2751 
2752 void ddr_set_rate(struct dram_info *dram,
2753 		  struct rv1126_sdram_params *sdram_params,
2754 		  u32 freq, u32 cur_freq, u32 dst_fsp,
2755 		  u32 dst_fsp_lp4, u32 training_en)
2756 {
2757 	u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off;
2758 	u32 mr_tmp;
2759 	u32 lp_stat;
2760 	u32 dramtype = sdram_params->base.dramtype;
2761 	struct rv1126_sdram_params *sdram_params_new;
2762 	void __iomem *pctl_base = dram->pctl;
2763 	void __iomem *phy_base = dram->phy;
2764 
2765 	lp_stat = low_power_update(dram, 0);
2766 	sdram_params_new = get_default_sdram_config(freq);
2767 	sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank;
2768 
2769 	pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4);
2770 
2771 	while ((readl(pctl_base + DDR_PCTL2_STAT) &
2772 			 PCTL2_OPERATING_MODE_MASK) ==
2773 			 PCTL2_OPERATING_MODE_SR)
2774 		continue;
2775 
2776 	dest_dll_off = 0;
2777 	dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2778 			  DDR_PCTL2_INIT3);
2779 	if ((dramtype == DDR3 && (dst_init3 & 1)) ||
2780 	    (dramtype == DDR4 && !(dst_init3 & 1)))
2781 		dest_dll_off = 1;
2782 
2783 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
2784 	cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
2785 			  DDR_PCTL2_INIT3);
2786 	cur_init3 &= PCTL2_MR_MASK;
2787 	cur_dll_off = 1;
2788 	if ((dramtype == DDR3 && !(cur_init3 & 1)) ||
2789 	    (dramtype == DDR4 && (cur_init3 & 1)))
2790 		cur_dll_off = 0;
2791 
2792 	if (!cur_dll_off) {
2793 		if (dramtype == DDR3)
2794 			cur_init3 |= 1;
2795 		else
2796 			cur_init3 &= ~1;
2797 		pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype);
2798 	}
2799 
2800 	setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
2801 		     PCTL2_DIS_AUTO_REFRESH);
2802 	update_refresh_reg(dram);
2803 
2804 	enter_sr(dram, 1);
2805 
2806 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
2807 	       PMUGRF_CON_DDRPHY_BUFFEREN_EN,
2808 	       &dram->pmugrf->soc_con[0]);
2809 	sw_set_req(dram);
2810 	clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC,
2811 		     PCTL2_DFI_INIT_COMPLETE_EN);
2812 	sw_set_ack(dram);
2813 
2814 	sw_set_req(dram);
2815 	if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off)
2816 		setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
2817 	else
2818 		clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
2819 
2820 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0,
2821 		     PCTL2_DIS_SRX_ZQCL);
2822 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0,
2823 		     PCTL2_DIS_SRX_ZQCL);
2824 	sw_set_ack(dram);
2825 
2826 	writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT),
2827 	       &dram->cru->clkgate_con[21]);
2828 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
2829 					(0x1 << CLK_DDR_UPCTL_EN_SHIFT) |
2830 					(0x1 << ACLK_DDR_UPCTL_EN_SHIFT),
2831 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
2832 
2833 	clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
2834 	rkclk_set_dpll(dram, freq * MHz / 2);
2835 	phy_pll_set(dram, freq * MHz, 0);
2836 	phy_pll_set(dram, freq * MHz, 1);
2837 	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
2838 
2839 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
2840 			PMUGRF_CON_DDRPHY_BUFFEREN_DIS,
2841 			&dram->pmugrf->soc_con[0]);
2842 	writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT),
2843 	       &dram->cru->clkgate_con[21]);
2844 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
2845 					(0x0 << CLK_DDR_UPCTL_EN_SHIFT) |
2846 					(0x0 << ACLK_DDR_UPCTL_EN_SHIFT),
2847 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
2848 	while ((readl(pctl_base + DDR_PCTL2_DFISTAT) &
2849 	       PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE)
2850 		continue;
2851 
2852 	sw_set_req(dram);
2853 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
2854 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp);
2855 	sw_set_ack(dram);
2856 	update_refresh_reg(dram);
2857 	clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2);
2858 
2859 	enter_sr(dram, 0);
2860 
2861 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
2862 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
2863 
2864 	mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4);
2865 	if (dramtype == LPDDR3) {
2866 		pctl_write_mr(dram->pctl, 3, 1,
2867 			      (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) &
2868 			      PCTL2_MR_MASK,
2869 			      dramtype);
2870 		pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK,
2871 			      dramtype);
2872 		pctl_write_mr(dram->pctl, 3, 3,
2873 			      (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) &
2874 			      PCTL2_MR_MASK,
2875 			      dramtype);
2876 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype);
2877 	} else if ((dramtype == DDR3) || (dramtype == DDR4)) {
2878 		pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK,
2879 			      dramtype);
2880 		if (!dest_dll_off) {
2881 			pctl_write_mr(dram->pctl, 3, 0,
2882 				      ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) &
2883 				      PCTL2_MR_MASK) | DDR3_DLL_RESET,
2884 				      dramtype);
2885 			udelay(2);
2886 		}
2887 		pctl_write_mr(dram->pctl, 3, 0,
2888 			      (dst_init3 >> PCTL2_DDR34_MR0_SHIFT &
2889 			       PCTL2_MR_MASK) & (~DDR3_DLL_RESET),
2890 			      dramtype);
2891 		pctl_write_mr(dram->pctl, 3, 2,
2892 			      ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) &
2893 			       PCTL2_MR_MASK), dramtype);
2894 		if (dramtype == DDR4) {
2895 			pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK,
2896 				      dramtype);
2897 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2898 				       DDR_PCTL2_INIT6);
2899 			pctl_write_mr(dram->pctl, 3, 4,
2900 				      (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) &
2901 				       PCTL2_MR_MASK,
2902 				      dramtype);
2903 			pctl_write_mr(dram->pctl, 3, 5,
2904 				      mr_tmp >> PCTL2_DDR4_MR5_SHIFT &
2905 				      PCTL2_MR_MASK,
2906 				      dramtype);
2907 
2908 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2909 				       DDR_PCTL2_INIT7);
2910 			pctl_write_mr(dram->pctl, 3, 6,
2911 				      mr_tmp >> PCTL2_DDR4_MR6_SHIFT &
2912 				      PCTL2_MR_MASK,
2913 				      dramtype);
2914 		}
2915 	} else if (dramtype == LPDDR4) {
2916 		pctl_write_mr(dram->pctl, 3, 13,
2917 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2918 			       PCTL2_MR_MASK) & (~(BIT(7)))) |
2919 			      dst_fsp_lp4 << 7, dramtype);
2920 	}
2921 	clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
2922 		     PCTL2_DIS_AUTO_REFRESH);
2923 	update_refresh_reg(dram);
2924 
2925 	/* training */
2926 	high_freq_training(dram, sdram_params_new, dst_fsp);
2927 	low_power_update(dram, lp_stat);
2928 
2929 	save_fsp_param(dram, dst_fsp, sdram_params_new);
2930 }
2931 
2932 static void ddr_set_rate_for_fsp(struct dram_info *dram,
2933 				 struct rv1126_sdram_params *sdram_params)
2934 {
2935 	struct ddr2_3_4_lp2_3_info *ddr_info;
2936 	u32 f0;
2937 	u32 dramtype = sdram_params->base.dramtype;
2938 #ifndef CONFIG_ROCKCHIP_THUNDER_BOOT
2939 	u32 f1, f2, f3;
2940 #endif
2941 
2942 	ddr_info = get_ddr_drv_odt_info(dramtype);
2943 	if (!ddr_info)
2944 		return;
2945 
2946 	f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
2947 	     DDR_FREQ_MASK;
2948 
2949 #ifndef CONFIG_ROCKCHIP_THUNDER_BOOT
2950 	memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param));
2951 	memset((void *)&fsp_param, 0, sizeof(fsp_param));
2952 
2953 	f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) &
2954 	     DDR_FREQ_MASK;
2955 	f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) &
2956 	     DDR_FREQ_MASK;
2957 	f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) &
2958 	     DDR_FREQ_MASK;
2959 #endif
2960 
2961 	if (get_wrlvl_val(dram, sdram_params))
2962 		printascii("get wrlvl value fail\n");
2963 
2964 #ifndef CONFIG_ROCKCHIP_THUNDER_BOOT
2965 	printascii("change to: ");
2966 	printdec(f1);
2967 	printascii("MHz\n");
2968 	ddr_set_rate(&dram_info, sdram_params, f1,
2969 		     sdram_params->base.ddr_freq, 1, 1, 1);
2970 	printascii("change to: ");
2971 	printdec(f2);
2972 	printascii("MHz\n");
2973 	ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1);
2974 	printascii("change to: ");
2975 	printdec(f3);
2976 	printascii("MHz\n");
2977 	ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1);
2978 #endif
2979 	printascii("change to: ");
2980 	printdec(f0);
2981 	printascii("MHz(final freq)\n");
2982 #ifndef CONFIG_ROCKCHIP_THUNDER_BOOT
2983 	ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1);
2984 #else
2985 	ddr_set_rate(&dram_info, sdram_params, f0, sdram_params->base.ddr_freq, 1, 1, 1);
2986 #endif
2987 }
2988 
2989 int get_uart_config(void)
2990 {
2991 	struct sdram_head_info_index_v2 *index =
2992 		(struct sdram_head_info_index_v2 *)common_info;
2993 	struct global_info *gbl_info;
2994 
2995 	gbl_info = (struct global_info *)((void *)common_info +
2996 		index->global_index.offset * 4);
2997 
2998 	return gbl_info->uart_info;
2999 }
3000 
3001 /* return: 0 = success, other = fail */
3002 int sdram_init(void)
3003 {
3004 	struct rv1126_sdram_params *sdram_params;
3005 	int ret = 0;
3006 	struct sdram_head_info_index_v2 *index =
3007 		(struct sdram_head_info_index_v2 *)common_info;
3008 	struct global_info *gbl_info;
3009 
3010 	dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
3011 	dram_info.pctl = (void *)UPCTL2_BASE_ADDR;
3012 	dram_info.grf = (void *)GRF_BASE_ADDR;
3013 	dram_info.cru = (void *)CRU_BASE_ADDR;
3014 	dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR;
3015 	dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR;
3016 	dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR;
3017 
3018 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
3019 	printascii("extended temp support\n");
3020 #endif
3021 	if (index->version_info != 2 ||
3022 	    (index->global_index.size != sizeof(struct global_info) / 4) ||
3023 	    (index->ddr3_index.size !=
3024 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3025 	    (index->ddr4_index.size !=
3026 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3027 	    (index->lp3_index.size !=
3028 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3029 	    (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) ||
3030 	    index->global_index.offset == 0 ||
3031 	    index->ddr3_index.offset == 0 ||
3032 	    index->ddr4_index.offset == 0 ||
3033 	    index->lp3_index.offset == 0 ||
3034 	    index->lp4_index.offset == 0) {
3035 		printascii("common info error\n");
3036 		goto error;
3037 	}
3038 
3039 	gbl_info = (struct global_info *)((void *)common_info +
3040 		index->global_index.offset * 4);
3041 
3042 	dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info);
3043 	dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info);
3044 
3045 	sdram_params = &sdram_configs[0];
3046 
3047 	if (sdram_params->base.dramtype == DDR3 ||
3048 	    sdram_params->base.dramtype == DDR4) {
3049 		if (DDR_2T_INFO(gbl_info->info_2t))
3050 			sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10;
3051 		else
3052 			sdram_params->pctl_regs.pctl[0][1] &=
3053 				~(0x1 << 10);
3054 	}
3055 	ret = sdram_init_detect(&dram_info, sdram_params);
3056 	if (ret) {
3057 		sdram_print_dram_type(sdram_params->base.dramtype);
3058 		printascii(", ");
3059 		printdec(sdram_params->base.ddr_freq);
3060 		printascii("MHz\n");
3061 		goto error;
3062 	}
3063 	print_ddr_info(sdram_params);
3064 
3065 	ddr_set_rate_for_fsp(&dram_info, sdram_params);
3066 #ifndef CONFIG_ROCKCHIP_THUNDER_BOOT
3067 	copy_fsp_param_to_ddr();
3068 #endif
3069 
3070 	ddr_set_atags(&dram_info, sdram_params);
3071 
3072 	printascii("out\n");
3073 
3074 	return ret;
3075 error:
3076 	printascii("error\n");
3077 	return (-1);
3078 }
3079 #endif /* CONFIG_TPL_BUILD */
3080