xref: /rk3399_rockchip-uboot/drivers/ram/rockchip/sdram_rv1126.c (revision 695a88c4e9a84d0965ef4dc8d43555d17d2096ba)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * (C) Copyright 2020 Rockchip Electronics Co., Ltd.
4  */
5 
6 #include <common.h>
7 #include <debug_uart.h>
8 #include <dm.h>
9 #include <ram.h>
10 #include <syscon.h>
11 #include <asm/io.h>
12 #include <asm/arch/clock.h>
13 #include <asm/arch/hardware.h>
14 #include <asm/arch/rk_atags.h>
15 #include <asm/arch/cru_rv1126.h>
16 #include <asm/arch/grf_rv1126.h>
17 #include <asm/arch/sdram_common.h>
18 #include <asm/arch/sdram_rv1126.h>
19 
20 /* define training flag */
21 #define CA_TRAINING			(0x1 << 0)
22 #define READ_GATE_TRAINING		(0x1 << 1)
23 #define WRITE_LEVELING			(0x1 << 2)
24 #define WRITE_TRAINING			(0x1 << 3)
25 #define READ_TRAINING			(0x1 << 4)
26 #define FULL_TRAINING			(0xff)
27 
28 #define SKEW_RX_SIGNAL			(0)
29 #define SKEW_TX_SIGNAL			(1)
30 #define SKEW_CA_SIGNAL			(2)
31 
32 #define DESKEW_MDF_ABS_VAL		(0)
33 #define DESKEW_MDF_DIFF_VAL		(1)
34 
35 #ifdef CONFIG_TPL_BUILD
36 #ifndef CONFIG_TPL_TINY_FRAMEWORK
37 #error please defined CONFIG_TPL_TINY_FRAMEWORK for RV1126 !!!
38 #endif
39 #endif
40 
41 #ifdef CONFIG_TPL_BUILD
42 
43 struct dram_info {
44 	void __iomem *pctl;
45 	void __iomem *phy;
46 	struct rv1126_cru *cru;
47 	struct msch_regs *msch;
48 	struct rv1126_ddrgrf *ddrgrf;
49 	struct rv1126_grf *grf;
50 	struct ram_info info;
51 	struct rv1126_pmugrf *pmugrf;
52 	u32 sr_idle;
53 	u32 pd_idle;
54 };
55 
56 #define GRF_BASE_ADDR			0xfe000000
57 #define PMU_GRF_BASE_ADDR		0xfe020000
58 #define DDR_GRF_BASE_ADDR		0xfe030000
59 #define BUS_SGRF_BASE_ADDR		0xfe0a0000
60 #define SERVER_MSCH_BASE_ADDR		0xfe800000
61 #define CRU_BASE_ADDR			0xff490000
62 #define DDR_PHY_BASE_ADDR		0xff4a0000
63 #define UPCTL2_BASE_ADDR		0xffa50000
64 
65 #define SGRF_SOC_CON12			0x30
66 #define SGRF_SOC_CON13			0x34
67 
68 struct dram_info dram_info;
69 
70 #define TPL_INIT_DDR_TYPE_DDR3
71 #ifdef TPL_INIT_DDR_TYPE_DDR3
72 struct rv1126_sdram_params sdram_configs[] = {
73 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-330.inc"
74 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-396.inc"
75 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-528.inc"
76 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-664.inc"
77 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-784.inc"
78 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-924.inc"
79 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-1056.inc"
80 };
81 #elif defined TPL_INIT_DDR_TYPE_DDR4
82 struct rv1126_sdram_params sdram_configs[] = {
83 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-330.inc"
84 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-396.inc"
85 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-528.inc"
86 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-664.inc"
87 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-784.inc"
88 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-924.inc"
89 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-1056.inc"
90 };
91 #elif defined TPL_INIT_DDR_TYPE_LPDDR3
92 struct rv1126_sdram_params sdram_configs[] = {
93 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-330.inc"
94 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-396.inc"
95 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-528.inc"
96 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-664.inc"
97 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-784.inc"
98 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-924.inc"
99 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-1056.inc"
100 };
101 #elif defined TPL_INIT_DDR_TYPE_LPDDR4
102 struct rv1126_sdram_params sdram_configs[] = {
103 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-330.inc"
104 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-396.inc"
105 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-528.inc"
106 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-664.inc"
107 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-784.inc"
108 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-924.inc"
109 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-1056.inc"
110 };
111 #endif
112 
113 u32 common_info[] = {
114 	#include "sdram_inc/rv1126/sdram-rv1126-loader_params.inc"
115 };
116 
117 static struct rv1126_fsp_param fsp_param[MAX_IDX];
118 
119 static u8 lp3_odt_value;
120 
121 static u8 wrlvl_result[2][4];
122 
123 /* DDR configuration 0-9 */
124 u16 ddr_cfg_2_rbc[] = {
125 	((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */
126 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */
127 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */
128 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */
129 	((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */
130 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */
131 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */
132 	((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */
133 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */
134 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */
135 };
136 
137 /* DDR configuration 10-21 */
138 u8 ddr4_cfg_2_rbc[] = {
139 	((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */
140 	((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */
141 	((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */
142 	((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */
143 	((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */
144 	((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */
145 	((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */
146 	((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */
147 	((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */
148 	((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */
149 	((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */
150 	((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */
151 };
152 
153 /* DDR configuration 22-28 */
154 u16 ddr_cfg_2_rbc_p2[] = {
155 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */
156 	((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */
157 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */
158 	((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */
159 	((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */
160 	((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */
161 	((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */
162 };
163 
164 u8 d4_rbc_2_d3_rbc[][2] = {
165 	{10, 0},
166 	{11, 2},
167 	{12, 23},
168 	{13, 1},
169 	{14, 28},
170 	{15, 24},
171 	{16, 27},
172 	{17, 7},
173 	{18, 6},
174 	{19, 25},
175 	{20, 26},
176 	{21, 3}
177 };
178 
179 u32 addrmap[23][9] = {
180 	{24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
181 		0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */
182 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
183 		0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */
184 	{23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
185 		0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */
186 	{22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606,
187 		0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */
188 	{24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909,
189 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */
190 	{6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707,
191 		0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */
192 	{7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808,
193 		0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */
194 	{8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909,
195 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */
196 	{22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
197 		0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */
198 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
199 		0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */
200 
201 	{24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
202 		0x08080808, 0x00000f0f, 0x0801}, /* 10 */
203 	{23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
204 		0x0f080808, 0x00000f0f, 0x0801}, /* 11 */
205 	{24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
206 		0x07070707, 0x00000f07, 0x0700}, /* 12 */
207 	{23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
208 		0x07070707, 0x00000f0f, 0x0700}, /* 13 */
209 	{24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
210 		0x07070707, 0x00000f07, 0x3f01}, /* 14 */
211 	{23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
212 		0x07070707, 0x00000f0f, 0x3f01}, /* 15 */
213 	{23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606,
214 		0x06060606, 0x00000f06, 0x3f00}, /* 16 */
215 	{8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909,
216 		0x0f090909, 0x00000f0f, 0x0801}, /* 17 */
217 	{7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808,
218 		0x08080808, 0x00000f0f, 0x0700}, /* 18 */
219 	{7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
220 		0x08080808, 0x00000f0f, 0x3f01}, /* 19 */
221 
222 	{6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
223 		0x07070707, 0x00000f07, 0x3f00}, /* 20 */
224 	{23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606,
225 		0x06060606, 0x00000f06, 0x0600}, /* 21 */
226 	{21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505,
227 		0x05050505, 0x00000f0f, 0x3f3f} /* 22 */
228 };
229 
230 static u8 dq_sel[22][3] = {
231 	{0x0, 0x17, 0x22},
232 	{0x1, 0x18, 0x23},
233 	{0x2, 0x19, 0x24},
234 	{0x3, 0x1a, 0x25},
235 	{0x4, 0x1b, 0x26},
236 	{0x5, 0x1c, 0x27},
237 	{0x6, 0x1d, 0x28},
238 	{0x7, 0x1e, 0x29},
239 	{0x8, 0x16, 0x21},
240 	{0x9, 0x1f, 0x2a},
241 	{0xa, 0x20, 0x2b},
242 	{0x10, 0x1, 0xc},
243 	{0x11, 0x2, 0xd},
244 	{0x12, 0x3, 0xe},
245 	{0x13, 0x4, 0xf},
246 	{0x14, 0x5, 0x10},
247 	{0x15, 0x6, 0x11},
248 	{0x16, 0x7, 0x12},
249 	{0x17, 0x8, 0x13},
250 	{0x18, 0x0, 0xb},
251 	{0x19, 0x9, 0x14},
252 	{0x1a, 0xa, 0x15}
253 };
254 
255 static u16 grp_addr[4] = {
256 	ADD_GROUP_CS0_A,
257 	ADD_GROUP_CS0_B,
258 	ADD_GROUP_CS1_A,
259 	ADD_GROUP_CS1_B
260 };
261 
262 static u8 wrlvl_result_offset[2][4] = {
263 	{0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27},
264 	{0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29},
265 };
266 
267 static u16 dqs_dq_skew_adr[16] = {
268 	0x170 + 0,	/* SKEW_UPDATE_RX_CS0_DQS0 */
269 	0x170 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS1 */
270 	0x1d0 + 0,	/* SKEW_UPDATE_RX_CS0_DQS2 */
271 	0x1d0 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS3 */
272 	0x1a0 + 0,	/* SKEW_UPDATE_RX_CS1_DQS0 */
273 	0x1a0 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS1 */
274 	0x200 + 0,	/* SKEW_UPDATE_RX_CS1_DQS2 */
275 	0x200 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS3 */
276 	0x170 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS0 */
277 	0x170 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS1 */
278 	0x1d0 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS2 */
279 	0x1d0 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS3 */
280 	0x1a0 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS0 */
281 	0x1a0 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS1 */
282 	0x200 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS2 */
283 	0x200 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS3 */
284 };
285 
286 static void rkclk_ddr_reset(struct dram_info *dram,
287 			    u32 ctl_srstn, u32 ctl_psrstn,
288 			    u32 phy_srstn, u32 phy_psrstn)
289 {
290 	writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) |
291 	       UPCTL2_ASRSTN_REQ(ctl_srstn),
292 	       BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13);
293 
294 	writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn),
295 	       &dram->cru->softrst_con[12]);
296 }
297 
298 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
299 {
300 	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
301 	int delay = 1000;
302 	u32 mhz = hz / MHz;
303 
304 	refdiv = 1;
305 	if (mhz <= 100) {
306 		postdiv1 = 6;
307 		postdiv2 = 4;
308 	} else if (mhz <= 150) {
309 		postdiv1 = 4;
310 		postdiv2 = 4;
311 	} else if (mhz <= 200) {
312 		postdiv1 = 6;
313 		postdiv2 = 2;
314 	} else if (mhz <= 300) {
315 		postdiv1 = 4;
316 		postdiv2 = 2;
317 	} else if (mhz <= 400) {
318 		postdiv1 = 6;
319 		postdiv2 = 1;
320 	} else {
321 		postdiv1 = 4;
322 		postdiv2 = 1;
323 	}
324 	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
325 
326 	writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
327 
328 	writel(0x1f000000, &dram->cru->clksel_con[64]);
329 	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
330 	writel(DSMPD(1) | POSTDIV2(postdiv2) | REFDIV(refdiv),
331 	       &dram->cru->pll[1].con1);
332 
333 	while (delay > 0) {
334 		udelay(1);
335 		if (LOCK(readl(&dram->cru->pll[1].con1)))
336 			break;
337 		delay--;
338 	}
339 
340 	writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
341 }
342 
343 static void rkclk_configure_ddr(struct dram_info *dram,
344 				struct rv1126_sdram_params *sdram_params)
345 {
346 	/* for inno ddr phy need freq / 2 */
347 	rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2);
348 }
349 
350 static void phy_soft_reset(struct dram_info *dram)
351 {
352 	void __iomem *phy_base = dram->phy;
353 
354 	clrbits_le32(PHY_REG(phy_base, 0), 0x3 << 2);
355 	udelay(1);
356 	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
357 	udelay(1);
358 }
359 
360 static unsigned int
361 	calculate_ddrconfig(struct rv1126_sdram_params *sdram_params)
362 {
363 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
364 	u32 cs, bw, die_bw, col, row, bank;
365 	u32 cs1_row;
366 	u32 i, tmp;
367 	u32 ddrconf = -1;
368 	u32 row_3_4;
369 
370 	cs = cap_info->rank;
371 	bw = cap_info->bw;
372 	die_bw = cap_info->dbw;
373 	col = cap_info->col;
374 	row = cap_info->cs0_row;
375 	cs1_row = cap_info->cs1_row;
376 	bank = cap_info->bk;
377 	row_3_4 = cap_info->row_3_4;
378 
379 	if (sdram_params->base.dramtype == DDR4) {
380 		if (cs == 2 && row == cs1_row && !row_3_4) {
381 			tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) |
382 			      die_bw;
383 			for (i = 17; i < 21; i++) {
384 				if (((tmp & 0xf) ==
385 				     (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
386 				    ((tmp & 0x70) <=
387 				     (ddr4_cfg_2_rbc[i - 10] & 0x70))) {
388 					ddrconf = i;
389 					goto out;
390 				}
391 			}
392 		}
393 
394 		tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw;
395 		for (i = 10; i < 21; i++) {
396 			if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
397 			    ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) &&
398 			    ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) {
399 				ddrconf = i;
400 				goto out;
401 			}
402 		}
403 	} else {
404 		if (cs == 2 && row == cs1_row && bank == 3) {
405 			for (i = 5; i < 8; i++) {
406 				if (((bw + col - 10) == (ddr_cfg_2_rbc[i] &
407 							 0x7)) &&
408 				    ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] &
409 							  (0x7 << 5))) {
410 					ddrconf = i;
411 					goto out;
412 				}
413 			}
414 		}
415 
416 		tmp = ((cs - 1) << 8) | ((row - 13) << 5) |
417 		      ((bw + col - 10) << 0);
418 		if (bank == 3)
419 			tmp |= (1 << 3);
420 
421 		for (i = 0; i < 9; i++)
422 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) &&
423 			    ((tmp & (7 << 5)) <=
424 			     (ddr_cfg_2_rbc[i] & (7 << 5))) &&
425 			    ((tmp & (1 << 8)) <=
426 			     (ddr_cfg_2_rbc[i] & (1 << 8)))) {
427 				ddrconf = i;
428 				goto out;
429 			}
430 		if (cs == 1 && bank == 3 && row <= 17 &&
431 		    (col + bw) == 12)
432 			ddrconf = 23;
433 	}
434 
435 out:
436 	if (ddrconf > 28)
437 		printascii("calculate ddrconfig error\n");
438 
439 	if (sdram_params->base.dramtype == DDR4) {
440 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
441 			if (ddrconf == d4_rbc_2_d3_rbc[i][0]) {
442 				if (ddrconf == 21 && row > 16)
443 					printascii("warn:ddrconf21 row > 16\n");
444 				else
445 					ddrconf = d4_rbc_2_d3_rbc[i][1];
446 				break;
447 			}
448 		}
449 	}
450 
451 	return ddrconf;
452 }
453 
454 static void sw_set_req(struct dram_info *dram)
455 {
456 	void __iomem *pctl_base = dram->pctl;
457 
458 	/* clear sw_done=0 */
459 	writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL);
460 }
461 
462 static void sw_set_ack(struct dram_info *dram)
463 {
464 	void __iomem *pctl_base = dram->pctl;
465 
466 	/* set sw_done=1 */
467 	writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL);
468 	while (1) {
469 		/* wait programming done */
470 		if (readl(pctl_base + DDR_PCTL2_SWSTAT) &
471 				PCTL2_SW_DONE_ACK)
472 			break;
473 	}
474 }
475 
476 static void set_ctl_address_map(struct dram_info *dram,
477 				struct rv1126_sdram_params *sdram_params)
478 {
479 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
480 	void __iomem *pctl_base = dram->pctl;
481 	u32 ddrconf = cap_info->ddrconfig;
482 	u32 i, row;
483 
484 	row = cap_info->cs0_row;
485 	if (sdram_params->base.dramtype == DDR4) {
486 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
487 			if (ddrconf == d4_rbc_2_d3_rbc[i][1]) {
488 				ddrconf = d4_rbc_2_d3_rbc[i][0];
489 				break;
490 			}
491 		}
492 	}
493 
494 	if (ddrconf > ARRAY_SIZE(addrmap)) {
495 		printascii("set ctl address map fail\n");
496 		return;
497 	}
498 
499 	sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
500 			  &addrmap[ddrconf][0], 9 * 4);
501 
502 	/* unused row set to 0xf */
503 	for (i = 17; i >= row; i--)
504 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
505 			((i - 12) * 8 / 32) * 4,
506 			0xf << ((i - 12) * 8 % 32));
507 
508 	if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4)
509 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
510 	if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1)
511 		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
512 
513 	if (cap_info->rank == 1)
514 		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
515 }
516 
517 static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait)
518 {
519 	void __iomem *phy_base = dram->phy;
520 	u32 fbdiv, prediv, postdiv, postdiv_en;
521 
522 	if (wait) {
523 		clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB);
524 		while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK))
525 			continue;
526 	} else {
527 		freq /= MHz;
528 		prediv = 1;
529 		if (freq <= 200) {
530 			fbdiv = 16;
531 			postdiv = 2;
532 			postdiv_en = 1;
533 		} else if (freq <= 456) {
534 			fbdiv = 8;
535 			postdiv = 1;
536 			postdiv_en = 1;
537 		} else {
538 			fbdiv = 4;
539 			postdiv = 0;
540 			postdiv_en = 0;
541 		}
542 		writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50));
543 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK,
544 				(fbdiv >> 8) & 1);
545 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK,
546 				postdiv_en << PHY_POSTDIV_EN_SHIFT);
547 
548 		clrsetbits_le32(PHY_REG(phy_base, 0x52),
549 				PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv);
550 		clrsetbits_le32(PHY_REG(phy_base, 0x53),
551 				PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT,
552 				postdiv << PHY_POSTDIV_SHIFT);
553 	}
554 }
555 
556 static const u16 d3_phy_drv_2_ohm[][2] = {
557 	{PHY_DDR3_RON_506ohm, 506},
558 	{PHY_DDR3_RON_253ohm, 253},
559 	{PHY_DDR3_RON_169hm, 169},
560 	{PHY_DDR3_RON_127ohm, 127},
561 	{PHY_DDR3_RON_101ohm, 101},
562 	{PHY_DDR3_RON_84ohm, 84},
563 	{PHY_DDR3_RON_72ohm, 72},
564 	{PHY_DDR3_RON_63ohm, 63},
565 	{PHY_DDR3_RON_56ohm, 56},
566 	{PHY_DDR3_RON_51ohm, 51},
567 	{PHY_DDR3_RON_46ohm, 46},
568 	{PHY_DDR3_RON_42ohm, 42},
569 	{PHY_DDR3_RON_39ohm, 39},
570 	{PHY_DDR3_RON_36ohm, 36},
571 	{PHY_DDR3_RON_34ohm, 34},
572 	{PHY_DDR3_RON_32ohm, 32},
573 	{PHY_DDR3_RON_30ohm, 30},
574 	{PHY_DDR3_RON_28ohm, 28},
575 	{PHY_DDR3_RON_27ohm, 27},
576 	{PHY_DDR3_RON_25ohm, 25},
577 	{PHY_DDR3_RON_24ohm, 24},
578 	{PHY_DDR3_RON_23ohm, 23},
579 	{PHY_DDR3_RON_22ohm, 22}
580 };
581 
582 static u16 d3_phy_odt_2_ohm[][2] = {
583 	{PHY_DDR3_RTT_DISABLE, 0},
584 	{PHY_DDR3_RTT_953ohm, 953},
585 	{PHY_DDR3_RTT_483ohm, 483},
586 	{PHY_DDR3_RTT_320ohm, 320},
587 	{PHY_DDR3_RTT_241ohm, 241},
588 	{PHY_DDR3_RTT_193ohm, 193},
589 	{PHY_DDR3_RTT_161ohm, 161},
590 	{PHY_DDR3_RTT_138ohm, 138},
591 	{PHY_DDR3_RTT_121ohm, 121},
592 	{PHY_DDR3_RTT_107ohm, 107},
593 	{PHY_DDR3_RTT_97ohm, 97},
594 	{PHY_DDR3_RTT_88ohm, 88},
595 	{PHY_DDR3_RTT_80ohm, 80},
596 	{PHY_DDR3_RTT_74ohm, 74},
597 	{PHY_DDR3_RTT_69ohm, 69},
598 	{PHY_DDR3_RTT_64ohm, 64},
599 	{PHY_DDR3_RTT_60ohm, 60},
600 	{PHY_DDR3_RTT_57ohm, 57},
601 	{PHY_DDR3_RTT_54ohm, 54},
602 	{PHY_DDR3_RTT_51ohm, 51},
603 	{PHY_DDR3_RTT_48ohm, 48},
604 	{PHY_DDR3_RTT_46ohm, 46},
605 	{PHY_DDR3_RTT_44ohm, 44},
606 	{PHY_DDR3_RTT_42ohm, 42}
607 };
608 
609 static u16 d4lp3_phy_drv_2_ohm[][2] = {
610 	{PHY_DDR4_LPDDR3_RON_570ohm, 570},
611 	{PHY_DDR4_LPDDR3_RON_285ohm, 285},
612 	{PHY_DDR4_LPDDR3_RON_190ohm, 190},
613 	{PHY_DDR4_LPDDR3_RON_142ohm, 142},
614 	{PHY_DDR4_LPDDR3_RON_114ohm, 114},
615 	{PHY_DDR4_LPDDR3_RON_95ohm, 95},
616 	{PHY_DDR4_LPDDR3_RON_81ohm, 81},
617 	{PHY_DDR4_LPDDR3_RON_71ohm, 71},
618 	{PHY_DDR4_LPDDR3_RON_63ohm, 63},
619 	{PHY_DDR4_LPDDR3_RON_57ohm, 57},
620 	{PHY_DDR4_LPDDR3_RON_52ohm, 52},
621 	{PHY_DDR4_LPDDR3_RON_47ohm, 47},
622 	{PHY_DDR4_LPDDR3_RON_44ohm, 44},
623 	{PHY_DDR4_LPDDR3_RON_41ohm, 41},
624 	{PHY_DDR4_LPDDR3_RON_38ohm, 38},
625 	{PHY_DDR4_LPDDR3_RON_36ohm, 36},
626 	{PHY_DDR4_LPDDR3_RON_34ohm, 34},
627 	{PHY_DDR4_LPDDR3_RON_32ohm, 32},
628 	{PHY_DDR4_LPDDR3_RON_30ohm, 30},
629 	{PHY_DDR4_LPDDR3_RON_28ohm, 28},
630 	{PHY_DDR4_LPDDR3_RON_27ohm, 27},
631 	{PHY_DDR4_LPDDR3_RON_26ohm, 26},
632 	{PHY_DDR4_LPDDR3_RON_25ohm, 25}
633 };
634 
635 static u16 d4lp3_phy_odt_2_ohm[][2] = {
636 	{PHY_DDR4_LPDDR3_RTT_DISABLE, 0},
637 	{PHY_DDR4_LPDDR3_RTT_973ohm, 973},
638 	{PHY_DDR4_LPDDR3_RTT_493ohm, 493},
639 	{PHY_DDR4_LPDDR3_RTT_327ohm, 327},
640 	{PHY_DDR4_LPDDR3_RTT_247ohm, 247},
641 	{PHY_DDR4_LPDDR3_RTT_197ohm, 197},
642 	{PHY_DDR4_LPDDR3_RTT_164ohm, 164},
643 	{PHY_DDR4_LPDDR3_RTT_141ohm, 141},
644 	{PHY_DDR4_LPDDR3_RTT_123ohm, 123},
645 	{PHY_DDR4_LPDDR3_RTT_109ohm, 109},
646 	{PHY_DDR4_LPDDR3_RTT_99ohm, 99},
647 	{PHY_DDR4_LPDDR3_RTT_90ohm, 90},
648 	{PHY_DDR4_LPDDR3_RTT_82ohm, 82},
649 	{PHY_DDR4_LPDDR3_RTT_76ohm, 76},
650 	{PHY_DDR4_LPDDR3_RTT_70ohm, 70},
651 	{PHY_DDR4_LPDDR3_RTT_66ohm, 66},
652 	{PHY_DDR4_LPDDR3_RTT_62ohm, 62},
653 	{PHY_DDR4_LPDDR3_RTT_58ohm, 58},
654 	{PHY_DDR4_LPDDR3_RTT_55ohm, 55},
655 	{PHY_DDR4_LPDDR3_RTT_52ohm, 52},
656 	{PHY_DDR4_LPDDR3_RTT_49ohm, 49},
657 	{PHY_DDR4_LPDDR3_RTT_47ohm, 47},
658 	{PHY_DDR4_LPDDR3_RTT_45ohm, 45},
659 	{PHY_DDR4_LPDDR3_RTT_43ohm, 43}
660 };
661 
662 static u16 lp4_phy_drv_2_ohm[][2] = {
663 	{PHY_LPDDR4_RON_606ohm, 606},
664 	{PHY_LPDDR4_RON_303ohm, 303},
665 	{PHY_LPDDR4_RON_202ohm, 202},
666 	{PHY_LPDDR4_RON_152ohm, 153},
667 	{PHY_LPDDR4_RON_121ohm, 121},
668 	{PHY_LPDDR4_RON_101ohm, 101},
669 	{PHY_LPDDR4_RON_87ohm, 87},
670 	{PHY_LPDDR4_RON_76ohm, 76},
671 	{PHY_LPDDR4_RON_67ohm, 67},
672 	{PHY_LPDDR4_RON_61ohm, 61},
673 	{PHY_LPDDR4_RON_55ohm, 55},
674 	{PHY_LPDDR4_RON_51ohm, 51},
675 	{PHY_LPDDR4_RON_47ohm, 47},
676 	{PHY_LPDDR4_RON_43ohm, 43},
677 	{PHY_LPDDR4_RON_40ohm, 40},
678 	{PHY_LPDDR4_RON_38ohm, 38},
679 	{PHY_LPDDR4_RON_36ohm, 36},
680 	{PHY_LPDDR4_RON_34ohm, 34},
681 	{PHY_LPDDR4_RON_32ohm, 32},
682 	{PHY_LPDDR4_RON_30ohm, 30},
683 	{PHY_LPDDR4_RON_29ohm, 29},
684 	{PHY_LPDDR4_RON_28ohm, 28},
685 	{PHY_LPDDR4_RON_26ohm, 26}
686 };
687 
688 static u16 lp4_phy_odt_2_ohm[][2] = {
689 	{PHY_LPDDR4_RTT_DISABLE, 0},
690 	{PHY_LPDDR4_RTT_998ohm, 998},
691 	{PHY_LPDDR4_RTT_506ohm, 506},
692 	{PHY_LPDDR4_RTT_336ohm, 336},
693 	{PHY_LPDDR4_RTT_253ohm, 253},
694 	{PHY_LPDDR4_RTT_202ohm, 202},
695 	{PHY_LPDDR4_RTT_169ohm, 169},
696 	{PHY_LPDDR4_RTT_144ohm, 144},
697 	{PHY_LPDDR4_RTT_127ohm, 127},
698 	{PHY_LPDDR4_RTT_112ohm, 112},
699 	{PHY_LPDDR4_RTT_101ohm, 101},
700 	{PHY_LPDDR4_RTT_92ohm, 92},
701 	{PHY_LPDDR4_RTT_84ohm, 84},
702 	{PHY_LPDDR4_RTT_78ohm, 78},
703 	{PHY_LPDDR4_RTT_72ohm, 72},
704 	{PHY_LPDDR4_RTT_67ohm, 67},
705 	{PHY_LPDDR4_RTT_63ohm, 63},
706 	{PHY_LPDDR4_RTT_60ohm, 60},
707 	{PHY_LPDDR4_RTT_56ohm, 56},
708 	{PHY_LPDDR4_RTT_53ohm, 53},
709 	{PHY_LPDDR4_RTT_51ohm, 51},
710 	{PHY_LPDDR4_RTT_48ohm, 48},
711 	{PHY_LPDDR4_RTT_46ohm, 46},
712 	{PHY_LPDDR4_RTT_44ohm, 44}
713 };
714 
715 static u32 lp4_odt_calc(u32 odt_ohm)
716 {
717 	u32 odt;
718 
719 	if (odt_ohm == 0)
720 		odt = LPDDR4_DQODT_DIS;
721 	else if (odt_ohm <= 40)
722 		odt = LPDDR4_DQODT_40;
723 	else if (odt_ohm <= 48)
724 		odt = LPDDR4_DQODT_48;
725 	else if (odt_ohm <= 60)
726 		odt = LPDDR4_DQODT_60;
727 	else if (odt_ohm <= 80)
728 		odt = LPDDR4_DQODT_80;
729 	else if (odt_ohm <= 120)
730 		odt = LPDDR4_DQODT_120;
731 	else
732 		odt = LPDDR4_DQODT_240;
733 
734 	return odt;
735 }
736 
737 static void *get_ddr_drv_odt_info(u32 dramtype)
738 {
739 	struct sdram_head_info_index_v2 *index =
740 		(struct sdram_head_info_index_v2 *)common_info;
741 	void *ddr_info = 0;
742 
743 	if (dramtype == DDR4)
744 		ddr_info = (void *)common_info + index->ddr4_index.offset * 4;
745 	else if (dramtype == DDR3)
746 		ddr_info = (void *)common_info + index->ddr3_index.offset * 4;
747 	else if (dramtype == LPDDR3)
748 		ddr_info = (void *)common_info + index->lp3_index.offset * 4;
749 	else if (dramtype == LPDDR4)
750 		ddr_info = (void *)common_info + index->lp4_index.offset * 4;
751 	else
752 		printascii("unsupported dram type\n");
753 	return ddr_info;
754 }
755 
756 static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info,
757 			 u32 freq_mhz, u32 dst_fsp)
758 {
759 	void __iomem *pctl_base = dram->pctl;
760 	u32 ca_vref, dq_vref;
761 
762 	if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
763 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff);
764 	else
765 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten);
766 
767 	if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq))
768 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff);
769 	else
770 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten);
771 
772 	if (ca_vref < 100)
773 		ca_vref = 100;
774 	if (ca_vref > 420)
775 		ca_vref = 420;
776 
777 	if (ca_vref <= 300)
778 		ca_vref = (0 << 6) | (ca_vref - 100) / 4;
779 	else
780 		ca_vref = (1 << 6) | (ca_vref - 220) / 4;
781 
782 	if (dq_vref < 100)
783 		dq_vref = 100;
784 	if (dq_vref > 420)
785 		dq_vref = 420;
786 
787 	if (dq_vref <= 300)
788 		dq_vref = (0 << 6) | (dq_vref - 100) / 4;
789 	else
790 		dq_vref = (1 << 6) | (dq_vref - 220) / 4;
791 
792 	sw_set_req(dram);
793 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
794 			DDR_PCTL2_INIT6,
795 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT,
796 			ca_vref << PCTL2_LPDDR4_MR12_SHIFT);
797 
798 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
799 			DDR_PCTL2_INIT7,
800 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT,
801 			dq_vref << PCTL2_LPDDR4_MR14_SHIFT);
802 	sw_set_ack(dram);
803 }
804 
805 static void set_ds_odt(struct dram_info *dram,
806 		       struct rv1126_sdram_params *sdram_params, u32 dst_fsp)
807 {
808 	void __iomem *phy_base = dram->phy;
809 	void __iomem *pctl_base = dram->pctl;
810 	u32 dramtype = sdram_params->base.dramtype;
811 	struct ddr2_3_4_lp2_3_info *ddr_info;
812 	struct lp4_info *lp4_info;
813 	u32 i, j, tmp;
814 	const u16 (*p_drv)[2];
815 	const u16 (*p_odt)[2];
816 	u32 drv_info, sr_info;
817 	u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm;
818 	u32 phy_odt_ohm, dram_odt_ohm;
819 	u32 lp4_pu_cal, phy_lp4_drv_pd_en;
820 	u32 phy_odt_up_en, phy_odt_dn_en;
821 	u32 sr_dq, sr_clk;
822 	u32 freq = sdram_params->base.ddr_freq;
823 	u32 mr1_mr3, mr11, mr22, vref_out, vref_inner;
824 	u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0;
825 	u32 phy_dq_drv = 0;
826 	u32 phy_odt_up = 0, phy_odt_dn = 0;
827 
828 	ddr_info = get_ddr_drv_odt_info(dramtype);
829 	lp4_info = (void *)ddr_info;
830 
831 	if (!ddr_info)
832 		return;
833 
834 	/* dram odt en freq control phy drv, dram odt and phy sr */
835 	if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) {
836 		drv_info = ddr_info->drv_when_odtoff;
837 		dram_odt_ohm = 0;
838 		sr_info = ddr_info->sr_when_odtoff;
839 		phy_lp4_drv_pd_en =
840 			PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info);
841 	} else {
842 		drv_info = ddr_info->drv_when_odten;
843 		dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info);
844 		sr_info = ddr_info->sr_when_odten;
845 		phy_lp4_drv_pd_en =
846 			PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info);
847 	}
848 	phy_dq_drv_ohm =
849 		DRV_INFO_PHY_DQ_DRV(drv_info);
850 	phy_clk_drv_ohm =
851 		DRV_INFO_PHY_CLK_DRV(drv_info);
852 	phy_ca_drv_ohm =
853 		DRV_INFO_PHY_CA_DRV(drv_info);
854 
855 	sr_dq = DQ_SR_INFO(sr_info);
856 	sr_clk = CLK_SR_INFO(sr_info);
857 
858 	/* phy odt en freq control dram drv and phy odt */
859 	if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) {
860 		dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff);
861 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info);
862 		phy_odt_ohm = 0;
863 		phy_odt_up_en = 0;
864 		phy_odt_dn_en = 0;
865 	} else {
866 		dram_drv_ohm =
867 			DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten);
868 		phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info);
869 		phy_odt_up_en =
870 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
871 		phy_odt_dn_en =
872 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
873 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info);
874 	}
875 
876 	if (dramtype == LPDDR4) {
877 		if (phy_odt_ohm) {
878 			phy_odt_up_en = 0;
879 			phy_odt_dn_en = 1;
880 		}
881 		if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
882 			dram_caodt_ohm = 0;
883 		else
884 			dram_caodt_ohm =
885 				ODT_INFO_LP4_CA_ODT(lp4_info->odt_info);
886 	}
887 
888 	if (dramtype == DDR3) {
889 		p_drv = d3_phy_drv_2_ohm;
890 		p_odt = d3_phy_odt_2_ohm;
891 	} else if (dramtype == LPDDR4) {
892 		p_drv = lp4_phy_drv_2_ohm;
893 		p_odt = lp4_phy_odt_2_ohm;
894 	} else {
895 		p_drv = d4lp3_phy_drv_2_ohm;
896 		p_odt = d4lp3_phy_odt_2_ohm;
897 	}
898 
899 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
900 		if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) {
901 			phy_dq_drv = **(p_drv + i);
902 			break;
903 		}
904 		if (i == 0)
905 			break;
906 	}
907 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
908 		if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) {
909 			phy_clk_drv = **(p_drv + i);
910 			break;
911 		}
912 		if (i == 0)
913 			break;
914 	}
915 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
916 		if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) {
917 			phy_ca_drv = **(p_drv + i);
918 			break;
919 		}
920 		if (i == 0)
921 			break;
922 	}
923 	if (!phy_odt_ohm)
924 		phy_odt = 0;
925 	else
926 		for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) {
927 			if (phy_odt_ohm <= *(*(p_odt + i) + 1)) {
928 				phy_odt = **(p_odt + i);
929 				break;
930 			}
931 			if (i == 0)
932 				break;
933 		}
934 
935 	if (dramtype != LPDDR4) {
936 		if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en))
937 			vref_inner = 0x80;
938 		else if (phy_odt_up_en)
939 			vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 /
940 				     (dram_drv_ohm + phy_odt_ohm);
941 		else
942 			vref_inner = phy_odt_ohm * 128 /
943 				(phy_odt_ohm + dram_drv_ohm);
944 
945 		if (dramtype != DDR3 && dram_odt_ohm)
946 			vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 /
947 				   (phy_dq_drv_ohm + dram_odt_ohm);
948 		else
949 			vref_out = 0x80;
950 	} else {
951 		/* for lp4 */
952 		if (phy_odt_ohm)
953 			vref_inner =
954 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) *
955 				 256) / 1000;
956 		else
957 			vref_inner =
958 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) *
959 				 256) / 1000;
960 
961 		vref_out = 0x80;
962 	}
963 
964 	/* default ZQCALIB bypass mode */
965 	clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv);
966 	clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv);
967 	clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv);
968 	clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv);
969 	/* clk / cmd slew rate */
970 	clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk);
971 
972 	phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1;
973 	if (phy_odt_up_en)
974 		phy_odt_up = phy_odt;
975 	if (phy_odt_dn_en)
976 		phy_odt_dn = phy_odt;
977 
978 	for (i = 0; i < 4; i++) {
979 		j = 0x110 + i * 0x10;
980 		clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up);
981 		clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn);
982 		clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv);
983 		clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv);
984 		writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10));
985 
986 		clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10),
987 				1 << 3, phy_lp4_drv_pd_en << 3);
988 		/* dq slew rate */
989 		clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10),
990 				0x1f, sr_dq);
991 	}
992 
993 	/* reg_rx_vref_value_update */
994 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
995 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
996 
997 	/* RAM VREF */
998 	writel(vref_out, PHY_REG(phy_base, 0x105));
999 	udelay(8000);
1000 
1001 	if (dramtype == LPDDR4)
1002 		set_lp4_vref(dram, lp4_info, freq, dst_fsp);
1003 
1004 	if (dramtype == DDR3 || dramtype == DDR4) {
1005 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1006 				DDR_PCTL2_INIT3);
1007 		mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK;
1008 	} else {
1009 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1010 				DDR_PCTL2_INIT4);
1011 		mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK;
1012 	}
1013 
1014 	if (dramtype == DDR3) {
1015 		mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK);
1016 		if (dram_drv_ohm == 34)
1017 			mr1_mr3 |= DDR3_DS_34;
1018 
1019 		if (dram_odt_ohm == 0)
1020 			mr1_mr3 |= DDR3_RTT_NOM_DIS;
1021 		else if (dram_odt_ohm <= 40)
1022 			mr1_mr3 |= DDR3_RTT_NOM_40;
1023 		else if (dram_odt_ohm <= 60)
1024 			mr1_mr3 |= DDR3_RTT_NOM_60;
1025 		else
1026 			mr1_mr3 |= DDR3_RTT_NOM_120;
1027 
1028 	} else if (dramtype == DDR4) {
1029 		mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK);
1030 		if (dram_drv_ohm == 48)
1031 			mr1_mr3 |= DDR4_DS_48;
1032 
1033 		if (dram_odt_ohm == 0)
1034 			mr1_mr3 |= DDR4_RTT_NOM_DIS;
1035 		else if (dram_odt_ohm <= 34)
1036 			mr1_mr3 |= DDR4_RTT_NOM_34;
1037 		else if (dram_odt_ohm <= 40)
1038 			mr1_mr3 |= DDR4_RTT_NOM_40;
1039 		else if (dram_odt_ohm <= 48)
1040 			mr1_mr3 |= DDR4_RTT_NOM_48;
1041 		else if (dram_odt_ohm <= 60)
1042 			mr1_mr3 |= DDR4_RTT_NOM_60;
1043 		else
1044 			mr1_mr3 |= DDR4_RTT_NOM_120;
1045 
1046 	} else if (dramtype == LPDDR3) {
1047 		if (dram_drv_ohm <= 34)
1048 			mr1_mr3 |= LPDDR3_DS_34;
1049 		else if (dram_drv_ohm <= 40)
1050 			mr1_mr3 |= LPDDR3_DS_40;
1051 		else if (dram_drv_ohm <= 48)
1052 			mr1_mr3 |= LPDDR3_DS_48;
1053 		else if (dram_drv_ohm <= 60)
1054 			mr1_mr3 |= LPDDR3_DS_60;
1055 		else if (dram_drv_ohm <= 80)
1056 			mr1_mr3 |= LPDDR3_DS_80;
1057 
1058 		if (dram_odt_ohm == 0)
1059 			lp3_odt_value = LPDDR3_ODT_DIS;
1060 		else if (dram_odt_ohm <= 60)
1061 			lp3_odt_value = LPDDR3_ODT_60;
1062 		else if (dram_odt_ohm <= 120)
1063 			lp3_odt_value = LPDDR3_ODT_120;
1064 		else
1065 			lp3_odt_value = LPDDR3_ODT_240;
1066 	} else {/* for lpddr4 */
1067 		/* MR3 for lp4 PU-CAL and PDDS */
1068 		mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK);
1069 		mr1_mr3 |= lp4_pu_cal;
1070 
1071 		tmp = lp4_odt_calc(dram_drv_ohm);
1072 		if (!tmp)
1073 			tmp = LPDDR4_PDDS_240;
1074 		mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT);
1075 
1076 		/* MR11 for lp4 ca odt, dq odt set */
1077 		mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1078 			     DDR_PCTL2_INIT6);
1079 		mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK;
1080 
1081 		mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK);
1082 
1083 		tmp = lp4_odt_calc(dram_odt_ohm);
1084 		mr11 |= (tmp << LPDDR4_DQODT_SHIFT);
1085 
1086 		tmp = lp4_odt_calc(dram_caodt_ohm);
1087 		mr11 |= (tmp << LPDDR4_CAODT_SHIFT);
1088 		sw_set_req(dram);
1089 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1090 				DDR_PCTL2_INIT6,
1091 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT,
1092 				mr11 << PCTL2_LPDDR4_MR11_SHIFT);
1093 		sw_set_ack(dram);
1094 
1095 		/* MR22 for soc odt/odt-ck/odt-cs/odt-ca */
1096 		mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1097 			     DDR_PCTL2_INIT7);
1098 		mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK;
1099 		mr22 &= ~LPDDR4_SOC_ODT_MASK;
1100 
1101 		tmp = lp4_odt_calc(phy_odt_ohm);
1102 		mr22 |= tmp;
1103 		mr22 = mr22 |
1104 		       (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) <<
1105 			LPDDR4_ODTE_CK_SHIFT) |
1106 		       (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) <<
1107 			LPDDR4_ODTE_CS_SHIFT) |
1108 		       (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) <<
1109 			LPDDR4_ODTD_CA_SHIFT);
1110 
1111 		sw_set_req(dram);
1112 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1113 				DDR_PCTL2_INIT7,
1114 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT,
1115 				mr22 << PCTL2_LPDDR4_MR22_SHIFT);
1116 		sw_set_ack(dram);
1117 	}
1118 
1119 	if (dramtype == DDR4 || dramtype == DDR3) {
1120 		sw_set_req(dram);
1121 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1122 				DDR_PCTL2_INIT3,
1123 				PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT,
1124 				mr1_mr3 << PCTL2_DDR34_MR1_SHIFT);
1125 		sw_set_ack(dram);
1126 	} else {
1127 		sw_set_req(dram);
1128 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1129 				DDR_PCTL2_INIT4,
1130 				PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT,
1131 				mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT);
1132 		sw_set_ack(dram);
1133 	}
1134 }
1135 
1136 static int sdram_cmd_dq_path_remap(struct dram_info *dram,
1137 				   struct rv1126_sdram_params *sdram_params)
1138 {
1139 	void __iomem *phy_base = dram->phy;
1140 	u32 dramtype = sdram_params->base.dramtype;
1141 	struct sdram_head_info_index_v2 *index =
1142 		(struct sdram_head_info_index_v2 *)common_info;
1143 	struct dq_map_info *map_info;
1144 
1145 	map_info = (struct dq_map_info *)((void *)common_info +
1146 		index->dq_map_index.offset * 4);
1147 
1148 	if (dramtype <= LPDDR4)
1149 		writel((map_info->byte_map[dramtype / 4] >>
1150 			((dramtype % 4) * 8)) & 0xff,
1151 		       PHY_REG(phy_base, 0x4f));
1152 
1153 	return 0;
1154 }
1155 
1156 static void phy_cfg(struct dram_info *dram,
1157 		    struct rv1126_sdram_params *sdram_params)
1158 {
1159 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
1160 	void __iomem *phy_base = dram->phy;
1161 	u32 i, dq_map, tmp;
1162 	u32 byte1 = 0, byte0 = 0;
1163 
1164 	sdram_cmd_dq_path_remap(dram, sdram_params);
1165 
1166 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0);
1167 	for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
1168 		writel(sdram_params->phy_regs.phy[i][1],
1169 		       phy_base + sdram_params->phy_regs.phy[i][0]);
1170 	}
1171 
1172 	clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5));
1173 	dq_map = readl(PHY_REG(phy_base, 0x4f));
1174 	for (i = 0; i < 4; i++) {
1175 		if (((dq_map >> (i * 2)) & 0x3) == 0)
1176 			byte0 = i;
1177 		if (((dq_map >> (i * 2)) & 0x3) == 1)
1178 			byte1 = i;
1179 	}
1180 
1181 	tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK);
1182 	if (cap_info->bw == 2)
1183 		tmp |= 0xf;
1184 	else if (cap_info->bw == 1)
1185 		tmp |= ((1 << byte0) | (1 << byte1));
1186 	else
1187 		tmp |= (1 << byte0);
1188 
1189 	writel(tmp, PHY_REG(phy_base, 0xf));
1190 
1191 	/* lpddr4 odt control by phy, enable cs0 odt */
1192 	if (sdram_params->base.dramtype == LPDDR4)
1193 		clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4,
1194 				(1 << 6) | (1 << 4));
1195 	/* for ca training ca vref choose range1 */
1196 	setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6));
1197 	setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6));
1198 	/* for wr training PHY_0x7c[5], choose range0 */
1199 	clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5));
1200 }
1201 
1202 static int update_refresh_reg(struct dram_info *dram)
1203 {
1204 	void __iomem *pctl_base = dram->pctl;
1205 	u32 ret;
1206 
1207 	ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
1208 	writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
1209 
1210 	return 0;
1211 }
1212 
1213 /*
1214  * rank = 1: cs0
1215  * rank = 2: cs1
1216  */
1217 int read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype)
1218 {
1219 	u32 ret;
1220 	u32 i, temp;
1221 	u32 dqmap;
1222 
1223 	void __iomem *pctl_base = dram->pctl;
1224 	struct sdram_head_info_index_v2 *index =
1225 		(struct sdram_head_info_index_v2 *)common_info;
1226 	struct dq_map_info *map_info;
1227 
1228 	map_info = (struct dq_map_info *)((void *)common_info +
1229 		index->dq_map_index.offset * 4);
1230 
1231 	if (dramtype == LPDDR2)
1232 		dqmap = map_info->lp2_dq0_7_map;
1233 	else
1234 		dqmap = map_info->lp3_dq0_7_map;
1235 
1236 	pctl_read_mr(pctl_base, rank, mr_num);
1237 
1238 	ret = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff);
1239 
1240 	if (dramtype != LPDDR4) {
1241 		temp = 0;
1242 		for (i = 0; i < 8; i++) {
1243 			temp = temp | (((ret >> i) & 0x1) <<
1244 				       ((dqmap >> (i * 4)) & 0xf));
1245 		}
1246 	} else {
1247 		ret = (readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff);
1248 	}
1249 
1250 	return ret;
1251 }
1252 
1253 /* before call this function autorefresh should be disabled */
1254 void send_a_refresh(struct dram_info *dram)
1255 {
1256 	void __iomem *pctl_base = dram->pctl;
1257 
1258 	while (readl(pctl_base + DDR_PCTL2_DBGSTAT) & 0x3)
1259 		continue;
1260 	writel(0x3, pctl_base + DDR_PCTL2_DBGCMD);
1261 }
1262 
1263 void record_dq_prebit(struct dram_info *dram)
1264 {
1265 	u32 group, i, tmp;
1266 	void __iomem *phy_base = dram->phy;
1267 
1268 	for (group = 0; group < 4; group++) {
1269 		for (i = 0; i < ARRAY_SIZE(dq_sel); i++) {
1270 			/* l_loop_invdelaysel */
1271 			writel(dq_sel[i][0], PHY_REG(phy_base,
1272 						     grp_addr[group] + 0x2c));
1273 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e));
1274 			writel(tmp, PHY_REG(phy_base,
1275 					    grp_addr[group] + dq_sel[i][1]));
1276 
1277 			/* r_loop_invdelaysel */
1278 			writel(dq_sel[i][0], PHY_REG(phy_base,
1279 						     grp_addr[group] + 0x2d));
1280 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f));
1281 			writel(tmp, PHY_REG(phy_base,
1282 					    grp_addr[group] + dq_sel[i][2]));
1283 		}
1284 	}
1285 }
1286 
1287 static void update_dq_rx_prebit(struct dram_info *dram)
1288 {
1289 	void __iomem *phy_base = dram->phy;
1290 
1291 	clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4),
1292 			BIT(4));
1293 	udelay(1);
1294 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4));
1295 }
1296 
1297 static void update_dq_tx_prebit(struct dram_info *dram)
1298 {
1299 	void __iomem *phy_base = dram->phy;
1300 
1301 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1302 	setbits_le32(PHY_REG(phy_base, 0x2), BIT(3));
1303 	setbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1304 	udelay(1);
1305 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1306 }
1307 
1308 static void update_ca_prebit(struct dram_info *dram)
1309 {
1310 	void __iomem *phy_base = dram->phy;
1311 
1312 	clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2));
1313 	setbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1314 	udelay(1);
1315 	clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1316 }
1317 
1318 /*
1319  * dir: 0: de-skew = delta_*
1320  *	1: de-skew = reg val - delta_*
1321  * delta_dir: value for differential signal: clk/
1322  * delta_sig: value for single signal: ca/cmd
1323  */
1324 static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif,
1325 			     int delta_sig, u32 cs, u32 dramtype)
1326 {
1327 	void __iomem *phy_base = dram->phy;
1328 	u32 i, cs_en, tmp;
1329 
1330 	if (cs == 0)
1331 		cs_en = 1;
1332 	else if (cs == 2)
1333 		cs_en = 2;
1334 	else
1335 		cs_en = 3;
1336 
1337 	for (i = 0; i < 0x20; i++) {
1338 		if (dir == DESKEW_MDF_ABS_VAL)
1339 			tmp = delta_sig;
1340 		else
1341 			tmp = readl(PHY_REG(phy_base, 0x150 + i)) +
1342 			      delta_sig;
1343 		writel(tmp, PHY_REG(phy_base, 0x150 + i));
1344 	}
1345 
1346 	if (dir == DESKEW_MDF_ABS_VAL)
1347 		tmp = delta_dif;
1348 	else
1349 		tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) -
1350 		       delta_sig + delta_dif;
1351 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x17));
1352 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x18));
1353 	if (dramtype == LPDDR4) {
1354 		writel(tmp, PHY_REG(phy_base, 0x150 + 0x4));
1355 		writel(tmp, PHY_REG(phy_base, 0x150 + 0xa));
1356 
1357 		clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6);
1358 		update_ca_prebit(dram);
1359 	}
1360 }
1361 
1362 static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank)
1363 {
1364 	u32 i, j, offset = 0;
1365 	u32 min = 0x3f;
1366 	void __iomem *phy_base = dram->phy;
1367 	u32 byte_en;
1368 
1369 	if (signal == SKEW_TX_SIGNAL)
1370 		offset = 8;
1371 
1372 	if (signal == SKEW_CA_SIGNAL) {
1373 		for (i = 0; i < 0x20; i++)
1374 			min = MIN(min, readl(PHY_REG(phy_base, 0x150  + i)));
1375 	} else {
1376 		byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1377 		for (j = offset; j < offset + rank * 4; j++) {
1378 			if (!((byte_en >> (j % 4)) & 1))
1379 				continue;
1380 			for (i = 0; i < 11; i++)
1381 				min = MIN(min,
1382 					  readl(PHY_REG(phy_base,
1383 							dqs_dq_skew_adr[j] +
1384 							i)));
1385 		}
1386 	}
1387 
1388 	return min;
1389 }
1390 
1391 static u32 low_power_update(struct dram_info *dram, u32 en)
1392 {
1393 	void __iomem *pctl_base = dram->pctl;
1394 	u32 lp_stat = 0;
1395 
1396 	if (en) {
1397 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf);
1398 	} else {
1399 		lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf;
1400 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf);
1401 	}
1402 
1403 	return lp_stat;
1404 }
1405 
1406 /*
1407  * signal:
1408  * dir: 0: de-skew = delta_*
1409  *	1: de-skew = reg val - delta_*
1410  * delta_dir: value for differential signal: dqs
1411  * delta_sig: value for single signal: dq/dm
1412  */
1413 static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir,
1414 			     int delta_dif, int delta_sig, u32 rank)
1415 {
1416 	void __iomem *phy_base = dram->phy;
1417 	u32 i, j, tmp, offset;
1418 	u32 byte_en;
1419 
1420 	byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1421 
1422 	if (signal == SKEW_RX_SIGNAL)
1423 		offset = 0;
1424 	else
1425 		offset = 8;
1426 
1427 	for (j = offset; j < (offset + rank * 4); j++) {
1428 		if (!((byte_en >> (j % 4)) & 1))
1429 			continue;
1430 		for (i = 0; i < 0x9; i++) {
1431 			if (dir == DESKEW_MDF_ABS_VAL)
1432 				tmp = delta_sig;
1433 			else
1434 				tmp = delta_sig + readl(PHY_REG(phy_base,
1435 							dqs_dq_skew_adr[j] +
1436 							i));
1437 			writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i));
1438 		}
1439 		if (dir == DESKEW_MDF_ABS_VAL)
1440 			tmp = delta_dif;
1441 		else
1442 			tmp = delta_dif + readl(PHY_REG(phy_base,
1443 						dqs_dq_skew_adr[j] + 9));
1444 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9));
1445 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa));
1446 	}
1447 	if (signal == SKEW_RX_SIGNAL)
1448 		update_dq_rx_prebit(dram);
1449 	else
1450 		update_dq_tx_prebit(dram);
1451 }
1452 
1453 static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype)
1454 {
1455 	void __iomem *phy_base = dram->phy;
1456 	u32 ret;
1457 	u32 dis_auto_zq = 0;
1458 	u32 odt_val_up, odt_val_dn;
1459 	u32 i, j;
1460 	u32 weak_pull;
1461 
1462 	odt_val_dn = readl(PHY_REG(phy_base, 0x110));
1463 	odt_val_up = readl(PHY_REG(phy_base, 0x111));
1464 
1465 	if (dramtype != LPDDR4) {
1466 		for (i = 0; i < 4; i++) {
1467 			j = 0x110 + i * 0x10;
1468 			writel(PHY_DDR4_LPDDR3_RTT_247ohm,
1469 			       PHY_REG(phy_base, j));
1470 			writel(PHY_DDR4_LPDDR3_RTT_DISABLE,
1471 			       PHY_REG(phy_base, j + 0x1));
1472 		}
1473 	}
1474 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1475 	/* use normal read mode for data training */
1476 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1477 
1478 	if (dramtype == DDR4) {
1479 		weak_pull = readl(PHY_REG(phy_base, 0x114));
1480 		writel(weak_pull & ~(0x3), PHY_REG(phy_base, 0x114));
1481 		writel(weak_pull & ~(0x3), PHY_REG(phy_base, 0x124));
1482 		writel(weak_pull & ~(0x3), PHY_REG(phy_base, 0x134));
1483 		writel(weak_pull & ~(0x3), PHY_REG(phy_base, 0x144));
1484 	}
1485 
1486 	/* choose training cs */
1487 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
1488 	/* enable gate training */
1489 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
1490 	udelay(50);
1491 	ret = readl(PHY_REG(phy_base, 0x91));
1492 	/* disable gate training */
1493 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
1494 	clrbits_le32(PHY_REG(phy_base, 2), 0x30);
1495 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1496 
1497 	if (dramtype == DDR4) {
1498 		writel(weak_pull, PHY_REG(phy_base, 0x114));
1499 		writel(weak_pull, PHY_REG(phy_base, 0x124));
1500 		writel(weak_pull, PHY_REG(phy_base, 0x134));
1501 		writel(weak_pull, PHY_REG(phy_base, 0x144));
1502 	}
1503 
1504 	if (ret & 0x20)
1505 		ret = -1;
1506 	else
1507 		ret = (ret & 0xf) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf);
1508 
1509 	if (dramtype != LPDDR4) {
1510 		for (i = 0; i < 4; i++) {
1511 			j = 0x110 + i * 0x10;
1512 			writel(odt_val_dn, PHY_REG(phy_base, j));
1513 			writel(odt_val_up, PHY_REG(phy_base, j + 0x1));
1514 		}
1515 	}
1516 	return ret;
1517 }
1518 
1519 static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype,
1520 			    u32 rank)
1521 {
1522 	void __iomem *pctl_base = dram->pctl;
1523 	void __iomem *phy_base = dram->phy;
1524 	u32 dis_auto_zq = 0;
1525 	u32 tmp;
1526 	u32 cur_fsp;
1527 	u32 timeout_us = 1000;
1528 
1529 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1530 
1531 	clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1532 
1533 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1534 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) &
1535 	      0xffff;
1536 	writel(tmp & 0xff, PHY_REG(phy_base, 0x3));
1537 
1538 	/* disable another cs's output */
1539 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1540 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12),
1541 			      dramtype);
1542 	if (dramtype == DDR3 || dramtype == DDR4)
1543 		writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1544 	else
1545 		writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1546 
1547 	/* choose cs */
1548 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1549 			((0x2 >> cs) << 6) | (0 << 2));
1550 	/* enable write leveling */
1551 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1552 			((0x2 >> cs) << 6) | (1 << 2));
1553 
1554 	while (1) {
1555 		if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) ==
1556 		    (readl(PHY_REG(phy_base, 0xf)) & 0xf))
1557 			break;
1558 
1559 		udelay(1);
1560 		if (timeout_us-- == 0) {
1561 			printascii("error: write leveling timeout\n");
1562 			while (1)
1563 				;
1564 		}
1565 	}
1566 
1567 	/* disable write leveling */
1568 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1569 			((0x2 >> cs) << 6) | (0 << 2));
1570 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6);
1571 
1572 	/* enable another cs's output */
1573 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1574 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12),
1575 			      dramtype);
1576 
1577 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1578 
1579 	return 0;
1580 }
1581 
1582 char pattern[32] = {
1583 	0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa,
1584 	0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55,
1585 	0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55,
1586 	0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa
1587 };
1588 
1589 static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype,
1590 			    u32 mhz)
1591 {
1592 	void __iomem *pctl_base = dram->pctl;
1593 	void __iomem *phy_base = dram->phy;
1594 	u32 trefi_1x, trfc_1x;
1595 	u32 dis_auto_zq = 0;
1596 	u32 timeout_us = 1000;
1597 	u32 dqs_default;
1598 	u32 cur_fsp;
1599 	u32 vref_inner;
1600 	u32 i;
1601 	struct sdram_head_info_index_v2 *index =
1602 		(struct sdram_head_info_index_v2 *)common_info;
1603 	struct dq_map_info *map_info;
1604 
1605 	vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff;
1606 	if (dramtype == DDR3 && vref_inner == 0x80) {
1607 		for (i = 0; i < 4; i++)
1608 			writel(vref_inner - 0xa,
1609 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1610 
1611 		/* reg_rx_vref_value_update */
1612 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1613 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1614 	}
1615 
1616 	map_info = (struct dq_map_info *)((void *)common_info +
1617 		index->dq_map_index.offset * 4);
1618 	/* only 1cs a time, 0:cs0 1 cs1 */
1619 	if (cs > 1)
1620 		return -1;
1621 
1622 	dqs_default = 0xf;
1623 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1624 
1625 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1626 	/* config refresh timing */
1627 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1628 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1629 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1630 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1631 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1632 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1633 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1634 	/* reg_phy_trfc */
1635 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1636 	/* reg_max_refi_cnt */
1637 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1638 
1639 	/* choose training cs */
1640 	clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6);
1641 
1642 	/* set dq map for ddr4 */
1643 	if (dramtype == DDR4) {
1644 		setbits_le32(PHY_REG(phy_base, 0x70), BIT(7));
1645 		for (i = 0; i < 4; i++) {
1646 			writel((map_info->ddr4_dq_map[cs * 2] >>
1647 				((i % 4) * 8)) & 0xff,
1648 				PHY_REG(phy_base, 0x238 + i));
1649 			writel((map_info->ddr4_dq_map[cs * 2 + 1] >>
1650 				((i % 4) * 8)) & 0xff,
1651 				PHY_REG(phy_base, 0x2b8 + i));
1652 		}
1653 	}
1654 
1655 	/* cha_l reg_l_rd_train_dqs_default[5:0] */
1656 	clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default);
1657 	/* cha_h reg_h_rd_train_dqs_default[5:0] */
1658 	clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default);
1659 	/* chb_l reg_l_rd_train_dqs_default[5:0] */
1660 	clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default);
1661 	/* chb_h reg_h_rd_train_dqs_default[5:0] */
1662 	clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default);
1663 
1664 	/* Choose the read train auto mode */
1665 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1);
1666 	/* Enable the auto train of the read train */
1667 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3);
1668 
1669 	/* Wait the train done. */
1670 	while (1) {
1671 		if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1)
1672 			break;
1673 
1674 		udelay(1);
1675 		if (timeout_us-- == 0) {
1676 			printascii("error: read training timeout\n");
1677 			return -1;
1678 		}
1679 	}
1680 
1681 	/* Check the read train state */
1682 	if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) ||
1683 	    (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) {
1684 		printascii("error: read training error\n");
1685 		return -1;
1686 	}
1687 
1688 	/* Exit the Read Training by setting */
1689 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1));
1690 
1691 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1692 
1693 	if (dramtype == DDR3 && vref_inner == 0x80) {
1694 		for (i = 0; i < 4; i++)
1695 			writel(vref_inner,
1696 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1697 
1698 		/* reg_rx_vref_value_update */
1699 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1700 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1701 	}
1702 
1703 	return 0;
1704 }
1705 
1706 static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype,
1707 			    u32 mhz, u32 dst_fsp)
1708 {
1709 	void __iomem *pctl_base = dram->pctl;
1710 	void __iomem *phy_base = dram->phy;
1711 	u32 trefi_1x, trfc_1x;
1712 	u32 dis_auto_zq = 0;
1713 	u32 timeout_us = 1000;
1714 	u32 cur_fsp;
1715 	u32 mr_tmp, cl, cwl, phy_fsp, offset = 0;
1716 
1717 	if (dramtype == LPDDR3 && mhz <= 400) {
1718 		phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3;
1719 		offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3;
1720 		cl = readl(PHY_REG(phy_base, offset));
1721 		cwl = readl(PHY_REG(phy_base, offset + 2));
1722 
1723 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8);
1724 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4);
1725 		pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype);
1726 	}
1727 
1728 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1729 
1730 	/* PHY_0x7b[7:0] reg_train_col_addr[7:0] */
1731 	clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0);
1732 	/* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */
1733 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2);
1734 	/* PHY_0x7c[1:0] reg_train_col_addr[9:8] */
1735 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0);
1736 	/* PHY_0x7d[7:0] reg_train_row_addr[7:0] */
1737 	clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0);
1738 	/* PHY_0x7e[7:0] reg_train_row_addr[15:8] */
1739 	clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0);
1740 
1741 	/* PHY_0x71[3] wrtrain_check_data_value_random_gen */
1742 	clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3));
1743 
1744 	/* config refresh timing */
1745 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1746 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1747 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1748 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1749 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1750 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1751 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1752 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1753 	/* reg_phy_trfc */
1754 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1755 	/* reg_max_refi_cnt */
1756 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1757 
1758 	/* choose training cs */
1759 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6);
1760 
1761 	/* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */
1762 	/* 0: Use the write-leveling value. */
1763 	/* 1: use reg0x233 0x237 0x2b3 0x2b7 */
1764 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4));
1765 
1766 	/* PHY_0x7a [0] reg_dq_wr_train_auto */
1767 	setbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1768 
1769 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1770 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1771 
1772 	send_a_refresh(dram);
1773 
1774 	while (1) {
1775 		if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1)
1776 			break;
1777 
1778 		udelay(1);
1779 		if (timeout_us-- == 0) {
1780 			printascii("error: write training timeout\n");
1781 			while (1)
1782 				;
1783 		}
1784 	}
1785 
1786 	/* Check the write train state */
1787 	if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) {
1788 		printascii("error: write training error\n");
1789 		return -1;
1790 	}
1791 
1792 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1793 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1794 
1795 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1796 
1797 	/* save LPDDR4 write vref to fsp_param for dfs */
1798 	if (dramtype == LPDDR4) {
1799 		fsp_param[dst_fsp].vref_dq[cs] =
1800 			((readl(PHY_REG(phy_base, 0x384)) & 0x3f) +
1801 			 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2;
1802 		/* add range info */
1803 		fsp_param[dst_fsp].vref_dq[cs] |=
1804 			((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1);
1805 	}
1806 
1807 	if (dramtype == LPDDR3 && mhz <= 400) {
1808 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl);
1809 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl);
1810 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1811 			       DDR_PCTL2_INIT3);
1812 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
1813 			      dramtype);
1814 	}
1815 
1816 	return 0;
1817 }
1818 
1819 static int data_training(struct dram_info *dram, u32 cs,
1820 			 struct rv1126_sdram_params *sdram_params, u32 dst_fsp,
1821 			 u32 training_flag)
1822 {
1823 	u32 ret = 0;
1824 
1825 	if (training_flag == FULL_TRAINING)
1826 		training_flag = READ_GATE_TRAINING | WRITE_LEVELING |
1827 				WRITE_TRAINING | READ_TRAINING;
1828 
1829 	if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) {
1830 		ret = data_training_wl(dram, cs,
1831 				       sdram_params->base.dramtype,
1832 				       sdram_params->ch.cap_info.rank);
1833 		if (ret != 0)
1834 			goto out;
1835 	}
1836 
1837 	if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) {
1838 		ret = data_training_rg(dram, cs,
1839 				       sdram_params->base.dramtype);
1840 		if (ret != 0)
1841 			goto out;
1842 	}
1843 
1844 	if ((training_flag & READ_TRAINING) == READ_TRAINING) {
1845 		ret = data_training_rd(dram, cs,
1846 				       sdram_params->base.dramtype,
1847 				       sdram_params->base.ddr_freq);
1848 		if (ret != 0)
1849 			goto out;
1850 	}
1851 
1852 	if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) {
1853 		ret = data_training_wr(dram, cs,
1854 				       sdram_params->base.dramtype,
1855 				       sdram_params->base.ddr_freq, dst_fsp);
1856 		if (ret != 0)
1857 			goto out;
1858 	}
1859 
1860 out:
1861 	return ret;
1862 }
1863 
1864 static int get_wrlvl_val(struct dram_info *dram,
1865 			 struct rv1126_sdram_params *sdram_params)
1866 {
1867 	u32 i, j, clk_skew;
1868 	void __iomem *phy_base = dram->phy;
1869 	u32 lp_stat;
1870 	int ret;
1871 
1872 	lp_stat = low_power_update(dram, 0);
1873 
1874 	clk_skew = readl(PHY_REG(phy_base, 0x150 + 0x17));
1875 
1876 	ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING);
1877 	if (sdram_params->ch.cap_info.rank == 2)
1878 		ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING);
1879 
1880 	for (j = 0; j < 2; j++)
1881 		for (i = 0; i < 4; i++)
1882 			wrlvl_result[j][i] =
1883 				readl(PHY_REG(phy_base,
1884 					      wrlvl_result_offset[j][i])) -
1885 				clk_skew;
1886 
1887 	low_power_update(dram, lp_stat);
1888 
1889 	return ret;
1890 }
1891 
1892 static int high_freq_training(struct dram_info *dram,
1893 			      struct rv1126_sdram_params *sdram_params,
1894 			      u32 fsp)
1895 {
1896 	u32 i, j;
1897 	void __iomem *phy_base = dram->phy;
1898 	u32 dramtype = sdram_params->base.dramtype;
1899 	int min_val;
1900 	u32 dqs_skew, clk_skew, ca_skew;
1901 	int ret;
1902 
1903 	dqs_skew = 0;
1904 	for (j = 0; j < sdram_params->ch.cap_info.rank; j++)
1905 		for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++)
1906 			dqs_skew += wrlvl_result[j][i];
1907 	dqs_skew = dqs_skew / (sdram_params->ch.cap_info.rank *
1908 			       ARRAY_SIZE(wrlvl_result[0]));
1909 
1910 	clk_skew = 0x20 - dqs_skew;
1911 	dqs_skew = 0x20;
1912 
1913 	if (dramtype == LPDDR4) {
1914 		clk_skew = 0;
1915 		ca_skew = 0;
1916 	} else if (dramtype == LPDDR3) {
1917 		ca_skew = clk_skew - 4;
1918 	} else {
1919 		ca_skew = clk_skew;
1920 	}
1921 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3,
1922 			 dramtype);
1923 
1924 	writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233));
1925 	writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237));
1926 	writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
1927 	writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
1928 	ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING |
1929 			    READ_TRAINING | WRITE_TRAINING);
1930 	if (sdram_params->ch.cap_info.rank == 2) {
1931 		writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233));
1932 		writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237));
1933 		writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
1934 		writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
1935 		ret |= data_training(dram, 1, sdram_params, fsp,
1936 				     READ_GATE_TRAINING | READ_TRAINING |
1937 				     WRITE_TRAINING);
1938 	}
1939 	if (ret)
1940 		goto out;
1941 
1942 	record_dq_prebit(dram);
1943 
1944 	min_val = get_min_value(dram, SKEW_RX_SIGNAL,
1945 				sdram_params->ch.cap_info.rank) * -1;
1946 	modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL,
1947 			 min_val, min_val, sdram_params->ch.cap_info.rank);
1948 
1949 	min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL,
1950 				    sdram_params->ch.cap_info.rank),
1951 		      get_min_value(dram, SKEW_CA_SIGNAL,
1952 				    sdram_params->ch.cap_info.rank)) * -1;
1953 
1954 	/* clk = 0, rx all skew -7, tx - min_value */
1955 	modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3,
1956 			 dramtype);
1957 
1958 	modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL,
1959 			 min_val, min_val, sdram_params->ch.cap_info.rank);
1960 
1961 	ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING);
1962 	if (sdram_params->ch.cap_info.rank == 2)
1963 		ret |= data_training(dram, 1, sdram_params, 0,
1964 				     READ_GATE_TRAINING);
1965 out:
1966 	return ret;
1967 }
1968 
1969 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
1970 {
1971 	writel(ddrconfig, &dram->msch->deviceconf);
1972 	clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0);
1973 }
1974 
1975 static void update_noc_timing(struct dram_info *dram,
1976 			      struct rv1126_sdram_params *sdram_params)
1977 {
1978 	writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
1979 	       &dram->msch->ddrtiminga0);
1980 	writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
1981 	       &dram->msch->ddrtimingb0);
1982 	writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
1983 	       &dram->msch->ddrtimingc0);
1984 	writel(sdram_params->ch.noc_timings.devtodev0.d32,
1985 	       &dram->msch->devtodev0);
1986 	writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
1987 	writel(sdram_params->ch.noc_timings.ddr4timing.d32,
1988 	       &dram->msch->ddr4timing);
1989 }
1990 
1991 static void dram_all_config(struct dram_info *dram,
1992 			    struct rv1126_sdram_params *sdram_params)
1993 {
1994 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
1995 	u32 dram_type = sdram_params->base.dramtype;
1996 	void __iomem *pctl_base = dram->pctl;
1997 	u32 sys_reg2 = 0;
1998 	u32 sys_reg3 = 0;
1999 	u64 cs_cap[2];
2000 	u32 cs_pst;
2001 
2002 	set_ddrconfig(dram, cap_info->ddrconfig);
2003 	sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
2004 			 &sys_reg3, 0);
2005 	writel(sys_reg2, &dram->pmugrf->os_reg[2]);
2006 	writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2007 
2008 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2009 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2010 
2011 	if (cap_info->rank == 2) {
2012 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2013 			6 + 2;
2014 		if (cs_pst > 28)
2015 			cs_cap[0] = 1 << cs_pst;
2016 	}
2017 
2018 	writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
2019 			(((cs_cap[0] >> 20) / 64) & 0xff),
2020 			&dram->msch->devicesize);
2021 	update_noc_timing(dram, sdram_params);
2022 }
2023 
2024 static void enable_low_power(struct dram_info *dram,
2025 			     struct rv1126_sdram_params *sdram_params)
2026 {
2027 	void __iomem *pctl_base = dram->pctl;
2028 	u32 grf_lp_con;
2029 
2030 	writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]);
2031 
2032 	if (sdram_params->base.dramtype == DDR4)
2033 		grf_lp_con = (0x7 << 16) | (1 << 1);
2034 	else if (sdram_params->base.dramtype == DDR3)
2035 		grf_lp_con = (0x7 << 16) | (1 << 0);
2036 	else
2037 		grf_lp_con = (0x7 << 16) | (1 << 2);
2038 
2039 	/* en lpckdis_en */
2040 	grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
2041 	writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con);
2042 
2043 	/* enable sr, pd */
2044 	if (dram->pd_idle == 0)
2045 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2046 	else
2047 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2048 	if (dram->sr_idle == 0)
2049 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2050 	else
2051 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2052 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
2053 }
2054 
2055 static void ddr_set_atags(struct dram_info *dram,
2056 			  struct rv1126_sdram_params *sdram_params)
2057 {
2058 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2059 	u32 dram_type = sdram_params->base.dramtype;
2060 	void __iomem *pctl_base = dram->pctl;
2061 	struct tag_serial t_serial;
2062 	struct tag_ddr_mem t_ddrmem;
2063 	struct tag_soc_info t_socinfo;
2064 	u64 cs_cap[2];
2065 	u32 cs_pst = 0;
2066 
2067 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2068 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2069 
2070 	memset(&t_serial, 0, sizeof(struct tag_serial));
2071 
2072 	t_serial.version = 0;
2073 	t_serial.enable = 1;
2074 	t_serial.addr = CONFIG_DEBUG_UART_BASE;
2075 	t_serial.baudrate = CONFIG_BAUDRATE;
2076 	t_serial.m_mode = SERIAL_M_MODE_M0;
2077 	t_serial.id = 2;
2078 
2079 	atags_destroy();
2080 	atags_set_tag(ATAG_SERIAL,  &t_serial);
2081 
2082 	memset(&t_ddrmem, 0, sizeof(struct tag_ddr_mem));
2083 	if (cap_info->row_3_4) {
2084 		cs_cap[0] =  cs_cap[0] * 3 / 4;
2085 		cs_cap[1] =  cs_cap[1] * 3 / 4;
2086 	}
2087 	t_ddrmem.version = 0;
2088 	t_ddrmem.bank[0] = CONFIG_SYS_SDRAM_BASE;
2089 	if (cs_cap[1]) {
2090 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2091 			6 + 2;
2092 	}
2093 
2094 	if (cs_cap[1] && cs_pst > 27) {
2095 		t_ddrmem.count = 2;
2096 		t_ddrmem.bank[1] = 1 << cs_pst;
2097 		t_ddrmem.bank[2] = cs_cap[0];
2098 		t_ddrmem.bank[3] = cs_cap[1];
2099 	} else {
2100 		t_ddrmem.count = 1;
2101 		t_ddrmem.bank[1] = (u64)cs_cap[0] + (u64)cs_cap[1];
2102 	}
2103 
2104 	atags_set_tag(ATAG_DDR_MEM,  &t_ddrmem);
2105 
2106 	memset(&t_socinfo, 0, sizeof(struct tag_soc_info));
2107 	t_socinfo.version = 0;
2108 	t_socinfo.name = 0x1126;
2109 }
2110 
2111 static void print_ddr_info(struct rv1126_sdram_params *sdram_params)
2112 {
2113 	u32 split;
2114 
2115 	if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2116 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2117 		split = 0;
2118 	else
2119 		split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2120 			SPLIT_SIZE_MASK;
2121 
2122 	sdram_print_ddr_info(&sdram_params->ch.cap_info,
2123 			     &sdram_params->base, split);
2124 }
2125 
2126 static int sdram_init_(struct dram_info *dram,
2127 		       struct rv1126_sdram_params *sdram_params, u32 post_init)
2128 {
2129 	void __iomem *pctl_base = dram->pctl;
2130 	void __iomem *phy_base = dram->phy;
2131 	u32 ddr4_vref;
2132 	u32 mr_tmp;
2133 
2134 	rkclk_configure_ddr(dram, sdram_params);
2135 
2136 	rkclk_ddr_reset(dram, 1, 1, 1, 1);
2137 	udelay(10);
2138 
2139 	rkclk_ddr_reset(dram, 1, 1, 1, 0);
2140 	phy_cfg(dram, sdram_params);
2141 
2142 	rkclk_ddr_reset(dram, 1, 1, 0, 0);
2143 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1);
2144 
2145 	rkclk_ddr_reset(dram, 1, 0, 0, 0);
2146 	pctl_cfg(dram->pctl, &sdram_params->pctl_regs,
2147 		 dram->sr_idle, dram->pd_idle);
2148 
2149 	/* set frequency_mode */
2150 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
2151 	/* set target_frequency to Frequency 0 */
2152 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0);
2153 
2154 	set_ds_odt(dram, sdram_params, 0);
2155 	sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params);
2156 	set_ctl_address_map(dram, sdram_params);
2157 
2158 	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
2159 
2160 	rkclk_ddr_reset(dram, 0, 0, 0, 0);
2161 
2162 	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
2163 		continue;
2164 
2165 	if (sdram_params->base.dramtype == LPDDR3) {
2166 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3);
2167 	} else if (sdram_params->base.dramtype == LPDDR4) {
2168 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6);
2169 		/* MR11 */
2170 		pctl_write_mr(dram->pctl, 3, 11,
2171 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2172 			      LPDDR4);
2173 		/* MR12 */
2174 		pctl_write_mr(dram->pctl, 3, 12,
2175 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2176 			      LPDDR4);
2177 
2178 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2179 		/* MR22 */
2180 		pctl_write_mr(dram->pctl, 3, 22,
2181 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2182 			      LPDDR4);
2183 		/* MR14 */
2184 		pctl_write_mr(dram->pctl, 3, 14,
2185 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2186 			      LPDDR4);
2187 	}
2188 
2189 	if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) != 0) {
2190 		if (post_init != 0)
2191 			printascii("DTT cs0 error\n");
2192 		return -1;
2193 	}
2194 
2195 	if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) {
2196 		if (data_training(dram, 1, sdram_params, 0,
2197 				  READ_GATE_TRAINING) != 0) {
2198 			printascii("DTT cs1 error\n");
2199 			return -1;
2200 		}
2201 	}
2202 
2203 	if (sdram_params->base.dramtype == DDR4) {
2204 		ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39;
2205 		pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref,
2206 				  sdram_params->base.dramtype);
2207 	}
2208 
2209 	dram_all_config(dram, sdram_params);
2210 	enable_low_power(dram, sdram_params);
2211 
2212 	return 0;
2213 }
2214 
2215 static u64 dram_detect_cap(struct dram_info *dram,
2216 			   struct rv1126_sdram_params *sdram_params,
2217 			   unsigned char channel)
2218 {
2219 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2220 	void __iomem *pctl_base = dram->pctl;
2221 	void __iomem *phy_base = dram->phy;
2222 	u32 mr8;
2223 
2224 	u32 bktmp;
2225 	u32 coltmp;
2226 	u32 rowtmp;
2227 	u32 cs;
2228 	u32 bw = 1;
2229 	u32 dram_type = sdram_params->base.dramtype;
2230 	u32 pwrctl;
2231 
2232 	cap_info->bw = bw;
2233 	if (dram_type != LPDDR4) {
2234 		if (dram_type != DDR4) {
2235 			coltmp = 12;
2236 			bktmp = 3;
2237 			if (dram_type == LPDDR2)
2238 				rowtmp = 15;
2239 			else
2240 				rowtmp = 16;
2241 
2242 			if (sdram_detect_col(cap_info, coltmp) != 0)
2243 				goto cap_err;
2244 
2245 			sdram_detect_bank(cap_info, coltmp, bktmp);
2246 			sdram_detect_dbw(cap_info, dram_type);
2247 		} else {
2248 			coltmp = 10;
2249 			bktmp = 4;
2250 			rowtmp = 17;
2251 
2252 			cap_info->col = 10;
2253 			cap_info->bk = 2;
2254 			sdram_detect_bg(cap_info, coltmp);
2255 		}
2256 
2257 		if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
2258 			goto cap_err;
2259 
2260 		sdram_detect_row_3_4(cap_info, coltmp, bktmp);
2261 	} else {
2262 		mr8 = (read_mr(dram, 1, 8, dram_type) >> 2) & 0xf;
2263 		cap_info->col = 10;
2264 		cap_info->bk = 3;
2265 		cap_info->cs0_row = 14 + (mr8 + 1) / 2;
2266 		if (mr8 % 2)
2267 			cap_info->row_3_4 = 1;
2268 		else
2269 			cap_info->row_3_4 = 0;
2270 		cap_info->dbw = 1;
2271 		cap_info->bw = 2;
2272 	}
2273 
2274 	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
2275 	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
2276 
2277 	if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0)
2278 		cs = 1;
2279 	else
2280 		cs = 0;
2281 	cap_info->rank = cs + 1;
2282 
2283 	if (dram_type != LPDDR4) {
2284 		setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2285 
2286 		phy_soft_reset(dram);
2287 
2288 		if (data_training(dram, 0, sdram_params, 0,
2289 				  READ_GATE_TRAINING) == 0)
2290 			cap_info->bw = 2;
2291 		else
2292 			cap_info->bw = 1;
2293 	}
2294 
2295 	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
2296 
2297 	cap_info->cs0_high16bit_row = cap_info->cs0_row;
2298 	if (cs) {
2299 		cap_info->cs1_row = cap_info->cs0_row;
2300 		cap_info->cs1_high16bit_row = cap_info->cs0_row;
2301 	} else {
2302 		cap_info->cs1_row = 0;
2303 		cap_info->cs1_high16bit_row = 0;
2304 	}
2305 
2306 	return 0;
2307 cap_err:
2308 	return -1;
2309 }
2310 
2311 static int dram_detect_cs1_row(struct dram_info *dram,
2312 			       struct rv1126_sdram_params *sdram_params,
2313 			       unsigned char channel)
2314 {
2315 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2316 	void __iomem *pctl_base = dram->pctl;
2317 	u32 ret = 0;
2318 	void __iomem *test_addr;
2319 	u32 row, bktmp, coltmp, bw;
2320 	u64 cs0_cap;
2321 	u32 byte_mask;
2322 	u32 cs_pst;
2323 	u32 cs_add = 0;
2324 	u32 max_row;
2325 
2326 	if (cap_info->rank == 2) {
2327 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2328 			6 + 2;
2329 		if (cs_pst < 28)
2330 			cs_add = 1;
2331 
2332 		cs0_cap = 1 << cs_pst;
2333 
2334 		if (sdram_params->base.dramtype == DDR4) {
2335 			if (cap_info->dbw == 0)
2336 				bktmp = cap_info->bk + 2;
2337 			else
2338 				bktmp = cap_info->bk + 1;
2339 		} else {
2340 			bktmp = cap_info->bk;
2341 		}
2342 		bw = cap_info->bw;
2343 		coltmp = cap_info->col;
2344 
2345 		if (bw == 2)
2346 			byte_mask = 0xFFFF;
2347 		else
2348 			byte_mask = 0xFF;
2349 
2350 		max_row = (cs_pst == 31) ? 30 : 31;
2351 
2352 		max_row = max_row - bktmp - coltmp - bw - cs_add + 1;
2353 
2354 		row = (cap_info->cs0_row > max_row) ? max_row :
2355 			cap_info->cs0_row;
2356 
2357 		for (; row > 12; row--) {
2358 			test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
2359 				    (u32)cs0_cap +
2360 				    (1ul << (row + bktmp + coltmp +
2361 					     cs_add + bw - 1ul)));
2362 
2363 			writel(0, CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap);
2364 			writel(PATTERN, test_addr);
2365 
2366 			if (((readl(test_addr) & byte_mask) ==
2367 			     (PATTERN & byte_mask)) &&
2368 			    ((readl(CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap) &
2369 			      byte_mask) == 0)) {
2370 				ret = row;
2371 				break;
2372 			}
2373 		}
2374 	}
2375 
2376 	return ret;
2377 }
2378 
2379 /* return: 0 = success, other = fail */
2380 static int sdram_init_detect(struct dram_info *dram,
2381 			     struct rv1126_sdram_params *sdram_params)
2382 {
2383 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2384 	u32 ret;
2385 	u32 sys_reg = 0;
2386 	u32 sys_reg3 = 0;
2387 
2388 	if (sdram_init_(dram, sdram_params, 0) != 0)
2389 		return -1;
2390 
2391 	if (sdram_params->base.dramtype == DDR3) {
2392 		writel(PATTERN, CONFIG_SYS_SDRAM_BASE);
2393 		if (readl(CONFIG_SYS_SDRAM_BASE) != PATTERN)
2394 			return -1;
2395 	}
2396 
2397 	if (dram_detect_cap(dram, sdram_params, 0) != 0)
2398 		return -1;
2399 
2400 	pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
2401 				   sdram_params->base.dramtype);
2402 	ret = sdram_init_(dram, sdram_params, 1);
2403 	if (ret != 0)
2404 		goto out;
2405 
2406 	cap_info->cs1_row =
2407 		dram_detect_cs1_row(dram, sdram_params, 0);
2408 	if (cap_info->cs1_row) {
2409 		sys_reg = readl(&dram->pmugrf->os_reg[2]);
2410 		sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
2411 		SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
2412 				    sys_reg, sys_reg3, 0);
2413 		writel(sys_reg, &dram->pmugrf->os_reg[2]);
2414 		writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2415 	}
2416 
2417 	sdram_detect_high_row(cap_info);
2418 
2419 out:
2420 	return ret;
2421 }
2422 
2423 struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz)
2424 {
2425 	u32 i;
2426 	u32 offset = 0;
2427 	struct ddr2_3_4_lp2_3_info *ddr_info;
2428 
2429 	if (!freq_mhz) {
2430 		ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype);
2431 		if (ddr_info)
2432 			freq_mhz =
2433 				(ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
2434 				DDR_FREQ_MASK;
2435 		else
2436 			freq_mhz = 0;
2437 	}
2438 
2439 	for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) {
2440 		if (sdram_configs[i].base.ddr_freq == 0 ||
2441 		    freq_mhz < sdram_configs[i].base.ddr_freq)
2442 			break;
2443 	}
2444 	offset = i == 0 ? 0 : i - 1;
2445 
2446 	return &sdram_configs[offset];
2447 }
2448 
2449 static const u16 pctl_need_update_reg[] = {
2450 	DDR_PCTL2_RFSHTMG,
2451 	DDR_PCTL2_INIT3,
2452 	DDR_PCTL2_INIT4,
2453 	DDR_PCTL2_INIT6,
2454 	DDR_PCTL2_INIT7,
2455 	DDR_PCTL2_DRAMTMG0,
2456 	DDR_PCTL2_DRAMTMG1,
2457 	DDR_PCTL2_DRAMTMG2,
2458 	DDR_PCTL2_DRAMTMG3,
2459 	DDR_PCTL2_DRAMTMG4,
2460 	DDR_PCTL2_DRAMTMG5,
2461 	DDR_PCTL2_DRAMTMG6,
2462 	DDR_PCTL2_DRAMTMG7,
2463 	DDR_PCTL2_DRAMTMG8,
2464 	DDR_PCTL2_DRAMTMG9,
2465 	DDR_PCTL2_DRAMTMG12,
2466 	DDR_PCTL2_DRAMTMG13,
2467 	DDR_PCTL2_DRAMTMG14,
2468 	DDR_PCTL2_ZQCTL0,
2469 	DDR_PCTL2_DFITMG0,
2470 	DDR_PCTL2_ODTCFG
2471 };
2472 
2473 static const u16 phy_need_update_reg[] = {
2474 	0x14,
2475 	0x18,
2476 	0x1c
2477 };
2478 
2479 static void pre_set_rate(struct dram_info *dram,
2480 			 struct rv1126_sdram_params *sdram_params,
2481 			 u32 dst_fsp, u32 dst_fsp_lp4)
2482 {
2483 	u32 i, j, find;
2484 	void __iomem *pctl_base = dram->pctl;
2485 	void __iomem *phy_base = dram->phy;
2486 	u32 phy_offset;
2487 	u32 mr_tmp;
2488 	u32 dramtype = sdram_params->base.dramtype;
2489 
2490 	sw_set_req(dram);
2491 	/* pctl timing update */
2492 	for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) {
2493 		for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF;
2494 		     j++) {
2495 			if (sdram_params->pctl_regs.pctl[j][0] ==
2496 			    pctl_need_update_reg[i]) {
2497 				writel(sdram_params->pctl_regs.pctl[j][1],
2498 				       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2499 				       pctl_need_update_reg[i]);
2500 				find = j;
2501 				break;
2502 			}
2503 		}
2504 	}
2505 	sw_set_ack(dram);
2506 
2507 	/* phy timing update */
2508 	if (dst_fsp == 0)
2509 		phy_offset = 0;
2510 	else
2511 		phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3);
2512 	/* cl cwl al update */
2513 	for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) {
2514 		for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF;
2515 		     j++) {
2516 			if (sdram_params->phy_regs.phy[j][0] ==
2517 			    phy_need_update_reg[i]) {
2518 				writel(sdram_params->phy_regs.phy[j][1],
2519 				       phy_base + phy_offset +
2520 				       phy_need_update_reg[i]);
2521 				find = j;
2522 				break;
2523 			}
2524 		}
2525 	}
2526 
2527 	set_ds_odt(dram, sdram_params, dst_fsp);
2528 	if (dramtype == LPDDR4) {
2529 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2530 			       DDR_PCTL2_INIT4);
2531 		/* MR13 */
2532 		pctl_write_mr(dram->pctl, 3, 13,
2533 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2534 			       PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2535 			      ((0x2 << 6) >> dst_fsp_lp4), dramtype);
2536 		writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2537 				      PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2538 				      ((0x2 << 6) >> dst_fsp_lp4),
2539 				       PHY_REG(phy_base, 0x1b));
2540 		/* MR3 */
2541 		pctl_write_mr(dram->pctl, 3, 3,
2542 			      mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT &
2543 			      PCTL2_MR_MASK,
2544 			      dramtype);
2545 		writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK,
2546 		       PHY_REG(phy_base, 0x19));
2547 
2548 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2549 			       DDR_PCTL2_INIT3);
2550 		/* MR1 */
2551 		pctl_write_mr(dram->pctl, 3, 1,
2552 			      mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT &
2553 			      PCTL2_MR_MASK,
2554 			      dramtype);
2555 		writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK,
2556 		       PHY_REG(phy_base, 0x17));
2557 		/* MR2 */
2558 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
2559 			      dramtype);
2560 		writel(mr_tmp & PCTL2_MR_MASK,
2561 		       PHY_REG(phy_base, 0x18));
2562 
2563 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2564 			       DDR_PCTL2_INIT6);
2565 		/* MR11 */
2566 		pctl_write_mr(dram->pctl, 3, 11,
2567 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2568 			      dramtype);
2569 		writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2570 		       PHY_REG(phy_base, 0x1a));
2571 		/* MR12 */
2572 		pctl_write_mr(dram->pctl, 3, 12,
2573 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2574 			      dramtype);
2575 
2576 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2577 			       DDR_PCTL2_INIT7);
2578 		/* MR22 */
2579 		pctl_write_mr(dram->pctl, 3, 22,
2580 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2581 			      dramtype);
2582 		writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2583 		       PHY_REG(phy_base, 0x1d));
2584 		/* MR14 */
2585 		pctl_write_mr(dram->pctl, 3, 14,
2586 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2587 			      dramtype);
2588 		writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2589 		       PHY_REG(phy_base, 0x1c));
2590 	}
2591 
2592 	update_noc_timing(dram, sdram_params);
2593 }
2594 
2595 static void save_fsp_param(struct dram_info *dram, u32 dst_fsp,
2596 			   struct rv1126_sdram_params *sdram_params)
2597 {
2598 	void __iomem *pctl_base = dram->pctl;
2599 	void __iomem *phy_base = dram->phy;
2600 	struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp];
2601 	u32 temp, temp1;
2602 	struct ddr2_3_4_lp2_3_info *ddr_info;
2603 
2604 	ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype);
2605 
2606 	p_fsp_param->freq_mhz = sdram_params->base.ddr_freq;
2607 
2608 	if (sdram_params->base.dramtype == LPDDR4) {
2609 		p_fsp_param->rd_odt_up_en = 0;
2610 		p_fsp_param->rd_odt_down_en = 1;
2611 	} else {
2612 		p_fsp_param->rd_odt_up_en =
2613 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
2614 		p_fsp_param->rd_odt_down_en =
2615 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
2616 	}
2617 
2618 	if (p_fsp_param->rd_odt_up_en)
2619 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111));
2620 	else if (p_fsp_param->rd_odt_down_en)
2621 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110));
2622 	else
2623 		p_fsp_param->rd_odt = 0;
2624 	p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112));
2625 	p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100));
2626 	p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102));
2627 	p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128));
2628 	p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105));
2629 
2630 	if (sdram_params->base.dramtype == DDR3) {
2631 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2632 			     DDR_PCTL2_INIT3);
2633 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
2634 		p_fsp_param->ds_pdds = ((temp >> 1) & 0x1) |
2635 				       (((temp >> 5) & 0x1) << 1);
2636 		p_fsp_param->dq_odt = ((temp >> 2) & 0x1) |
2637 				      (((temp >> 6) & 0x1) << 1) |
2638 				      (((temp >> 9) & 0x1) << 2);
2639 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2640 	} else if (sdram_params->base.dramtype == DDR4) {
2641 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2642 			     DDR_PCTL2_INIT3);
2643 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
2644 		p_fsp_param->ds_pdds = (temp >> 1) & 0x3;
2645 		p_fsp_param->dq_odt = (temp >> 8) & 0x7;
2646 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2647 	} else if (sdram_params->base.dramtype == LPDDR3) {
2648 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2649 			     DDR_PCTL2_INIT4);
2650 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
2651 		p_fsp_param->ds_pdds = temp & 0xf;
2652 
2653 		p_fsp_param->dq_odt = lp3_odt_value;
2654 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2655 	} else if (sdram_params->base.dramtype == LPDDR4) {
2656 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2657 			     DDR_PCTL2_INIT4);
2658 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
2659 		p_fsp_param->ds_pdds = (temp >> 3) & 0x7;
2660 
2661 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2662 			     DDR_PCTL2_INIT6);
2663 		temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK;
2664 		p_fsp_param->dq_odt = temp & 0x7;
2665 		p_fsp_param->ca_odt = (temp >> 4) & 0x7;
2666 
2667 		temp = MAX(readl(PHY_REG(phy_base, 0x3ae)),
2668 			   readl(PHY_REG(phy_base, 0x3ce)));
2669 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)),
2670 			    readl(PHY_REG(phy_base, 0x3de)));
2671 		p_fsp_param->vref_ca[0] = (temp + temp1) / 2;
2672 		temp = MAX(readl(PHY_REG(phy_base, 0x3af)),
2673 			   readl(PHY_REG(phy_base, 0x3cf)));
2674 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)),
2675 			    readl(PHY_REG(phy_base, 0x3df)));
2676 		p_fsp_param->vref_ca[1] = (temp + temp1) / 2;
2677 		p_fsp_param->vref_ca[0] |=
2678 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
2679 		p_fsp_param->vref_ca[1] |=
2680 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
2681 
2682 		p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >>
2683 					      3) & 0x1;
2684 	}
2685 
2686 	p_fsp_param->noc_timings.ddrtiminga0 =
2687 		sdram_params->ch.noc_timings.ddrtiminga0;
2688 	p_fsp_param->noc_timings.ddrtimingb0 =
2689 		sdram_params->ch.noc_timings.ddrtimingb0;
2690 	p_fsp_param->noc_timings.ddrtimingc0 =
2691 		sdram_params->ch.noc_timings.ddrtimingc0;
2692 	p_fsp_param->noc_timings.devtodev0 =
2693 		sdram_params->ch.noc_timings.devtodev0;
2694 	p_fsp_param->noc_timings.ddrmode =
2695 		sdram_params->ch.noc_timings.ddrmode;
2696 	p_fsp_param->noc_timings.ddr4timing =
2697 		sdram_params->ch.noc_timings.ddr4timing;
2698 	p_fsp_param->noc_timings.agingx0 =
2699 		sdram_params->ch.noc_timings.agingx0;
2700 	p_fsp_param->noc_timings.aging0 =
2701 		sdram_params->ch.noc_timings.aging0;
2702 	p_fsp_param->noc_timings.aging1 =
2703 		sdram_params->ch.noc_timings.aging1;
2704 	p_fsp_param->noc_timings.aging2 =
2705 		sdram_params->ch.noc_timings.aging2;
2706 	p_fsp_param->noc_timings.aging3 =
2707 		sdram_params->ch.noc_timings.aging3;
2708 
2709 	p_fsp_param->flag = FSP_FLAG;
2710 }
2711 
2712 static void copy_fsp_param_to_ddr(void)
2713 {
2714 	u32 i;
2715 
2716 	printascii("fsp freq: ");
2717 	for (i = 0; i < MAX_IDX; i++) {
2718 		printascii("[");
2719 		printdec(i);
2720 		printascii("]");
2721 		printdec(fsp_param[i].freq_mhz);
2722 		printascii("Mhz ");
2723 	}
2724 	printascii("\n");
2725 
2726 	memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param,
2727 	       sizeof(fsp_param));
2728 }
2729 
2730 void ddr_set_rate(struct dram_info *dram,
2731 		  struct rv1126_sdram_params *sdram_params,
2732 		  u32 freq, u32 cur_freq, u32 dst_fsp,
2733 		  u32 dst_fsp_lp4, u32 training_en)
2734 {
2735 	u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off;
2736 	u32 mr_tmp;
2737 	u32 lp_stat;
2738 	u32 dramtype = sdram_params->base.dramtype;
2739 	struct rv1126_sdram_params *sdram_params_new;
2740 	void __iomem *pctl_base = dram->pctl;
2741 	void __iomem *phy_base = dram->phy;
2742 
2743 	lp_stat = low_power_update(dram, 0);
2744 	sdram_params_new = get_default_sdram_config(freq);
2745 	sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank;
2746 
2747 	pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4);
2748 
2749 	while ((readl(pctl_base + DDR_PCTL2_STAT) &
2750 			 PCTL2_OPERATING_MODE_MASK) ==
2751 			 PCTL2_OPERATING_MODE_SR)
2752 		continue;
2753 
2754 	dest_dll_off = 0;
2755 	dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2756 			  DDR_PCTL2_INIT3);
2757 	if ((dramtype == DDR3 && (dst_init3 & 1)) ||
2758 	    (dramtype == DDR4 && !(dst_init3 & 1)))
2759 		dest_dll_off = 1;
2760 
2761 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
2762 	cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
2763 			  DDR_PCTL2_INIT3);
2764 	cur_init3 &= PCTL2_MR_MASK;
2765 	cur_dll_off = 1;
2766 	if ((dramtype == DDR3 && !(cur_init3 & 1)) ||
2767 	    (dramtype == DDR4 && (cur_init3 & 1)))
2768 		cur_dll_off = 0;
2769 
2770 	if (!cur_dll_off) {
2771 		if (dramtype == DDR3)
2772 			cur_init3 |= 1;
2773 		else
2774 			cur_init3 &= ~1;
2775 		pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype);
2776 	}
2777 
2778 	setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
2779 		     PCTL2_DIS_AUTO_REFRESH);
2780 	update_refresh_reg(dram);
2781 
2782 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
2783 	while (1) {
2784 		if (((readl(pctl_base + DDR_PCTL2_STAT) &
2785 		      PCTL2_SELFREF_TYPE_MASK) ==
2786 		     PCTL2_SELFREF_TYPE_SR_NOT_AUTO) &&
2787 		    ((readl(pctl_base + DDR_PCTL2_STAT) &
2788 		      PCTL2_OPERATING_MODE_MASK) ==
2789 		     PCTL2_OPERATING_MODE_SR)) {
2790 			break;
2791 		}
2792 	}
2793 
2794 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
2795 	       PMUGRF_CON_DDRPHY_BUFFEREN_EN,
2796 	       dram->pmugrf->soc_con[0]);
2797 	sw_set_req(dram);
2798 	clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC,
2799 		     PCTL2_DFI_INIT_COMPLETE_EN);
2800 	sw_set_ack(dram);
2801 
2802 	sw_set_req(dram);
2803 	if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off)
2804 		setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
2805 	else
2806 		clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
2807 
2808 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0,
2809 		     PCTL2_DIS_SRX_ZQCL);
2810 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0,
2811 		     PCTL2_DIS_SRX_ZQCL);
2812 	sw_set_ack(dram);
2813 
2814 	writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT),
2815 	       dram->cru->clkgate_con[2]);
2816 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
2817 					(0x1 << CLK_DDR_UPCTL_EN_SHIFT) |
2818 					(0x1 << ACLK_DDR_UPCTL_EN_SHIFT),
2819 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
2820 
2821 	clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
2822 	rkclk_set_dpll(dram, freq * MHz / 2);
2823 	phy_pll_set(dram, freq * MHz, 0);
2824 	phy_pll_set(dram, freq * MHz, 1);
2825 	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
2826 
2827 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
2828 			PMUGRF_CON_DDRPHY_BUFFEREN_DIS,
2829 			dram->pmugrf->soc_con[0]);
2830 	writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT),
2831 	       dram->cru->clkgate_con[2]);
2832 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
2833 					(0x0 << CLK_DDR_UPCTL_EN_SHIFT) |
2834 					(0x0 << ACLK_DDR_UPCTL_EN_SHIFT),
2835 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
2836 	while ((readl(pctl_base + DDR_PCTL2_DFISTAT) &
2837 	       PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE)
2838 		continue;
2839 
2840 	sw_set_req(dram);
2841 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
2842 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp);
2843 	sw_set_ack(dram);
2844 	update_refresh_reg(dram);
2845 	clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2);
2846 
2847 	clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
2848 	while ((readl(pctl_base + DDR_PCTL2_STAT) &
2849 	       PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR)
2850 		continue;
2851 
2852 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
2853 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
2854 
2855 	mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4);
2856 	if (dramtype == LPDDR3) {
2857 		pctl_write_mr(dram->pctl, 3, 1,
2858 			      (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) &
2859 			      PCTL2_MR_MASK,
2860 			      dramtype);
2861 		pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK,
2862 			      dramtype);
2863 		pctl_write_mr(dram->pctl, 3, 3,
2864 			      (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) &
2865 			      PCTL2_MR_MASK,
2866 			      dramtype);
2867 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype);
2868 	} else if ((dramtype == DDR3) || (dramtype == DDR4)) {
2869 		pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK,
2870 			      dramtype);
2871 		if (!dest_dll_off) {
2872 			pctl_write_mr(dram->pctl, 3, 0,
2873 				      ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) &
2874 				      PCTL2_MR_MASK) | DDR3_DLL_RESET,
2875 				      dramtype);
2876 			udelay(2);
2877 		}
2878 		pctl_write_mr(dram->pctl, 3, 0,
2879 			      (dst_init3 >> PCTL2_DDR34_MR0_SHIFT &
2880 			       PCTL2_MR_MASK) & (~DDR3_DLL_RESET),
2881 			      dramtype);
2882 		pctl_write_mr(dram->pctl, 3, 2,
2883 			      ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) &
2884 			       PCTL2_MR_MASK), dramtype);
2885 		if (dramtype == DDR4) {
2886 			pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK,
2887 				      dramtype);
2888 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2889 				       DDR_PCTL2_INIT6);
2890 			pctl_write_mr(dram->pctl, 3, 4,
2891 				      (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) &
2892 				       PCTL2_MR_MASK,
2893 				      dramtype);
2894 			pctl_write_mr(dram->pctl, 3, 5,
2895 				      mr_tmp >> PCTL2_DDR4_MR5_SHIFT &
2896 				      PCTL2_MR_MASK,
2897 				      dramtype);
2898 
2899 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2900 				       DDR_PCTL2_INIT7);
2901 			pctl_write_mr(dram->pctl, 3, 6,
2902 				      mr_tmp >> PCTL2_DDR4_MR6_SHIFT &
2903 				      PCTL2_MR_MASK,
2904 				      dramtype);
2905 		}
2906 	} else if (dramtype == LPDDR4) {
2907 		pctl_write_mr(dram->pctl, 3, 13,
2908 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2909 			       PCTL2_MR_MASK) & (~(BIT(7)))) |
2910 			      dst_fsp_lp4 << 7, dramtype);
2911 	}
2912 
2913 	/* training */
2914 	high_freq_training(dram, sdram_params_new, dst_fsp);
2915 
2916 	clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
2917 		     PCTL2_DIS_AUTO_REFRESH);
2918 	low_power_update(dram, lp_stat);
2919 
2920 	save_fsp_param(dram, dst_fsp, sdram_params_new);
2921 }
2922 
2923 static void ddr_set_rate_for_fsp(struct dram_info *dram,
2924 				 struct rv1126_sdram_params *sdram_params)
2925 {
2926 	struct ddr2_3_4_lp2_3_info *ddr_info;
2927 	u32 f0, f1, f2, f3;
2928 	u32 dramtype = sdram_params->base.dramtype;
2929 
2930 	ddr_info = get_ddr_drv_odt_info(dramtype);
2931 	if (!ddr_info)
2932 		return;
2933 
2934 	memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param));
2935 	memset((void *)&fsp_param, 0, sizeof(fsp_param));
2936 
2937 	f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
2938 	     DDR_FREQ_MASK;
2939 	f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) &
2940 	     DDR_FREQ_MASK;
2941 	f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) &
2942 	     DDR_FREQ_MASK;
2943 	f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) &
2944 	     DDR_FREQ_MASK;
2945 
2946 	if (get_wrlvl_val(dram, sdram_params))
2947 		printascii("get wrlvl value fail\n");
2948 	printascii("change to f1: ");
2949 	printdec(f1);
2950 	printascii("MHz\n");
2951 	ddr_set_rate(&dram_info, sdram_params, f1,
2952 		     sdram_params->base.ddr_freq, 1, 1, 1);
2953 	printascii("change to f2: ");
2954 	printdec(f2);
2955 	printascii("MHz\n");
2956 	ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1);
2957 	printascii("change to f3: ");
2958 	printdec(f3);
2959 	printascii("MHz\n");
2960 	ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1);
2961 	printascii("change to f0: ");
2962 	printdec(f0);
2963 	printascii("MHz\n");
2964 	ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1);
2965 }
2966 
2967 int get_uart_config(void)
2968 {
2969 	struct sdram_head_info_index_v2 *index =
2970 		(struct sdram_head_info_index_v2 *)common_info;
2971 	struct global_info *gbl_info;
2972 
2973 	gbl_info = (struct global_info *)((void *)common_info +
2974 		index->global_index.offset * 4);
2975 
2976 	return gbl_info->uart_info;
2977 }
2978 
2979 /* return: 0 = success, other = fail */
2980 int sdram_init(void)
2981 {
2982 	struct rv1126_sdram_params *sdram_params;
2983 	int ret = 0;
2984 	struct sdram_head_info_index_v2 *index =
2985 		(struct sdram_head_info_index_v2 *)common_info;
2986 	struct global_info *gbl_info;
2987 
2988 	dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
2989 	dram_info.pctl = (void *)UPCTL2_BASE_ADDR;
2990 	dram_info.grf = (void *)GRF_BASE_ADDR;
2991 	dram_info.cru = (void *)CRU_BASE_ADDR;
2992 	dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR;
2993 	dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR;
2994 	dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR;
2995 
2996 	if (index->version_info != 2 ||
2997 	    (index->global_index.size != sizeof(struct global_info) / 4) ||
2998 	    (index->ddr3_index.size !=
2999 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3000 	    (index->ddr4_index.size !=
3001 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3002 	    (index->lp3_index.size !=
3003 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3004 	    (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) ||
3005 	    index->global_index.offset == 0 ||
3006 	    index->ddr3_index.offset == 0 ||
3007 	    index->ddr4_index.offset == 0 ||
3008 	    index->lp3_index.offset == 0 ||
3009 	    index->lp4_index.offset == 0) {
3010 		printascii("common info error\n");
3011 		goto error;
3012 	}
3013 
3014 	gbl_info = (struct global_info *)((void *)common_info +
3015 		index->global_index.offset * 4);
3016 
3017 	dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info);
3018 	dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info);
3019 
3020 	sdram_params = &sdram_configs[0];
3021 
3022 	if (sdram_params->base.dramtype == DDR3 ||
3023 	    sdram_params->base.dramtype == DDR4) {
3024 		if (DDR_2T_INFO(gbl_info->info_2t))
3025 			sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10;
3026 		else
3027 			sdram_params->pctl_regs.pctl[0][1] &=
3028 				~(0x1 << 10);
3029 	}
3030 	ret = sdram_init_detect(&dram_info, sdram_params);
3031 	if (ret) {
3032 		sdram_print_dram_type(sdram_params->base.dramtype);
3033 		printascii(", ");
3034 		printdec(sdram_params->base.ddr_freq);
3035 		printascii("MHz\n");
3036 		goto error;
3037 	}
3038 	print_ddr_info(sdram_params);
3039 
3040 	ddr_set_rate_for_fsp(&dram_info, sdram_params);
3041 	copy_fsp_param_to_ddr();
3042 
3043 	ddr_set_atags(&dram_info, sdram_params);
3044 
3045 	printascii("out\n");
3046 
3047 	return ret;
3048 error:
3049 	printascii("error\n");
3050 	return (-1);
3051 }
3052 #endif /* CONFIG_TPL_BUILD */
3053