xref: /rk3399_rockchip-uboot/drivers/ram/rockchip/sdram_rv1126.c (revision 58cb0c2b99bbcb16ea3e203afb8b2b76e11f4d78)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * (C) Copyright 2020 Rockchip Electronics Co., Ltd.
4  */
5 
6 #include <common.h>
7 #include <debug_uart.h>
8 #include <dm.h>
9 #include <ram.h>
10 #include <syscon.h>
11 #include <asm/io.h>
12 #include <asm/arch/clock.h>
13 #include <asm/arch/hardware.h>
14 #include <asm/arch/rk_atags.h>
15 #include <asm/arch/cru_rv1126.h>
16 #include <asm/arch/grf_rv1126.h>
17 #include <asm/arch/sdram_common.h>
18 #include <asm/arch/sdram_rv1126.h>
19 
20 /* define training flag */
21 #define CA_TRAINING			(0x1 << 0)
22 #define READ_GATE_TRAINING		(0x1 << 1)
23 #define WRITE_LEVELING			(0x1 << 2)
24 #define WRITE_TRAINING			(0x1 << 3)
25 #define READ_TRAINING			(0x1 << 4)
26 #define FULL_TRAINING			(0xff)
27 
28 #define SKEW_RX_SIGNAL			(0)
29 #define SKEW_TX_SIGNAL			(1)
30 #define SKEW_CA_SIGNAL			(2)
31 
32 #define DESKEW_MDF_ABS_VAL		(0)
33 #define DESKEW_MDF_DIFF_VAL		(1)
34 
35 #ifdef CONFIG_TPL_BUILD
36 #ifndef CONFIG_TPL_TINY_FRAMEWORK
37 #error please defined CONFIG_TPL_TINY_FRAMEWORK for RV1126 !!!
38 #endif
39 #endif
40 
41 #ifdef CONFIG_TPL_BUILD
42 
43 struct dram_info {
44 	void __iomem *pctl;
45 	void __iomem *phy;
46 	struct rv1126_cru *cru;
47 	struct msch_regs *msch;
48 	struct rv1126_ddrgrf *ddrgrf;
49 	struct rv1126_grf *grf;
50 	struct ram_info info;
51 	struct rv1126_pmugrf *pmugrf;
52 	u32 sr_idle;
53 	u32 pd_idle;
54 };
55 
56 #define GRF_BASE_ADDR			0xfe000000
57 #define PMU_GRF_BASE_ADDR		0xfe020000
58 #define DDR_GRF_BASE_ADDR		0xfe030000
59 #define BUS_SGRF_BASE_ADDR		0xfe0a0000
60 #define SERVER_MSCH_BASE_ADDR		0xfe800000
61 #define CRU_BASE_ADDR			0xff490000
62 #define DDR_PHY_BASE_ADDR		0xff4a0000
63 #define UPCTL2_BASE_ADDR		0xffa50000
64 
65 #define SGRF_SOC_CON12			0x30
66 #define SGRF_SOC_CON13			0x34
67 
68 struct dram_info dram_info;
69 
70 #define TPL_INIT_DDR_TYPE_DDR3
71 #ifdef TPL_INIT_DDR_TYPE_DDR3
72 struct rv1126_sdram_params sdram_configs[] = {
73 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-330.inc"
74 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-396.inc"
75 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-528.inc"
76 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-664.inc"
77 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-784.inc"
78 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-924.inc"
79 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-1056.inc"
80 };
81 #elif defined TPL_INIT_DDR_TYPE_DDR4
82 struct rv1126_sdram_params sdram_configs[] = {
83 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-330.inc"
84 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-396.inc"
85 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-528.inc"
86 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-664.inc"
87 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-784.inc"
88 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-924.inc"
89 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-1056.inc"
90 };
91 #elif defined TPL_INIT_DDR_TYPE_LPDDR3
92 struct rv1126_sdram_params sdram_configs[] = {
93 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-330.inc"
94 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-396.inc"
95 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-528.inc"
96 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-664.inc"
97 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-784.inc"
98 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-924.inc"
99 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-1056.inc"
100 };
101 #elif defined TPL_INIT_DDR_TYPE_LPDDR4
102 struct rv1126_sdram_params sdram_configs[] = {
103 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-330.inc"
104 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-396.inc"
105 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-528.inc"
106 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-664.inc"
107 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-784.inc"
108 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-924.inc"
109 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-1056.inc"
110 };
111 #endif
112 
113 u32 common_info[] = {
114 	#include "sdram_inc/rv1126/sdram-rv1126-loader_params.inc"
115 };
116 
117 static struct rv1126_fsp_param fsp_param[MAX_IDX];
118 
119 static u8 lp3_odt_value;
120 
121 static u8 wrlvl_result[2][4];
122 
123 /* DDR configuration 0-9 */
124 u16 ddr_cfg_2_rbc[] = {
125 	((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */
126 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */
127 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */
128 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */
129 	((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */
130 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */
131 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */
132 	((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */
133 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */
134 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */
135 };
136 
137 /* DDR configuration 10-21 */
138 u8 ddr4_cfg_2_rbc[] = {
139 	((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */
140 	((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */
141 	((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */
142 	((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */
143 	((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */
144 	((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */
145 	((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */
146 	((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */
147 	((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */
148 	((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */
149 	((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */
150 	((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */
151 };
152 
153 /* DDR configuration 22-28 */
154 u16 ddr_cfg_2_rbc_p2[] = {
155 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */
156 	((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */
157 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */
158 	((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */
159 	((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */
160 	((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */
161 	((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */
162 };
163 
164 u8 d4_rbc_2_d3_rbc[][2] = {
165 	{10, 0},
166 	{11, 2},
167 	{12, 23},
168 	{13, 1},
169 	{14, 28},
170 	{15, 24},
171 	{16, 27},
172 	{17, 7},
173 	{18, 6},
174 	{19, 25},
175 	{20, 26},
176 	{21, 3}
177 };
178 
179 u32 addrmap[23][9] = {
180 	{24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
181 		0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */
182 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
183 		0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */
184 	{23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
185 		0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */
186 	{22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606,
187 		0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */
188 	{24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909,
189 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */
190 	{6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707,
191 		0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */
192 	{7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808,
193 		0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */
194 	{8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909,
195 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */
196 	{22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
197 		0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */
198 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
199 		0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */
200 
201 	{24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
202 		0x08080808, 0x00000f0f, 0x0801}, /* 10 */
203 	{23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
204 		0x0f080808, 0x00000f0f, 0x0801}, /* 11 */
205 	{24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
206 		0x07070707, 0x00000f07, 0x0700}, /* 12 */
207 	{23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
208 		0x07070707, 0x00000f0f, 0x0700}, /* 13 */
209 	{24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
210 		0x07070707, 0x00000f07, 0x3f01}, /* 14 */
211 	{23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
212 		0x07070707, 0x00000f0f, 0x3f01}, /* 15 */
213 	{23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606,
214 		0x06060606, 0x00000f06, 0x3f00}, /* 16 */
215 	{8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909,
216 		0x0f090909, 0x00000f0f, 0x0801}, /* 17 */
217 	{7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808,
218 		0x08080808, 0x00000f0f, 0x0700}, /* 18 */
219 	{7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
220 		0x08080808, 0x00000f0f, 0x3f01}, /* 19 */
221 
222 	{6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
223 		0x07070707, 0x00000f07, 0x3f00}, /* 20 */
224 	{23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606,
225 		0x06060606, 0x00000f06, 0x0600}, /* 21 */
226 	{21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505,
227 		0x05050505, 0x00000f0f, 0x3f3f} /* 22 */
228 };
229 
230 static u8 dq_sel[22][3] = {
231 	{0x0, 0x17, 0x22},
232 	{0x1, 0x18, 0x23},
233 	{0x2, 0x19, 0x24},
234 	{0x3, 0x1a, 0x25},
235 	{0x4, 0x1b, 0x26},
236 	{0x5, 0x1c, 0x27},
237 	{0x6, 0x1d, 0x28},
238 	{0x7, 0x1e, 0x29},
239 	{0x8, 0x16, 0x21},
240 	{0x9, 0x1f, 0x2a},
241 	{0xa, 0x20, 0x2b},
242 	{0x10, 0x1, 0xc},
243 	{0x11, 0x2, 0xd},
244 	{0x12, 0x3, 0xe},
245 	{0x13, 0x4, 0xf},
246 	{0x14, 0x5, 0x10},
247 	{0x15, 0x6, 0x11},
248 	{0x16, 0x7, 0x12},
249 	{0x17, 0x8, 0x13},
250 	{0x18, 0x0, 0xb},
251 	{0x19, 0x9, 0x14},
252 	{0x1a, 0xa, 0x15}
253 };
254 
255 static u16 grp_addr[4] = {
256 	ADD_GROUP_CS0_A,
257 	ADD_GROUP_CS0_B,
258 	ADD_GROUP_CS1_A,
259 	ADD_GROUP_CS1_B
260 };
261 
262 static u8 wrlvl_result_offset[2][4] = {
263 	{0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27},
264 	{0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29},
265 };
266 
267 static u16 dqs_dq_skew_adr[16] = {
268 	0x170 + 0,	/* SKEW_UPDATE_RX_CS0_DQS0 */
269 	0x170 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS1 */
270 	0x1d0 + 0,	/* SKEW_UPDATE_RX_CS0_DQS2 */
271 	0x1d0 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS3 */
272 	0x1a0 + 0,	/* SKEW_UPDATE_RX_CS1_DQS0 */
273 	0x1a0 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS1 */
274 	0x200 + 0,	/* SKEW_UPDATE_RX_CS1_DQS2 */
275 	0x200 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS3 */
276 	0x170 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS0 */
277 	0x170 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS1 */
278 	0x1d0 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS2 */
279 	0x1d0 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS3 */
280 	0x1a0 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS0 */
281 	0x1a0 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS1 */
282 	0x200 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS2 */
283 	0x200 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS3 */
284 };
285 
286 static void rkclk_ddr_reset(struct dram_info *dram,
287 			    u32 ctl_srstn, u32 ctl_psrstn,
288 			    u32 phy_srstn, u32 phy_psrstn)
289 {
290 	writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) |
291 	       UPCTL2_ASRSTN_REQ(ctl_srstn),
292 	       BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13);
293 
294 	writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn),
295 	       &dram->cru->softrst_con[12]);
296 }
297 
298 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
299 {
300 	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
301 	int delay = 1000;
302 	u32 mhz = hz / MHz;
303 
304 	refdiv = 1;
305 	if (mhz <= 100) {
306 		postdiv1 = 6;
307 		postdiv2 = 4;
308 	} else if (mhz <= 150) {
309 		postdiv1 = 4;
310 		postdiv2 = 4;
311 	} else if (mhz <= 200) {
312 		postdiv1 = 6;
313 		postdiv2 = 2;
314 	} else if (mhz <= 300) {
315 		postdiv1 = 4;
316 		postdiv2 = 2;
317 	} else if (mhz <= 400) {
318 		postdiv1 = 6;
319 		postdiv2 = 1;
320 	} else {
321 		postdiv1 = 4;
322 		postdiv2 = 1;
323 	}
324 	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
325 
326 	writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
327 
328 	writel(0x1f000000, &dram->cru->clksel_con[64]);
329 	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
330 	writel(DSMPD(1) | POSTDIV2(postdiv2) | REFDIV(refdiv),
331 	       &dram->cru->pll[1].con1);
332 
333 	while (delay > 0) {
334 		udelay(1);
335 		if (LOCK(readl(&dram->cru->pll[1].con1)))
336 			break;
337 		delay--;
338 	}
339 
340 	writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
341 }
342 
343 static void rkclk_configure_ddr(struct dram_info *dram,
344 				struct rv1126_sdram_params *sdram_params)
345 {
346 	/* for inno ddr phy need freq / 2 */
347 	rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2);
348 }
349 
350 static void phy_soft_reset(struct dram_info *dram)
351 {
352 	void __iomem *phy_base = dram->phy;
353 
354 	clrbits_le32(PHY_REG(phy_base, 0), 0x3 << 2);
355 	udelay(1);
356 	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
357 	udelay(1);
358 }
359 
360 static unsigned int
361 	calculate_ddrconfig(struct rv1126_sdram_params *sdram_params)
362 {
363 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
364 	u32 cs, bw, die_bw, col, row, bank;
365 	u32 cs1_row;
366 	u32 i, tmp;
367 	u32 ddrconf = -1;
368 	u32 row_3_4;
369 
370 	cs = cap_info->rank;
371 	bw = cap_info->bw;
372 	die_bw = cap_info->dbw;
373 	col = cap_info->col;
374 	row = cap_info->cs0_row;
375 	cs1_row = cap_info->cs1_row;
376 	bank = cap_info->bk;
377 	row_3_4 = cap_info->row_3_4;
378 
379 	if (sdram_params->base.dramtype == DDR4) {
380 		if (cs == 2 && row == cs1_row && !row_3_4) {
381 			tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) |
382 			      die_bw;
383 			for (i = 17; i < 21; i++) {
384 				if (((tmp & 0xf) ==
385 				     (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
386 				    ((tmp & 0x70) <=
387 				     (ddr4_cfg_2_rbc[i - 10] & 0x70))) {
388 					ddrconf = i;
389 					goto out;
390 				}
391 			}
392 		}
393 
394 		tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw;
395 		for (i = 10; i < 21; i++) {
396 			if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
397 			    ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) &&
398 			    ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) {
399 				ddrconf = i;
400 				goto out;
401 			}
402 		}
403 	} else {
404 		if (cs == 2 && row == cs1_row && bank == 3) {
405 			for (i = 5; i < 8; i++) {
406 				if (((bw + col - 10) == (ddr_cfg_2_rbc[i] &
407 							 0x7)) &&
408 				    ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] &
409 							  (0x7 << 5))) {
410 					ddrconf = i;
411 					goto out;
412 				}
413 			}
414 		}
415 
416 		tmp = ((cs - 1) << 8) | ((row - 13) << 5) |
417 		      ((bw + col - 10) << 0);
418 		if (bank == 3)
419 			tmp |= (1 << 3);
420 
421 		for (i = 0; i < 9; i++)
422 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) &&
423 			    ((tmp & (7 << 5)) <=
424 			     (ddr_cfg_2_rbc[i] & (7 << 5))) &&
425 			    ((tmp & (1 << 8)) <=
426 			     (ddr_cfg_2_rbc[i] & (1 << 8)))) {
427 				ddrconf = i;
428 				goto out;
429 			}
430 		if (cs == 1 && bank == 3 && row <= 17 &&
431 		    (col + bw) == 12)
432 			ddrconf = 23;
433 	}
434 
435 out:
436 	if (ddrconf > 28)
437 		printascii("calculate ddrconfig error\n");
438 
439 	if (sdram_params->base.dramtype == DDR4) {
440 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
441 			if (ddrconf == d4_rbc_2_d3_rbc[i][0]) {
442 				if (ddrconf == 21 && row > 16)
443 					printascii("warn:ddrconf21 row > 16\n");
444 				else
445 					ddrconf = d4_rbc_2_d3_rbc[i][1];
446 				break;
447 			}
448 		}
449 	}
450 
451 	return ddrconf;
452 }
453 
454 static void sw_set_req(struct dram_info *dram)
455 {
456 	void __iomem *pctl_base = dram->pctl;
457 
458 	/* clear sw_done=0 */
459 	writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL);
460 }
461 
462 static void sw_set_ack(struct dram_info *dram)
463 {
464 	void __iomem *pctl_base = dram->pctl;
465 
466 	/* set sw_done=1 */
467 	writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL);
468 	while (1) {
469 		/* wait programming done */
470 		if (readl(pctl_base + DDR_PCTL2_SWSTAT) &
471 				PCTL2_SW_DONE_ACK)
472 			break;
473 	}
474 }
475 
476 static void set_ctl_address_map(struct dram_info *dram,
477 				struct rv1126_sdram_params *sdram_params)
478 {
479 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
480 	void __iomem *pctl_base = dram->pctl;
481 	u32 ddrconf = cap_info->ddrconfig;
482 	u32 i, row;
483 
484 	row = cap_info->cs0_row;
485 	if (sdram_params->base.dramtype == DDR4) {
486 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
487 			if (ddrconf == d4_rbc_2_d3_rbc[i][1]) {
488 				ddrconf = d4_rbc_2_d3_rbc[i][0];
489 				break;
490 			}
491 		}
492 	}
493 
494 	if (ddrconf > ARRAY_SIZE(addrmap)) {
495 		printascii("set ctl address map fail\n");
496 		return;
497 	}
498 
499 	sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
500 			  &addrmap[ddrconf][0], 9 * 4);
501 
502 	/* unused row set to 0xf */
503 	for (i = 17; i >= row; i--)
504 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
505 			((i - 12) * 8 / 32) * 4,
506 			0xf << ((i - 12) * 8 % 32));
507 
508 	if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4)
509 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
510 	if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1)
511 		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
512 
513 	if (cap_info->rank == 1)
514 		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
515 }
516 
517 static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait)
518 {
519 	void __iomem *phy_base = dram->phy;
520 	u32 fbdiv, prediv, postdiv, postdiv_en;
521 
522 	if (wait) {
523 		clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB);
524 		while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK))
525 			continue;
526 	} else {
527 		freq /= MHz;
528 		prediv = 1;
529 		if (freq <= 200) {
530 			fbdiv = 16;
531 			postdiv = 2;
532 			postdiv_en = 1;
533 		} else if (freq <= 456) {
534 			fbdiv = 8;
535 			postdiv = 1;
536 			postdiv_en = 1;
537 		} else {
538 			fbdiv = 4;
539 			postdiv = 0;
540 			postdiv_en = 0;
541 		}
542 		writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50));
543 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK,
544 				(fbdiv >> 8) & 1);
545 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK,
546 				postdiv_en << PHY_POSTDIV_EN_SHIFT);
547 
548 		clrsetbits_le32(PHY_REG(phy_base, 0x52),
549 				PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv);
550 		clrsetbits_le32(PHY_REG(phy_base, 0x53),
551 				PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT,
552 				postdiv << PHY_POSTDIV_SHIFT);
553 	}
554 }
555 
556 static const u16 d3_phy_drv_2_ohm[][2] = {
557 	{PHY_DDR3_RON_506ohm, 506},
558 	{PHY_DDR3_RON_253ohm, 253},
559 	{PHY_DDR3_RON_169hm, 169},
560 	{PHY_DDR3_RON_127ohm, 127},
561 	{PHY_DDR3_RON_101ohm, 101},
562 	{PHY_DDR3_RON_84ohm, 84},
563 	{PHY_DDR3_RON_72ohm, 72},
564 	{PHY_DDR3_RON_63ohm, 63},
565 	{PHY_DDR3_RON_56ohm, 56},
566 	{PHY_DDR3_RON_51ohm, 51},
567 	{PHY_DDR3_RON_46ohm, 46},
568 	{PHY_DDR3_RON_42ohm, 42},
569 	{PHY_DDR3_RON_39ohm, 39},
570 	{PHY_DDR3_RON_36ohm, 36},
571 	{PHY_DDR3_RON_34ohm, 34},
572 	{PHY_DDR3_RON_32ohm, 32},
573 	{PHY_DDR3_RON_30ohm, 30},
574 	{PHY_DDR3_RON_28ohm, 28},
575 	{PHY_DDR3_RON_27ohm, 27},
576 	{PHY_DDR3_RON_25ohm, 25},
577 	{PHY_DDR3_RON_24ohm, 24},
578 	{PHY_DDR3_RON_23ohm, 23},
579 	{PHY_DDR3_RON_22ohm, 22}
580 };
581 
582 static u16 d3_phy_odt_2_ohm[][2] = {
583 	{PHY_DDR3_RTT_DISABLE, 0},
584 	{PHY_DDR3_RTT_953ohm, 953},
585 	{PHY_DDR3_RTT_483ohm, 483},
586 	{PHY_DDR3_RTT_320ohm, 320},
587 	{PHY_DDR3_RTT_241ohm, 241},
588 	{PHY_DDR3_RTT_193ohm, 193},
589 	{PHY_DDR3_RTT_161ohm, 161},
590 	{PHY_DDR3_RTT_138ohm, 138},
591 	{PHY_DDR3_RTT_121ohm, 121},
592 	{PHY_DDR3_RTT_107ohm, 107},
593 	{PHY_DDR3_RTT_97ohm, 97},
594 	{PHY_DDR3_RTT_88ohm, 88},
595 	{PHY_DDR3_RTT_80ohm, 80},
596 	{PHY_DDR3_RTT_74ohm, 74},
597 	{PHY_DDR3_RTT_69ohm, 69},
598 	{PHY_DDR3_RTT_64ohm, 64},
599 	{PHY_DDR3_RTT_60ohm, 60},
600 	{PHY_DDR3_RTT_57ohm, 57},
601 	{PHY_DDR3_RTT_54ohm, 54},
602 	{PHY_DDR3_RTT_51ohm, 51},
603 	{PHY_DDR3_RTT_48ohm, 48},
604 	{PHY_DDR3_RTT_46ohm, 46},
605 	{PHY_DDR3_RTT_44ohm, 44},
606 	{PHY_DDR3_RTT_42ohm, 42}
607 };
608 
609 static u16 d4lp3_phy_drv_2_ohm[][2] = {
610 	{PHY_DDR4_LPDDR3_RON_570ohm, 570},
611 	{PHY_DDR4_LPDDR3_RON_285ohm, 285},
612 	{PHY_DDR4_LPDDR3_RON_190ohm, 190},
613 	{PHY_DDR4_LPDDR3_RON_142ohm, 142},
614 	{PHY_DDR4_LPDDR3_RON_114ohm, 114},
615 	{PHY_DDR4_LPDDR3_RON_95ohm, 95},
616 	{PHY_DDR4_LPDDR3_RON_81ohm, 81},
617 	{PHY_DDR4_LPDDR3_RON_71ohm, 71},
618 	{PHY_DDR4_LPDDR3_RON_63ohm, 63},
619 	{PHY_DDR4_LPDDR3_RON_57ohm, 57},
620 	{PHY_DDR4_LPDDR3_RON_52ohm, 52},
621 	{PHY_DDR4_LPDDR3_RON_47ohm, 47},
622 	{PHY_DDR4_LPDDR3_RON_44ohm, 44},
623 	{PHY_DDR4_LPDDR3_RON_41ohm, 41},
624 	{PHY_DDR4_LPDDR3_RON_38ohm, 38},
625 	{PHY_DDR4_LPDDR3_RON_36ohm, 36},
626 	{PHY_DDR4_LPDDR3_RON_34ohm, 34},
627 	{PHY_DDR4_LPDDR3_RON_32ohm, 32},
628 	{PHY_DDR4_LPDDR3_RON_30ohm, 30},
629 	{PHY_DDR4_LPDDR3_RON_28ohm, 28},
630 	{PHY_DDR4_LPDDR3_RON_27ohm, 27},
631 	{PHY_DDR4_LPDDR3_RON_26ohm, 26},
632 	{PHY_DDR4_LPDDR3_RON_25ohm, 25}
633 };
634 
635 static u16 d4lp3_phy_odt_2_ohm[][2] = {
636 	{PHY_DDR4_LPDDR3_RTT_DISABLE, 0},
637 	{PHY_DDR4_LPDDR3_RTT_973ohm, 973},
638 	{PHY_DDR4_LPDDR3_RTT_493ohm, 493},
639 	{PHY_DDR4_LPDDR3_RTT_327ohm, 327},
640 	{PHY_DDR4_LPDDR3_RTT_247ohm, 247},
641 	{PHY_DDR4_LPDDR3_RTT_197ohm, 197},
642 	{PHY_DDR4_LPDDR3_RTT_164ohm, 164},
643 	{PHY_DDR4_LPDDR3_RTT_141ohm, 141},
644 	{PHY_DDR4_LPDDR3_RTT_123ohm, 123},
645 	{PHY_DDR4_LPDDR3_RTT_109ohm, 109},
646 	{PHY_DDR4_LPDDR3_RTT_99ohm, 99},
647 	{PHY_DDR4_LPDDR3_RTT_90ohm, 90},
648 	{PHY_DDR4_LPDDR3_RTT_82ohm, 82},
649 	{PHY_DDR4_LPDDR3_RTT_76ohm, 76},
650 	{PHY_DDR4_LPDDR3_RTT_70ohm, 70},
651 	{PHY_DDR4_LPDDR3_RTT_66ohm, 66},
652 	{PHY_DDR4_LPDDR3_RTT_62ohm, 62},
653 	{PHY_DDR4_LPDDR3_RTT_58ohm, 58},
654 	{PHY_DDR4_LPDDR3_RTT_55ohm, 55},
655 	{PHY_DDR4_LPDDR3_RTT_52ohm, 52},
656 	{PHY_DDR4_LPDDR3_RTT_49ohm, 49},
657 	{PHY_DDR4_LPDDR3_RTT_47ohm, 47},
658 	{PHY_DDR4_LPDDR3_RTT_45ohm, 45},
659 	{PHY_DDR4_LPDDR3_RTT_43ohm, 43}
660 };
661 
662 static u16 lp4_phy_drv_2_ohm[][2] = {
663 	{PHY_LPDDR4_RON_606ohm, 606},
664 	{PHY_LPDDR4_RON_303ohm, 303},
665 	{PHY_LPDDR4_RON_202ohm, 202},
666 	{PHY_LPDDR4_RON_152ohm, 153},
667 	{PHY_LPDDR4_RON_121ohm, 121},
668 	{PHY_LPDDR4_RON_101ohm, 101},
669 	{PHY_LPDDR4_RON_87ohm, 87},
670 	{PHY_LPDDR4_RON_76ohm, 76},
671 	{PHY_LPDDR4_RON_67ohm, 67},
672 	{PHY_LPDDR4_RON_61ohm, 61},
673 	{PHY_LPDDR4_RON_55ohm, 55},
674 	{PHY_LPDDR4_RON_51ohm, 51},
675 	{PHY_LPDDR4_RON_47ohm, 47},
676 	{PHY_LPDDR4_RON_43ohm, 43},
677 	{PHY_LPDDR4_RON_40ohm, 40},
678 	{PHY_LPDDR4_RON_38ohm, 38},
679 	{PHY_LPDDR4_RON_36ohm, 36},
680 	{PHY_LPDDR4_RON_34ohm, 34},
681 	{PHY_LPDDR4_RON_32ohm, 32},
682 	{PHY_LPDDR4_RON_30ohm, 30},
683 	{PHY_LPDDR4_RON_29ohm, 29},
684 	{PHY_LPDDR4_RON_28ohm, 28},
685 	{PHY_LPDDR4_RON_26ohm, 26}
686 };
687 
688 static u16 lp4_phy_odt_2_ohm[][2] = {
689 	{PHY_LPDDR4_RTT_DISABLE, 0},
690 	{PHY_LPDDR4_RTT_998ohm, 998},
691 	{PHY_LPDDR4_RTT_506ohm, 506},
692 	{PHY_LPDDR4_RTT_336ohm, 336},
693 	{PHY_LPDDR4_RTT_253ohm, 253},
694 	{PHY_LPDDR4_RTT_202ohm, 202},
695 	{PHY_LPDDR4_RTT_169ohm, 169},
696 	{PHY_LPDDR4_RTT_144ohm, 144},
697 	{PHY_LPDDR4_RTT_127ohm, 127},
698 	{PHY_LPDDR4_RTT_112ohm, 112},
699 	{PHY_LPDDR4_RTT_101ohm, 101},
700 	{PHY_LPDDR4_RTT_92ohm, 92},
701 	{PHY_LPDDR4_RTT_84ohm, 84},
702 	{PHY_LPDDR4_RTT_78ohm, 78},
703 	{PHY_LPDDR4_RTT_72ohm, 72},
704 	{PHY_LPDDR4_RTT_67ohm, 67},
705 	{PHY_LPDDR4_RTT_63ohm, 63},
706 	{PHY_LPDDR4_RTT_60ohm, 60},
707 	{PHY_LPDDR4_RTT_56ohm, 56},
708 	{PHY_LPDDR4_RTT_53ohm, 53},
709 	{PHY_LPDDR4_RTT_51ohm, 51},
710 	{PHY_LPDDR4_RTT_48ohm, 48},
711 	{PHY_LPDDR4_RTT_46ohm, 46},
712 	{PHY_LPDDR4_RTT_44ohm, 44}
713 };
714 
715 static u32 lp4_odt_calc(u32 odt_ohm)
716 {
717 	u32 odt;
718 
719 	if (odt_ohm == 0)
720 		odt = LPDDR4_DQODT_DIS;
721 	else if (odt_ohm <= 40)
722 		odt = LPDDR4_DQODT_40;
723 	else if (odt_ohm <= 48)
724 		odt = LPDDR4_DQODT_48;
725 	else if (odt_ohm <= 60)
726 		odt = LPDDR4_DQODT_60;
727 	else if (odt_ohm <= 80)
728 		odt = LPDDR4_DQODT_80;
729 	else if (odt_ohm <= 120)
730 		odt = LPDDR4_DQODT_120;
731 	else
732 		odt = LPDDR4_DQODT_240;
733 
734 	return odt;
735 }
736 
737 static void *get_ddr_drv_odt_info(u32 dramtype)
738 {
739 	struct sdram_head_info_index_v2 *index =
740 		(struct sdram_head_info_index_v2 *)common_info;
741 	void *ddr_info = 0;
742 
743 	if (dramtype == DDR4)
744 		ddr_info = (void *)common_info + index->ddr4_index.offset * 4;
745 	else if (dramtype == DDR3)
746 		ddr_info = (void *)common_info + index->ddr3_index.offset * 4;
747 	else if (dramtype == LPDDR3)
748 		ddr_info = (void *)common_info + index->lp3_index.offset * 4;
749 	else if (dramtype == LPDDR4)
750 		ddr_info = (void *)common_info + index->lp4_index.offset * 4;
751 	else
752 		printascii("unsupported dram type\n");
753 	return ddr_info;
754 }
755 
756 static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info,
757 			 u32 freq_mhz, u32 dst_fsp)
758 {
759 	void __iomem *pctl_base = dram->pctl;
760 	u32 ca_vref, dq_vref;
761 
762 	if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
763 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff);
764 	else
765 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten);
766 
767 	if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq))
768 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff);
769 	else
770 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten);
771 
772 	if (ca_vref < 100)
773 		ca_vref = 100;
774 	if (ca_vref > 420)
775 		ca_vref = 420;
776 
777 	if (ca_vref <= 300)
778 		ca_vref = (0 << 6) | (ca_vref - 100) / 4;
779 	else
780 		ca_vref = (1 << 6) | (ca_vref - 220) / 4;
781 
782 	if (dq_vref < 100)
783 		dq_vref = 100;
784 	if (dq_vref > 420)
785 		dq_vref = 420;
786 
787 	if (dq_vref <= 300)
788 		dq_vref = (0 << 6) | (dq_vref - 100) / 4;
789 	else
790 		dq_vref = (1 << 6) | (dq_vref - 220) / 4;
791 
792 	sw_set_req(dram);
793 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
794 			DDR_PCTL2_INIT6,
795 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT,
796 			ca_vref << PCTL2_LPDDR4_MR12_SHIFT);
797 
798 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
799 			DDR_PCTL2_INIT7,
800 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT,
801 			dq_vref << PCTL2_LPDDR4_MR14_SHIFT);
802 	sw_set_ack(dram);
803 }
804 
805 static void set_ds_odt(struct dram_info *dram,
806 		       struct rv1126_sdram_params *sdram_params, u32 dst_fsp)
807 {
808 	void __iomem *phy_base = dram->phy;
809 	void __iomem *pctl_base = dram->pctl;
810 	u32 dramtype = sdram_params->base.dramtype;
811 	struct ddr2_3_4_lp2_3_info *ddr_info;
812 	struct lp4_info *lp4_info;
813 	u32 i, j, tmp;
814 	const u16 (*p_drv)[2];
815 	const u16 (*p_odt)[2];
816 	u32 drv_info, sr_info;
817 	u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm;
818 	u32 phy_odt_ohm, dram_odt_ohm;
819 	u32 lp4_pu_cal, phy_lp4_drv_pd_en;
820 	u32 phy_odt_up_en, phy_odt_dn_en;
821 	u32 sr_dq, sr_clk;
822 	u32 freq = sdram_params->base.ddr_freq;
823 	u32 mr1_mr3, mr11, mr22, vref_out, vref_inner;
824 	u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0;
825 	u32 phy_dq_drv = 0;
826 	u32 phy_odt_up = 0, phy_odt_dn = 0;
827 
828 	ddr_info = get_ddr_drv_odt_info(dramtype);
829 	lp4_info = (void *)ddr_info;
830 
831 	if (!ddr_info)
832 		return;
833 
834 	/* dram odt en freq control phy drv, dram odt and phy sr */
835 	if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) {
836 		drv_info = ddr_info->drv_when_odtoff;
837 		dram_odt_ohm = 0;
838 		sr_info = ddr_info->sr_when_odtoff;
839 		phy_lp4_drv_pd_en =
840 			PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info);
841 	} else {
842 		drv_info = ddr_info->drv_when_odten;
843 		dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info);
844 		sr_info = ddr_info->sr_when_odten;
845 		phy_lp4_drv_pd_en =
846 			PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info);
847 	}
848 	phy_dq_drv_ohm =
849 		DRV_INFO_PHY_DQ_DRV(drv_info);
850 	phy_clk_drv_ohm =
851 		DRV_INFO_PHY_CLK_DRV(drv_info);
852 	phy_ca_drv_ohm =
853 		DRV_INFO_PHY_CA_DRV(drv_info);
854 
855 	sr_dq = DQ_SR_INFO(sr_info);
856 	sr_clk = CLK_SR_INFO(sr_info);
857 
858 	/* phy odt en freq control dram drv and phy odt */
859 	if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) {
860 		dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff);
861 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info);
862 		phy_odt_ohm = 0;
863 		phy_odt_up_en = 0;
864 		phy_odt_dn_en = 0;
865 	} else {
866 		dram_drv_ohm =
867 			DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten);
868 		phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info);
869 		phy_odt_up_en =
870 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
871 		phy_odt_dn_en =
872 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
873 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info);
874 	}
875 
876 	if (dramtype == LPDDR4) {
877 		if (phy_odt_ohm) {
878 			phy_odt_up_en = 0;
879 			phy_odt_dn_en = 1;
880 		}
881 		if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
882 			dram_caodt_ohm = 0;
883 		else
884 			dram_caodt_ohm =
885 				ODT_INFO_LP4_CA_ODT(lp4_info->odt_info);
886 	}
887 
888 	if (dramtype == DDR3) {
889 		p_drv = d3_phy_drv_2_ohm;
890 		p_odt = d3_phy_odt_2_ohm;
891 	} else if (dramtype == LPDDR4) {
892 		p_drv = lp4_phy_drv_2_ohm;
893 		p_odt = lp4_phy_odt_2_ohm;
894 	} else {
895 		p_drv = d4lp3_phy_drv_2_ohm;
896 		p_odt = d4lp3_phy_odt_2_ohm;
897 	}
898 
899 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
900 		if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) {
901 			phy_dq_drv = **(p_drv + i);
902 			break;
903 		}
904 		if (i == 0)
905 			break;
906 	}
907 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
908 		if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) {
909 			phy_clk_drv = **(p_drv + i);
910 			break;
911 		}
912 		if (i == 0)
913 			break;
914 	}
915 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
916 		if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) {
917 			phy_ca_drv = **(p_drv + i);
918 			break;
919 		}
920 		if (i == 0)
921 			break;
922 	}
923 	if (!phy_odt_ohm)
924 		phy_odt = 0;
925 	else
926 		for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) {
927 			if (phy_odt_ohm <= *(*(p_odt + i) + 1)) {
928 				phy_odt = **(p_odt + i);
929 				break;
930 			}
931 			if (i == 0)
932 				break;
933 		}
934 
935 	if (dramtype != LPDDR4) {
936 		if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en))
937 			vref_inner = 0x80;
938 		else if (phy_odt_up_en)
939 			vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 /
940 				     (dram_drv_ohm + phy_odt_ohm);
941 		else
942 			vref_inner = phy_odt_ohm * 128 /
943 				(phy_odt_ohm + dram_drv_ohm);
944 
945 		if (dramtype != DDR3 && dram_odt_ohm)
946 			vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 /
947 				   (phy_dq_drv_ohm + dram_odt_ohm);
948 		else
949 			vref_out = 0x80;
950 	} else {
951 		/* for lp4 */
952 		if (phy_odt_ohm)
953 			vref_inner =
954 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) *
955 				 256) / 1000;
956 		else
957 			vref_inner =
958 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) *
959 				 256) / 1000;
960 
961 		vref_out = 0x80;
962 	}
963 
964 	/* default ZQCALIB bypass mode */
965 	clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv);
966 	clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv);
967 	clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv);
968 	clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv);
969 	/* clk / cmd slew rate */
970 	clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk);
971 
972 	phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1;
973 	if (phy_odt_up_en)
974 		phy_odt_up = phy_odt;
975 	if (phy_odt_dn_en)
976 		phy_odt_dn = phy_odt;
977 
978 	for (i = 0; i < 4; i++) {
979 		j = 0x110 + i * 0x10;
980 		clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up);
981 		clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn);
982 		clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv);
983 		clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv);
984 		writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10));
985 
986 		clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10),
987 				1 << 3, phy_lp4_drv_pd_en << 3);
988 		/* dq slew rate */
989 		clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10),
990 				0x1f, sr_dq);
991 	}
992 
993 	/* reg_rx_vref_value_update */
994 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
995 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
996 
997 	/* RAM VREF */
998 	writel(vref_out, PHY_REG(phy_base, 0x105));
999 	if (dramtype == LPDDR3)
1000 		udelay(100);
1001 
1002 	if (dramtype == LPDDR4)
1003 		set_lp4_vref(dram, lp4_info, freq, dst_fsp);
1004 
1005 	if (dramtype == DDR3 || dramtype == DDR4) {
1006 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1007 				DDR_PCTL2_INIT3);
1008 		mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK;
1009 	} else {
1010 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1011 				DDR_PCTL2_INIT4);
1012 		mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK;
1013 	}
1014 
1015 	if (dramtype == DDR3) {
1016 		mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK);
1017 		if (dram_drv_ohm == 34)
1018 			mr1_mr3 |= DDR3_DS_34;
1019 
1020 		if (dram_odt_ohm == 0)
1021 			mr1_mr3 |= DDR3_RTT_NOM_DIS;
1022 		else if (dram_odt_ohm <= 40)
1023 			mr1_mr3 |= DDR3_RTT_NOM_40;
1024 		else if (dram_odt_ohm <= 60)
1025 			mr1_mr3 |= DDR3_RTT_NOM_60;
1026 		else
1027 			mr1_mr3 |= DDR3_RTT_NOM_120;
1028 
1029 	} else if (dramtype == DDR4) {
1030 		mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK);
1031 		if (dram_drv_ohm == 48)
1032 			mr1_mr3 |= DDR4_DS_48;
1033 
1034 		if (dram_odt_ohm == 0)
1035 			mr1_mr3 |= DDR4_RTT_NOM_DIS;
1036 		else if (dram_odt_ohm <= 34)
1037 			mr1_mr3 |= DDR4_RTT_NOM_34;
1038 		else if (dram_odt_ohm <= 40)
1039 			mr1_mr3 |= DDR4_RTT_NOM_40;
1040 		else if (dram_odt_ohm <= 48)
1041 			mr1_mr3 |= DDR4_RTT_NOM_48;
1042 		else if (dram_odt_ohm <= 60)
1043 			mr1_mr3 |= DDR4_RTT_NOM_60;
1044 		else
1045 			mr1_mr3 |= DDR4_RTT_NOM_120;
1046 
1047 	} else if (dramtype == LPDDR3) {
1048 		if (dram_drv_ohm <= 34)
1049 			mr1_mr3 |= LPDDR3_DS_34;
1050 		else if (dram_drv_ohm <= 40)
1051 			mr1_mr3 |= LPDDR3_DS_40;
1052 		else if (dram_drv_ohm <= 48)
1053 			mr1_mr3 |= LPDDR3_DS_48;
1054 		else if (dram_drv_ohm <= 60)
1055 			mr1_mr3 |= LPDDR3_DS_60;
1056 		else if (dram_drv_ohm <= 80)
1057 			mr1_mr3 |= LPDDR3_DS_80;
1058 
1059 		if (dram_odt_ohm == 0)
1060 			lp3_odt_value = LPDDR3_ODT_DIS;
1061 		else if (dram_odt_ohm <= 60)
1062 			lp3_odt_value = LPDDR3_ODT_60;
1063 		else if (dram_odt_ohm <= 120)
1064 			lp3_odt_value = LPDDR3_ODT_120;
1065 		else
1066 			lp3_odt_value = LPDDR3_ODT_240;
1067 	} else {/* for lpddr4 */
1068 		/* MR3 for lp4 PU-CAL and PDDS */
1069 		mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK);
1070 		mr1_mr3 |= lp4_pu_cal;
1071 
1072 		tmp = lp4_odt_calc(dram_drv_ohm);
1073 		if (!tmp)
1074 			tmp = LPDDR4_PDDS_240;
1075 		mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT);
1076 
1077 		/* MR11 for lp4 ca odt, dq odt set */
1078 		mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1079 			     DDR_PCTL2_INIT6);
1080 		mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK;
1081 
1082 		mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK);
1083 
1084 		tmp = lp4_odt_calc(dram_odt_ohm);
1085 		mr11 |= (tmp << LPDDR4_DQODT_SHIFT);
1086 
1087 		tmp = lp4_odt_calc(dram_caodt_ohm);
1088 		mr11 |= (tmp << LPDDR4_CAODT_SHIFT);
1089 		sw_set_req(dram);
1090 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1091 				DDR_PCTL2_INIT6,
1092 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT,
1093 				mr11 << PCTL2_LPDDR4_MR11_SHIFT);
1094 		sw_set_ack(dram);
1095 
1096 		/* MR22 for soc odt/odt-ck/odt-cs/odt-ca */
1097 		mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1098 			     DDR_PCTL2_INIT7);
1099 		mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK;
1100 		mr22 &= ~LPDDR4_SOC_ODT_MASK;
1101 
1102 		tmp = lp4_odt_calc(phy_odt_ohm);
1103 		mr22 |= tmp;
1104 		mr22 = mr22 |
1105 		       (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) <<
1106 			LPDDR4_ODTE_CK_SHIFT) |
1107 		       (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) <<
1108 			LPDDR4_ODTE_CS_SHIFT) |
1109 		       (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) <<
1110 			LPDDR4_ODTD_CA_SHIFT);
1111 
1112 		sw_set_req(dram);
1113 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1114 				DDR_PCTL2_INIT7,
1115 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT,
1116 				mr22 << PCTL2_LPDDR4_MR22_SHIFT);
1117 		sw_set_ack(dram);
1118 	}
1119 
1120 	if (dramtype == DDR4 || dramtype == DDR3) {
1121 		sw_set_req(dram);
1122 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1123 				DDR_PCTL2_INIT3,
1124 				PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT,
1125 				mr1_mr3 << PCTL2_DDR34_MR1_SHIFT);
1126 		sw_set_ack(dram);
1127 	} else {
1128 		sw_set_req(dram);
1129 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1130 				DDR_PCTL2_INIT4,
1131 				PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT,
1132 				mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT);
1133 		sw_set_ack(dram);
1134 	}
1135 }
1136 
1137 static int sdram_cmd_dq_path_remap(struct dram_info *dram,
1138 				   struct rv1126_sdram_params *sdram_params)
1139 {
1140 	void __iomem *phy_base = dram->phy;
1141 	u32 dramtype = sdram_params->base.dramtype;
1142 	struct sdram_head_info_index_v2 *index =
1143 		(struct sdram_head_info_index_v2 *)common_info;
1144 	struct dq_map_info *map_info;
1145 
1146 	map_info = (struct dq_map_info *)((void *)common_info +
1147 		index->dq_map_index.offset * 4);
1148 
1149 	if (dramtype <= LPDDR4)
1150 		writel((map_info->byte_map[dramtype / 4] >>
1151 			((dramtype % 4) * 8)) & 0xff,
1152 		       PHY_REG(phy_base, 0x4f));
1153 
1154 	return 0;
1155 }
1156 
1157 static void phy_cfg(struct dram_info *dram,
1158 		    struct rv1126_sdram_params *sdram_params)
1159 {
1160 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
1161 	void __iomem *phy_base = dram->phy;
1162 	u32 i, dq_map, tmp;
1163 	u32 byte1 = 0, byte0 = 0;
1164 
1165 	sdram_cmd_dq_path_remap(dram, sdram_params);
1166 
1167 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0);
1168 	for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
1169 		writel(sdram_params->phy_regs.phy[i][1],
1170 		       phy_base + sdram_params->phy_regs.phy[i][0]);
1171 	}
1172 
1173 	clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5));
1174 	dq_map = readl(PHY_REG(phy_base, 0x4f));
1175 	for (i = 0; i < 4; i++) {
1176 		if (((dq_map >> (i * 2)) & 0x3) == 0)
1177 			byte0 = i;
1178 		if (((dq_map >> (i * 2)) & 0x3) == 1)
1179 			byte1 = i;
1180 	}
1181 
1182 	tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK);
1183 	if (cap_info->bw == 2)
1184 		tmp |= 0xf;
1185 	else if (cap_info->bw == 1)
1186 		tmp |= ((1 << byte0) | (1 << byte1));
1187 	else
1188 		tmp |= (1 << byte0);
1189 
1190 	writel(tmp, PHY_REG(phy_base, 0xf));
1191 
1192 	/* lpddr4 odt control by phy, enable cs0 odt */
1193 	if (sdram_params->base.dramtype == LPDDR4)
1194 		clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4,
1195 				(1 << 6) | (1 << 4));
1196 	/* for ca training ca vref choose range1 */
1197 	setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6));
1198 	setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6));
1199 	/* for wr training PHY_0x7c[5], choose range0 */
1200 	clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5));
1201 }
1202 
1203 static int update_refresh_reg(struct dram_info *dram)
1204 {
1205 	void __iomem *pctl_base = dram->pctl;
1206 	u32 ret;
1207 
1208 	ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
1209 	writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
1210 
1211 	return 0;
1212 }
1213 
1214 /*
1215  * rank = 1: cs0
1216  * rank = 2: cs1
1217  */
1218 int read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype)
1219 {
1220 	u32 ret;
1221 	u32 i, temp;
1222 	u32 dqmap;
1223 
1224 	void __iomem *pctl_base = dram->pctl;
1225 	struct sdram_head_info_index_v2 *index =
1226 		(struct sdram_head_info_index_v2 *)common_info;
1227 	struct dq_map_info *map_info;
1228 
1229 	map_info = (struct dq_map_info *)((void *)common_info +
1230 		index->dq_map_index.offset * 4);
1231 
1232 	if (dramtype == LPDDR2)
1233 		dqmap = map_info->lp2_dq0_7_map;
1234 	else
1235 		dqmap = map_info->lp3_dq0_7_map;
1236 
1237 	pctl_read_mr(pctl_base, rank, mr_num);
1238 
1239 	ret = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff);
1240 
1241 	if (dramtype != LPDDR4) {
1242 		temp = 0;
1243 		for (i = 0; i < 8; i++) {
1244 			temp = temp | (((ret >> i) & 0x1) <<
1245 				       ((dqmap >> (i * 4)) & 0xf));
1246 		}
1247 	} else {
1248 		ret = (readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff);
1249 	}
1250 
1251 	return ret;
1252 }
1253 
1254 /* before call this function autorefresh should be disabled */
1255 void send_a_refresh(struct dram_info *dram)
1256 {
1257 	void __iomem *pctl_base = dram->pctl;
1258 
1259 	while (readl(pctl_base + DDR_PCTL2_DBGSTAT) & 0x3)
1260 		continue;
1261 	writel(0x3, pctl_base + DDR_PCTL2_DBGCMD);
1262 }
1263 
1264 void record_dq_prebit(struct dram_info *dram)
1265 {
1266 	u32 group, i, tmp;
1267 	void __iomem *phy_base = dram->phy;
1268 
1269 	for (group = 0; group < 4; group++) {
1270 		for (i = 0; i < ARRAY_SIZE(dq_sel); i++) {
1271 			/* l_loop_invdelaysel */
1272 			writel(dq_sel[i][0], PHY_REG(phy_base,
1273 						     grp_addr[group] + 0x2c));
1274 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e));
1275 			writel(tmp, PHY_REG(phy_base,
1276 					    grp_addr[group] + dq_sel[i][1]));
1277 
1278 			/* r_loop_invdelaysel */
1279 			writel(dq_sel[i][0], PHY_REG(phy_base,
1280 						     grp_addr[group] + 0x2d));
1281 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f));
1282 			writel(tmp, PHY_REG(phy_base,
1283 					    grp_addr[group] + dq_sel[i][2]));
1284 		}
1285 	}
1286 }
1287 
1288 static void update_dq_rx_prebit(struct dram_info *dram)
1289 {
1290 	void __iomem *phy_base = dram->phy;
1291 
1292 	clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4),
1293 			BIT(4));
1294 	udelay(1);
1295 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4));
1296 }
1297 
1298 static void update_dq_tx_prebit(struct dram_info *dram)
1299 {
1300 	void __iomem *phy_base = dram->phy;
1301 
1302 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1303 	setbits_le32(PHY_REG(phy_base, 0x2), BIT(3));
1304 	setbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1305 	udelay(1);
1306 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1307 }
1308 
1309 static void update_ca_prebit(struct dram_info *dram)
1310 {
1311 	void __iomem *phy_base = dram->phy;
1312 
1313 	clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2));
1314 	setbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1315 	udelay(1);
1316 	clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1317 }
1318 
1319 /*
1320  * dir: 0: de-skew = delta_*
1321  *	1: de-skew = reg val - delta_*
1322  * delta_dir: value for differential signal: clk/
1323  * delta_sig: value for single signal: ca/cmd
1324  */
1325 static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif,
1326 			     int delta_sig, u32 cs, u32 dramtype)
1327 {
1328 	void __iomem *phy_base = dram->phy;
1329 	u32 i, cs_en, tmp;
1330 
1331 	if (cs == 0)
1332 		cs_en = 1;
1333 	else if (cs == 2)
1334 		cs_en = 2;
1335 	else
1336 		cs_en = 3;
1337 
1338 	for (i = 0; i < 0x20; i++) {
1339 		if (dir == DESKEW_MDF_ABS_VAL)
1340 			tmp = delta_sig;
1341 		else
1342 			tmp = readl(PHY_REG(phy_base, 0x150 + i)) +
1343 			      delta_sig;
1344 		writel(tmp, PHY_REG(phy_base, 0x150 + i));
1345 	}
1346 
1347 	if (dir == DESKEW_MDF_ABS_VAL)
1348 		tmp = delta_dif;
1349 	else
1350 		tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) -
1351 		       delta_sig + delta_dif;
1352 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x17));
1353 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x18));
1354 	if (dramtype == LPDDR4) {
1355 		writel(tmp, PHY_REG(phy_base, 0x150 + 0x4));
1356 		writel(tmp, PHY_REG(phy_base, 0x150 + 0xa));
1357 
1358 		clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6);
1359 		update_ca_prebit(dram);
1360 	}
1361 }
1362 
1363 static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank)
1364 {
1365 	u32 i, j, offset = 0;
1366 	u32 min = 0x3f;
1367 	void __iomem *phy_base = dram->phy;
1368 	u32 byte_en;
1369 
1370 	if (signal == SKEW_TX_SIGNAL)
1371 		offset = 8;
1372 
1373 	if (signal == SKEW_CA_SIGNAL) {
1374 		for (i = 0; i < 0x20; i++)
1375 			min = MIN(min, readl(PHY_REG(phy_base, 0x150  + i)));
1376 	} else {
1377 		byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1378 		for (j = offset; j < offset + rank * 4; j++) {
1379 			if (!((byte_en >> (j % 4)) & 1))
1380 				continue;
1381 			for (i = 0; i < 11; i++)
1382 				min = MIN(min,
1383 					  readl(PHY_REG(phy_base,
1384 							dqs_dq_skew_adr[j] +
1385 							i)));
1386 		}
1387 	}
1388 
1389 	return min;
1390 }
1391 
1392 static u32 low_power_update(struct dram_info *dram, u32 en)
1393 {
1394 	void __iomem *pctl_base = dram->pctl;
1395 	u32 lp_stat = 0;
1396 
1397 	if (en) {
1398 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf);
1399 	} else {
1400 		lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf;
1401 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf);
1402 	}
1403 
1404 	return lp_stat;
1405 }
1406 
1407 /*
1408  * signal:
1409  * dir: 0: de-skew = delta_*
1410  *	1: de-skew = reg val - delta_*
1411  * delta_dir: value for differential signal: dqs
1412  * delta_sig: value for single signal: dq/dm
1413  */
1414 static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir,
1415 			     int delta_dif, int delta_sig, u32 rank)
1416 {
1417 	void __iomem *phy_base = dram->phy;
1418 	u32 i, j, tmp, offset;
1419 	u32 byte_en;
1420 
1421 	byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1422 
1423 	if (signal == SKEW_RX_SIGNAL)
1424 		offset = 0;
1425 	else
1426 		offset = 8;
1427 
1428 	for (j = offset; j < (offset + rank * 4); j++) {
1429 		if (!((byte_en >> (j % 4)) & 1))
1430 			continue;
1431 		for (i = 0; i < 0x9; i++) {
1432 			if (dir == DESKEW_MDF_ABS_VAL)
1433 				tmp = delta_sig;
1434 			else
1435 				tmp = delta_sig + readl(PHY_REG(phy_base,
1436 							dqs_dq_skew_adr[j] +
1437 							i));
1438 			writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i));
1439 		}
1440 		if (dir == DESKEW_MDF_ABS_VAL)
1441 			tmp = delta_dif;
1442 		else
1443 			tmp = delta_dif + readl(PHY_REG(phy_base,
1444 						dqs_dq_skew_adr[j] + 9));
1445 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9));
1446 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa));
1447 	}
1448 	if (signal == SKEW_RX_SIGNAL)
1449 		update_dq_rx_prebit(dram);
1450 	else
1451 		update_dq_tx_prebit(dram);
1452 }
1453 
1454 static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype)
1455 {
1456 	void __iomem *phy_base = dram->phy;
1457 	u32 ret;
1458 	u32 dis_auto_zq = 0;
1459 	u32 odt_val_up, odt_val_dn;
1460 	u32 i, j;
1461 
1462 	odt_val_dn = readl(PHY_REG(phy_base, 0x110));
1463 	odt_val_up = readl(PHY_REG(phy_base, 0x111));
1464 
1465 	if (dramtype != LPDDR4) {
1466 		for (i = 0; i < 4; i++) {
1467 			j = 0x110 + i * 0x10;
1468 			writel(PHY_DDR4_LPDDR3_RTT_247ohm,
1469 			       PHY_REG(phy_base, j));
1470 			writel(PHY_DDR4_LPDDR3_RTT_DISABLE,
1471 			       PHY_REG(phy_base, j + 0x1));
1472 		}
1473 	}
1474 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1475 	/* use normal read mode for data training */
1476 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1477 
1478 	if (dramtype == DDR4)
1479 		setbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1480 
1481 	/* choose training cs */
1482 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
1483 	/* enable gate training */
1484 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
1485 	udelay(50);
1486 	ret = readl(PHY_REG(phy_base, 0x91));
1487 	/* disable gate training */
1488 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
1489 	clrbits_le32(PHY_REG(phy_base, 2), 0x30);
1490 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1491 
1492 	if (ret & 0x20)
1493 		ret = -1;
1494 	else
1495 		ret = (ret & 0xf) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf);
1496 
1497 	if (dramtype != LPDDR4) {
1498 		for (i = 0; i < 4; i++) {
1499 			j = 0x110 + i * 0x10;
1500 			writel(odt_val_dn, PHY_REG(phy_base, j));
1501 			writel(odt_val_up, PHY_REG(phy_base, j + 0x1));
1502 		}
1503 	}
1504 	return ret;
1505 }
1506 
1507 static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype,
1508 			    u32 rank)
1509 {
1510 	void __iomem *pctl_base = dram->pctl;
1511 	void __iomem *phy_base = dram->phy;
1512 	u32 dis_auto_zq = 0;
1513 	u32 tmp;
1514 	u32 cur_fsp;
1515 	u32 timeout_us = 1000;
1516 
1517 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1518 
1519 	clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1520 
1521 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1522 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) &
1523 	      0xffff;
1524 	writel(tmp & 0xff, PHY_REG(phy_base, 0x3));
1525 
1526 	/* disable another cs's output */
1527 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1528 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12),
1529 			      dramtype);
1530 	if (dramtype == DDR3 || dramtype == DDR4)
1531 		writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1532 	else
1533 		writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1534 
1535 	/* choose cs */
1536 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1537 			((0x2 >> cs) << 6) | (0 << 2));
1538 	/* enable write leveling */
1539 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1540 			((0x2 >> cs) << 6) | (1 << 2));
1541 
1542 	while (1) {
1543 		if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) ==
1544 		    (readl(PHY_REG(phy_base, 0xf)) & 0xf))
1545 			break;
1546 
1547 		udelay(1);
1548 		if (timeout_us-- == 0) {
1549 			printascii("error: write leveling timeout\n");
1550 			while (1)
1551 				;
1552 		}
1553 	}
1554 
1555 	/* disable write leveling */
1556 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1557 			((0x2 >> cs) << 6) | (0 << 2));
1558 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6);
1559 
1560 	/* enable another cs's output */
1561 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1562 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12),
1563 			      dramtype);
1564 
1565 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1566 
1567 	return 0;
1568 }
1569 
1570 char pattern[32] = {
1571 	0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa,
1572 	0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55,
1573 	0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55,
1574 	0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa
1575 };
1576 
1577 static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype,
1578 			    u32 mhz)
1579 {
1580 	void __iomem *pctl_base = dram->pctl;
1581 	void __iomem *phy_base = dram->phy;
1582 	u32 trefi_1x, trfc_1x;
1583 	u32 dis_auto_zq = 0;
1584 	u32 timeout_us = 1000;
1585 	u32 dqs_default;
1586 	u32 cur_fsp;
1587 	u32 vref_inner;
1588 	u32 i;
1589 	struct sdram_head_info_index_v2 *index =
1590 		(struct sdram_head_info_index_v2 *)common_info;
1591 	struct dq_map_info *map_info;
1592 
1593 	vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff;
1594 	if (dramtype == DDR3 && vref_inner == 0x80) {
1595 		for (i = 0; i < 4; i++)
1596 			writel(vref_inner - 0xa,
1597 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1598 
1599 		/* reg_rx_vref_value_update */
1600 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1601 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1602 	}
1603 
1604 	map_info = (struct dq_map_info *)((void *)common_info +
1605 		index->dq_map_index.offset * 4);
1606 	/* only 1cs a time, 0:cs0 1 cs1 */
1607 	if (cs > 1)
1608 		return -1;
1609 
1610 	dqs_default = 0xf;
1611 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1612 
1613 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1614 	/* config refresh timing */
1615 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1616 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1617 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1618 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1619 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1620 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1621 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1622 	/* reg_phy_trfc */
1623 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1624 	/* reg_max_refi_cnt */
1625 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1626 
1627 	/* choose training cs */
1628 	clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6);
1629 
1630 	/* set dq map for ddr4 */
1631 	if (dramtype == DDR4) {
1632 		setbits_le32(PHY_REG(phy_base, 0x70), BIT(7));
1633 		for (i = 0; i < 4; i++) {
1634 			writel((map_info->ddr4_dq_map[cs * 2] >>
1635 				((i % 4) * 8)) & 0xff,
1636 				PHY_REG(phy_base, 0x238 + i));
1637 			writel((map_info->ddr4_dq_map[cs * 2 + 1] >>
1638 				((i % 4) * 8)) & 0xff,
1639 				PHY_REG(phy_base, 0x2b8 + i));
1640 		}
1641 	}
1642 
1643 	/* cha_l reg_l_rd_train_dqs_default[5:0] */
1644 	clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default);
1645 	/* cha_h reg_h_rd_train_dqs_default[5:0] */
1646 	clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default);
1647 	/* chb_l reg_l_rd_train_dqs_default[5:0] */
1648 	clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default);
1649 	/* chb_h reg_h_rd_train_dqs_default[5:0] */
1650 	clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default);
1651 
1652 	/* Choose the read train auto mode */
1653 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1);
1654 	/* Enable the auto train of the read train */
1655 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3);
1656 
1657 	/* Wait the train done. */
1658 	while (1) {
1659 		if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1)
1660 			break;
1661 
1662 		udelay(1);
1663 		if (timeout_us-- == 0) {
1664 			printascii("error: read training timeout\n");
1665 			return -1;
1666 		}
1667 	}
1668 
1669 	/* Check the read train state */
1670 	if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) ||
1671 	    (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) {
1672 		printascii("error: read training error\n");
1673 		return -1;
1674 	}
1675 
1676 	/* Exit the Read Training by setting */
1677 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1));
1678 
1679 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1680 
1681 	if (dramtype == DDR3 && vref_inner == 0x80) {
1682 		for (i = 0; i < 4; i++)
1683 			writel(vref_inner,
1684 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1685 
1686 		/* reg_rx_vref_value_update */
1687 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1688 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1689 	}
1690 
1691 	return 0;
1692 }
1693 
1694 static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype,
1695 			    u32 mhz, u32 dst_fsp)
1696 {
1697 	void __iomem *pctl_base = dram->pctl;
1698 	void __iomem *phy_base = dram->phy;
1699 	u32 trefi_1x, trfc_1x;
1700 	u32 dis_auto_zq = 0;
1701 	u32 timeout_us = 1000;
1702 	u32 cur_fsp;
1703 	u32 mr_tmp, cl, cwl, phy_fsp, offset = 0;
1704 
1705 	if (dramtype == LPDDR3 && mhz <= 400) {
1706 		phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3;
1707 		offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3;
1708 		cl = readl(PHY_REG(phy_base, offset));
1709 		cwl = readl(PHY_REG(phy_base, offset + 2));
1710 
1711 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8);
1712 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4);
1713 		pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype);
1714 	}
1715 
1716 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1717 
1718 	/* PHY_0x7b[7:0] reg_train_col_addr[7:0] */
1719 	clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0);
1720 	/* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */
1721 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2);
1722 	/* PHY_0x7c[1:0] reg_train_col_addr[9:8] */
1723 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0);
1724 	/* PHY_0x7d[7:0] reg_train_row_addr[7:0] */
1725 	clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0);
1726 	/* PHY_0x7e[7:0] reg_train_row_addr[15:8] */
1727 	clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0);
1728 
1729 	/* PHY_0x71[3] wrtrain_check_data_value_random_gen */
1730 	clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3));
1731 
1732 	/* config refresh timing */
1733 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1734 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1735 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1736 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1737 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1738 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1739 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1740 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1741 	/* reg_phy_trfc */
1742 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1743 	/* reg_max_refi_cnt */
1744 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1745 
1746 	/* choose training cs */
1747 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6);
1748 
1749 	/* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */
1750 	/* 0: Use the write-leveling value. */
1751 	/* 1: use reg0x233 0x237 0x2b3 0x2b7 */
1752 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4));
1753 
1754 	/* PHY_0x7a [0] reg_dq_wr_train_auto */
1755 	setbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1756 
1757 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1758 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1759 
1760 	send_a_refresh(dram);
1761 
1762 	while (1) {
1763 		if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1)
1764 			break;
1765 
1766 		udelay(1);
1767 		if (timeout_us-- == 0) {
1768 			printascii("error: write training timeout\n");
1769 			while (1)
1770 				;
1771 		}
1772 	}
1773 
1774 	/* Check the write train state */
1775 	if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) {
1776 		printascii("error: write training error\n");
1777 		return -1;
1778 	}
1779 
1780 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1781 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1782 
1783 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1784 
1785 	/* save LPDDR4 write vref to fsp_param for dfs */
1786 	if (dramtype == LPDDR4) {
1787 		fsp_param[dst_fsp].vref_dq[cs] =
1788 			((readl(PHY_REG(phy_base, 0x384)) & 0x3f) +
1789 			 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2;
1790 		/* add range info */
1791 		fsp_param[dst_fsp].vref_dq[cs] |=
1792 			((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1);
1793 	}
1794 
1795 	if (dramtype == LPDDR3 && mhz <= 400) {
1796 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl);
1797 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl);
1798 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1799 			       DDR_PCTL2_INIT3);
1800 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
1801 			      dramtype);
1802 	}
1803 
1804 	return 0;
1805 }
1806 
1807 static int data_training(struct dram_info *dram, u32 cs,
1808 			 struct rv1126_sdram_params *sdram_params, u32 dst_fsp,
1809 			 u32 training_flag)
1810 {
1811 	u32 ret = 0;
1812 
1813 	if (training_flag == FULL_TRAINING)
1814 		training_flag = READ_GATE_TRAINING | WRITE_LEVELING |
1815 				WRITE_TRAINING | READ_TRAINING;
1816 
1817 	if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) {
1818 		ret = data_training_wl(dram, cs,
1819 				       sdram_params->base.dramtype,
1820 				       sdram_params->ch.cap_info.rank);
1821 		if (ret != 0)
1822 			goto out;
1823 	}
1824 
1825 	if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) {
1826 		ret = data_training_rg(dram, cs,
1827 				       sdram_params->base.dramtype);
1828 		if (ret != 0)
1829 			goto out;
1830 	}
1831 
1832 	if ((training_flag & READ_TRAINING) == READ_TRAINING) {
1833 		ret = data_training_rd(dram, cs,
1834 				       sdram_params->base.dramtype,
1835 				       sdram_params->base.ddr_freq);
1836 		if (ret != 0)
1837 			goto out;
1838 	}
1839 
1840 	if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) {
1841 		ret = data_training_wr(dram, cs,
1842 				       sdram_params->base.dramtype,
1843 				       sdram_params->base.ddr_freq, dst_fsp);
1844 		if (ret != 0)
1845 			goto out;
1846 	}
1847 
1848 out:
1849 	return ret;
1850 }
1851 
1852 static int get_wrlvl_val(struct dram_info *dram,
1853 			 struct rv1126_sdram_params *sdram_params)
1854 {
1855 	u32 i, j, clk_skew;
1856 	void __iomem *phy_base = dram->phy;
1857 	u32 lp_stat;
1858 	int ret;
1859 
1860 	lp_stat = low_power_update(dram, 0);
1861 
1862 	clk_skew = readl(PHY_REG(phy_base, 0x150 + 0x17));
1863 
1864 	ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING);
1865 	if (sdram_params->ch.cap_info.rank == 2)
1866 		ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING);
1867 
1868 	for (j = 0; j < 2; j++)
1869 		for (i = 0; i < 4; i++)
1870 			wrlvl_result[j][i] =
1871 				readl(PHY_REG(phy_base,
1872 					      wrlvl_result_offset[j][i])) -
1873 				clk_skew;
1874 
1875 	low_power_update(dram, lp_stat);
1876 
1877 	return ret;
1878 }
1879 
1880 static int high_freq_training(struct dram_info *dram,
1881 			      struct rv1126_sdram_params *sdram_params,
1882 			      u32 fsp)
1883 {
1884 	u32 i, j;
1885 	void __iomem *phy_base = dram->phy;
1886 	u32 dramtype = sdram_params->base.dramtype;
1887 	int min_val;
1888 	u32 dqs_skew, clk_skew, ca_skew;
1889 	int ret;
1890 
1891 	dqs_skew = 0;
1892 	for (j = 0; j < sdram_params->ch.cap_info.rank; j++)
1893 		for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++)
1894 			dqs_skew += wrlvl_result[j][i];
1895 	dqs_skew = dqs_skew / (sdram_params->ch.cap_info.rank *
1896 			       ARRAY_SIZE(wrlvl_result[0]));
1897 
1898 	clk_skew = 0x20 - dqs_skew;
1899 	dqs_skew = 0x20;
1900 
1901 	if (dramtype == LPDDR4) {
1902 		clk_skew = 0;
1903 		ca_skew = 0;
1904 	} else if (dramtype == LPDDR3) {
1905 		ca_skew = clk_skew - 4;
1906 	} else {
1907 		ca_skew = clk_skew;
1908 	}
1909 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3,
1910 			 dramtype);
1911 
1912 	writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233));
1913 	writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237));
1914 	writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
1915 	writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
1916 	ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING |
1917 			    READ_TRAINING | WRITE_TRAINING);
1918 	if (sdram_params->ch.cap_info.rank == 2) {
1919 		writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233));
1920 		writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237));
1921 		writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
1922 		writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
1923 		ret |= data_training(dram, 1, sdram_params, fsp,
1924 				     READ_GATE_TRAINING | READ_TRAINING |
1925 				     WRITE_TRAINING);
1926 	}
1927 	if (ret)
1928 		goto out;
1929 
1930 	record_dq_prebit(dram);
1931 
1932 	min_val = get_min_value(dram, SKEW_RX_SIGNAL,
1933 				sdram_params->ch.cap_info.rank) * -1;
1934 	modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL,
1935 			 min_val, min_val, sdram_params->ch.cap_info.rank);
1936 
1937 	min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL,
1938 				    sdram_params->ch.cap_info.rank),
1939 		      get_min_value(dram, SKEW_CA_SIGNAL,
1940 				    sdram_params->ch.cap_info.rank)) * -1;
1941 
1942 	/* clk = 0, rx all skew -7, tx - min_value */
1943 	modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3,
1944 			 dramtype);
1945 
1946 	modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL,
1947 			 min_val, min_val, sdram_params->ch.cap_info.rank);
1948 
1949 	ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING);
1950 	if (sdram_params->ch.cap_info.rank == 2)
1951 		ret |= data_training(dram, 1, sdram_params, 0,
1952 				     READ_GATE_TRAINING);
1953 out:
1954 	return ret;
1955 }
1956 
1957 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
1958 {
1959 	writel(ddrconfig, &dram->msch->deviceconf);
1960 	clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0);
1961 }
1962 
1963 static void update_noc_timing(struct dram_info *dram,
1964 			      struct rv1126_sdram_params *sdram_params)
1965 {
1966 	writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
1967 	       &dram->msch->ddrtiminga0);
1968 	writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
1969 	       &dram->msch->ddrtimingb0);
1970 	writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
1971 	       &dram->msch->ddrtimingc0);
1972 	writel(sdram_params->ch.noc_timings.devtodev0.d32,
1973 	       &dram->msch->devtodev0);
1974 	writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
1975 	writel(sdram_params->ch.noc_timings.ddr4timing.d32,
1976 	       &dram->msch->ddr4timing);
1977 }
1978 
1979 static void dram_all_config(struct dram_info *dram,
1980 			    struct rv1126_sdram_params *sdram_params)
1981 {
1982 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
1983 	u32 dram_type = sdram_params->base.dramtype;
1984 	void __iomem *pctl_base = dram->pctl;
1985 	u32 sys_reg2 = 0;
1986 	u32 sys_reg3 = 0;
1987 	u64 cs_cap[2];
1988 	u32 cs_pst;
1989 
1990 	set_ddrconfig(dram, cap_info->ddrconfig);
1991 	sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
1992 			 &sys_reg3, 0);
1993 	writel(sys_reg2, &dram->pmugrf->os_reg[2]);
1994 	writel(sys_reg3, &dram->pmugrf->os_reg[3]);
1995 
1996 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
1997 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
1998 
1999 	if (cap_info->rank == 2) {
2000 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2001 			6 + 2;
2002 		if (cs_pst > 28)
2003 			cs_cap[0] = 1 << cs_pst;
2004 	}
2005 
2006 	writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
2007 			(((cs_cap[0] >> 20) / 64) & 0xff),
2008 			&dram->msch->devicesize);
2009 	update_noc_timing(dram, sdram_params);
2010 }
2011 
2012 static void enable_low_power(struct dram_info *dram,
2013 			     struct rv1126_sdram_params *sdram_params)
2014 {
2015 	void __iomem *pctl_base = dram->pctl;
2016 	u32 grf_lp_con;
2017 
2018 	writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]);
2019 
2020 	if (sdram_params->base.dramtype == DDR4)
2021 		grf_lp_con = (0x7 << 16) | (1 << 1);
2022 	else if (sdram_params->base.dramtype == DDR3)
2023 		grf_lp_con = (0x7 << 16) | (1 << 0);
2024 	else
2025 		grf_lp_con = (0x7 << 16) | (1 << 2);
2026 
2027 	/* en lpckdis_en */
2028 	grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
2029 	writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con);
2030 
2031 	/* enable sr, pd */
2032 	if (dram->pd_idle == 0)
2033 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2034 	else
2035 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2036 	if (dram->sr_idle == 0)
2037 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2038 	else
2039 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2040 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
2041 }
2042 
2043 static void ddr_set_atags(struct dram_info *dram,
2044 			  struct rv1126_sdram_params *sdram_params)
2045 {
2046 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2047 	u32 dram_type = sdram_params->base.dramtype;
2048 	void __iomem *pctl_base = dram->pctl;
2049 	struct tag_serial t_serial;
2050 	struct tag_ddr_mem t_ddrmem;
2051 	struct tag_soc_info t_socinfo;
2052 	u64 cs_cap[2];
2053 	u32 cs_pst = 0;
2054 
2055 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2056 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2057 
2058 	memset(&t_serial, 0, sizeof(struct tag_serial));
2059 
2060 	t_serial.version = 0;
2061 	t_serial.enable = 1;
2062 	t_serial.addr = CONFIG_DEBUG_UART_BASE;
2063 	t_serial.baudrate = CONFIG_BAUDRATE;
2064 	t_serial.m_mode = SERIAL_M_MODE_M0;
2065 	t_serial.id = 2;
2066 
2067 	atags_destroy();
2068 	atags_set_tag(ATAG_SERIAL,  &t_serial);
2069 
2070 	memset(&t_ddrmem, 0, sizeof(struct tag_ddr_mem));
2071 	if (cap_info->row_3_4) {
2072 		cs_cap[0] =  cs_cap[0] * 3 / 4;
2073 		cs_cap[1] =  cs_cap[1] * 3 / 4;
2074 	}
2075 	t_ddrmem.version = 0;
2076 	t_ddrmem.bank[0] = CONFIG_SYS_SDRAM_BASE;
2077 	if (cs_cap[1]) {
2078 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2079 			6 + 2;
2080 	}
2081 
2082 	if (cs_cap[1] && cs_pst > 27) {
2083 		t_ddrmem.count = 2;
2084 		t_ddrmem.bank[1] = 1 << cs_pst;
2085 		t_ddrmem.bank[2] = cs_cap[0];
2086 		t_ddrmem.bank[3] = cs_cap[1];
2087 	} else {
2088 		t_ddrmem.count = 1;
2089 		t_ddrmem.bank[1] = (u64)cs_cap[0] + (u64)cs_cap[1];
2090 	}
2091 
2092 	atags_set_tag(ATAG_DDR_MEM,  &t_ddrmem);
2093 
2094 	memset(&t_socinfo, 0, sizeof(struct tag_soc_info));
2095 	t_socinfo.version = 0;
2096 	t_socinfo.name = 0x1126;
2097 }
2098 
2099 static void print_ddr_info(struct rv1126_sdram_params *sdram_params)
2100 {
2101 	u32 split;
2102 
2103 	if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2104 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2105 		split = 0;
2106 	else
2107 		split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2108 			SPLIT_SIZE_MASK;
2109 
2110 	sdram_print_ddr_info(&sdram_params->ch.cap_info,
2111 			     &sdram_params->base, split);
2112 }
2113 
2114 static int sdram_init_(struct dram_info *dram,
2115 		       struct rv1126_sdram_params *sdram_params, u32 post_init)
2116 {
2117 	void __iomem *pctl_base = dram->pctl;
2118 	void __iomem *phy_base = dram->phy;
2119 	u32 ddr4_vref;
2120 	u32 mr_tmp;
2121 
2122 	rkclk_configure_ddr(dram, sdram_params);
2123 
2124 	rkclk_ddr_reset(dram, 1, 1, 1, 1);
2125 	udelay(10);
2126 
2127 	rkclk_ddr_reset(dram, 1, 1, 1, 0);
2128 	phy_cfg(dram, sdram_params);
2129 
2130 	rkclk_ddr_reset(dram, 1, 1, 0, 0);
2131 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1);
2132 
2133 	rkclk_ddr_reset(dram, 1, 0, 0, 0);
2134 	pctl_cfg(dram->pctl, &sdram_params->pctl_regs,
2135 		 dram->sr_idle, dram->pd_idle);
2136 
2137 	/* set frequency_mode */
2138 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
2139 	/* set target_frequency to Frequency 0 */
2140 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0);
2141 
2142 	set_ds_odt(dram, sdram_params, 0);
2143 	sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params);
2144 	set_ctl_address_map(dram, sdram_params);
2145 
2146 	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
2147 
2148 	rkclk_ddr_reset(dram, 0, 0, 0, 0);
2149 
2150 	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
2151 		continue;
2152 
2153 	if (sdram_params->base.dramtype == LPDDR3) {
2154 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3);
2155 	} else if (sdram_params->base.dramtype == LPDDR4) {
2156 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6);
2157 		/* MR11 */
2158 		pctl_write_mr(dram->pctl, 3, 11,
2159 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2160 			      LPDDR4);
2161 		/* MR12 */
2162 		pctl_write_mr(dram->pctl, 3, 12,
2163 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2164 			      LPDDR4);
2165 
2166 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2167 		/* MR22 */
2168 		pctl_write_mr(dram->pctl, 3, 22,
2169 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2170 			      LPDDR4);
2171 		/* MR14 */
2172 		pctl_write_mr(dram->pctl, 3, 14,
2173 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2174 			      LPDDR4);
2175 	}
2176 
2177 	if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) != 0) {
2178 		if (post_init != 0)
2179 			printascii("DTT cs0 error\n");
2180 		return -1;
2181 	}
2182 
2183 	if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) {
2184 		if (data_training(dram, 1, sdram_params, 0,
2185 				  READ_GATE_TRAINING) != 0) {
2186 			printascii("DTT cs1 error\n");
2187 			return -1;
2188 		}
2189 	}
2190 
2191 	if (sdram_params->base.dramtype == DDR4) {
2192 		ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39;
2193 		pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref,
2194 				  sdram_params->base.dramtype);
2195 	}
2196 
2197 	dram_all_config(dram, sdram_params);
2198 	enable_low_power(dram, sdram_params);
2199 
2200 	return 0;
2201 }
2202 
2203 static u64 dram_detect_cap(struct dram_info *dram,
2204 			   struct rv1126_sdram_params *sdram_params,
2205 			   unsigned char channel)
2206 {
2207 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2208 	void __iomem *pctl_base = dram->pctl;
2209 	void __iomem *phy_base = dram->phy;
2210 	u32 mr8;
2211 
2212 	u32 bktmp;
2213 	u32 coltmp;
2214 	u32 rowtmp;
2215 	u32 cs;
2216 	u32 bw = 1;
2217 	u32 dram_type = sdram_params->base.dramtype;
2218 	u32 pwrctl;
2219 
2220 	cap_info->bw = bw;
2221 	if (dram_type != LPDDR4) {
2222 		if (dram_type != DDR4) {
2223 			coltmp = 12;
2224 			bktmp = 3;
2225 			if (dram_type == LPDDR2)
2226 				rowtmp = 15;
2227 			else
2228 				rowtmp = 16;
2229 
2230 			if (sdram_detect_col(cap_info, coltmp) != 0)
2231 				goto cap_err;
2232 
2233 			sdram_detect_bank(cap_info, coltmp, bktmp);
2234 			sdram_detect_dbw(cap_info, dram_type);
2235 		} else {
2236 			coltmp = 10;
2237 			bktmp = 4;
2238 			rowtmp = 17;
2239 
2240 			cap_info->col = 10;
2241 			cap_info->bk = 2;
2242 			sdram_detect_bg(cap_info, coltmp);
2243 		}
2244 
2245 		if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
2246 			goto cap_err;
2247 
2248 		sdram_detect_row_3_4(cap_info, coltmp, bktmp);
2249 	} else {
2250 		mr8 = (read_mr(dram, 1, 8, dram_type) >> 2) & 0xf;
2251 		cap_info->col = 10;
2252 		cap_info->bk = 3;
2253 		cap_info->cs0_row = 14 + (mr8 + 1) / 2;
2254 		if (mr8 % 2)
2255 			cap_info->row_3_4 = 1;
2256 		else
2257 			cap_info->row_3_4 = 0;
2258 		cap_info->dbw = 1;
2259 		cap_info->bw = 2;
2260 	}
2261 
2262 	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
2263 	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
2264 
2265 	if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0)
2266 		cs = 1;
2267 	else
2268 		cs = 0;
2269 	cap_info->rank = cs + 1;
2270 
2271 	if (dram_type != LPDDR4) {
2272 		setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2273 
2274 		phy_soft_reset(dram);
2275 
2276 		if (data_training(dram, 0, sdram_params, 0,
2277 				  READ_GATE_TRAINING) == 0)
2278 			cap_info->bw = 2;
2279 		else
2280 			cap_info->bw = 1;
2281 	}
2282 
2283 	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
2284 
2285 	cap_info->cs0_high16bit_row = cap_info->cs0_row;
2286 	if (cs) {
2287 		cap_info->cs1_row = cap_info->cs0_row;
2288 		cap_info->cs1_high16bit_row = cap_info->cs0_row;
2289 	} else {
2290 		cap_info->cs1_row = 0;
2291 		cap_info->cs1_high16bit_row = 0;
2292 	}
2293 
2294 	return 0;
2295 cap_err:
2296 	return -1;
2297 }
2298 
2299 static int dram_detect_cs1_row(struct dram_info *dram,
2300 			       struct rv1126_sdram_params *sdram_params,
2301 			       unsigned char channel)
2302 {
2303 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2304 	void __iomem *pctl_base = dram->pctl;
2305 	u32 ret = 0;
2306 	void __iomem *test_addr;
2307 	u32 row, bktmp, coltmp, bw;
2308 	u64 cs0_cap;
2309 	u32 byte_mask;
2310 	u32 cs_pst;
2311 	u32 cs_add = 0;
2312 	u32 max_row;
2313 
2314 	if (cap_info->rank == 2) {
2315 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2316 			6 + 2;
2317 		if (cs_pst < 28)
2318 			cs_add = 1;
2319 
2320 		cs0_cap = 1 << cs_pst;
2321 
2322 		if (sdram_params->base.dramtype == DDR4) {
2323 			if (cap_info->dbw == 0)
2324 				bktmp = cap_info->bk + 2;
2325 			else
2326 				bktmp = cap_info->bk + 1;
2327 		} else {
2328 			bktmp = cap_info->bk;
2329 		}
2330 		bw = cap_info->bw;
2331 		coltmp = cap_info->col;
2332 
2333 		if (bw == 2)
2334 			byte_mask = 0xFFFF;
2335 		else
2336 			byte_mask = 0xFF;
2337 
2338 		max_row = (cs_pst == 31) ? 30 : 31;
2339 
2340 		max_row = max_row - bktmp - coltmp - bw - cs_add + 1;
2341 
2342 		row = (cap_info->cs0_row > max_row) ? max_row :
2343 			cap_info->cs0_row;
2344 
2345 		for (; row > 12; row--) {
2346 			test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
2347 				    (u32)cs0_cap +
2348 				    (1ul << (row + bktmp + coltmp +
2349 					     cs_add + bw - 1ul)));
2350 
2351 			writel(0, CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap);
2352 			writel(PATTERN, test_addr);
2353 
2354 			if (((readl(test_addr) & byte_mask) ==
2355 			     (PATTERN & byte_mask)) &&
2356 			    ((readl(CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap) &
2357 			      byte_mask) == 0)) {
2358 				ret = row;
2359 				break;
2360 			}
2361 		}
2362 	}
2363 
2364 	return ret;
2365 }
2366 
2367 /* return: 0 = success, other = fail */
2368 static int sdram_init_detect(struct dram_info *dram,
2369 			     struct rv1126_sdram_params *sdram_params)
2370 {
2371 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2372 	u32 ret;
2373 	u32 sys_reg = 0;
2374 	u32 sys_reg3 = 0;
2375 
2376 	if (sdram_init_(dram, sdram_params, 0) != 0)
2377 		return -1;
2378 
2379 	if (sdram_params->base.dramtype == DDR3) {
2380 		writel(PATTERN, CONFIG_SYS_SDRAM_BASE);
2381 		if (readl(CONFIG_SYS_SDRAM_BASE) != PATTERN)
2382 			return -1;
2383 	}
2384 
2385 	if (dram_detect_cap(dram, sdram_params, 0) != 0)
2386 		return -1;
2387 
2388 	pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
2389 				   sdram_params->base.dramtype);
2390 	ret = sdram_init_(dram, sdram_params, 1);
2391 	if (ret != 0)
2392 		goto out;
2393 
2394 	cap_info->cs1_row =
2395 		dram_detect_cs1_row(dram, sdram_params, 0);
2396 	if (cap_info->cs1_row) {
2397 		sys_reg = readl(&dram->pmugrf->os_reg[2]);
2398 		sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
2399 		SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
2400 				    sys_reg, sys_reg3, 0);
2401 		writel(sys_reg, &dram->pmugrf->os_reg[2]);
2402 		writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2403 	}
2404 
2405 	sdram_detect_high_row(cap_info);
2406 
2407 out:
2408 	return ret;
2409 }
2410 
2411 struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz)
2412 {
2413 	u32 i;
2414 	u32 offset = 0;
2415 	struct ddr2_3_4_lp2_3_info *ddr_info;
2416 
2417 	if (!freq_mhz) {
2418 		ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype);
2419 		if (ddr_info)
2420 			freq_mhz =
2421 				(ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
2422 				DDR_FREQ_MASK;
2423 		else
2424 			freq_mhz = 0;
2425 	}
2426 
2427 	for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) {
2428 		if (sdram_configs[i].base.ddr_freq == 0 ||
2429 		    freq_mhz < sdram_configs[i].base.ddr_freq)
2430 			break;
2431 	}
2432 	offset = i == 0 ? 0 : i - 1;
2433 
2434 	return &sdram_configs[offset];
2435 }
2436 
2437 static const u16 pctl_need_update_reg[] = {
2438 	DDR_PCTL2_RFSHTMG,
2439 	DDR_PCTL2_INIT3,
2440 	DDR_PCTL2_INIT4,
2441 	DDR_PCTL2_INIT6,
2442 	DDR_PCTL2_INIT7,
2443 	DDR_PCTL2_DRAMTMG0,
2444 	DDR_PCTL2_DRAMTMG1,
2445 	DDR_PCTL2_DRAMTMG2,
2446 	DDR_PCTL2_DRAMTMG3,
2447 	DDR_PCTL2_DRAMTMG4,
2448 	DDR_PCTL2_DRAMTMG5,
2449 	DDR_PCTL2_DRAMTMG6,
2450 	DDR_PCTL2_DRAMTMG7,
2451 	DDR_PCTL2_DRAMTMG8,
2452 	DDR_PCTL2_DRAMTMG9,
2453 	DDR_PCTL2_DRAMTMG12,
2454 	DDR_PCTL2_DRAMTMG13,
2455 	DDR_PCTL2_DRAMTMG14,
2456 	DDR_PCTL2_ZQCTL0,
2457 	DDR_PCTL2_DFITMG0,
2458 	DDR_PCTL2_ODTCFG
2459 };
2460 
2461 static const u16 phy_need_update_reg[] = {
2462 	0x14,
2463 	0x18,
2464 	0x1c
2465 };
2466 
2467 static void pre_set_rate(struct dram_info *dram,
2468 			 struct rv1126_sdram_params *sdram_params,
2469 			 u32 dst_fsp, u32 dst_fsp_lp4)
2470 {
2471 	u32 i, j, find;
2472 	void __iomem *pctl_base = dram->pctl;
2473 	void __iomem *phy_base = dram->phy;
2474 	u32 phy_offset;
2475 	u32 mr_tmp;
2476 	u32 dramtype = sdram_params->base.dramtype;
2477 
2478 	sw_set_req(dram);
2479 	/* pctl timing update */
2480 	for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) {
2481 		for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF;
2482 		     j++) {
2483 			if (sdram_params->pctl_regs.pctl[j][0] ==
2484 			    pctl_need_update_reg[i]) {
2485 				writel(sdram_params->pctl_regs.pctl[j][1],
2486 				       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2487 				       pctl_need_update_reg[i]);
2488 				find = j;
2489 				break;
2490 			}
2491 		}
2492 	}
2493 	sw_set_ack(dram);
2494 
2495 	/* phy timing update */
2496 	if (dst_fsp == 0)
2497 		phy_offset = 0;
2498 	else
2499 		phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3);
2500 	/* cl cwl al update */
2501 	for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) {
2502 		for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF;
2503 		     j++) {
2504 			if (sdram_params->phy_regs.phy[j][0] ==
2505 			    phy_need_update_reg[i]) {
2506 				writel(sdram_params->phy_regs.phy[j][1],
2507 				       phy_base + phy_offset +
2508 				       phy_need_update_reg[i]);
2509 				find = j;
2510 				break;
2511 			}
2512 		}
2513 	}
2514 
2515 	set_ds_odt(dram, sdram_params, dst_fsp);
2516 	if (dramtype == LPDDR4) {
2517 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2518 			       DDR_PCTL2_INIT4);
2519 		/* MR13 */
2520 		pctl_write_mr(dram->pctl, 3, 13,
2521 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2522 			       PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2523 			      ((0x2 << 6) >> dst_fsp_lp4), dramtype);
2524 		writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2525 				      PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2526 				      ((0x2 << 6) >> dst_fsp_lp4),
2527 				       PHY_REG(phy_base, 0x1b));
2528 		/* MR3 */
2529 		pctl_write_mr(dram->pctl, 3, 3,
2530 			      mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT &
2531 			      PCTL2_MR_MASK,
2532 			      dramtype);
2533 		writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK,
2534 		       PHY_REG(phy_base, 0x19));
2535 
2536 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2537 			       DDR_PCTL2_INIT3);
2538 		/* MR1 */
2539 		pctl_write_mr(dram->pctl, 3, 1,
2540 			      mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT &
2541 			      PCTL2_MR_MASK,
2542 			      dramtype);
2543 		writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK,
2544 		       PHY_REG(phy_base, 0x17));
2545 		/* MR2 */
2546 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
2547 			      dramtype);
2548 		writel(mr_tmp & PCTL2_MR_MASK,
2549 		       PHY_REG(phy_base, 0x18));
2550 
2551 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2552 			       DDR_PCTL2_INIT6);
2553 		/* MR11 */
2554 		pctl_write_mr(dram->pctl, 3, 11,
2555 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2556 			      dramtype);
2557 		writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2558 		       PHY_REG(phy_base, 0x1a));
2559 		/* MR12 */
2560 		pctl_write_mr(dram->pctl, 3, 12,
2561 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2562 			      dramtype);
2563 
2564 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2565 			       DDR_PCTL2_INIT7);
2566 		/* MR22 */
2567 		pctl_write_mr(dram->pctl, 3, 22,
2568 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2569 			      dramtype);
2570 		writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2571 		       PHY_REG(phy_base, 0x1d));
2572 		/* MR14 */
2573 		pctl_write_mr(dram->pctl, 3, 14,
2574 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2575 			      dramtype);
2576 		writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2577 		       PHY_REG(phy_base, 0x1c));
2578 	}
2579 
2580 	update_noc_timing(dram, sdram_params);
2581 }
2582 
2583 static void save_fsp_param(struct dram_info *dram, u32 dst_fsp,
2584 			   struct rv1126_sdram_params *sdram_params)
2585 {
2586 	void __iomem *pctl_base = dram->pctl;
2587 	void __iomem *phy_base = dram->phy;
2588 	struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp];
2589 	u32 temp, temp1;
2590 	struct ddr2_3_4_lp2_3_info *ddr_info;
2591 
2592 	ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype);
2593 
2594 	p_fsp_param->freq_mhz = sdram_params->base.ddr_freq;
2595 
2596 	if (sdram_params->base.dramtype == LPDDR4) {
2597 		p_fsp_param->rd_odt_up_en = 0;
2598 		p_fsp_param->rd_odt_down_en = 1;
2599 	} else {
2600 		p_fsp_param->rd_odt_up_en =
2601 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
2602 		p_fsp_param->rd_odt_down_en =
2603 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
2604 	}
2605 
2606 	if (p_fsp_param->rd_odt_up_en)
2607 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111));
2608 	else if (p_fsp_param->rd_odt_down_en)
2609 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110));
2610 	else
2611 		p_fsp_param->rd_odt = 0;
2612 	p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112));
2613 	p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100));
2614 	p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102));
2615 	p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128));
2616 	p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105));
2617 
2618 	if (sdram_params->base.dramtype == DDR3) {
2619 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2620 			     DDR_PCTL2_INIT3);
2621 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
2622 		p_fsp_param->ds_pdds = ((temp >> 1) & 0x1) |
2623 				       (((temp >> 5) & 0x1) << 1);
2624 		p_fsp_param->dq_odt = ((temp >> 2) & 0x1) |
2625 				      (((temp >> 6) & 0x1) << 1) |
2626 				      (((temp >> 9) & 0x1) << 2);
2627 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2628 	} else if (sdram_params->base.dramtype == DDR4) {
2629 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2630 			     DDR_PCTL2_INIT3);
2631 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
2632 		p_fsp_param->ds_pdds = (temp >> 1) & 0x3;
2633 		p_fsp_param->dq_odt = (temp >> 8) & 0x7;
2634 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2635 	} else if (sdram_params->base.dramtype == LPDDR3) {
2636 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2637 			     DDR_PCTL2_INIT4);
2638 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
2639 		p_fsp_param->ds_pdds = temp & 0xf;
2640 
2641 		p_fsp_param->dq_odt = lp3_odt_value;
2642 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2643 	} else if (sdram_params->base.dramtype == LPDDR4) {
2644 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2645 			     DDR_PCTL2_INIT4);
2646 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
2647 		p_fsp_param->ds_pdds = (temp >> 3) & 0x7;
2648 
2649 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2650 			     DDR_PCTL2_INIT6);
2651 		temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK;
2652 		p_fsp_param->dq_odt = temp & 0x7;
2653 		p_fsp_param->ca_odt = (temp >> 4) & 0x7;
2654 
2655 		temp = MAX(readl(PHY_REG(phy_base, 0x3ae)),
2656 			   readl(PHY_REG(phy_base, 0x3ce)));
2657 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)),
2658 			    readl(PHY_REG(phy_base, 0x3de)));
2659 		p_fsp_param->vref_ca[0] = (temp + temp1) / 2;
2660 		temp = MAX(readl(PHY_REG(phy_base, 0x3af)),
2661 			   readl(PHY_REG(phy_base, 0x3cf)));
2662 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)),
2663 			    readl(PHY_REG(phy_base, 0x3df)));
2664 		p_fsp_param->vref_ca[1] = (temp + temp1) / 2;
2665 		p_fsp_param->vref_ca[0] |=
2666 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
2667 		p_fsp_param->vref_ca[1] |=
2668 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
2669 
2670 		p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >>
2671 					      3) & 0x1;
2672 	}
2673 
2674 	p_fsp_param->noc_timings.ddrtiminga0 =
2675 		sdram_params->ch.noc_timings.ddrtiminga0;
2676 	p_fsp_param->noc_timings.ddrtimingb0 =
2677 		sdram_params->ch.noc_timings.ddrtimingb0;
2678 	p_fsp_param->noc_timings.ddrtimingc0 =
2679 		sdram_params->ch.noc_timings.ddrtimingc0;
2680 	p_fsp_param->noc_timings.devtodev0 =
2681 		sdram_params->ch.noc_timings.devtodev0;
2682 	p_fsp_param->noc_timings.ddrmode =
2683 		sdram_params->ch.noc_timings.ddrmode;
2684 	p_fsp_param->noc_timings.ddr4timing =
2685 		sdram_params->ch.noc_timings.ddr4timing;
2686 	p_fsp_param->noc_timings.agingx0 =
2687 		sdram_params->ch.noc_timings.agingx0;
2688 	p_fsp_param->noc_timings.aging0 =
2689 		sdram_params->ch.noc_timings.aging0;
2690 	p_fsp_param->noc_timings.aging1 =
2691 		sdram_params->ch.noc_timings.aging1;
2692 	p_fsp_param->noc_timings.aging2 =
2693 		sdram_params->ch.noc_timings.aging2;
2694 	p_fsp_param->noc_timings.aging3 =
2695 		sdram_params->ch.noc_timings.aging3;
2696 
2697 	p_fsp_param->flag = FSP_FLAG;
2698 }
2699 
2700 #ifndef CONFIG_ROCKCHIP_THUNDER_BOOT
2701 static void copy_fsp_param_to_ddr(void)
2702 {
2703 	memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param,
2704 	       sizeof(fsp_param));
2705 }
2706 #endif
2707 
2708 void ddr_set_rate(struct dram_info *dram,
2709 		  struct rv1126_sdram_params *sdram_params,
2710 		  u32 freq, u32 cur_freq, u32 dst_fsp,
2711 		  u32 dst_fsp_lp4, u32 training_en)
2712 {
2713 	u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off;
2714 	u32 mr_tmp;
2715 	u32 lp_stat;
2716 	u32 dramtype = sdram_params->base.dramtype;
2717 	struct rv1126_sdram_params *sdram_params_new;
2718 	void __iomem *pctl_base = dram->pctl;
2719 	void __iomem *phy_base = dram->phy;
2720 
2721 	lp_stat = low_power_update(dram, 0);
2722 	sdram_params_new = get_default_sdram_config(freq);
2723 	sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank;
2724 
2725 	pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4);
2726 
2727 	while ((readl(pctl_base + DDR_PCTL2_STAT) &
2728 			 PCTL2_OPERATING_MODE_MASK) ==
2729 			 PCTL2_OPERATING_MODE_SR)
2730 		continue;
2731 
2732 	dest_dll_off = 0;
2733 	dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2734 			  DDR_PCTL2_INIT3);
2735 	if ((dramtype == DDR3 && (dst_init3 & 1)) ||
2736 	    (dramtype == DDR4 && !(dst_init3 & 1)))
2737 		dest_dll_off = 1;
2738 
2739 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
2740 	cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
2741 			  DDR_PCTL2_INIT3);
2742 	cur_init3 &= PCTL2_MR_MASK;
2743 	cur_dll_off = 1;
2744 	if ((dramtype == DDR3 && !(cur_init3 & 1)) ||
2745 	    (dramtype == DDR4 && (cur_init3 & 1)))
2746 		cur_dll_off = 0;
2747 
2748 	if (!cur_dll_off) {
2749 		if (dramtype == DDR3)
2750 			cur_init3 |= 1;
2751 		else
2752 			cur_init3 &= ~1;
2753 		pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype);
2754 	}
2755 
2756 	setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
2757 		     PCTL2_DIS_AUTO_REFRESH);
2758 	update_refresh_reg(dram);
2759 
2760 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
2761 	while (1) {
2762 		if (((readl(pctl_base + DDR_PCTL2_STAT) &
2763 		      PCTL2_SELFREF_TYPE_MASK) ==
2764 		     PCTL2_SELFREF_TYPE_SR_NOT_AUTO) &&
2765 		    ((readl(pctl_base + DDR_PCTL2_STAT) &
2766 		      PCTL2_OPERATING_MODE_MASK) ==
2767 		     PCTL2_OPERATING_MODE_SR)) {
2768 			break;
2769 		}
2770 	}
2771 
2772 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
2773 	       PMUGRF_CON_DDRPHY_BUFFEREN_EN,
2774 	       dram->pmugrf->soc_con[0]);
2775 	sw_set_req(dram);
2776 	clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC,
2777 		     PCTL2_DFI_INIT_COMPLETE_EN);
2778 	sw_set_ack(dram);
2779 
2780 	sw_set_req(dram);
2781 	if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off)
2782 		setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
2783 	else
2784 		clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
2785 
2786 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0,
2787 		     PCTL2_DIS_SRX_ZQCL);
2788 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0,
2789 		     PCTL2_DIS_SRX_ZQCL);
2790 	sw_set_ack(dram);
2791 
2792 	writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT),
2793 	       dram->cru->clkgate_con[2]);
2794 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
2795 					(0x1 << CLK_DDR_UPCTL_EN_SHIFT) |
2796 					(0x1 << ACLK_DDR_UPCTL_EN_SHIFT),
2797 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
2798 
2799 	clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
2800 	rkclk_set_dpll(dram, freq * MHz / 2);
2801 	phy_pll_set(dram, freq * MHz, 0);
2802 	phy_pll_set(dram, freq * MHz, 1);
2803 	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
2804 
2805 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
2806 			PMUGRF_CON_DDRPHY_BUFFEREN_DIS,
2807 			dram->pmugrf->soc_con[0]);
2808 	writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT),
2809 	       dram->cru->clkgate_con[2]);
2810 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
2811 					(0x0 << CLK_DDR_UPCTL_EN_SHIFT) |
2812 					(0x0 << ACLK_DDR_UPCTL_EN_SHIFT),
2813 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
2814 	while ((readl(pctl_base + DDR_PCTL2_DFISTAT) &
2815 	       PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE)
2816 		continue;
2817 
2818 	sw_set_req(dram);
2819 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
2820 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp);
2821 	sw_set_ack(dram);
2822 	update_refresh_reg(dram);
2823 	clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2);
2824 
2825 	clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
2826 	while ((readl(pctl_base + DDR_PCTL2_STAT) &
2827 	       PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR)
2828 		continue;
2829 
2830 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
2831 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
2832 
2833 	mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4);
2834 	if (dramtype == LPDDR3) {
2835 		pctl_write_mr(dram->pctl, 3, 1,
2836 			      (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) &
2837 			      PCTL2_MR_MASK,
2838 			      dramtype);
2839 		pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK,
2840 			      dramtype);
2841 		pctl_write_mr(dram->pctl, 3, 3,
2842 			      (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) &
2843 			      PCTL2_MR_MASK,
2844 			      dramtype);
2845 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype);
2846 	} else if ((dramtype == DDR3) || (dramtype == DDR4)) {
2847 		pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK,
2848 			      dramtype);
2849 		if (!dest_dll_off) {
2850 			pctl_write_mr(dram->pctl, 3, 0,
2851 				      ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) &
2852 				      PCTL2_MR_MASK) | DDR3_DLL_RESET,
2853 				      dramtype);
2854 			udelay(2);
2855 		}
2856 		pctl_write_mr(dram->pctl, 3, 0,
2857 			      (dst_init3 >> PCTL2_DDR34_MR0_SHIFT &
2858 			       PCTL2_MR_MASK) & (~DDR3_DLL_RESET),
2859 			      dramtype);
2860 		pctl_write_mr(dram->pctl, 3, 2,
2861 			      ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) &
2862 			       PCTL2_MR_MASK), dramtype);
2863 		if (dramtype == DDR4) {
2864 			pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK,
2865 				      dramtype);
2866 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2867 				       DDR_PCTL2_INIT6);
2868 			pctl_write_mr(dram->pctl, 3, 4,
2869 				      (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) &
2870 				       PCTL2_MR_MASK,
2871 				      dramtype);
2872 			pctl_write_mr(dram->pctl, 3, 5,
2873 				      mr_tmp >> PCTL2_DDR4_MR5_SHIFT &
2874 				      PCTL2_MR_MASK,
2875 				      dramtype);
2876 
2877 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2878 				       DDR_PCTL2_INIT7);
2879 			pctl_write_mr(dram->pctl, 3, 6,
2880 				      mr_tmp >> PCTL2_DDR4_MR6_SHIFT &
2881 				      PCTL2_MR_MASK,
2882 				      dramtype);
2883 		}
2884 	} else if (dramtype == LPDDR4) {
2885 		pctl_write_mr(dram->pctl, 3, 13,
2886 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2887 			       PCTL2_MR_MASK) & (~(BIT(7)))) |
2888 			      dst_fsp_lp4 << 7, dramtype);
2889 	}
2890 
2891 	/* training */
2892 	high_freq_training(dram, sdram_params_new, dst_fsp);
2893 
2894 	clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
2895 		     PCTL2_DIS_AUTO_REFRESH);
2896 	low_power_update(dram, lp_stat);
2897 
2898 	save_fsp_param(dram, dst_fsp, sdram_params_new);
2899 }
2900 
2901 static void ddr_set_rate_for_fsp(struct dram_info *dram,
2902 				 struct rv1126_sdram_params *sdram_params)
2903 {
2904 	struct ddr2_3_4_lp2_3_info *ddr_info;
2905 	u32 f0;
2906 	u32 dramtype = sdram_params->base.dramtype;
2907 #ifndef CONFIG_ROCKCHIP_THUNDER_BOOT
2908 	u32 f1, f2, f3;
2909 #endif
2910 
2911 	ddr_info = get_ddr_drv_odt_info(dramtype);
2912 	if (!ddr_info)
2913 		return;
2914 
2915 	f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
2916 	     DDR_FREQ_MASK;
2917 
2918 #ifndef CONFIG_ROCKCHIP_THUNDER_BOOT
2919 	memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param));
2920 	memset((void *)&fsp_param, 0, sizeof(fsp_param));
2921 
2922 	f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) &
2923 	     DDR_FREQ_MASK;
2924 	f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) &
2925 	     DDR_FREQ_MASK;
2926 	f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) &
2927 	     DDR_FREQ_MASK;
2928 #endif
2929 
2930 	if (get_wrlvl_val(dram, sdram_params))
2931 		printascii("get wrlvl value fail\n");
2932 
2933 #ifndef CONFIG_ROCKCHIP_THUNDER_BOOT
2934 	printascii("change to: ");
2935 	printdec(f1);
2936 	printascii("MHz\n");
2937 	ddr_set_rate(&dram_info, sdram_params, f1,
2938 		     sdram_params->base.ddr_freq, 1, 1, 1);
2939 	printascii("change to: ");
2940 	printdec(f2);
2941 	printascii("MHz\n");
2942 	ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1);
2943 	printascii("change to: ");
2944 	printdec(f3);
2945 	printascii("MHz\n");
2946 	ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1);
2947 #endif
2948 	printascii("change to: ");
2949 	printdec(f0);
2950 	printascii("MHz(final freq)\n");
2951 #ifndef CONFIG_ROCKCHIP_THUNDER_BOOT
2952 	ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1);
2953 #else
2954 	ddr_set_rate(&dram_info, sdram_params, f0, sdram_params->base.ddr_freq, 1, 1, 1);
2955 #endif
2956 }
2957 
2958 int get_uart_config(void)
2959 {
2960 	struct sdram_head_info_index_v2 *index =
2961 		(struct sdram_head_info_index_v2 *)common_info;
2962 	struct global_info *gbl_info;
2963 
2964 	gbl_info = (struct global_info *)((void *)common_info +
2965 		index->global_index.offset * 4);
2966 
2967 	return gbl_info->uart_info;
2968 }
2969 
2970 /* return: 0 = success, other = fail */
2971 int sdram_init(void)
2972 {
2973 	struct rv1126_sdram_params *sdram_params;
2974 	int ret = 0;
2975 	struct sdram_head_info_index_v2 *index =
2976 		(struct sdram_head_info_index_v2 *)common_info;
2977 	struct global_info *gbl_info;
2978 
2979 	dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
2980 	dram_info.pctl = (void *)UPCTL2_BASE_ADDR;
2981 	dram_info.grf = (void *)GRF_BASE_ADDR;
2982 	dram_info.cru = (void *)CRU_BASE_ADDR;
2983 	dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR;
2984 	dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR;
2985 	dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR;
2986 
2987 	if (index->version_info != 2 ||
2988 	    (index->global_index.size != sizeof(struct global_info) / 4) ||
2989 	    (index->ddr3_index.size !=
2990 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
2991 	    (index->ddr4_index.size !=
2992 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
2993 	    (index->lp3_index.size !=
2994 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
2995 	    (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) ||
2996 	    index->global_index.offset == 0 ||
2997 	    index->ddr3_index.offset == 0 ||
2998 	    index->ddr4_index.offset == 0 ||
2999 	    index->lp3_index.offset == 0 ||
3000 	    index->lp4_index.offset == 0) {
3001 		printascii("common info error\n");
3002 		goto error;
3003 	}
3004 
3005 	gbl_info = (struct global_info *)((void *)common_info +
3006 		index->global_index.offset * 4);
3007 
3008 	dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info);
3009 	dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info);
3010 
3011 	sdram_params = &sdram_configs[0];
3012 
3013 	if (sdram_params->base.dramtype == DDR3 ||
3014 	    sdram_params->base.dramtype == DDR4) {
3015 		if (DDR_2T_INFO(gbl_info->info_2t))
3016 			sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10;
3017 		else
3018 			sdram_params->pctl_regs.pctl[0][1] &=
3019 				~(0x1 << 10);
3020 	}
3021 	ret = sdram_init_detect(&dram_info, sdram_params);
3022 	if (ret) {
3023 		sdram_print_dram_type(sdram_params->base.dramtype);
3024 		printascii(", ");
3025 		printdec(sdram_params->base.ddr_freq);
3026 		printascii("MHz\n");
3027 		goto error;
3028 	}
3029 	print_ddr_info(sdram_params);
3030 
3031 	ddr_set_rate_for_fsp(&dram_info, sdram_params);
3032 #ifndef CONFIG_ROCKCHIP_THUNDER_BOOT
3033 	copy_fsp_param_to_ddr();
3034 #endif
3035 
3036 	ddr_set_atags(&dram_info, sdram_params);
3037 
3038 	printascii("out\n");
3039 
3040 	return ret;
3041 error:
3042 	printascii("error\n");
3043 	return (-1);
3044 }
3045 #endif /* CONFIG_TPL_BUILD */
3046