xref: /rk3399_rockchip-uboot/drivers/ram/rockchip/sdram_rv1126.c (revision d5bb9a92b151a7b9342d4099f6793e68d524f8fe)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * (C) Copyright 2020 Rockchip Electronics Co., Ltd.
4  */
5 
6 #include <common.h>
7 #include <debug_uart.h>
8 #include <dm.h>
9 #include <ram.h>
10 #include <syscon.h>
11 #include <asm/io.h>
12 #include <asm/arch/clock.h>
13 #include <asm/arch/hardware.h>
14 #include <asm/arch/rk_atags.h>
15 #include <asm/arch/cru_rv1126.h>
16 #include <asm/arch/grf_rv1126.h>
17 #include <asm/arch/sdram_common.h>
18 #include <asm/arch/sdram_rv1126.h>
19 
20 /* define training flag */
21 #define CA_TRAINING			(0x1 << 0)
22 #define READ_GATE_TRAINING		(0x1 << 1)
23 #define WRITE_LEVELING			(0x1 << 2)
24 #define WRITE_TRAINING			(0x1 << 3)
25 #define READ_TRAINING			(0x1 << 4)
26 #define FULL_TRAINING			(0xff)
27 
28 #define SKEW_RX_SIGNAL			(0)
29 #define SKEW_TX_SIGNAL			(1)
30 #define SKEW_CA_SIGNAL			(2)
31 
32 #define DESKEW_MDF_ABS_VAL		(0)
33 #define DESKEW_MDF_DIFF_VAL		(1)
34 
35 #ifdef CONFIG_TPL_BUILD
36 #ifndef CONFIG_TPL_TINY_FRAMEWORK
37 #error please defined CONFIG_TPL_TINY_FRAMEWORK for RV1126 !!!
38 #endif
39 #endif
40 
41 #ifdef CONFIG_TPL_BUILD
42 
43 struct dram_info {
44 	void __iomem *pctl;
45 	void __iomem *phy;
46 	struct rv1126_cru *cru;
47 	struct msch_regs *msch;
48 	struct rv1126_ddrgrf *ddrgrf;
49 	struct rv1126_grf *grf;
50 	struct ram_info info;
51 	struct rv1126_pmugrf *pmugrf;
52 	u32 sr_idle;
53 	u32 pd_idle;
54 };
55 
56 #define GRF_BASE_ADDR			0xfe000000
57 #define PMU_GRF_BASE_ADDR		0xfe020000
58 #define DDR_GRF_BASE_ADDR		0xfe030000
59 #define BUS_SGRF_BASE_ADDR		0xfe0a0000
60 #define SERVER_MSCH_BASE_ADDR		0xfe800000
61 #define CRU_BASE_ADDR			0xff490000
62 #define DDR_PHY_BASE_ADDR		0xff4a0000
63 #define UPCTL2_BASE_ADDR		0xffa50000
64 
65 #define SGRF_SOC_CON12			0x30
66 #define SGRF_SOC_CON13			0x34
67 
68 struct dram_info dram_info;
69 
70 #if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 3)
71 struct rv1126_sdram_params sdram_configs[] = {
72 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-328.inc"
73 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-396.inc"
74 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-528.inc"
75 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-664.inc"
76 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-784.inc"
77 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-924.inc"
78 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-1056.inc"
79 };
80 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 0)
81 struct rv1126_sdram_params sdram_configs[] = {
82 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-328.inc"
83 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-396.inc"
84 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-528.inc"
85 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-664.inc"
86 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-784.inc"
87 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-924.inc"
88 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-1056.inc"
89 };
90 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 6)
91 struct rv1126_sdram_params sdram_configs[] = {
92 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-328.inc"
93 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-396.inc"
94 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-528.inc"
95 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-664.inc"
96 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-784.inc"
97 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-924.inc"
98 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-1056.inc"
99 };
100 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 7)
101 struct rv1126_sdram_params sdram_configs[] = {
102 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-328.inc"
103 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-396.inc"
104 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-528.inc"
105 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-664.inc"
106 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-784.inc"
107 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-924.inc"
108 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-1056.inc"
109 };
110 #endif
111 
112 u32 common_info[] = {
113 	#include "sdram_inc/rv1126/sdram-rv1126-loader_params.inc"
114 };
115 
116 static struct rv1126_fsp_param fsp_param[MAX_IDX];
117 
118 static u8 lp3_odt_value;
119 
120 static s8 wrlvl_result[2][4];
121 
122 /* DDR configuration 0-9 */
123 u16 ddr_cfg_2_rbc[] = {
124 	((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */
125 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */
126 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */
127 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */
128 	((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */
129 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */
130 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */
131 	((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */
132 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */
133 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */
134 };
135 
136 /* DDR configuration 10-21 */
137 u8 ddr4_cfg_2_rbc[] = {
138 	((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */
139 	((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */
140 	((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */
141 	((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */
142 	((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */
143 	((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */
144 	((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */
145 	((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */
146 	((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */
147 	((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */
148 	((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */
149 	((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */
150 };
151 
152 /* DDR configuration 22-28 */
153 u16 ddr_cfg_2_rbc_p2[] = {
154 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */
155 	((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */
156 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */
157 	((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */
158 	((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */
159 	((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */
160 	((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */
161 };
162 
163 u8 d4_rbc_2_d3_rbc[][2] = {
164 	{10, 0},
165 	{11, 2},
166 	{12, 23},
167 	{13, 1},
168 	{14, 28},
169 	{15, 24},
170 	{16, 27},
171 	{17, 7},
172 	{18, 6},
173 	{19, 25},
174 	{20, 26},
175 	{21, 3}
176 };
177 
178 u32 addrmap[23][9] = {
179 	{24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
180 		0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */
181 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
182 		0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */
183 	{23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
184 		0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */
185 	{22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606,
186 		0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */
187 	{24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909,
188 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */
189 	{6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707,
190 		0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */
191 	{7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808,
192 		0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */
193 	{8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909,
194 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */
195 	{22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
196 		0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */
197 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
198 		0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */
199 
200 	{24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
201 		0x08080808, 0x00000f0f, 0x0801}, /* 10 */
202 	{23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
203 		0x0f080808, 0x00000f0f, 0x0801}, /* 11 */
204 	{24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
205 		0x07070707, 0x00000f07, 0x0700}, /* 12 */
206 	{23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
207 		0x07070707, 0x00000f0f, 0x0700}, /* 13 */
208 	{24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
209 		0x07070707, 0x00000f07, 0x3f01}, /* 14 */
210 	{23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
211 		0x07070707, 0x00000f0f, 0x3f01}, /* 15 */
212 	{23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606,
213 		0x06060606, 0x00000f06, 0x3f00}, /* 16 */
214 	{8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909,
215 		0x0f090909, 0x00000f0f, 0x0801}, /* 17 */
216 	{7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808,
217 		0x08080808, 0x00000f0f, 0x0700}, /* 18 */
218 	{7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
219 		0x08080808, 0x00000f0f, 0x3f01}, /* 19 */
220 
221 	{6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
222 		0x07070707, 0x00000f07, 0x3f00}, /* 20 */
223 	{23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606,
224 		0x06060606, 0x00000f06, 0x0600}, /* 21 */
225 	{21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505,
226 		0x05050505, 0x00000f0f, 0x3f3f} /* 22 */
227 };
228 
229 static u8 dq_sel[22][3] = {
230 	{0x0, 0x17, 0x22},
231 	{0x1, 0x18, 0x23},
232 	{0x2, 0x19, 0x24},
233 	{0x3, 0x1a, 0x25},
234 	{0x4, 0x1b, 0x26},
235 	{0x5, 0x1c, 0x27},
236 	{0x6, 0x1d, 0x28},
237 	{0x7, 0x1e, 0x29},
238 	{0x8, 0x16, 0x21},
239 	{0x9, 0x1f, 0x2a},
240 	{0xa, 0x20, 0x2b},
241 	{0x10, 0x1, 0xc},
242 	{0x11, 0x2, 0xd},
243 	{0x12, 0x3, 0xe},
244 	{0x13, 0x4, 0xf},
245 	{0x14, 0x5, 0x10},
246 	{0x15, 0x6, 0x11},
247 	{0x16, 0x7, 0x12},
248 	{0x17, 0x8, 0x13},
249 	{0x18, 0x0, 0xb},
250 	{0x19, 0x9, 0x14},
251 	{0x1a, 0xa, 0x15}
252 };
253 
254 static u16 grp_addr[4] = {
255 	ADD_GROUP_CS0_A,
256 	ADD_GROUP_CS0_B,
257 	ADD_GROUP_CS1_A,
258 	ADD_GROUP_CS1_B
259 };
260 
261 static u8 wrlvl_result_offset[2][4] = {
262 	{0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27},
263 	{0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29},
264 };
265 
266 static u16 dqs_dq_skew_adr[16] = {
267 	0x170 + 0,	/* SKEW_UPDATE_RX_CS0_DQS0 */
268 	0x170 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS1 */
269 	0x1d0 + 0,	/* SKEW_UPDATE_RX_CS0_DQS2 */
270 	0x1d0 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS3 */
271 	0x1a0 + 0,	/* SKEW_UPDATE_RX_CS1_DQS0 */
272 	0x1a0 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS1 */
273 	0x200 + 0,	/* SKEW_UPDATE_RX_CS1_DQS2 */
274 	0x200 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS3 */
275 	0x170 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS0 */
276 	0x170 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS1 */
277 	0x1d0 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS2 */
278 	0x1d0 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS3 */
279 	0x1a0 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS0 */
280 	0x1a0 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS1 */
281 	0x200 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS2 */
282 	0x200 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS3 */
283 };
284 
285 static void rkclk_ddr_reset(struct dram_info *dram,
286 			    u32 ctl_srstn, u32 ctl_psrstn,
287 			    u32 phy_srstn, u32 phy_psrstn)
288 {
289 	writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) |
290 	       UPCTL2_ASRSTN_REQ(ctl_srstn),
291 	       BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13);
292 
293 	writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn),
294 	       &dram->cru->softrst_con[12]);
295 }
296 
297 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
298 {
299 	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
300 	int delay = 1000;
301 	u32 mhz = hz / MHz;
302 
303 	refdiv = 1;
304 	if (mhz <= 100) {
305 		postdiv1 = 6;
306 		postdiv2 = 4;
307 	} else if (mhz <= 150) {
308 		postdiv1 = 4;
309 		postdiv2 = 4;
310 	} else if (mhz <= 200) {
311 		postdiv1 = 6;
312 		postdiv2 = 2;
313 	} else if (mhz <= 300) {
314 		postdiv1 = 4;
315 		postdiv2 = 2;
316 	} else if (mhz <= 400) {
317 		postdiv1 = 6;
318 		postdiv2 = 1;
319 	} else {
320 		postdiv1 = 4;
321 		postdiv2 = 1;
322 	}
323 	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
324 
325 	writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
326 
327 	writel(0x1f000000, &dram->cru->clksel_con[64]);
328 	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
329 	writel(DSMPD(1) | POSTDIV2(postdiv2) | REFDIV(refdiv),
330 	       &dram->cru->pll[1].con1);
331 
332 	while (delay > 0) {
333 		udelay(1);
334 		if (LOCK(readl(&dram->cru->pll[1].con1)))
335 			break;
336 		delay--;
337 	}
338 
339 	writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
340 }
341 
342 static void rkclk_configure_ddr(struct dram_info *dram,
343 				struct rv1126_sdram_params *sdram_params)
344 {
345 	/* for inno ddr phy need freq / 2 */
346 	rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2);
347 }
348 
349 static void phy_soft_reset(struct dram_info *dram)
350 {
351 	void __iomem *phy_base = dram->phy;
352 
353 	clrbits_le32(PHY_REG(phy_base, 0), 0x3 << 2);
354 	udelay(1);
355 	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
356 	udelay(1);
357 }
358 
359 static unsigned int
360 	calculate_ddrconfig(struct rv1126_sdram_params *sdram_params)
361 {
362 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
363 	u32 cs, bw, die_bw, col, row, bank;
364 	u32 cs1_row;
365 	u32 i, tmp;
366 	u32 ddrconf = -1;
367 	u32 row_3_4;
368 
369 	cs = cap_info->rank;
370 	bw = cap_info->bw;
371 	die_bw = cap_info->dbw;
372 	col = cap_info->col;
373 	row = cap_info->cs0_row;
374 	cs1_row = cap_info->cs1_row;
375 	bank = cap_info->bk;
376 	row_3_4 = cap_info->row_3_4;
377 
378 	if (sdram_params->base.dramtype == DDR4) {
379 		if (cs == 2 && row == cs1_row && !row_3_4) {
380 			tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) |
381 			      die_bw;
382 			for (i = 17; i < 21; i++) {
383 				if (((tmp & 0xf) ==
384 				     (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
385 				    ((tmp & 0x70) <=
386 				     (ddr4_cfg_2_rbc[i - 10] & 0x70))) {
387 					ddrconf = i;
388 					goto out;
389 				}
390 			}
391 		}
392 
393 		tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw;
394 		for (i = 10; i < 21; i++) {
395 			if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
396 			    ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) &&
397 			    ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) {
398 				ddrconf = i;
399 				goto out;
400 			}
401 		}
402 	} else {
403 		if (cs == 2 && row == cs1_row && bank == 3) {
404 			for (i = 5; i < 8; i++) {
405 				if (((bw + col - 10) == (ddr_cfg_2_rbc[i] &
406 							 0x7)) &&
407 				    ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] &
408 							  (0x7 << 5))) {
409 					ddrconf = i;
410 					goto out;
411 				}
412 			}
413 		}
414 
415 		tmp = ((cs - 1) << 8) | ((row - 13) << 5) |
416 		      ((bw + col - 10) << 0);
417 		if (bank == 3)
418 			tmp |= (1 << 3);
419 
420 		for (i = 0; i < 9; i++)
421 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) &&
422 			    ((tmp & (7 << 5)) <=
423 			     (ddr_cfg_2_rbc[i] & (7 << 5))) &&
424 			    ((tmp & (1 << 8)) <=
425 			     (ddr_cfg_2_rbc[i] & (1 << 8)))) {
426 				ddrconf = i;
427 				goto out;
428 			}
429 		if (cs == 1 && bank == 3 && row <= 17 &&
430 		    (col + bw) == 12)
431 			ddrconf = 23;
432 	}
433 
434 out:
435 	if (ddrconf > 28)
436 		printascii("calculate ddrconfig error\n");
437 
438 	if (sdram_params->base.dramtype == DDR4) {
439 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
440 			if (ddrconf == d4_rbc_2_d3_rbc[i][0]) {
441 				if (ddrconf == 21 && row > 16)
442 					printascii("warn:ddrconf21 row > 16\n");
443 				else
444 					ddrconf = d4_rbc_2_d3_rbc[i][1];
445 				break;
446 			}
447 		}
448 	}
449 
450 	return ddrconf;
451 }
452 
453 static void sw_set_req(struct dram_info *dram)
454 {
455 	void __iomem *pctl_base = dram->pctl;
456 
457 	/* clear sw_done=0 */
458 	writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL);
459 }
460 
461 static void sw_set_ack(struct dram_info *dram)
462 {
463 	void __iomem *pctl_base = dram->pctl;
464 
465 	/* set sw_done=1 */
466 	writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL);
467 	while (1) {
468 		/* wait programming done */
469 		if (readl(pctl_base + DDR_PCTL2_SWSTAT) &
470 				PCTL2_SW_DONE_ACK)
471 			break;
472 	}
473 }
474 
475 static void set_ctl_address_map(struct dram_info *dram,
476 				struct rv1126_sdram_params *sdram_params)
477 {
478 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
479 	void __iomem *pctl_base = dram->pctl;
480 	u32 ddrconf = cap_info->ddrconfig;
481 	u32 i, row;
482 
483 	row = cap_info->cs0_row;
484 	if (sdram_params->base.dramtype == DDR4) {
485 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
486 			if (ddrconf == d4_rbc_2_d3_rbc[i][1]) {
487 				ddrconf = d4_rbc_2_d3_rbc[i][0];
488 				break;
489 			}
490 		}
491 	}
492 
493 	if (ddrconf > ARRAY_SIZE(addrmap)) {
494 		printascii("set ctl address map fail\n");
495 		return;
496 	}
497 
498 	sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
499 			  &addrmap[ddrconf][0], 9 * 4);
500 
501 	/* unused row set to 0xf */
502 	for (i = 17; i >= row; i--)
503 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
504 			((i - 12) * 8 / 32) * 4,
505 			0xf << ((i - 12) * 8 % 32));
506 
507 	if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4)
508 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
509 	if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1)
510 		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
511 
512 	if (cap_info->rank == 1)
513 		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
514 }
515 
516 static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait)
517 {
518 	void __iomem *phy_base = dram->phy;
519 	u32 fbdiv, prediv, postdiv, postdiv_en;
520 
521 	if (wait) {
522 		clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB);
523 		while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK))
524 			continue;
525 	} else {
526 		freq /= MHz;
527 		prediv = 1;
528 		if (freq <= 200) {
529 			fbdiv = 16;
530 			postdiv = 2;
531 			postdiv_en = 1;
532 		} else if (freq <= 456) {
533 			fbdiv = 8;
534 			postdiv = 1;
535 			postdiv_en = 1;
536 		} else {
537 			fbdiv = 4;
538 			postdiv = 0;
539 			postdiv_en = 0;
540 		}
541 		writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50));
542 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK,
543 				(fbdiv >> 8) & 1);
544 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK,
545 				postdiv_en << PHY_POSTDIV_EN_SHIFT);
546 
547 		clrsetbits_le32(PHY_REG(phy_base, 0x52),
548 				PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv);
549 		clrsetbits_le32(PHY_REG(phy_base, 0x53),
550 				PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT,
551 				postdiv << PHY_POSTDIV_SHIFT);
552 	}
553 }
554 
555 static const u16 d3_phy_drv_2_ohm[][2] = {
556 	{PHY_DDR3_RON_506ohm, 506},
557 	{PHY_DDR3_RON_253ohm, 253},
558 	{PHY_DDR3_RON_169hm, 169},
559 	{PHY_DDR3_RON_127ohm, 127},
560 	{PHY_DDR3_RON_101ohm, 101},
561 	{PHY_DDR3_RON_84ohm, 84},
562 	{PHY_DDR3_RON_72ohm, 72},
563 	{PHY_DDR3_RON_63ohm, 63},
564 	{PHY_DDR3_RON_56ohm, 56},
565 	{PHY_DDR3_RON_51ohm, 51},
566 	{PHY_DDR3_RON_46ohm, 46},
567 	{PHY_DDR3_RON_42ohm, 42},
568 	{PHY_DDR3_RON_39ohm, 39},
569 	{PHY_DDR3_RON_36ohm, 36},
570 	{PHY_DDR3_RON_34ohm, 34},
571 	{PHY_DDR3_RON_32ohm, 32},
572 	{PHY_DDR3_RON_30ohm, 30},
573 	{PHY_DDR3_RON_28ohm, 28},
574 	{PHY_DDR3_RON_27ohm, 27},
575 	{PHY_DDR3_RON_25ohm, 25},
576 	{PHY_DDR3_RON_24ohm, 24},
577 	{PHY_DDR3_RON_23ohm, 23},
578 	{PHY_DDR3_RON_22ohm, 22}
579 };
580 
581 static u16 d3_phy_odt_2_ohm[][2] = {
582 	{PHY_DDR3_RTT_DISABLE, 0},
583 	{PHY_DDR3_RTT_953ohm, 953},
584 	{PHY_DDR3_RTT_483ohm, 483},
585 	{PHY_DDR3_RTT_320ohm, 320},
586 	{PHY_DDR3_RTT_241ohm, 241},
587 	{PHY_DDR3_RTT_193ohm, 193},
588 	{PHY_DDR3_RTT_161ohm, 161},
589 	{PHY_DDR3_RTT_138ohm, 138},
590 	{PHY_DDR3_RTT_121ohm, 121},
591 	{PHY_DDR3_RTT_107ohm, 107},
592 	{PHY_DDR3_RTT_97ohm, 97},
593 	{PHY_DDR3_RTT_88ohm, 88},
594 	{PHY_DDR3_RTT_80ohm, 80},
595 	{PHY_DDR3_RTT_74ohm, 74},
596 	{PHY_DDR3_RTT_69ohm, 69},
597 	{PHY_DDR3_RTT_64ohm, 64},
598 	{PHY_DDR3_RTT_60ohm, 60},
599 	{PHY_DDR3_RTT_57ohm, 57},
600 	{PHY_DDR3_RTT_54ohm, 54},
601 	{PHY_DDR3_RTT_51ohm, 51},
602 	{PHY_DDR3_RTT_48ohm, 48},
603 	{PHY_DDR3_RTT_46ohm, 46},
604 	{PHY_DDR3_RTT_44ohm, 44},
605 	{PHY_DDR3_RTT_42ohm, 42}
606 };
607 
608 static u16 d4lp3_phy_drv_2_ohm[][2] = {
609 	{PHY_DDR4_LPDDR3_RON_570ohm, 570},
610 	{PHY_DDR4_LPDDR3_RON_285ohm, 285},
611 	{PHY_DDR4_LPDDR3_RON_190ohm, 190},
612 	{PHY_DDR4_LPDDR3_RON_142ohm, 142},
613 	{PHY_DDR4_LPDDR3_RON_114ohm, 114},
614 	{PHY_DDR4_LPDDR3_RON_95ohm, 95},
615 	{PHY_DDR4_LPDDR3_RON_81ohm, 81},
616 	{PHY_DDR4_LPDDR3_RON_71ohm, 71},
617 	{PHY_DDR4_LPDDR3_RON_63ohm, 63},
618 	{PHY_DDR4_LPDDR3_RON_57ohm, 57},
619 	{PHY_DDR4_LPDDR3_RON_52ohm, 52},
620 	{PHY_DDR4_LPDDR3_RON_47ohm, 47},
621 	{PHY_DDR4_LPDDR3_RON_44ohm, 44},
622 	{PHY_DDR4_LPDDR3_RON_41ohm, 41},
623 	{PHY_DDR4_LPDDR3_RON_38ohm, 38},
624 	{PHY_DDR4_LPDDR3_RON_36ohm, 36},
625 	{PHY_DDR4_LPDDR3_RON_34ohm, 34},
626 	{PHY_DDR4_LPDDR3_RON_32ohm, 32},
627 	{PHY_DDR4_LPDDR3_RON_30ohm, 30},
628 	{PHY_DDR4_LPDDR3_RON_28ohm, 28},
629 	{PHY_DDR4_LPDDR3_RON_27ohm, 27},
630 	{PHY_DDR4_LPDDR3_RON_26ohm, 26},
631 	{PHY_DDR4_LPDDR3_RON_25ohm, 25}
632 };
633 
634 static u16 d4lp3_phy_odt_2_ohm[][2] = {
635 	{PHY_DDR4_LPDDR3_RTT_DISABLE, 0},
636 	{PHY_DDR4_LPDDR3_RTT_973ohm, 973},
637 	{PHY_DDR4_LPDDR3_RTT_493ohm, 493},
638 	{PHY_DDR4_LPDDR3_RTT_327ohm, 327},
639 	{PHY_DDR4_LPDDR3_RTT_247ohm, 247},
640 	{PHY_DDR4_LPDDR3_RTT_197ohm, 197},
641 	{PHY_DDR4_LPDDR3_RTT_164ohm, 164},
642 	{PHY_DDR4_LPDDR3_RTT_141ohm, 141},
643 	{PHY_DDR4_LPDDR3_RTT_123ohm, 123},
644 	{PHY_DDR4_LPDDR3_RTT_109ohm, 109},
645 	{PHY_DDR4_LPDDR3_RTT_99ohm, 99},
646 	{PHY_DDR4_LPDDR3_RTT_90ohm, 90},
647 	{PHY_DDR4_LPDDR3_RTT_82ohm, 82},
648 	{PHY_DDR4_LPDDR3_RTT_76ohm, 76},
649 	{PHY_DDR4_LPDDR3_RTT_70ohm, 70},
650 	{PHY_DDR4_LPDDR3_RTT_66ohm, 66},
651 	{PHY_DDR4_LPDDR3_RTT_62ohm, 62},
652 	{PHY_DDR4_LPDDR3_RTT_58ohm, 58},
653 	{PHY_DDR4_LPDDR3_RTT_55ohm, 55},
654 	{PHY_DDR4_LPDDR3_RTT_52ohm, 52},
655 	{PHY_DDR4_LPDDR3_RTT_49ohm, 49},
656 	{PHY_DDR4_LPDDR3_RTT_47ohm, 47},
657 	{PHY_DDR4_LPDDR3_RTT_45ohm, 45},
658 	{PHY_DDR4_LPDDR3_RTT_43ohm, 43}
659 };
660 
661 static u16 lp4_phy_drv_2_ohm[][2] = {
662 	{PHY_LPDDR4_RON_606ohm, 606},
663 	{PHY_LPDDR4_RON_303ohm, 303},
664 	{PHY_LPDDR4_RON_202ohm, 202},
665 	{PHY_LPDDR4_RON_152ohm, 153},
666 	{PHY_LPDDR4_RON_121ohm, 121},
667 	{PHY_LPDDR4_RON_101ohm, 101},
668 	{PHY_LPDDR4_RON_87ohm, 87},
669 	{PHY_LPDDR4_RON_76ohm, 76},
670 	{PHY_LPDDR4_RON_67ohm, 67},
671 	{PHY_LPDDR4_RON_61ohm, 61},
672 	{PHY_LPDDR4_RON_55ohm, 55},
673 	{PHY_LPDDR4_RON_51ohm, 51},
674 	{PHY_LPDDR4_RON_47ohm, 47},
675 	{PHY_LPDDR4_RON_43ohm, 43},
676 	{PHY_LPDDR4_RON_40ohm, 40},
677 	{PHY_LPDDR4_RON_38ohm, 38},
678 	{PHY_LPDDR4_RON_36ohm, 36},
679 	{PHY_LPDDR4_RON_34ohm, 34},
680 	{PHY_LPDDR4_RON_32ohm, 32},
681 	{PHY_LPDDR4_RON_30ohm, 30},
682 	{PHY_LPDDR4_RON_29ohm, 29},
683 	{PHY_LPDDR4_RON_28ohm, 28},
684 	{PHY_LPDDR4_RON_26ohm, 26}
685 };
686 
687 static u16 lp4_phy_odt_2_ohm[][2] = {
688 	{PHY_LPDDR4_RTT_DISABLE, 0},
689 	{PHY_LPDDR4_RTT_998ohm, 998},
690 	{PHY_LPDDR4_RTT_506ohm, 506},
691 	{PHY_LPDDR4_RTT_336ohm, 336},
692 	{PHY_LPDDR4_RTT_253ohm, 253},
693 	{PHY_LPDDR4_RTT_202ohm, 202},
694 	{PHY_LPDDR4_RTT_169ohm, 169},
695 	{PHY_LPDDR4_RTT_144ohm, 144},
696 	{PHY_LPDDR4_RTT_127ohm, 127},
697 	{PHY_LPDDR4_RTT_112ohm, 112},
698 	{PHY_LPDDR4_RTT_101ohm, 101},
699 	{PHY_LPDDR4_RTT_92ohm, 92},
700 	{PHY_LPDDR4_RTT_84ohm, 84},
701 	{PHY_LPDDR4_RTT_78ohm, 78},
702 	{PHY_LPDDR4_RTT_72ohm, 72},
703 	{PHY_LPDDR4_RTT_67ohm, 67},
704 	{PHY_LPDDR4_RTT_63ohm, 63},
705 	{PHY_LPDDR4_RTT_60ohm, 60},
706 	{PHY_LPDDR4_RTT_56ohm, 56},
707 	{PHY_LPDDR4_RTT_53ohm, 53},
708 	{PHY_LPDDR4_RTT_51ohm, 51},
709 	{PHY_LPDDR4_RTT_48ohm, 48},
710 	{PHY_LPDDR4_RTT_46ohm, 46},
711 	{PHY_LPDDR4_RTT_44ohm, 44}
712 };
713 
714 static u32 lp4_odt_calc(u32 odt_ohm)
715 {
716 	u32 odt;
717 
718 	if (odt_ohm == 0)
719 		odt = LPDDR4_DQODT_DIS;
720 	else if (odt_ohm <= 40)
721 		odt = LPDDR4_DQODT_40;
722 	else if (odt_ohm <= 48)
723 		odt = LPDDR4_DQODT_48;
724 	else if (odt_ohm <= 60)
725 		odt = LPDDR4_DQODT_60;
726 	else if (odt_ohm <= 80)
727 		odt = LPDDR4_DQODT_80;
728 	else if (odt_ohm <= 120)
729 		odt = LPDDR4_DQODT_120;
730 	else
731 		odt = LPDDR4_DQODT_240;
732 
733 	return odt;
734 }
735 
736 static void *get_ddr_drv_odt_info(u32 dramtype)
737 {
738 	struct sdram_head_info_index_v2 *index =
739 		(struct sdram_head_info_index_v2 *)common_info;
740 	void *ddr_info = 0;
741 
742 	if (dramtype == DDR4)
743 		ddr_info = (void *)common_info + index->ddr4_index.offset * 4;
744 	else if (dramtype == DDR3)
745 		ddr_info = (void *)common_info + index->ddr3_index.offset * 4;
746 	else if (dramtype == LPDDR3)
747 		ddr_info = (void *)common_info + index->lp3_index.offset * 4;
748 	else if (dramtype == LPDDR4)
749 		ddr_info = (void *)common_info + index->lp4_index.offset * 4;
750 	else
751 		printascii("unsupported dram type\n");
752 	return ddr_info;
753 }
754 
755 static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info,
756 			 u32 freq_mhz, u32 dst_fsp)
757 {
758 	void __iomem *pctl_base = dram->pctl;
759 	u32 ca_vref, dq_vref;
760 
761 	if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
762 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff);
763 	else
764 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten);
765 
766 	if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq))
767 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff);
768 	else
769 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten);
770 
771 	if (ca_vref < 100)
772 		ca_vref = 100;
773 	if (ca_vref > 420)
774 		ca_vref = 420;
775 
776 	if (ca_vref <= 300)
777 		ca_vref = (0 << 6) | (ca_vref - 100) / 4;
778 	else
779 		ca_vref = (1 << 6) | (ca_vref - 220) / 4;
780 
781 	if (dq_vref < 100)
782 		dq_vref = 100;
783 	if (dq_vref > 420)
784 		dq_vref = 420;
785 
786 	if (dq_vref <= 300)
787 		dq_vref = (0 << 6) | (dq_vref - 100) / 4;
788 	else
789 		dq_vref = (1 << 6) | (dq_vref - 220) / 4;
790 
791 	sw_set_req(dram);
792 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
793 			DDR_PCTL2_INIT6,
794 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT,
795 			ca_vref << PCTL2_LPDDR4_MR12_SHIFT);
796 
797 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
798 			DDR_PCTL2_INIT7,
799 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT,
800 			dq_vref << PCTL2_LPDDR4_MR14_SHIFT);
801 	sw_set_ack(dram);
802 }
803 
804 static void set_ds_odt(struct dram_info *dram,
805 		       struct rv1126_sdram_params *sdram_params, u32 dst_fsp)
806 {
807 	void __iomem *phy_base = dram->phy;
808 	void __iomem *pctl_base = dram->pctl;
809 	u32 dramtype = sdram_params->base.dramtype;
810 	struct ddr2_3_4_lp2_3_info *ddr_info;
811 	struct lp4_info *lp4_info;
812 	u32 i, j, tmp;
813 	const u16 (*p_drv)[2];
814 	const u16 (*p_odt)[2];
815 	u32 drv_info, sr_info;
816 	u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm;
817 	u32 phy_odt_ohm, dram_odt_ohm;
818 	u32 lp4_pu_cal, phy_lp4_drv_pd_en;
819 	u32 phy_odt_up_en, phy_odt_dn_en;
820 	u32 sr_dq, sr_clk;
821 	u32 freq = sdram_params->base.ddr_freq;
822 	u32 mr1_mr3, mr11, mr22, vref_out, vref_inner;
823 	u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0;
824 	u32 phy_dq_drv = 0;
825 	u32 phy_odt_up = 0, phy_odt_dn = 0;
826 
827 	ddr_info = get_ddr_drv_odt_info(dramtype);
828 	lp4_info = (void *)ddr_info;
829 
830 	if (!ddr_info)
831 		return;
832 
833 	/* dram odt en freq control phy drv, dram odt and phy sr */
834 	if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) {
835 		drv_info = ddr_info->drv_when_odtoff;
836 		dram_odt_ohm = 0;
837 		sr_info = ddr_info->sr_when_odtoff;
838 		phy_lp4_drv_pd_en =
839 			PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info);
840 	} else {
841 		drv_info = ddr_info->drv_when_odten;
842 		dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info);
843 		sr_info = ddr_info->sr_when_odten;
844 		phy_lp4_drv_pd_en =
845 			PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info);
846 	}
847 	phy_dq_drv_ohm =
848 		DRV_INFO_PHY_DQ_DRV(drv_info);
849 	phy_clk_drv_ohm =
850 		DRV_INFO_PHY_CLK_DRV(drv_info);
851 	phy_ca_drv_ohm =
852 		DRV_INFO_PHY_CA_DRV(drv_info);
853 
854 	sr_dq = DQ_SR_INFO(sr_info);
855 	sr_clk = CLK_SR_INFO(sr_info);
856 
857 	/* phy odt en freq control dram drv and phy odt */
858 	if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) {
859 		dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff);
860 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info);
861 		phy_odt_ohm = 0;
862 		phy_odt_up_en = 0;
863 		phy_odt_dn_en = 0;
864 	} else {
865 		dram_drv_ohm =
866 			DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten);
867 		phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info);
868 		phy_odt_up_en =
869 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
870 		phy_odt_dn_en =
871 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
872 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info);
873 	}
874 
875 	if (dramtype == LPDDR4) {
876 		if (phy_odt_ohm) {
877 			phy_odt_up_en = 0;
878 			phy_odt_dn_en = 1;
879 		}
880 		if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
881 			dram_caodt_ohm = 0;
882 		else
883 			dram_caodt_ohm =
884 				ODT_INFO_LP4_CA_ODT(lp4_info->odt_info);
885 	}
886 
887 	if (dramtype == DDR3) {
888 		p_drv = d3_phy_drv_2_ohm;
889 		p_odt = d3_phy_odt_2_ohm;
890 	} else if (dramtype == LPDDR4) {
891 		p_drv = lp4_phy_drv_2_ohm;
892 		p_odt = lp4_phy_odt_2_ohm;
893 	} else {
894 		p_drv = d4lp3_phy_drv_2_ohm;
895 		p_odt = d4lp3_phy_odt_2_ohm;
896 	}
897 
898 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
899 		if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) {
900 			phy_dq_drv = **(p_drv + i);
901 			break;
902 		}
903 		if (i == 0)
904 			break;
905 	}
906 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
907 		if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) {
908 			phy_clk_drv = **(p_drv + i);
909 			break;
910 		}
911 		if (i == 0)
912 			break;
913 	}
914 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
915 		if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) {
916 			phy_ca_drv = **(p_drv + i);
917 			break;
918 		}
919 		if (i == 0)
920 			break;
921 	}
922 	if (!phy_odt_ohm)
923 		phy_odt = 0;
924 	else
925 		for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) {
926 			if (phy_odt_ohm <= *(*(p_odt + i) + 1)) {
927 				phy_odt = **(p_odt + i);
928 				break;
929 			}
930 			if (i == 0)
931 				break;
932 		}
933 
934 	if (dramtype != LPDDR4) {
935 		if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en))
936 			vref_inner = 0x80;
937 		else if (phy_odt_up_en)
938 			vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 /
939 				     (dram_drv_ohm + phy_odt_ohm);
940 		else
941 			vref_inner = phy_odt_ohm * 128 /
942 				(phy_odt_ohm + dram_drv_ohm);
943 
944 		if (dramtype != DDR3 && dram_odt_ohm)
945 			vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 /
946 				   (phy_dq_drv_ohm + dram_odt_ohm);
947 		else
948 			vref_out = 0x80;
949 	} else {
950 		/* for lp4 */
951 		if (phy_odt_ohm)
952 			vref_inner =
953 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) *
954 				 256) / 1000;
955 		else
956 			vref_inner =
957 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) *
958 				 256) / 1000;
959 
960 		vref_out = 0x80;
961 	}
962 
963 	/* default ZQCALIB bypass mode */
964 	clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv);
965 	clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv);
966 	clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv);
967 	clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv);
968 	/* clk / cmd slew rate */
969 	clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk);
970 
971 	phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1;
972 	if (phy_odt_up_en)
973 		phy_odt_up = phy_odt;
974 	if (phy_odt_dn_en)
975 		phy_odt_dn = phy_odt;
976 
977 	for (i = 0; i < 4; i++) {
978 		j = 0x110 + i * 0x10;
979 		clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up);
980 		clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn);
981 		clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv);
982 		clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv);
983 		writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10));
984 
985 		clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10),
986 				1 << 3, phy_lp4_drv_pd_en << 3);
987 		/* dq slew rate */
988 		clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10),
989 				0x1f, sr_dq);
990 	}
991 
992 	/* reg_rx_vref_value_update */
993 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
994 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
995 
996 	/* RAM VREF */
997 	writel(vref_out, PHY_REG(phy_base, 0x105));
998 	if (dramtype == LPDDR3)
999 		udelay(100);
1000 
1001 	if (dramtype == LPDDR4)
1002 		set_lp4_vref(dram, lp4_info, freq, dst_fsp);
1003 
1004 	if (dramtype == DDR3 || dramtype == DDR4) {
1005 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1006 				DDR_PCTL2_INIT3);
1007 		mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK;
1008 	} else {
1009 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1010 				DDR_PCTL2_INIT4);
1011 		mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK;
1012 	}
1013 
1014 	if (dramtype == DDR3) {
1015 		mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK);
1016 		if (dram_drv_ohm == 34)
1017 			mr1_mr3 |= DDR3_DS_34;
1018 
1019 		if (dram_odt_ohm == 0)
1020 			mr1_mr3 |= DDR3_RTT_NOM_DIS;
1021 		else if (dram_odt_ohm <= 40)
1022 			mr1_mr3 |= DDR3_RTT_NOM_40;
1023 		else if (dram_odt_ohm <= 60)
1024 			mr1_mr3 |= DDR3_RTT_NOM_60;
1025 		else
1026 			mr1_mr3 |= DDR3_RTT_NOM_120;
1027 
1028 	} else if (dramtype == DDR4) {
1029 		mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK);
1030 		if (dram_drv_ohm == 48)
1031 			mr1_mr3 |= DDR4_DS_48;
1032 
1033 		if (dram_odt_ohm == 0)
1034 			mr1_mr3 |= DDR4_RTT_NOM_DIS;
1035 		else if (dram_odt_ohm <= 34)
1036 			mr1_mr3 |= DDR4_RTT_NOM_34;
1037 		else if (dram_odt_ohm <= 40)
1038 			mr1_mr3 |= DDR4_RTT_NOM_40;
1039 		else if (dram_odt_ohm <= 48)
1040 			mr1_mr3 |= DDR4_RTT_NOM_48;
1041 		else if (dram_odt_ohm <= 60)
1042 			mr1_mr3 |= DDR4_RTT_NOM_60;
1043 		else
1044 			mr1_mr3 |= DDR4_RTT_NOM_120;
1045 
1046 	} else if (dramtype == LPDDR3) {
1047 		if (dram_drv_ohm <= 34)
1048 			mr1_mr3 |= LPDDR3_DS_34;
1049 		else if (dram_drv_ohm <= 40)
1050 			mr1_mr3 |= LPDDR3_DS_40;
1051 		else if (dram_drv_ohm <= 48)
1052 			mr1_mr3 |= LPDDR3_DS_48;
1053 		else if (dram_drv_ohm <= 60)
1054 			mr1_mr3 |= LPDDR3_DS_60;
1055 		else if (dram_drv_ohm <= 80)
1056 			mr1_mr3 |= LPDDR3_DS_80;
1057 
1058 		if (dram_odt_ohm == 0)
1059 			lp3_odt_value = LPDDR3_ODT_DIS;
1060 		else if (dram_odt_ohm <= 60)
1061 			lp3_odt_value = LPDDR3_ODT_60;
1062 		else if (dram_odt_ohm <= 120)
1063 			lp3_odt_value = LPDDR3_ODT_120;
1064 		else
1065 			lp3_odt_value = LPDDR3_ODT_240;
1066 	} else {/* for lpddr4 */
1067 		/* MR3 for lp4 PU-CAL and PDDS */
1068 		mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK);
1069 		mr1_mr3 |= lp4_pu_cal;
1070 
1071 		tmp = lp4_odt_calc(dram_drv_ohm);
1072 		if (!tmp)
1073 			tmp = LPDDR4_PDDS_240;
1074 		mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT);
1075 
1076 		/* MR11 for lp4 ca odt, dq odt set */
1077 		mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1078 			     DDR_PCTL2_INIT6);
1079 		mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK;
1080 
1081 		mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK);
1082 
1083 		tmp = lp4_odt_calc(dram_odt_ohm);
1084 		mr11 |= (tmp << LPDDR4_DQODT_SHIFT);
1085 
1086 		tmp = lp4_odt_calc(dram_caodt_ohm);
1087 		mr11 |= (tmp << LPDDR4_CAODT_SHIFT);
1088 		sw_set_req(dram);
1089 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1090 				DDR_PCTL2_INIT6,
1091 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT,
1092 				mr11 << PCTL2_LPDDR4_MR11_SHIFT);
1093 		sw_set_ack(dram);
1094 
1095 		/* MR22 for soc odt/odt-ck/odt-cs/odt-ca */
1096 		mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1097 			     DDR_PCTL2_INIT7);
1098 		mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK;
1099 		mr22 &= ~LPDDR4_SOC_ODT_MASK;
1100 
1101 		tmp = lp4_odt_calc(phy_odt_ohm);
1102 		mr22 |= tmp;
1103 		mr22 = mr22 |
1104 		       (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) <<
1105 			LPDDR4_ODTE_CK_SHIFT) |
1106 		       (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) <<
1107 			LPDDR4_ODTE_CS_SHIFT) |
1108 		       (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) <<
1109 			LPDDR4_ODTD_CA_SHIFT);
1110 
1111 		sw_set_req(dram);
1112 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1113 				DDR_PCTL2_INIT7,
1114 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT,
1115 				mr22 << PCTL2_LPDDR4_MR22_SHIFT);
1116 		sw_set_ack(dram);
1117 	}
1118 
1119 	if (dramtype == DDR4 || dramtype == DDR3) {
1120 		sw_set_req(dram);
1121 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1122 				DDR_PCTL2_INIT3,
1123 				PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT,
1124 				mr1_mr3 << PCTL2_DDR34_MR1_SHIFT);
1125 		sw_set_ack(dram);
1126 	} else {
1127 		sw_set_req(dram);
1128 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1129 				DDR_PCTL2_INIT4,
1130 				PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT,
1131 				mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT);
1132 		sw_set_ack(dram);
1133 	}
1134 }
1135 
1136 static int sdram_cmd_dq_path_remap(struct dram_info *dram,
1137 				   struct rv1126_sdram_params *sdram_params)
1138 {
1139 	void __iomem *phy_base = dram->phy;
1140 	u32 dramtype = sdram_params->base.dramtype;
1141 	struct sdram_head_info_index_v2 *index =
1142 		(struct sdram_head_info_index_v2 *)common_info;
1143 	struct dq_map_info *map_info;
1144 
1145 	map_info = (struct dq_map_info *)((void *)common_info +
1146 		index->dq_map_index.offset * 4);
1147 
1148 	if (dramtype <= LPDDR4)
1149 		writel((map_info->byte_map[dramtype / 4] >>
1150 			((dramtype % 4) * 8)) & 0xff,
1151 		       PHY_REG(phy_base, 0x4f));
1152 
1153 	return 0;
1154 }
1155 
1156 static void phy_cfg(struct dram_info *dram,
1157 		    struct rv1126_sdram_params *sdram_params)
1158 {
1159 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
1160 	void __iomem *phy_base = dram->phy;
1161 	u32 i, dq_map, tmp;
1162 	u32 byte1 = 0, byte0 = 0;
1163 
1164 	sdram_cmd_dq_path_remap(dram, sdram_params);
1165 
1166 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0);
1167 	for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
1168 		writel(sdram_params->phy_regs.phy[i][1],
1169 		       phy_base + sdram_params->phy_regs.phy[i][0]);
1170 	}
1171 
1172 	clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5));
1173 	dq_map = readl(PHY_REG(phy_base, 0x4f));
1174 	for (i = 0; i < 4; i++) {
1175 		if (((dq_map >> (i * 2)) & 0x3) == 0)
1176 			byte0 = i;
1177 		if (((dq_map >> (i * 2)) & 0x3) == 1)
1178 			byte1 = i;
1179 	}
1180 
1181 	tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK);
1182 	if (cap_info->bw == 2)
1183 		tmp |= 0xf;
1184 	else if (cap_info->bw == 1)
1185 		tmp |= ((1 << byte0) | (1 << byte1));
1186 	else
1187 		tmp |= (1 << byte0);
1188 
1189 	writel(tmp, PHY_REG(phy_base, 0xf));
1190 
1191 	/* lpddr4 odt control by phy, enable cs0 odt */
1192 	if (sdram_params->base.dramtype == LPDDR4)
1193 		clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4,
1194 				(1 << 6) | (1 << 4));
1195 	/* for ca training ca vref choose range1 */
1196 	setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6));
1197 	setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6));
1198 	/* for wr training PHY_0x7c[5], choose range0 */
1199 	clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5));
1200 }
1201 
1202 static int update_refresh_reg(struct dram_info *dram)
1203 {
1204 	void __iomem *pctl_base = dram->pctl;
1205 	u32 ret;
1206 
1207 	ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
1208 	writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
1209 
1210 	return 0;
1211 }
1212 
1213 /*
1214  * rank = 1: cs0
1215  * rank = 2: cs1
1216  */
1217 int read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype)
1218 {
1219 	u32 ret;
1220 	u32 i, temp;
1221 	u32 dqmap;
1222 
1223 	void __iomem *pctl_base = dram->pctl;
1224 	struct sdram_head_info_index_v2 *index =
1225 		(struct sdram_head_info_index_v2 *)common_info;
1226 	struct dq_map_info *map_info;
1227 
1228 	map_info = (struct dq_map_info *)((void *)common_info +
1229 		index->dq_map_index.offset * 4);
1230 
1231 	if (dramtype == LPDDR2)
1232 		dqmap = map_info->lp2_dq0_7_map;
1233 	else
1234 		dqmap = map_info->lp3_dq0_7_map;
1235 
1236 	pctl_read_mr(pctl_base, rank, mr_num);
1237 
1238 	ret = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff);
1239 
1240 	if (dramtype != LPDDR4) {
1241 		temp = 0;
1242 		for (i = 0; i < 8; i++) {
1243 			temp = temp | (((ret >> i) & 0x1) <<
1244 				       ((dqmap >> (i * 4)) & 0xf));
1245 		}
1246 	} else {
1247 		ret = (readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff);
1248 	}
1249 
1250 	return ret;
1251 }
1252 
1253 /* before call this function autorefresh should be disabled */
1254 void send_a_refresh(struct dram_info *dram)
1255 {
1256 	void __iomem *pctl_base = dram->pctl;
1257 
1258 	while (readl(pctl_base + DDR_PCTL2_DBGSTAT) & 0x3)
1259 		continue;
1260 	writel(0x3, pctl_base + DDR_PCTL2_DBGCMD);
1261 }
1262 
1263 void record_dq_prebit(struct dram_info *dram)
1264 {
1265 	u32 group, i, tmp;
1266 	void __iomem *phy_base = dram->phy;
1267 
1268 	for (group = 0; group < 4; group++) {
1269 		for (i = 0; i < ARRAY_SIZE(dq_sel); i++) {
1270 			/* l_loop_invdelaysel */
1271 			writel(dq_sel[i][0], PHY_REG(phy_base,
1272 						     grp_addr[group] + 0x2c));
1273 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e));
1274 			writel(tmp, PHY_REG(phy_base,
1275 					    grp_addr[group] + dq_sel[i][1]));
1276 
1277 			/* r_loop_invdelaysel */
1278 			writel(dq_sel[i][0], PHY_REG(phy_base,
1279 						     grp_addr[group] + 0x2d));
1280 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f));
1281 			writel(tmp, PHY_REG(phy_base,
1282 					    grp_addr[group] + dq_sel[i][2]));
1283 		}
1284 	}
1285 }
1286 
1287 static void update_dq_rx_prebit(struct dram_info *dram)
1288 {
1289 	void __iomem *phy_base = dram->phy;
1290 
1291 	clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4),
1292 			BIT(4));
1293 	udelay(1);
1294 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4));
1295 }
1296 
1297 static void update_dq_tx_prebit(struct dram_info *dram)
1298 {
1299 	void __iomem *phy_base = dram->phy;
1300 
1301 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1302 	setbits_le32(PHY_REG(phy_base, 0x2), BIT(3));
1303 	setbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1304 	udelay(1);
1305 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1306 }
1307 
1308 static void update_ca_prebit(struct dram_info *dram)
1309 {
1310 	void __iomem *phy_base = dram->phy;
1311 
1312 	clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2));
1313 	setbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1314 	udelay(1);
1315 	clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1316 }
1317 
1318 /*
1319  * dir: 0: de-skew = delta_*
1320  *	1: de-skew = reg val - delta_*
1321  * delta_dir: value for differential signal: clk/
1322  * delta_sig: value for single signal: ca/cmd
1323  */
1324 static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif,
1325 			     int delta_sig, u32 cs, u32 dramtype)
1326 {
1327 	void __iomem *phy_base = dram->phy;
1328 	u32 i, cs_en, tmp;
1329 
1330 	if (cs == 0)
1331 		cs_en = 1;
1332 	else if (cs == 2)
1333 		cs_en = 2;
1334 	else
1335 		cs_en = 3;
1336 
1337 	for (i = 0; i < 0x20; i++) {
1338 		if (dir == DESKEW_MDF_ABS_VAL)
1339 			tmp = delta_sig;
1340 		else
1341 			tmp = readl(PHY_REG(phy_base, 0x150 + i)) +
1342 			      delta_sig;
1343 		writel(tmp, PHY_REG(phy_base, 0x150 + i));
1344 	}
1345 
1346 	if (dir == DESKEW_MDF_ABS_VAL)
1347 		tmp = delta_dif;
1348 	else
1349 		tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) -
1350 		       delta_sig + delta_dif;
1351 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x17));
1352 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x18));
1353 	if (dramtype == LPDDR4) {
1354 		writel(tmp, PHY_REG(phy_base, 0x150 + 0x4));
1355 		writel(tmp, PHY_REG(phy_base, 0x150 + 0xa));
1356 
1357 		clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6);
1358 		update_ca_prebit(dram);
1359 	}
1360 }
1361 
1362 static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank)
1363 {
1364 	u32 i, j, offset = 0;
1365 	u32 min = 0x3f;
1366 	void __iomem *phy_base = dram->phy;
1367 	u32 byte_en;
1368 
1369 	if (signal == SKEW_TX_SIGNAL)
1370 		offset = 8;
1371 
1372 	if (signal == SKEW_CA_SIGNAL) {
1373 		for (i = 0; i < 0x20; i++)
1374 			min = MIN(min, readl(PHY_REG(phy_base, 0x150  + i)));
1375 	} else {
1376 		byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1377 		for (j = offset; j < offset + rank * 4; j++) {
1378 			if (!((byte_en >> (j % 4)) & 1))
1379 				continue;
1380 			for (i = 0; i < 11; i++)
1381 				min = MIN(min,
1382 					  readl(PHY_REG(phy_base,
1383 							dqs_dq_skew_adr[j] +
1384 							i)));
1385 		}
1386 	}
1387 
1388 	return min;
1389 }
1390 
1391 static u32 low_power_update(struct dram_info *dram, u32 en)
1392 {
1393 	void __iomem *pctl_base = dram->pctl;
1394 	u32 lp_stat = 0;
1395 
1396 	if (en) {
1397 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf);
1398 	} else {
1399 		lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf;
1400 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf);
1401 	}
1402 
1403 	return lp_stat;
1404 }
1405 
1406 /*
1407  * signal:
1408  * dir: 0: de-skew = delta_*
1409  *	1: de-skew = reg val - delta_*
1410  * delta_dir: value for differential signal: dqs
1411  * delta_sig: value for single signal: dq/dm
1412  */
1413 static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir,
1414 			     int delta_dif, int delta_sig, u32 rank)
1415 {
1416 	void __iomem *phy_base = dram->phy;
1417 	u32 i, j, tmp, offset;
1418 	u32 byte_en;
1419 
1420 	byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1421 
1422 	if (signal == SKEW_RX_SIGNAL)
1423 		offset = 0;
1424 	else
1425 		offset = 8;
1426 
1427 	for (j = offset; j < (offset + rank * 4); j++) {
1428 		if (!((byte_en >> (j % 4)) & 1))
1429 			continue;
1430 		for (i = 0; i < 0x9; i++) {
1431 			if (dir == DESKEW_MDF_ABS_VAL)
1432 				tmp = delta_sig;
1433 			else
1434 				tmp = delta_sig + readl(PHY_REG(phy_base,
1435 							dqs_dq_skew_adr[j] +
1436 							i));
1437 			writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i));
1438 		}
1439 		if (dir == DESKEW_MDF_ABS_VAL)
1440 			tmp = delta_dif;
1441 		else
1442 			tmp = delta_dif + readl(PHY_REG(phy_base,
1443 						dqs_dq_skew_adr[j] + 9));
1444 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9));
1445 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa));
1446 	}
1447 	if (signal == SKEW_RX_SIGNAL)
1448 		update_dq_rx_prebit(dram);
1449 	else
1450 		update_dq_tx_prebit(dram);
1451 }
1452 
1453 static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype)
1454 {
1455 	void __iomem *phy_base = dram->phy;
1456 	u32 ret;
1457 	u32 dis_auto_zq = 0;
1458 	u32 odt_val_up, odt_val_dn;
1459 	u32 i, j;
1460 
1461 	odt_val_dn = readl(PHY_REG(phy_base, 0x110));
1462 	odt_val_up = readl(PHY_REG(phy_base, 0x111));
1463 
1464 	if (dramtype != LPDDR4) {
1465 		for (i = 0; i < 4; i++) {
1466 			j = 0x110 + i * 0x10;
1467 			writel(PHY_DDR4_LPDDR3_RTT_247ohm,
1468 			       PHY_REG(phy_base, j));
1469 			writel(PHY_DDR4_LPDDR3_RTT_DISABLE,
1470 			       PHY_REG(phy_base, j + 0x1));
1471 		}
1472 	}
1473 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1474 	/* use normal read mode for data training */
1475 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1476 
1477 	if (dramtype == DDR4)
1478 		setbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1479 
1480 	/* choose training cs */
1481 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
1482 	/* enable gate training */
1483 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
1484 	udelay(50);
1485 	ret = readl(PHY_REG(phy_base, 0x91));
1486 	/* disable gate training */
1487 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
1488 	clrbits_le32(PHY_REG(phy_base, 2), 0x30);
1489 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1490 
1491 	if (ret & 0x20)
1492 		ret = -1;
1493 	else
1494 		ret = (ret & 0xf) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf);
1495 
1496 	if (dramtype != LPDDR4) {
1497 		for (i = 0; i < 4; i++) {
1498 			j = 0x110 + i * 0x10;
1499 			writel(odt_val_dn, PHY_REG(phy_base, j));
1500 			writel(odt_val_up, PHY_REG(phy_base, j + 0x1));
1501 		}
1502 	}
1503 	return ret;
1504 }
1505 
1506 static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype,
1507 			    u32 rank)
1508 {
1509 	void __iomem *pctl_base = dram->pctl;
1510 	void __iomem *phy_base = dram->phy;
1511 	u32 dis_auto_zq = 0;
1512 	u32 tmp;
1513 	u32 cur_fsp;
1514 	u32 timeout_us = 1000;
1515 
1516 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1517 
1518 	clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1519 
1520 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1521 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) &
1522 	      0xffff;
1523 	writel(tmp & 0xff, PHY_REG(phy_base, 0x3));
1524 
1525 	/* disable another cs's output */
1526 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1527 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12),
1528 			      dramtype);
1529 	if (dramtype == DDR3 || dramtype == DDR4)
1530 		writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1531 	else
1532 		writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1533 
1534 	/* choose cs */
1535 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1536 			((0x2 >> cs) << 6) | (0 << 2));
1537 	/* enable write leveling */
1538 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1539 			((0x2 >> cs) << 6) | (1 << 2));
1540 
1541 	while (1) {
1542 		if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) ==
1543 		    (readl(PHY_REG(phy_base, 0xf)) & 0xf))
1544 			break;
1545 
1546 		udelay(1);
1547 		if (timeout_us-- == 0) {
1548 			printascii("error: write leveling timeout\n");
1549 			while (1)
1550 				;
1551 		}
1552 	}
1553 
1554 	/* disable write leveling */
1555 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1556 			((0x2 >> cs) << 6) | (0 << 2));
1557 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6);
1558 
1559 	/* enable another cs's output */
1560 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1561 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12),
1562 			      dramtype);
1563 
1564 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1565 
1566 	return 0;
1567 }
1568 
1569 char pattern[32] = {
1570 	0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa,
1571 	0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55,
1572 	0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55,
1573 	0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa
1574 };
1575 
1576 static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype,
1577 			    u32 mhz)
1578 {
1579 	void __iomem *pctl_base = dram->pctl;
1580 	void __iomem *phy_base = dram->phy;
1581 	u32 trefi_1x, trfc_1x;
1582 	u32 dis_auto_zq = 0;
1583 	u32 timeout_us = 1000;
1584 	u32 dqs_default;
1585 	u32 cur_fsp;
1586 	u32 vref_inner;
1587 	u32 i;
1588 	struct sdram_head_info_index_v2 *index =
1589 		(struct sdram_head_info_index_v2 *)common_info;
1590 	struct dq_map_info *map_info;
1591 
1592 	vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff;
1593 	if (dramtype == DDR3 && vref_inner == 0x80) {
1594 		for (i = 0; i < 4; i++)
1595 			writel(vref_inner - 0xa,
1596 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1597 
1598 		/* reg_rx_vref_value_update */
1599 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1600 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1601 	}
1602 
1603 	map_info = (struct dq_map_info *)((void *)common_info +
1604 		index->dq_map_index.offset * 4);
1605 	/* only 1cs a time, 0:cs0 1 cs1 */
1606 	if (cs > 1)
1607 		return -1;
1608 
1609 	dqs_default = 0xf;
1610 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1611 
1612 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1613 	/* config refresh timing */
1614 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1615 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1616 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1617 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1618 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1619 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1620 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1621 	/* reg_phy_trfc */
1622 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1623 	/* reg_max_refi_cnt */
1624 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1625 
1626 	/* choose training cs */
1627 	clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6);
1628 
1629 	/* set dq map for ddr4 */
1630 	if (dramtype == DDR4) {
1631 		setbits_le32(PHY_REG(phy_base, 0x70), BIT(7));
1632 		for (i = 0; i < 4; i++) {
1633 			writel((map_info->ddr4_dq_map[cs * 2] >>
1634 				((i % 4) * 8)) & 0xff,
1635 				PHY_REG(phy_base, 0x238 + i));
1636 			writel((map_info->ddr4_dq_map[cs * 2 + 1] >>
1637 				((i % 4) * 8)) & 0xff,
1638 				PHY_REG(phy_base, 0x2b8 + i));
1639 		}
1640 	}
1641 
1642 	/* cha_l reg_l_rd_train_dqs_default[5:0] */
1643 	clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default);
1644 	/* cha_h reg_h_rd_train_dqs_default[5:0] */
1645 	clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default);
1646 	/* chb_l reg_l_rd_train_dqs_default[5:0] */
1647 	clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default);
1648 	/* chb_h reg_h_rd_train_dqs_default[5:0] */
1649 	clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default);
1650 
1651 	/* Choose the read train auto mode */
1652 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1);
1653 	/* Enable the auto train of the read train */
1654 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3);
1655 
1656 	/* Wait the train done. */
1657 	while (1) {
1658 		if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1)
1659 			break;
1660 
1661 		udelay(1);
1662 		if (timeout_us-- == 0) {
1663 			printascii("error: read training timeout\n");
1664 			return -1;
1665 		}
1666 	}
1667 
1668 	/* Check the read train state */
1669 	if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) ||
1670 	    (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) {
1671 		printascii("error: read training error\n");
1672 		return -1;
1673 	}
1674 
1675 	/* Exit the Read Training by setting */
1676 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1));
1677 
1678 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1679 
1680 	if (dramtype == DDR3 && vref_inner == 0x80) {
1681 		for (i = 0; i < 4; i++)
1682 			writel(vref_inner,
1683 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1684 
1685 		/* reg_rx_vref_value_update */
1686 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1687 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1688 	}
1689 
1690 	return 0;
1691 }
1692 
1693 static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype,
1694 			    u32 mhz, u32 dst_fsp)
1695 {
1696 	void __iomem *pctl_base = dram->pctl;
1697 	void __iomem *phy_base = dram->phy;
1698 	u32 trefi_1x, trfc_1x;
1699 	u32 dis_auto_zq = 0;
1700 	u32 timeout_us = 1000;
1701 	u32 cur_fsp;
1702 	u32 mr_tmp, cl, cwl, phy_fsp, offset = 0;
1703 
1704 	if (dramtype == LPDDR3 && mhz <= 400) {
1705 		phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3;
1706 		offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3;
1707 		cl = readl(PHY_REG(phy_base, offset));
1708 		cwl = readl(PHY_REG(phy_base, offset + 2));
1709 
1710 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8);
1711 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4);
1712 		pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype);
1713 	}
1714 
1715 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1716 
1717 	/* PHY_0x7b[7:0] reg_train_col_addr[7:0] */
1718 	clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0);
1719 	/* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */
1720 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2);
1721 	/* PHY_0x7c[1:0] reg_train_col_addr[9:8] */
1722 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0);
1723 	/* PHY_0x7d[7:0] reg_train_row_addr[7:0] */
1724 	clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0);
1725 	/* PHY_0x7e[7:0] reg_train_row_addr[15:8] */
1726 	clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0);
1727 
1728 	/* PHY_0x71[3] wrtrain_check_data_value_random_gen */
1729 	clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3));
1730 
1731 	/* config refresh timing */
1732 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1733 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1734 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1735 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1736 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1737 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1738 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1739 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1740 	/* reg_phy_trfc */
1741 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1742 	/* reg_max_refi_cnt */
1743 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1744 
1745 	/* choose training cs */
1746 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6);
1747 
1748 	/* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */
1749 	/* 0: Use the write-leveling value. */
1750 	/* 1: use reg0x233 0x237 0x2b3 0x2b7 */
1751 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4));
1752 
1753 	/* PHY_0x7a [0] reg_dq_wr_train_auto */
1754 	setbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1755 
1756 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1757 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1758 
1759 	send_a_refresh(dram);
1760 
1761 	while (1) {
1762 		if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1)
1763 			break;
1764 
1765 		udelay(1);
1766 		if (timeout_us-- == 0) {
1767 			printascii("error: write training timeout\n");
1768 			while (1)
1769 				;
1770 		}
1771 	}
1772 
1773 	/* Check the write train state */
1774 	if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) {
1775 		printascii("error: write training error\n");
1776 		return -1;
1777 	}
1778 
1779 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1780 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1781 
1782 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1783 
1784 	/* save LPDDR4 write vref to fsp_param for dfs */
1785 	if (dramtype == LPDDR4) {
1786 		fsp_param[dst_fsp].vref_dq[cs] =
1787 			((readl(PHY_REG(phy_base, 0x384)) & 0x3f) +
1788 			 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2;
1789 		/* add range info */
1790 		fsp_param[dst_fsp].vref_dq[cs] |=
1791 			((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1);
1792 	}
1793 
1794 	if (dramtype == LPDDR3 && mhz <= 400) {
1795 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl);
1796 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl);
1797 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1798 			       DDR_PCTL2_INIT3);
1799 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
1800 			      dramtype);
1801 	}
1802 
1803 	return 0;
1804 }
1805 
1806 static int data_training(struct dram_info *dram, u32 cs,
1807 			 struct rv1126_sdram_params *sdram_params, u32 dst_fsp,
1808 			 u32 training_flag)
1809 {
1810 	u32 ret = 0;
1811 
1812 	if (training_flag == FULL_TRAINING)
1813 		training_flag = READ_GATE_TRAINING | WRITE_LEVELING |
1814 				WRITE_TRAINING | READ_TRAINING;
1815 
1816 	if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) {
1817 		ret = data_training_wl(dram, cs,
1818 				       sdram_params->base.dramtype,
1819 				       sdram_params->ch.cap_info.rank);
1820 		if (ret != 0)
1821 			goto out;
1822 	}
1823 
1824 	if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) {
1825 		ret = data_training_rg(dram, cs,
1826 				       sdram_params->base.dramtype);
1827 		if (ret != 0)
1828 			goto out;
1829 	}
1830 
1831 	if ((training_flag & READ_TRAINING) == READ_TRAINING) {
1832 		ret = data_training_rd(dram, cs,
1833 				       sdram_params->base.dramtype,
1834 				       sdram_params->base.ddr_freq);
1835 		if (ret != 0)
1836 			goto out;
1837 	}
1838 
1839 	if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) {
1840 		ret = data_training_wr(dram, cs,
1841 				       sdram_params->base.dramtype,
1842 				       sdram_params->base.ddr_freq, dst_fsp);
1843 		if (ret != 0)
1844 			goto out;
1845 	}
1846 
1847 out:
1848 	return ret;
1849 }
1850 
1851 static int get_wrlvl_val(struct dram_info *dram,
1852 			 struct rv1126_sdram_params *sdram_params)
1853 {
1854 	u32 i, j, clk_skew;
1855 	void __iomem *phy_base = dram->phy;
1856 	u32 lp_stat;
1857 	int ret;
1858 
1859 	lp_stat = low_power_update(dram, 0);
1860 
1861 	clk_skew = 0x1f;
1862 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, clk_skew, 3,
1863 			 sdram_params->base.dramtype);
1864 
1865 	ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING);
1866 	if (sdram_params->ch.cap_info.rank == 2)
1867 		ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING);
1868 
1869 	for (j = 0; j < 2; j++)
1870 		for (i = 0; i < 4; i++)
1871 			wrlvl_result[j][i] =
1872 				readl(PHY_REG(phy_base,
1873 					      wrlvl_result_offset[j][i])) -
1874 				clk_skew;
1875 
1876 	low_power_update(dram, lp_stat);
1877 
1878 	return ret;
1879 }
1880 
1881 static int high_freq_training(struct dram_info *dram,
1882 			      struct rv1126_sdram_params *sdram_params,
1883 			      u32 fsp)
1884 {
1885 	u32 i, j;
1886 	void __iomem *phy_base = dram->phy;
1887 	u32 dramtype = sdram_params->base.dramtype;
1888 	int min_val;
1889 	int dqs_skew, clk_skew, ca_skew;
1890 	int ret;
1891 
1892 	dqs_skew = 0;
1893 	for (j = 0; j < sdram_params->ch.cap_info.rank; j++)
1894 		for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++)
1895 			dqs_skew += wrlvl_result[j][i];
1896 	dqs_skew = dqs_skew / (sdram_params->ch.cap_info.rank *
1897 			       ARRAY_SIZE(wrlvl_result[0]));
1898 
1899 	clk_skew = 0x20 - dqs_skew;
1900 	dqs_skew = 0x20;
1901 
1902 	if (dramtype == LPDDR4) {
1903 		clk_skew = 0;
1904 		ca_skew = 0;
1905 	} else if (dramtype == LPDDR3) {
1906 		ca_skew = clk_skew - 4;
1907 	} else {
1908 		ca_skew = clk_skew;
1909 	}
1910 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3,
1911 			 dramtype);
1912 
1913 	writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233));
1914 	writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237));
1915 	writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
1916 	writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
1917 	ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING |
1918 			    READ_TRAINING | WRITE_TRAINING);
1919 	if (sdram_params->ch.cap_info.rank == 2) {
1920 		writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233));
1921 		writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237));
1922 		writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
1923 		writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
1924 		ret |= data_training(dram, 1, sdram_params, fsp,
1925 				     READ_GATE_TRAINING | READ_TRAINING |
1926 				     WRITE_TRAINING);
1927 	}
1928 	if (ret)
1929 		goto out;
1930 
1931 	record_dq_prebit(dram);
1932 
1933 	min_val = get_min_value(dram, SKEW_RX_SIGNAL,
1934 				sdram_params->ch.cap_info.rank) * -1;
1935 	modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL,
1936 			 min_val, min_val, sdram_params->ch.cap_info.rank);
1937 
1938 	min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL,
1939 				    sdram_params->ch.cap_info.rank),
1940 		      get_min_value(dram, SKEW_CA_SIGNAL,
1941 				    sdram_params->ch.cap_info.rank)) * -1;
1942 
1943 	/* clk = 0, rx all skew -7, tx - min_value */
1944 	modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3,
1945 			 dramtype);
1946 
1947 	modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL,
1948 			 min_val, min_val, sdram_params->ch.cap_info.rank);
1949 
1950 	ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING);
1951 	if (sdram_params->ch.cap_info.rank == 2)
1952 		ret |= data_training(dram, 1, sdram_params, 0,
1953 				     READ_GATE_TRAINING);
1954 out:
1955 	return ret;
1956 }
1957 
1958 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
1959 {
1960 	writel(ddrconfig, &dram->msch->deviceconf);
1961 	clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0);
1962 }
1963 
1964 static void update_noc_timing(struct dram_info *dram,
1965 			      struct rv1126_sdram_params *sdram_params)
1966 {
1967 	writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
1968 	       &dram->msch->ddrtiminga0);
1969 	writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
1970 	       &dram->msch->ddrtimingb0);
1971 	writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
1972 	       &dram->msch->ddrtimingc0);
1973 	writel(sdram_params->ch.noc_timings.devtodev0.d32,
1974 	       &dram->msch->devtodev0);
1975 	writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
1976 	writel(sdram_params->ch.noc_timings.ddr4timing.d32,
1977 	       &dram->msch->ddr4timing);
1978 }
1979 
1980 static void dram_all_config(struct dram_info *dram,
1981 			    struct rv1126_sdram_params *sdram_params)
1982 {
1983 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
1984 	u32 dram_type = sdram_params->base.dramtype;
1985 	void __iomem *pctl_base = dram->pctl;
1986 	u32 sys_reg2 = 0;
1987 	u32 sys_reg3 = 0;
1988 	u64 cs_cap[2];
1989 	u32 cs_pst;
1990 
1991 	set_ddrconfig(dram, cap_info->ddrconfig);
1992 	sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
1993 			 &sys_reg3, 0);
1994 	writel(sys_reg2, &dram->pmugrf->os_reg[2]);
1995 	writel(sys_reg3, &dram->pmugrf->os_reg[3]);
1996 
1997 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
1998 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
1999 
2000 	if (cap_info->rank == 2) {
2001 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2002 			6 + 2;
2003 		if (cs_pst > 28)
2004 			cs_cap[0] = 1 << cs_pst;
2005 	}
2006 
2007 	writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
2008 			(((cs_cap[0] >> 20) / 64) & 0xff),
2009 			&dram->msch->devicesize);
2010 	update_noc_timing(dram, sdram_params);
2011 }
2012 
2013 static void enable_low_power(struct dram_info *dram,
2014 			     struct rv1126_sdram_params *sdram_params)
2015 {
2016 	void __iomem *pctl_base = dram->pctl;
2017 	u32 grf_lp_con;
2018 
2019 	writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]);
2020 
2021 	if (sdram_params->base.dramtype == DDR4)
2022 		grf_lp_con = (0x7 << 16) | (1 << 1);
2023 	else if (sdram_params->base.dramtype == DDR3)
2024 		grf_lp_con = (0x7 << 16) | (1 << 0);
2025 	else
2026 		grf_lp_con = (0x7 << 16) | (1 << 2);
2027 
2028 	/* en lpckdis_en */
2029 	grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
2030 	writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con);
2031 
2032 	/* enable sr, pd */
2033 	if (dram->pd_idle == 0)
2034 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2035 	else
2036 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2037 	if (dram->sr_idle == 0)
2038 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2039 	else
2040 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2041 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
2042 }
2043 
2044 static void ddr_set_atags(struct dram_info *dram,
2045 			  struct rv1126_sdram_params *sdram_params)
2046 {
2047 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2048 	u32 dram_type = sdram_params->base.dramtype;
2049 	void __iomem *pctl_base = dram->pctl;
2050 	struct tag_serial t_serial;
2051 	struct tag_ddr_mem t_ddrmem;
2052 	struct tag_soc_info t_socinfo;
2053 	u64 cs_cap[2];
2054 	u32 cs_pst = 0;
2055 
2056 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2057 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2058 
2059 	memset(&t_serial, 0, sizeof(struct tag_serial));
2060 
2061 	t_serial.version = 0;
2062 	t_serial.enable = 1;
2063 	t_serial.addr = CONFIG_DEBUG_UART_BASE;
2064 	t_serial.baudrate = CONFIG_BAUDRATE;
2065 	t_serial.m_mode = SERIAL_M_MODE_M0;
2066 	t_serial.id = 2;
2067 
2068 	atags_destroy();
2069 	atags_set_tag(ATAG_SERIAL,  &t_serial);
2070 
2071 	memset(&t_ddrmem, 0, sizeof(struct tag_ddr_mem));
2072 	if (cap_info->row_3_4) {
2073 		cs_cap[0] =  cs_cap[0] * 3 / 4;
2074 		cs_cap[1] =  cs_cap[1] * 3 / 4;
2075 	}
2076 	t_ddrmem.version = 0;
2077 	t_ddrmem.bank[0] = CONFIG_SYS_SDRAM_BASE;
2078 	if (cs_cap[1]) {
2079 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2080 			6 + 2;
2081 	}
2082 
2083 	if (cs_cap[1] && cs_pst > 27) {
2084 		t_ddrmem.count = 2;
2085 		t_ddrmem.bank[1] = 1 << cs_pst;
2086 		t_ddrmem.bank[2] = cs_cap[0];
2087 		t_ddrmem.bank[3] = cs_cap[1];
2088 	} else {
2089 		t_ddrmem.count = 1;
2090 		t_ddrmem.bank[1] = (u64)cs_cap[0] + (u64)cs_cap[1];
2091 	}
2092 
2093 	atags_set_tag(ATAG_DDR_MEM,  &t_ddrmem);
2094 
2095 	memset(&t_socinfo, 0, sizeof(struct tag_soc_info));
2096 	t_socinfo.version = 0;
2097 	t_socinfo.name = 0x1126;
2098 }
2099 
2100 static void print_ddr_info(struct rv1126_sdram_params *sdram_params)
2101 {
2102 	u32 split;
2103 
2104 	if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2105 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2106 		split = 0;
2107 	else
2108 		split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2109 			SPLIT_SIZE_MASK;
2110 
2111 	sdram_print_ddr_info(&sdram_params->ch.cap_info,
2112 			     &sdram_params->base, split);
2113 }
2114 
2115 static int sdram_init_(struct dram_info *dram,
2116 		       struct rv1126_sdram_params *sdram_params, u32 post_init)
2117 {
2118 	void __iomem *pctl_base = dram->pctl;
2119 	void __iomem *phy_base = dram->phy;
2120 	u32 ddr4_vref;
2121 	u32 mr_tmp;
2122 
2123 	rkclk_configure_ddr(dram, sdram_params);
2124 
2125 	rkclk_ddr_reset(dram, 1, 1, 1, 1);
2126 	udelay(10);
2127 
2128 	rkclk_ddr_reset(dram, 1, 1, 1, 0);
2129 	phy_cfg(dram, sdram_params);
2130 
2131 	rkclk_ddr_reset(dram, 1, 1, 0, 0);
2132 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1);
2133 
2134 	rkclk_ddr_reset(dram, 1, 0, 0, 0);
2135 	pctl_cfg(dram->pctl, &sdram_params->pctl_regs,
2136 		 dram->sr_idle, dram->pd_idle);
2137 
2138 	/* set frequency_mode */
2139 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
2140 	/* set target_frequency to Frequency 0 */
2141 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0);
2142 
2143 	set_ds_odt(dram, sdram_params, 0);
2144 	sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params);
2145 	set_ctl_address_map(dram, sdram_params);
2146 
2147 	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
2148 
2149 	rkclk_ddr_reset(dram, 0, 0, 0, 0);
2150 
2151 	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
2152 		continue;
2153 
2154 	if (sdram_params->base.dramtype == LPDDR3) {
2155 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3);
2156 	} else if (sdram_params->base.dramtype == LPDDR4) {
2157 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6);
2158 		/* MR11 */
2159 		pctl_write_mr(dram->pctl, 3, 11,
2160 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2161 			      LPDDR4);
2162 		/* MR12 */
2163 		pctl_write_mr(dram->pctl, 3, 12,
2164 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2165 			      LPDDR4);
2166 
2167 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2168 		/* MR22 */
2169 		pctl_write_mr(dram->pctl, 3, 22,
2170 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2171 			      LPDDR4);
2172 		/* MR14 */
2173 		pctl_write_mr(dram->pctl, 3, 14,
2174 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2175 			      LPDDR4);
2176 	}
2177 
2178 	if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) != 0) {
2179 		if (post_init != 0)
2180 			printascii("DTT cs0 error\n");
2181 		return -1;
2182 	}
2183 
2184 	if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) {
2185 		if (data_training(dram, 1, sdram_params, 0,
2186 				  READ_GATE_TRAINING) != 0) {
2187 			printascii("DTT cs1 error\n");
2188 			return -1;
2189 		}
2190 	}
2191 
2192 	if (sdram_params->base.dramtype == DDR4) {
2193 		ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39;
2194 		pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref,
2195 				  sdram_params->base.dramtype);
2196 	}
2197 
2198 	dram_all_config(dram, sdram_params);
2199 	enable_low_power(dram, sdram_params);
2200 
2201 	return 0;
2202 }
2203 
2204 static u64 dram_detect_cap(struct dram_info *dram,
2205 			   struct rv1126_sdram_params *sdram_params,
2206 			   unsigned char channel)
2207 {
2208 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2209 	void __iomem *pctl_base = dram->pctl;
2210 	void __iomem *phy_base = dram->phy;
2211 	u32 mr8;
2212 
2213 	u32 bktmp;
2214 	u32 coltmp;
2215 	u32 rowtmp;
2216 	u32 cs;
2217 	u32 bw = 1;
2218 	u32 dram_type = sdram_params->base.dramtype;
2219 	u32 pwrctl;
2220 
2221 	cap_info->bw = bw;
2222 	if (dram_type != LPDDR4) {
2223 		if (dram_type != DDR4) {
2224 			coltmp = 12;
2225 			bktmp = 3;
2226 			if (dram_type == LPDDR2)
2227 				rowtmp = 15;
2228 			else
2229 				rowtmp = 16;
2230 
2231 			if (sdram_detect_col(cap_info, coltmp) != 0)
2232 				goto cap_err;
2233 
2234 			sdram_detect_bank(cap_info, coltmp, bktmp);
2235 			sdram_detect_dbw(cap_info, dram_type);
2236 		} else {
2237 			coltmp = 10;
2238 			bktmp = 4;
2239 			rowtmp = 17;
2240 
2241 			cap_info->col = 10;
2242 			cap_info->bk = 2;
2243 			sdram_detect_bg(cap_info, coltmp);
2244 		}
2245 
2246 		if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
2247 			goto cap_err;
2248 
2249 		sdram_detect_row_3_4(cap_info, coltmp, bktmp);
2250 	} else {
2251 		mr8 = (read_mr(dram, 1, 8, dram_type) >> 2) & 0xf;
2252 		cap_info->col = 10;
2253 		cap_info->bk = 3;
2254 		cap_info->cs0_row = 14 + (mr8 + 1) / 2;
2255 		if (mr8 % 2)
2256 			cap_info->row_3_4 = 1;
2257 		else
2258 			cap_info->row_3_4 = 0;
2259 		cap_info->dbw = 1;
2260 		cap_info->bw = 2;
2261 	}
2262 
2263 	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
2264 	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
2265 
2266 	if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0)
2267 		cs = 1;
2268 	else
2269 		cs = 0;
2270 	cap_info->rank = cs + 1;
2271 
2272 	if (dram_type != LPDDR4) {
2273 		setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2274 
2275 		phy_soft_reset(dram);
2276 
2277 		if (data_training(dram, 0, sdram_params, 0,
2278 				  READ_GATE_TRAINING) == 0)
2279 			cap_info->bw = 2;
2280 		else
2281 			cap_info->bw = 1;
2282 	}
2283 
2284 	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
2285 
2286 	cap_info->cs0_high16bit_row = cap_info->cs0_row;
2287 	if (cs) {
2288 		cap_info->cs1_row = cap_info->cs0_row;
2289 		cap_info->cs1_high16bit_row = cap_info->cs0_row;
2290 	} else {
2291 		cap_info->cs1_row = 0;
2292 		cap_info->cs1_high16bit_row = 0;
2293 	}
2294 
2295 	return 0;
2296 cap_err:
2297 	return -1;
2298 }
2299 
2300 static int dram_detect_cs1_row(struct dram_info *dram,
2301 			       struct rv1126_sdram_params *sdram_params,
2302 			       unsigned char channel)
2303 {
2304 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2305 	void __iomem *pctl_base = dram->pctl;
2306 	u32 ret = 0;
2307 	void __iomem *test_addr;
2308 	u32 row, bktmp, coltmp, bw;
2309 	u64 cs0_cap;
2310 	u32 byte_mask;
2311 	u32 cs_pst;
2312 	u32 cs_add = 0;
2313 	u32 max_row;
2314 
2315 	if (cap_info->rank == 2) {
2316 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2317 			6 + 2;
2318 		if (cs_pst < 28)
2319 			cs_add = 1;
2320 
2321 		cs0_cap = 1 << cs_pst;
2322 
2323 		if (sdram_params->base.dramtype == DDR4) {
2324 			if (cap_info->dbw == 0)
2325 				bktmp = cap_info->bk + 2;
2326 			else
2327 				bktmp = cap_info->bk + 1;
2328 		} else {
2329 			bktmp = cap_info->bk;
2330 		}
2331 		bw = cap_info->bw;
2332 		coltmp = cap_info->col;
2333 
2334 		if (bw == 2)
2335 			byte_mask = 0xFFFF;
2336 		else
2337 			byte_mask = 0xFF;
2338 
2339 		max_row = (cs_pst == 31) ? 30 : 31;
2340 
2341 		max_row = max_row - bktmp - coltmp - bw - cs_add + 1;
2342 
2343 		row = (cap_info->cs0_row > max_row) ? max_row :
2344 			cap_info->cs0_row;
2345 
2346 		for (; row > 12; row--) {
2347 			test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
2348 				    (u32)cs0_cap +
2349 				    (1ul << (row + bktmp + coltmp +
2350 					     cs_add + bw - 1ul)));
2351 
2352 			writel(0, CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap);
2353 			writel(PATTERN, test_addr);
2354 
2355 			if (((readl(test_addr) & byte_mask) ==
2356 			     (PATTERN & byte_mask)) &&
2357 			    ((readl(CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap) &
2358 			      byte_mask) == 0)) {
2359 				ret = row;
2360 				break;
2361 			}
2362 		}
2363 	}
2364 
2365 	return ret;
2366 }
2367 
2368 /* return: 0 = success, other = fail */
2369 static int sdram_init_detect(struct dram_info *dram,
2370 			     struct rv1126_sdram_params *sdram_params)
2371 {
2372 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2373 	u32 ret;
2374 	u32 sys_reg = 0;
2375 	u32 sys_reg3 = 0;
2376 
2377 	if (sdram_init_(dram, sdram_params, 0) != 0)
2378 		return -1;
2379 
2380 	if (sdram_params->base.dramtype == DDR3) {
2381 		writel(PATTERN, CONFIG_SYS_SDRAM_BASE);
2382 		if (readl(CONFIG_SYS_SDRAM_BASE) != PATTERN)
2383 			return -1;
2384 	}
2385 
2386 	if (dram_detect_cap(dram, sdram_params, 0) != 0)
2387 		return -1;
2388 
2389 	pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
2390 				   sdram_params->base.dramtype);
2391 	ret = sdram_init_(dram, sdram_params, 1);
2392 	if (ret != 0)
2393 		goto out;
2394 
2395 	cap_info->cs1_row =
2396 		dram_detect_cs1_row(dram, sdram_params, 0);
2397 	if (cap_info->cs1_row) {
2398 		sys_reg = readl(&dram->pmugrf->os_reg[2]);
2399 		sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
2400 		SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
2401 				    sys_reg, sys_reg3, 0);
2402 		writel(sys_reg, &dram->pmugrf->os_reg[2]);
2403 		writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2404 	}
2405 
2406 	sdram_detect_high_row(cap_info);
2407 
2408 out:
2409 	return ret;
2410 }
2411 
2412 struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz)
2413 {
2414 	u32 i;
2415 	u32 offset = 0;
2416 	struct ddr2_3_4_lp2_3_info *ddr_info;
2417 
2418 	if (!freq_mhz) {
2419 		ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype);
2420 		if (ddr_info)
2421 			freq_mhz =
2422 				(ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
2423 				DDR_FREQ_MASK;
2424 		else
2425 			freq_mhz = 0;
2426 	}
2427 
2428 	for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) {
2429 		if (sdram_configs[i].base.ddr_freq == 0 ||
2430 		    freq_mhz < sdram_configs[i].base.ddr_freq)
2431 			break;
2432 	}
2433 	offset = i == 0 ? 0 : i - 1;
2434 
2435 	return &sdram_configs[offset];
2436 }
2437 
2438 static const u16 pctl_need_update_reg[] = {
2439 	DDR_PCTL2_RFSHTMG,
2440 	DDR_PCTL2_INIT3,
2441 	DDR_PCTL2_INIT4,
2442 	DDR_PCTL2_INIT6,
2443 	DDR_PCTL2_INIT7,
2444 	DDR_PCTL2_DRAMTMG0,
2445 	DDR_PCTL2_DRAMTMG1,
2446 	DDR_PCTL2_DRAMTMG2,
2447 	DDR_PCTL2_DRAMTMG3,
2448 	DDR_PCTL2_DRAMTMG4,
2449 	DDR_PCTL2_DRAMTMG5,
2450 	DDR_PCTL2_DRAMTMG6,
2451 	DDR_PCTL2_DRAMTMG7,
2452 	DDR_PCTL2_DRAMTMG8,
2453 	DDR_PCTL2_DRAMTMG9,
2454 	DDR_PCTL2_DRAMTMG12,
2455 	DDR_PCTL2_DRAMTMG13,
2456 	DDR_PCTL2_DRAMTMG14,
2457 	DDR_PCTL2_ZQCTL0,
2458 	DDR_PCTL2_DFITMG0,
2459 	DDR_PCTL2_ODTCFG
2460 };
2461 
2462 static const u16 phy_need_update_reg[] = {
2463 	0x14,
2464 	0x18,
2465 	0x1c
2466 };
2467 
2468 static void pre_set_rate(struct dram_info *dram,
2469 			 struct rv1126_sdram_params *sdram_params,
2470 			 u32 dst_fsp, u32 dst_fsp_lp4)
2471 {
2472 	u32 i, j, find;
2473 	void __iomem *pctl_base = dram->pctl;
2474 	void __iomem *phy_base = dram->phy;
2475 	u32 phy_offset;
2476 	u32 mr_tmp;
2477 	u32 dramtype = sdram_params->base.dramtype;
2478 
2479 	sw_set_req(dram);
2480 	/* pctl timing update */
2481 	for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) {
2482 		for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF;
2483 		     j++) {
2484 			if (sdram_params->pctl_regs.pctl[j][0] ==
2485 			    pctl_need_update_reg[i]) {
2486 				writel(sdram_params->pctl_regs.pctl[j][1],
2487 				       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2488 				       pctl_need_update_reg[i]);
2489 				find = j;
2490 				break;
2491 			}
2492 		}
2493 	}
2494 	sw_set_ack(dram);
2495 
2496 	/* phy timing update */
2497 	if (dst_fsp == 0)
2498 		phy_offset = 0;
2499 	else
2500 		phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3);
2501 	/* cl cwl al update */
2502 	for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) {
2503 		for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF;
2504 		     j++) {
2505 			if (sdram_params->phy_regs.phy[j][0] ==
2506 			    phy_need_update_reg[i]) {
2507 				writel(sdram_params->phy_regs.phy[j][1],
2508 				       phy_base + phy_offset +
2509 				       phy_need_update_reg[i]);
2510 				find = j;
2511 				break;
2512 			}
2513 		}
2514 	}
2515 
2516 	set_ds_odt(dram, sdram_params, dst_fsp);
2517 	if (dramtype == LPDDR4) {
2518 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2519 			       DDR_PCTL2_INIT4);
2520 		/* MR13 */
2521 		pctl_write_mr(dram->pctl, 3, 13,
2522 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2523 			       PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2524 			      ((0x2 << 6) >> dst_fsp_lp4), dramtype);
2525 		writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2526 				      PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2527 				      ((0x2 << 6) >> dst_fsp_lp4),
2528 				       PHY_REG(phy_base, 0x1b));
2529 		/* MR3 */
2530 		pctl_write_mr(dram->pctl, 3, 3,
2531 			      mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT &
2532 			      PCTL2_MR_MASK,
2533 			      dramtype);
2534 		writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK,
2535 		       PHY_REG(phy_base, 0x19));
2536 
2537 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2538 			       DDR_PCTL2_INIT3);
2539 		/* MR1 */
2540 		pctl_write_mr(dram->pctl, 3, 1,
2541 			      mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT &
2542 			      PCTL2_MR_MASK,
2543 			      dramtype);
2544 		writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK,
2545 		       PHY_REG(phy_base, 0x17));
2546 		/* MR2 */
2547 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
2548 			      dramtype);
2549 		writel(mr_tmp & PCTL2_MR_MASK,
2550 		       PHY_REG(phy_base, 0x18));
2551 
2552 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2553 			       DDR_PCTL2_INIT6);
2554 		/* MR11 */
2555 		pctl_write_mr(dram->pctl, 3, 11,
2556 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2557 			      dramtype);
2558 		writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2559 		       PHY_REG(phy_base, 0x1a));
2560 		/* MR12 */
2561 		pctl_write_mr(dram->pctl, 3, 12,
2562 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2563 			      dramtype);
2564 
2565 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2566 			       DDR_PCTL2_INIT7);
2567 		/* MR22 */
2568 		pctl_write_mr(dram->pctl, 3, 22,
2569 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2570 			      dramtype);
2571 		writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2572 		       PHY_REG(phy_base, 0x1d));
2573 		/* MR14 */
2574 		pctl_write_mr(dram->pctl, 3, 14,
2575 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2576 			      dramtype);
2577 		writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2578 		       PHY_REG(phy_base, 0x1c));
2579 	}
2580 
2581 	update_noc_timing(dram, sdram_params);
2582 }
2583 
2584 static void save_fsp_param(struct dram_info *dram, u32 dst_fsp,
2585 			   struct rv1126_sdram_params *sdram_params)
2586 {
2587 	void __iomem *pctl_base = dram->pctl;
2588 	void __iomem *phy_base = dram->phy;
2589 	struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp];
2590 	u32 temp, temp1;
2591 	struct ddr2_3_4_lp2_3_info *ddr_info;
2592 
2593 	ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype);
2594 
2595 	p_fsp_param->freq_mhz = sdram_params->base.ddr_freq;
2596 
2597 	if (sdram_params->base.dramtype == LPDDR4) {
2598 		p_fsp_param->rd_odt_up_en = 0;
2599 		p_fsp_param->rd_odt_down_en = 1;
2600 	} else {
2601 		p_fsp_param->rd_odt_up_en =
2602 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
2603 		p_fsp_param->rd_odt_down_en =
2604 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
2605 	}
2606 
2607 	if (p_fsp_param->rd_odt_up_en)
2608 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111));
2609 	else if (p_fsp_param->rd_odt_down_en)
2610 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110));
2611 	else
2612 		p_fsp_param->rd_odt = 0;
2613 	p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112));
2614 	p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100));
2615 	p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102));
2616 	p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128));
2617 	p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105));
2618 
2619 	if (sdram_params->base.dramtype == DDR3) {
2620 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2621 			     DDR_PCTL2_INIT3);
2622 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
2623 		p_fsp_param->ds_pdds = ((temp >> 1) & 0x1) |
2624 				       (((temp >> 5) & 0x1) << 1);
2625 		p_fsp_param->dq_odt = ((temp >> 2) & 0x1) |
2626 				      (((temp >> 6) & 0x1) << 1) |
2627 				      (((temp >> 9) & 0x1) << 2);
2628 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2629 	} else if (sdram_params->base.dramtype == DDR4) {
2630 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2631 			     DDR_PCTL2_INIT3);
2632 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
2633 		p_fsp_param->ds_pdds = (temp >> 1) & 0x3;
2634 		p_fsp_param->dq_odt = (temp >> 8) & 0x7;
2635 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2636 	} else if (sdram_params->base.dramtype == LPDDR3) {
2637 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2638 			     DDR_PCTL2_INIT4);
2639 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
2640 		p_fsp_param->ds_pdds = temp & 0xf;
2641 
2642 		p_fsp_param->dq_odt = lp3_odt_value;
2643 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2644 	} else if (sdram_params->base.dramtype == LPDDR4) {
2645 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2646 			     DDR_PCTL2_INIT4);
2647 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
2648 		p_fsp_param->ds_pdds = (temp >> 3) & 0x7;
2649 
2650 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2651 			     DDR_PCTL2_INIT6);
2652 		temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK;
2653 		p_fsp_param->dq_odt = temp & 0x7;
2654 		p_fsp_param->ca_odt = (temp >> 4) & 0x7;
2655 
2656 		temp = MAX(readl(PHY_REG(phy_base, 0x3ae)),
2657 			   readl(PHY_REG(phy_base, 0x3ce)));
2658 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)),
2659 			    readl(PHY_REG(phy_base, 0x3de)));
2660 		p_fsp_param->vref_ca[0] = (temp + temp1) / 2;
2661 		temp = MAX(readl(PHY_REG(phy_base, 0x3af)),
2662 			   readl(PHY_REG(phy_base, 0x3cf)));
2663 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)),
2664 			    readl(PHY_REG(phy_base, 0x3df)));
2665 		p_fsp_param->vref_ca[1] = (temp + temp1) / 2;
2666 		p_fsp_param->vref_ca[0] |=
2667 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
2668 		p_fsp_param->vref_ca[1] |=
2669 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
2670 
2671 		p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >>
2672 					      3) & 0x1;
2673 	}
2674 
2675 	p_fsp_param->noc_timings.ddrtiminga0 =
2676 		sdram_params->ch.noc_timings.ddrtiminga0;
2677 	p_fsp_param->noc_timings.ddrtimingb0 =
2678 		sdram_params->ch.noc_timings.ddrtimingb0;
2679 	p_fsp_param->noc_timings.ddrtimingc0 =
2680 		sdram_params->ch.noc_timings.ddrtimingc0;
2681 	p_fsp_param->noc_timings.devtodev0 =
2682 		sdram_params->ch.noc_timings.devtodev0;
2683 	p_fsp_param->noc_timings.ddrmode =
2684 		sdram_params->ch.noc_timings.ddrmode;
2685 	p_fsp_param->noc_timings.ddr4timing =
2686 		sdram_params->ch.noc_timings.ddr4timing;
2687 	p_fsp_param->noc_timings.agingx0 =
2688 		sdram_params->ch.noc_timings.agingx0;
2689 	p_fsp_param->noc_timings.aging0 =
2690 		sdram_params->ch.noc_timings.aging0;
2691 	p_fsp_param->noc_timings.aging1 =
2692 		sdram_params->ch.noc_timings.aging1;
2693 	p_fsp_param->noc_timings.aging2 =
2694 		sdram_params->ch.noc_timings.aging2;
2695 	p_fsp_param->noc_timings.aging3 =
2696 		sdram_params->ch.noc_timings.aging3;
2697 
2698 	p_fsp_param->flag = FSP_FLAG;
2699 }
2700 
2701 #ifndef CONFIG_ROCKCHIP_THUNDER_BOOT
2702 static void copy_fsp_param_to_ddr(void)
2703 {
2704 	memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param,
2705 	       sizeof(fsp_param));
2706 }
2707 #endif
2708 
2709 void ddr_set_rate(struct dram_info *dram,
2710 		  struct rv1126_sdram_params *sdram_params,
2711 		  u32 freq, u32 cur_freq, u32 dst_fsp,
2712 		  u32 dst_fsp_lp4, u32 training_en)
2713 {
2714 	u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off;
2715 	u32 mr_tmp;
2716 	u32 lp_stat;
2717 	u32 dramtype = sdram_params->base.dramtype;
2718 	struct rv1126_sdram_params *sdram_params_new;
2719 	void __iomem *pctl_base = dram->pctl;
2720 	void __iomem *phy_base = dram->phy;
2721 
2722 	lp_stat = low_power_update(dram, 0);
2723 	sdram_params_new = get_default_sdram_config(freq);
2724 	sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank;
2725 
2726 	pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4);
2727 
2728 	while ((readl(pctl_base + DDR_PCTL2_STAT) &
2729 			 PCTL2_OPERATING_MODE_MASK) ==
2730 			 PCTL2_OPERATING_MODE_SR)
2731 		continue;
2732 
2733 	dest_dll_off = 0;
2734 	dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2735 			  DDR_PCTL2_INIT3);
2736 	if ((dramtype == DDR3 && (dst_init3 & 1)) ||
2737 	    (dramtype == DDR4 && !(dst_init3 & 1)))
2738 		dest_dll_off = 1;
2739 
2740 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
2741 	cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
2742 			  DDR_PCTL2_INIT3);
2743 	cur_init3 &= PCTL2_MR_MASK;
2744 	cur_dll_off = 1;
2745 	if ((dramtype == DDR3 && !(cur_init3 & 1)) ||
2746 	    (dramtype == DDR4 && (cur_init3 & 1)))
2747 		cur_dll_off = 0;
2748 
2749 	if (!cur_dll_off) {
2750 		if (dramtype == DDR3)
2751 			cur_init3 |= 1;
2752 		else
2753 			cur_init3 &= ~1;
2754 		pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype);
2755 	}
2756 
2757 	setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
2758 		     PCTL2_DIS_AUTO_REFRESH);
2759 	update_refresh_reg(dram);
2760 
2761 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
2762 	while (1) {
2763 		if (((readl(pctl_base + DDR_PCTL2_STAT) &
2764 		      PCTL2_SELFREF_TYPE_MASK) ==
2765 		     PCTL2_SELFREF_TYPE_SR_NOT_AUTO) &&
2766 		    ((readl(pctl_base + DDR_PCTL2_STAT) &
2767 		      PCTL2_OPERATING_MODE_MASK) ==
2768 		     PCTL2_OPERATING_MODE_SR)) {
2769 			break;
2770 		}
2771 	}
2772 
2773 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
2774 	       PMUGRF_CON_DDRPHY_BUFFEREN_EN,
2775 	       &dram->pmugrf->soc_con[0]);
2776 	sw_set_req(dram);
2777 	clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC,
2778 		     PCTL2_DFI_INIT_COMPLETE_EN);
2779 	sw_set_ack(dram);
2780 
2781 	sw_set_req(dram);
2782 	if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off)
2783 		setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
2784 	else
2785 		clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
2786 
2787 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0,
2788 		     PCTL2_DIS_SRX_ZQCL);
2789 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0,
2790 		     PCTL2_DIS_SRX_ZQCL);
2791 	sw_set_ack(dram);
2792 
2793 	writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT),
2794 	       &dram->cru->clkgate_con[21]);
2795 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
2796 					(0x1 << CLK_DDR_UPCTL_EN_SHIFT) |
2797 					(0x1 << ACLK_DDR_UPCTL_EN_SHIFT),
2798 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
2799 
2800 	clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
2801 	rkclk_set_dpll(dram, freq * MHz / 2);
2802 	phy_pll_set(dram, freq * MHz, 0);
2803 	phy_pll_set(dram, freq * MHz, 1);
2804 	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
2805 
2806 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
2807 			PMUGRF_CON_DDRPHY_BUFFEREN_DIS,
2808 			&dram->pmugrf->soc_con[0]);
2809 	writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT),
2810 	       &dram->cru->clkgate_con[21]);
2811 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
2812 					(0x0 << CLK_DDR_UPCTL_EN_SHIFT) |
2813 					(0x0 << ACLK_DDR_UPCTL_EN_SHIFT),
2814 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
2815 	while ((readl(pctl_base + DDR_PCTL2_DFISTAT) &
2816 	       PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE)
2817 		continue;
2818 
2819 	sw_set_req(dram);
2820 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
2821 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp);
2822 	sw_set_ack(dram);
2823 	update_refresh_reg(dram);
2824 	clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2);
2825 
2826 	clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
2827 	while ((readl(pctl_base + DDR_PCTL2_STAT) &
2828 	       PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR)
2829 		continue;
2830 
2831 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
2832 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
2833 
2834 	mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4);
2835 	if (dramtype == LPDDR3) {
2836 		pctl_write_mr(dram->pctl, 3, 1,
2837 			      (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) &
2838 			      PCTL2_MR_MASK,
2839 			      dramtype);
2840 		pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK,
2841 			      dramtype);
2842 		pctl_write_mr(dram->pctl, 3, 3,
2843 			      (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) &
2844 			      PCTL2_MR_MASK,
2845 			      dramtype);
2846 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype);
2847 	} else if ((dramtype == DDR3) || (dramtype == DDR4)) {
2848 		pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK,
2849 			      dramtype);
2850 		if (!dest_dll_off) {
2851 			pctl_write_mr(dram->pctl, 3, 0,
2852 				      ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) &
2853 				      PCTL2_MR_MASK) | DDR3_DLL_RESET,
2854 				      dramtype);
2855 			udelay(2);
2856 		}
2857 		pctl_write_mr(dram->pctl, 3, 0,
2858 			      (dst_init3 >> PCTL2_DDR34_MR0_SHIFT &
2859 			       PCTL2_MR_MASK) & (~DDR3_DLL_RESET),
2860 			      dramtype);
2861 		pctl_write_mr(dram->pctl, 3, 2,
2862 			      ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) &
2863 			       PCTL2_MR_MASK), dramtype);
2864 		if (dramtype == DDR4) {
2865 			pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK,
2866 				      dramtype);
2867 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2868 				       DDR_PCTL2_INIT6);
2869 			pctl_write_mr(dram->pctl, 3, 4,
2870 				      (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) &
2871 				       PCTL2_MR_MASK,
2872 				      dramtype);
2873 			pctl_write_mr(dram->pctl, 3, 5,
2874 				      mr_tmp >> PCTL2_DDR4_MR5_SHIFT &
2875 				      PCTL2_MR_MASK,
2876 				      dramtype);
2877 
2878 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2879 				       DDR_PCTL2_INIT7);
2880 			pctl_write_mr(dram->pctl, 3, 6,
2881 				      mr_tmp >> PCTL2_DDR4_MR6_SHIFT &
2882 				      PCTL2_MR_MASK,
2883 				      dramtype);
2884 		}
2885 	} else if (dramtype == LPDDR4) {
2886 		pctl_write_mr(dram->pctl, 3, 13,
2887 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2888 			       PCTL2_MR_MASK) & (~(BIT(7)))) |
2889 			      dst_fsp_lp4 << 7, dramtype);
2890 	}
2891 
2892 	/* training */
2893 	high_freq_training(dram, sdram_params_new, dst_fsp);
2894 
2895 	clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
2896 		     PCTL2_DIS_AUTO_REFRESH);
2897 	low_power_update(dram, lp_stat);
2898 
2899 	save_fsp_param(dram, dst_fsp, sdram_params_new);
2900 }
2901 
2902 static void ddr_set_rate_for_fsp(struct dram_info *dram,
2903 				 struct rv1126_sdram_params *sdram_params)
2904 {
2905 	struct ddr2_3_4_lp2_3_info *ddr_info;
2906 	u32 f0;
2907 	u32 dramtype = sdram_params->base.dramtype;
2908 #ifndef CONFIG_ROCKCHIP_THUNDER_BOOT
2909 	u32 f1, f2, f3;
2910 #endif
2911 
2912 	ddr_info = get_ddr_drv_odt_info(dramtype);
2913 	if (!ddr_info)
2914 		return;
2915 
2916 	f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
2917 	     DDR_FREQ_MASK;
2918 
2919 #ifndef CONFIG_ROCKCHIP_THUNDER_BOOT
2920 	memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param));
2921 	memset((void *)&fsp_param, 0, sizeof(fsp_param));
2922 
2923 	f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) &
2924 	     DDR_FREQ_MASK;
2925 	f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) &
2926 	     DDR_FREQ_MASK;
2927 	f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) &
2928 	     DDR_FREQ_MASK;
2929 #endif
2930 
2931 	if (get_wrlvl_val(dram, sdram_params))
2932 		printascii("get wrlvl value fail\n");
2933 
2934 #ifndef CONFIG_ROCKCHIP_THUNDER_BOOT
2935 	printascii("change to: ");
2936 	printdec(f1);
2937 	printascii("MHz\n");
2938 	ddr_set_rate(&dram_info, sdram_params, f1,
2939 		     sdram_params->base.ddr_freq, 1, 1, 1);
2940 	printascii("change to: ");
2941 	printdec(f2);
2942 	printascii("MHz\n");
2943 	ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1);
2944 	printascii("change to: ");
2945 	printdec(f3);
2946 	printascii("MHz\n");
2947 	ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1);
2948 #endif
2949 	printascii("change to: ");
2950 	printdec(f0);
2951 	printascii("MHz(final freq)\n");
2952 #ifndef CONFIG_ROCKCHIP_THUNDER_BOOT
2953 	ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1);
2954 #else
2955 	ddr_set_rate(&dram_info, sdram_params, f0, sdram_params->base.ddr_freq, 1, 1, 1);
2956 #endif
2957 }
2958 
2959 int get_uart_config(void)
2960 {
2961 	struct sdram_head_info_index_v2 *index =
2962 		(struct sdram_head_info_index_v2 *)common_info;
2963 	struct global_info *gbl_info;
2964 
2965 	gbl_info = (struct global_info *)((void *)common_info +
2966 		index->global_index.offset * 4);
2967 
2968 	return gbl_info->uart_info;
2969 }
2970 
2971 /* return: 0 = success, other = fail */
2972 int sdram_init(void)
2973 {
2974 	struct rv1126_sdram_params *sdram_params;
2975 	int ret = 0;
2976 	struct sdram_head_info_index_v2 *index =
2977 		(struct sdram_head_info_index_v2 *)common_info;
2978 	struct global_info *gbl_info;
2979 
2980 	dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
2981 	dram_info.pctl = (void *)UPCTL2_BASE_ADDR;
2982 	dram_info.grf = (void *)GRF_BASE_ADDR;
2983 	dram_info.cru = (void *)CRU_BASE_ADDR;
2984 	dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR;
2985 	dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR;
2986 	dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR;
2987 
2988 	if (index->version_info != 2 ||
2989 	    (index->global_index.size != sizeof(struct global_info) / 4) ||
2990 	    (index->ddr3_index.size !=
2991 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
2992 	    (index->ddr4_index.size !=
2993 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
2994 	    (index->lp3_index.size !=
2995 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
2996 	    (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) ||
2997 	    index->global_index.offset == 0 ||
2998 	    index->ddr3_index.offset == 0 ||
2999 	    index->ddr4_index.offset == 0 ||
3000 	    index->lp3_index.offset == 0 ||
3001 	    index->lp4_index.offset == 0) {
3002 		printascii("common info error\n");
3003 		goto error;
3004 	}
3005 
3006 	gbl_info = (struct global_info *)((void *)common_info +
3007 		index->global_index.offset * 4);
3008 
3009 	dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info);
3010 	dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info);
3011 
3012 	sdram_params = &sdram_configs[0];
3013 
3014 	if (sdram_params->base.dramtype == DDR3 ||
3015 	    sdram_params->base.dramtype == DDR4) {
3016 		if (DDR_2T_INFO(gbl_info->info_2t))
3017 			sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10;
3018 		else
3019 			sdram_params->pctl_regs.pctl[0][1] &=
3020 				~(0x1 << 10);
3021 	}
3022 	ret = sdram_init_detect(&dram_info, sdram_params);
3023 	if (ret) {
3024 		sdram_print_dram_type(sdram_params->base.dramtype);
3025 		printascii(", ");
3026 		printdec(sdram_params->base.ddr_freq);
3027 		printascii("MHz\n");
3028 		goto error;
3029 	}
3030 	print_ddr_info(sdram_params);
3031 
3032 	ddr_set_rate_for_fsp(&dram_info, sdram_params);
3033 #ifndef CONFIG_ROCKCHIP_THUNDER_BOOT
3034 	copy_fsp_param_to_ddr();
3035 #endif
3036 
3037 	ddr_set_atags(&dram_info, sdram_params);
3038 
3039 	printascii("out\n");
3040 
3041 	return ret;
3042 error:
3043 	printascii("error\n");
3044 	return (-1);
3045 }
3046 #endif /* CONFIG_TPL_BUILD */
3047