xref: /rk3399_rockchip-uboot/drivers/ram/rockchip/sdram_rv1126.c (revision effae6d71544d6cab5ae01aa7160bb709b3a3e6e)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * (C) Copyright 2020 Rockchip Electronics Co., Ltd.
4  */
5 
6 #include <common.h>
7 #include <debug_uart.h>
8 #include <dm.h>
9 #include <ram.h>
10 #include <syscon.h>
11 #include <asm/io.h>
12 #include <asm/arch/clock.h>
13 #include <asm/arch/hardware.h>
14 #include <asm/arch/rk_atags.h>
15 #include <asm/arch/cru_rv1126.h>
16 #include <asm/arch/grf_rv1126.h>
17 #include <asm/arch/sdram_common.h>
18 #include <asm/arch/sdram_rv1126.h>
19 
20 /* define training flag */
21 #define CA_TRAINING			(0x1 << 0)
22 #define READ_GATE_TRAINING		(0x1 << 1)
23 #define WRITE_LEVELING			(0x1 << 2)
24 #define WRITE_TRAINING			(0x1 << 3)
25 #define READ_TRAINING			(0x1 << 4)
26 #define FULL_TRAINING			(0xff)
27 
28 #define SKEW_RX_SIGNAL			(0)
29 #define SKEW_TX_SIGNAL			(1)
30 #define SKEW_CA_SIGNAL			(2)
31 
32 #define DESKEW_MDF_ABS_VAL		(0)
33 #define DESKEW_MDF_DIFF_VAL		(1)
34 
35 #ifdef CONFIG_TPL_BUILD
36 #ifndef CONFIG_TPL_TINY_FRAMEWORK
37 #error please defined CONFIG_TPL_TINY_FRAMEWORK for RV1126 !!!
38 #endif
39 #endif
40 
41 #ifdef CONFIG_TPL_BUILD
42 
43 struct dram_info {
44 	void __iomem *pctl;
45 	void __iomem *phy;
46 	struct rv1126_cru *cru;
47 	struct msch_regs *msch;
48 	struct rv1126_ddrgrf *ddrgrf;
49 	struct rv1126_grf *grf;
50 	struct ram_info info;
51 	struct rv1126_pmugrf *pmugrf;
52 	u32 sr_idle;
53 	u32 pd_idle;
54 };
55 
56 #define GRF_BASE_ADDR			0xfe000000
57 #define PMU_GRF_BASE_ADDR		0xfe020000
58 #define DDR_GRF_BASE_ADDR		0xfe030000
59 #define BUS_SGRF_BASE_ADDR		0xfe0a0000
60 #define SERVER_MSCH_BASE_ADDR		0xfe800000
61 #define CRU_BASE_ADDR			0xff490000
62 #define DDR_PHY_BASE_ADDR		0xff4a0000
63 #define UPCTL2_BASE_ADDR		0xffa50000
64 
65 #define SGRF_SOC_CON12			0x30
66 #define SGRF_SOC_CON13			0x34
67 
68 struct dram_info dram_info;
69 
70 #if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 3)
71 struct rv1126_sdram_params sdram_configs[] = {
72 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-328.inc"
73 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-396.inc"
74 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-528.inc"
75 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-664.inc"
76 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-784.inc"
77 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-924.inc"
78 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-1056.inc"
79 };
80 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 0)
81 struct rv1126_sdram_params sdram_configs[] = {
82 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-328.inc"
83 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-396.inc"
84 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-528.inc"
85 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-664.inc"
86 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-784.inc"
87 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-924.inc"
88 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-1056.inc"
89 };
90 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 6)
91 struct rv1126_sdram_params sdram_configs[] = {
92 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-328.inc"
93 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-396.inc"
94 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-528.inc"
95 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-664.inc"
96 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-784.inc"
97 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-924.inc"
98 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-1056.inc"
99 };
100 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 7)
101 struct rv1126_sdram_params sdram_configs[] = {
102 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-328.inc"
103 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-396.inc"
104 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-528.inc"
105 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-664.inc"
106 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-784.inc"
107 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-924.inc"
108 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-1056.inc"
109 };
110 #endif
111 
112 u32 common_info[] = {
113 	#include "sdram_inc/rv1126/sdram-rv1126-loader_params.inc"
114 };
115 
116 static struct rv1126_fsp_param fsp_param[MAX_IDX];
117 
118 static u8 lp3_odt_value;
119 
120 static s8 wrlvl_result[2][4];
121 
122 /* DDR configuration 0-9 */
123 u16 ddr_cfg_2_rbc[] = {
124 	((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */
125 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */
126 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */
127 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */
128 	((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */
129 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */
130 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */
131 	((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */
132 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */
133 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */
134 };
135 
136 /* DDR configuration 10-21 */
137 u8 ddr4_cfg_2_rbc[] = {
138 	((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */
139 	((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */
140 	((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */
141 	((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */
142 	((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */
143 	((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */
144 	((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */
145 	((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */
146 	((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */
147 	((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */
148 	((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */
149 	((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */
150 };
151 
152 /* DDR configuration 22-28 */
153 u16 ddr_cfg_2_rbc_p2[] = {
154 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */
155 	((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */
156 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */
157 	((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */
158 	((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */
159 	((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */
160 	((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */
161 };
162 
163 u8 d4_rbc_2_d3_rbc[][2] = {
164 	{10, 0},
165 	{11, 2},
166 	{12, 23},
167 	{13, 1},
168 	{14, 28},
169 	{15, 24},
170 	{16, 27},
171 	{17, 7},
172 	{18, 6},
173 	{19, 25},
174 	{20, 26},
175 	{21, 3}
176 };
177 
178 u32 addrmap[23][9] = {
179 	{24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
180 		0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */
181 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
182 		0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */
183 	{23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
184 		0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */
185 	{22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606,
186 		0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */
187 	{24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909,
188 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */
189 	{6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707,
190 		0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */
191 	{7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808,
192 		0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */
193 	{8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909,
194 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */
195 	{22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
196 		0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */
197 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
198 		0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */
199 
200 	{24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
201 		0x08080808, 0x00000f0f, 0x0801}, /* 10 */
202 	{23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
203 		0x0f080808, 0x00000f0f, 0x0801}, /* 11 */
204 	{24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
205 		0x07070707, 0x00000f07, 0x0700}, /* 12 */
206 	{23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
207 		0x07070707, 0x00000f0f, 0x0700}, /* 13 */
208 	{24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
209 		0x07070707, 0x00000f07, 0x3f01}, /* 14 */
210 	{23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
211 		0x07070707, 0x00000f0f, 0x3f01}, /* 15 */
212 	{23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606,
213 		0x06060606, 0x00000f06, 0x3f00}, /* 16 */
214 	{8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909,
215 		0x0f090909, 0x00000f0f, 0x0801}, /* 17 */
216 	{7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808,
217 		0x08080808, 0x00000f0f, 0x0700}, /* 18 */
218 	{7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
219 		0x08080808, 0x00000f0f, 0x3f01}, /* 19 */
220 
221 	{6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
222 		0x07070707, 0x00000f07, 0x3f00}, /* 20 */
223 	{23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606,
224 		0x06060606, 0x00000f06, 0x0600}, /* 21 */
225 	{21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505,
226 		0x05050505, 0x00000f0f, 0x3f3f} /* 22 */
227 };
228 
229 static u8 dq_sel[22][3] = {
230 	{0x0, 0x17, 0x22},
231 	{0x1, 0x18, 0x23},
232 	{0x2, 0x19, 0x24},
233 	{0x3, 0x1a, 0x25},
234 	{0x4, 0x1b, 0x26},
235 	{0x5, 0x1c, 0x27},
236 	{0x6, 0x1d, 0x28},
237 	{0x7, 0x1e, 0x29},
238 	{0x8, 0x16, 0x21},
239 	{0x9, 0x1f, 0x2a},
240 	{0xa, 0x20, 0x2b},
241 	{0x10, 0x1, 0xc},
242 	{0x11, 0x2, 0xd},
243 	{0x12, 0x3, 0xe},
244 	{0x13, 0x4, 0xf},
245 	{0x14, 0x5, 0x10},
246 	{0x15, 0x6, 0x11},
247 	{0x16, 0x7, 0x12},
248 	{0x17, 0x8, 0x13},
249 	{0x18, 0x0, 0xb},
250 	{0x19, 0x9, 0x14},
251 	{0x1a, 0xa, 0x15}
252 };
253 
254 static u16 grp_addr[4] = {
255 	ADD_GROUP_CS0_A,
256 	ADD_GROUP_CS0_B,
257 	ADD_GROUP_CS1_A,
258 	ADD_GROUP_CS1_B
259 };
260 
261 static u8 wrlvl_result_offset[2][4] = {
262 	{0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27},
263 	{0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29},
264 };
265 
266 static u16 dqs_dq_skew_adr[16] = {
267 	0x170 + 0,	/* SKEW_UPDATE_RX_CS0_DQS0 */
268 	0x170 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS1 */
269 	0x1d0 + 0,	/* SKEW_UPDATE_RX_CS0_DQS2 */
270 	0x1d0 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS3 */
271 	0x1a0 + 0,	/* SKEW_UPDATE_RX_CS1_DQS0 */
272 	0x1a0 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS1 */
273 	0x200 + 0,	/* SKEW_UPDATE_RX_CS1_DQS2 */
274 	0x200 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS3 */
275 	0x170 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS0 */
276 	0x170 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS1 */
277 	0x1d0 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS2 */
278 	0x1d0 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS3 */
279 	0x1a0 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS0 */
280 	0x1a0 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS1 */
281 	0x200 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS2 */
282 	0x200 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS3 */
283 };
284 
285 static void rkclk_ddr_reset(struct dram_info *dram,
286 			    u32 ctl_srstn, u32 ctl_psrstn,
287 			    u32 phy_srstn, u32 phy_psrstn)
288 {
289 	writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) |
290 	       UPCTL2_ASRSTN_REQ(ctl_srstn),
291 	       BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13);
292 
293 	writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn),
294 	       &dram->cru->softrst_con[12]);
295 }
296 
297 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
298 {
299 	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
300 	int delay = 1000;
301 	u32 mhz = hz / MHz;
302 
303 	refdiv = 1;
304 	if (mhz <= 100) {
305 		postdiv1 = 6;
306 		postdiv2 = 4;
307 	} else if (mhz <= 150) {
308 		postdiv1 = 4;
309 		postdiv2 = 4;
310 	} else if (mhz <= 200) {
311 		postdiv1 = 6;
312 		postdiv2 = 2;
313 	} else if (mhz <= 300) {
314 		postdiv1 = 4;
315 		postdiv2 = 2;
316 	} else if (mhz <= 400) {
317 		postdiv1 = 6;
318 		postdiv2 = 1;
319 	} else {
320 		postdiv1 = 4;
321 		postdiv2 = 1;
322 	}
323 	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
324 
325 	writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
326 
327 	writel(0x1f000000, &dram->cru->clksel_con[64]);
328 	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
329 	writel(DSMPD(1) | POSTDIV2(postdiv2) | REFDIV(refdiv),
330 	       &dram->cru->pll[1].con1);
331 
332 	while (delay > 0) {
333 		udelay(1);
334 		if (LOCK(readl(&dram->cru->pll[1].con1)))
335 			break;
336 		delay--;
337 	}
338 
339 	writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
340 }
341 
342 static void rkclk_configure_ddr(struct dram_info *dram,
343 				struct rv1126_sdram_params *sdram_params)
344 {
345 	/* for inno ddr phy need freq / 2 */
346 	rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2);
347 }
348 
349 static unsigned int
350 	calculate_ddrconfig(struct rv1126_sdram_params *sdram_params)
351 {
352 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
353 	u32 cs, bw, die_bw, col, row, bank;
354 	u32 cs1_row;
355 	u32 i, tmp;
356 	u32 ddrconf = -1;
357 	u32 row_3_4;
358 
359 	cs = cap_info->rank;
360 	bw = cap_info->bw;
361 	die_bw = cap_info->dbw;
362 	col = cap_info->col;
363 	row = cap_info->cs0_row;
364 	cs1_row = cap_info->cs1_row;
365 	bank = cap_info->bk;
366 	row_3_4 = cap_info->row_3_4;
367 
368 	if (sdram_params->base.dramtype == DDR4) {
369 		if (cs == 2 && row == cs1_row && !row_3_4) {
370 			tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) |
371 			      die_bw;
372 			for (i = 17; i < 21; i++) {
373 				if (((tmp & 0xf) ==
374 				     (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
375 				    ((tmp & 0x70) <=
376 				     (ddr4_cfg_2_rbc[i - 10] & 0x70))) {
377 					ddrconf = i;
378 					goto out;
379 				}
380 			}
381 		}
382 
383 		tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw;
384 		for (i = 10; i < 21; i++) {
385 			if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
386 			    ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) &&
387 			    ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) {
388 				ddrconf = i;
389 				goto out;
390 			}
391 		}
392 	} else {
393 		if (cs == 2 && row == cs1_row && bank == 3) {
394 			for (i = 5; i < 8; i++) {
395 				if (((bw + col - 10) == (ddr_cfg_2_rbc[i] &
396 							 0x7)) &&
397 				    ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] &
398 							  (0x7 << 5))) {
399 					ddrconf = i;
400 					goto out;
401 				}
402 			}
403 		}
404 
405 		tmp = ((cs - 1) << 8) | ((row - 13) << 5) |
406 		      ((bw + col - 10) << 0);
407 		if (bank == 3)
408 			tmp |= (1 << 3);
409 
410 		for (i = 0; i < 9; i++)
411 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) &&
412 			    ((tmp & (7 << 5)) <=
413 			     (ddr_cfg_2_rbc[i] & (7 << 5))) &&
414 			    ((tmp & (1 << 8)) <=
415 			     (ddr_cfg_2_rbc[i] & (1 << 8)))) {
416 				ddrconf = i;
417 				goto out;
418 			}
419 		if (cs == 1 && bank == 3 && row <= 17 &&
420 		    (col + bw) == 12)
421 			ddrconf = 23;
422 	}
423 
424 out:
425 	if (ddrconf > 28)
426 		printascii("calculate ddrconfig error\n");
427 
428 	if (sdram_params->base.dramtype == DDR4) {
429 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
430 			if (ddrconf == d4_rbc_2_d3_rbc[i][0]) {
431 				if (ddrconf == 21 && row > 16)
432 					printascii("warn:ddrconf21 row > 16\n");
433 				else
434 					ddrconf = d4_rbc_2_d3_rbc[i][1];
435 				break;
436 			}
437 		}
438 	}
439 
440 	return ddrconf;
441 }
442 
443 static void sw_set_req(struct dram_info *dram)
444 {
445 	void __iomem *pctl_base = dram->pctl;
446 
447 	/* clear sw_done=0 */
448 	writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL);
449 }
450 
451 static void sw_set_ack(struct dram_info *dram)
452 {
453 	void __iomem *pctl_base = dram->pctl;
454 
455 	/* set sw_done=1 */
456 	writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL);
457 	while (1) {
458 		/* wait programming done */
459 		if (readl(pctl_base + DDR_PCTL2_SWSTAT) &
460 				PCTL2_SW_DONE_ACK)
461 			break;
462 	}
463 }
464 
465 static void set_ctl_address_map(struct dram_info *dram,
466 				struct rv1126_sdram_params *sdram_params)
467 {
468 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
469 	void __iomem *pctl_base = dram->pctl;
470 	u32 ddrconf = cap_info->ddrconfig;
471 	u32 i, row;
472 
473 	row = cap_info->cs0_row;
474 	if (sdram_params->base.dramtype == DDR4) {
475 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
476 			if (ddrconf == d4_rbc_2_d3_rbc[i][1]) {
477 				ddrconf = d4_rbc_2_d3_rbc[i][0];
478 				break;
479 			}
480 		}
481 	}
482 
483 	if (ddrconf > ARRAY_SIZE(addrmap)) {
484 		printascii("set ctl address map fail\n");
485 		return;
486 	}
487 
488 	sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
489 			  &addrmap[ddrconf][0], 9 * 4);
490 
491 	/* unused row set to 0xf */
492 	for (i = 17; i >= row; i--)
493 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
494 			((i - 12) * 8 / 32) * 4,
495 			0xf << ((i - 12) * 8 % 32));
496 
497 	if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4)
498 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
499 	if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1)
500 		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
501 
502 	if (cap_info->rank == 1)
503 		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
504 }
505 
506 static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait)
507 {
508 	void __iomem *phy_base = dram->phy;
509 	u32 fbdiv, prediv, postdiv, postdiv_en;
510 
511 	if (wait) {
512 		clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB);
513 		while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK))
514 			continue;
515 	} else {
516 		freq /= MHz;
517 		prediv = 1;
518 		if (freq <= 200) {
519 			fbdiv = 16;
520 			postdiv = 2;
521 			postdiv_en = 1;
522 		} else if (freq <= 456) {
523 			fbdiv = 8;
524 			postdiv = 1;
525 			postdiv_en = 1;
526 		} else {
527 			fbdiv = 4;
528 			postdiv = 0;
529 			postdiv_en = 0;
530 		}
531 		writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50));
532 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK,
533 				(fbdiv >> 8) & 1);
534 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK,
535 				postdiv_en << PHY_POSTDIV_EN_SHIFT);
536 
537 		clrsetbits_le32(PHY_REG(phy_base, 0x52),
538 				PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv);
539 		clrsetbits_le32(PHY_REG(phy_base, 0x53),
540 				PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT,
541 				postdiv << PHY_POSTDIV_SHIFT);
542 	}
543 }
544 
545 static const u16 d3_phy_drv_2_ohm[][2] = {
546 	{PHY_DDR3_RON_455ohm, 455},
547 	{PHY_DDR3_RON_230ohm, 230},
548 	{PHY_DDR3_RON_153ohm, 153},
549 	{PHY_DDR3_RON_115ohm, 115},
550 	{PHY_DDR3_RON_91ohm, 91},
551 	{PHY_DDR3_RON_76ohm, 76},
552 	{PHY_DDR3_RON_65ohm, 65},
553 	{PHY_DDR3_RON_57ohm, 57},
554 	{PHY_DDR3_RON_51ohm, 51},
555 	{PHY_DDR3_RON_46ohm, 46},
556 	{PHY_DDR3_RON_41ohm, 41},
557 	{PHY_DDR3_RON_38ohm, 38},
558 	{PHY_DDR3_RON_35ohm, 35},
559 	{PHY_DDR3_RON_32ohm, 32},
560 	{PHY_DDR3_RON_30ohm, 30},
561 	{PHY_DDR3_RON_28ohm, 28},
562 	{PHY_DDR3_RON_27ohm, 27},
563 	{PHY_DDR3_RON_25ohm, 25},
564 	{PHY_DDR3_RON_24ohm, 24},
565 	{PHY_DDR3_RON_23ohm, 23},
566 	{PHY_DDR3_RON_22ohm, 22},
567 	{PHY_DDR3_RON_21ohm, 21},
568 	{PHY_DDR3_RON_20ohm, 20}
569 };
570 
571 static u16 d3_phy_odt_2_ohm[][2] = {
572 	{PHY_DDR3_RTT_DISABLE, 0},
573 	{PHY_DDR3_RTT_561ohm, 561},
574 	{PHY_DDR3_RTT_282ohm, 282},
575 	{PHY_DDR3_RTT_188ohm, 188},
576 	{PHY_DDR3_RTT_141ohm, 141},
577 	{PHY_DDR3_RTT_113ohm, 113},
578 	{PHY_DDR3_RTT_94ohm, 94},
579 	{PHY_DDR3_RTT_81ohm, 81},
580 	{PHY_DDR3_RTT_72ohm, 72},
581 	{PHY_DDR3_RTT_64ohm, 64},
582 	{PHY_DDR3_RTT_58ohm, 58},
583 	{PHY_DDR3_RTT_52ohm, 52},
584 	{PHY_DDR3_RTT_48ohm, 48},
585 	{PHY_DDR3_RTT_44ohm, 44},
586 	{PHY_DDR3_RTT_41ohm, 41},
587 	{PHY_DDR3_RTT_38ohm, 38},
588 	{PHY_DDR3_RTT_37ohm, 37},
589 	{PHY_DDR3_RTT_34ohm, 34},
590 	{PHY_DDR3_RTT_32ohm, 32},
591 	{PHY_DDR3_RTT_31ohm, 31},
592 	{PHY_DDR3_RTT_29ohm, 29},
593 	{PHY_DDR3_RTT_28ohm, 28},
594 	{PHY_DDR3_RTT_27ohm, 27},
595 	{PHY_DDR3_RTT_25ohm, 25}
596 };
597 
598 static u16 d4lp3_phy_drv_2_ohm[][2] = {
599 	{PHY_DDR4_LPDDR3_RON_482ohm, 482},
600 	{PHY_DDR4_LPDDR3_RON_244ohm, 244},
601 	{PHY_DDR4_LPDDR3_RON_162ohm, 162},
602 	{PHY_DDR4_LPDDR3_RON_122ohm, 122},
603 	{PHY_DDR4_LPDDR3_RON_97ohm, 97},
604 	{PHY_DDR4_LPDDR3_RON_81ohm, 81},
605 	{PHY_DDR4_LPDDR3_RON_69ohm, 69},
606 	{PHY_DDR4_LPDDR3_RON_61ohm, 61},
607 	{PHY_DDR4_LPDDR3_RON_54ohm, 54},
608 	{PHY_DDR4_LPDDR3_RON_48ohm, 48},
609 	{PHY_DDR4_LPDDR3_RON_44ohm, 44},
610 	{PHY_DDR4_LPDDR3_RON_40ohm, 40},
611 	{PHY_DDR4_LPDDR3_RON_37ohm, 37},
612 	{PHY_DDR4_LPDDR3_RON_34ohm, 34},
613 	{PHY_DDR4_LPDDR3_RON_32ohm, 32},
614 	{PHY_DDR4_LPDDR3_RON_30ohm, 30},
615 	{PHY_DDR4_LPDDR3_RON_28ohm, 28},
616 	{PHY_DDR4_LPDDR3_RON_27ohm, 27},
617 	{PHY_DDR4_LPDDR3_RON_25ohm, 25},
618 	{PHY_DDR4_LPDDR3_RON_24ohm, 24},
619 	{PHY_DDR4_LPDDR3_RON_23ohm, 23},
620 	{PHY_DDR4_LPDDR3_RON_22ohm, 22},
621 	{PHY_DDR4_LPDDR3_RON_21ohm, 21}
622 };
623 
624 static u16 d4lp3_phy_odt_2_ohm[][2] = {
625 	{PHY_DDR4_LPDDR3_RTT_DISABLE, 0},
626 	{PHY_DDR4_LPDDR3_RTT_586ohm, 586},
627 	{PHY_DDR4_LPDDR3_RTT_294ohm, 294},
628 	{PHY_DDR4_LPDDR3_RTT_196ohm, 196},
629 	{PHY_DDR4_LPDDR3_RTT_148ohm, 148},
630 	{PHY_DDR4_LPDDR3_RTT_118ohm, 118},
631 	{PHY_DDR4_LPDDR3_RTT_99ohm, 99},
632 	{PHY_DDR4_LPDDR3_RTT_85ohm, 58},
633 	{PHY_DDR4_LPDDR3_RTT_76ohm, 76},
634 	{PHY_DDR4_LPDDR3_RTT_67ohm, 67},
635 	{PHY_DDR4_LPDDR3_RTT_60ohm, 60},
636 	{PHY_DDR4_LPDDR3_RTT_55ohm, 55},
637 	{PHY_DDR4_LPDDR3_RTT_50ohm, 50},
638 	{PHY_DDR4_LPDDR3_RTT_46ohm, 46},
639 	{PHY_DDR4_LPDDR3_RTT_43ohm, 43},
640 	{PHY_DDR4_LPDDR3_RTT_40ohm, 40},
641 	{PHY_DDR4_LPDDR3_RTT_38ohm, 38},
642 	{PHY_DDR4_LPDDR3_RTT_36ohm, 36},
643 	{PHY_DDR4_LPDDR3_RTT_34ohm, 34},
644 	{PHY_DDR4_LPDDR3_RTT_32ohm, 32},
645 	{PHY_DDR4_LPDDR3_RTT_31ohm, 31},
646 	{PHY_DDR4_LPDDR3_RTT_29ohm, 29},
647 	{PHY_DDR4_LPDDR3_RTT_28ohm, 28},
648 	{PHY_DDR4_LPDDR3_RTT_27ohm, 27}
649 };
650 
651 static u16 lp4_phy_drv_2_ohm[][2] = {
652 	{PHY_LPDDR4_RON_501ohm, 501},
653 	{PHY_LPDDR4_RON_253ohm, 253},
654 	{PHY_LPDDR4_RON_168ohm, 168},
655 	{PHY_LPDDR4_RON_126ohm, 126},
656 	{PHY_LPDDR4_RON_101ohm, 101},
657 	{PHY_LPDDR4_RON_84ohm, 84},
658 	{PHY_LPDDR4_RON_72ohm, 72},
659 	{PHY_LPDDR4_RON_63ohm, 63},
660 	{PHY_LPDDR4_RON_56ohm, 56},
661 	{PHY_LPDDR4_RON_50ohm, 50},
662 	{PHY_LPDDR4_RON_46ohm, 46},
663 	{PHY_LPDDR4_RON_42ohm, 42},
664 	{PHY_LPDDR4_RON_38ohm, 38},
665 	{PHY_LPDDR4_RON_36ohm, 36},
666 	{PHY_LPDDR4_RON_33ohm, 33},
667 	{PHY_LPDDR4_RON_31ohm, 31},
668 	{PHY_LPDDR4_RON_29ohm, 29},
669 	{PHY_LPDDR4_RON_28ohm, 28},
670 	{PHY_LPDDR4_RON_26ohm, 26},
671 	{PHY_LPDDR4_RON_25ohm, 25},
672 	{PHY_LPDDR4_RON_24ohm, 24},
673 	{PHY_LPDDR4_RON_23ohm, 23},
674 	{PHY_LPDDR4_RON_22ohm, 22}
675 };
676 
677 static u16 lp4_phy_odt_2_ohm[][2] = {
678 	{PHY_LPDDR4_RTT_DISABLE, 0},
679 	{PHY_LPDDR4_RTT_604ohm, 604},
680 	{PHY_LPDDR4_RTT_303ohm, 303},
681 	{PHY_LPDDR4_RTT_202ohm, 202},
682 	{PHY_LPDDR4_RTT_152ohm, 152},
683 	{PHY_LPDDR4_RTT_122ohm, 122},
684 	{PHY_LPDDR4_RTT_101ohm, 101},
685 	{PHY_LPDDR4_RTT_87ohm,	87},
686 	{PHY_LPDDR4_RTT_78ohm, 78},
687 	{PHY_LPDDR4_RTT_69ohm, 69},
688 	{PHY_LPDDR4_RTT_62ohm, 62},
689 	{PHY_LPDDR4_RTT_56ohm, 56},
690 	{PHY_LPDDR4_RTT_52ohm, 52},
691 	{PHY_LPDDR4_RTT_48ohm, 48},
692 	{PHY_LPDDR4_RTT_44ohm, 44},
693 	{PHY_LPDDR4_RTT_41ohm, 41},
694 	{PHY_LPDDR4_RTT_39ohm, 39},
695 	{PHY_LPDDR4_RTT_37ohm, 37},
696 	{PHY_LPDDR4_RTT_35ohm, 35},
697 	{PHY_LPDDR4_RTT_33ohm, 33},
698 	{PHY_LPDDR4_RTT_32ohm, 32},
699 	{PHY_LPDDR4_RTT_30ohm, 30},
700 	{PHY_LPDDR4_RTT_29ohm, 29},
701 	{PHY_LPDDR4_RTT_27ohm, 27}
702 };
703 
704 static u32 lp4_odt_calc(u32 odt_ohm)
705 {
706 	u32 odt;
707 
708 	if (odt_ohm == 0)
709 		odt = LPDDR4_DQODT_DIS;
710 	else if (odt_ohm <= 40)
711 		odt = LPDDR4_DQODT_40;
712 	else if (odt_ohm <= 48)
713 		odt = LPDDR4_DQODT_48;
714 	else if (odt_ohm <= 60)
715 		odt = LPDDR4_DQODT_60;
716 	else if (odt_ohm <= 80)
717 		odt = LPDDR4_DQODT_80;
718 	else if (odt_ohm <= 120)
719 		odt = LPDDR4_DQODT_120;
720 	else
721 		odt = LPDDR4_DQODT_240;
722 
723 	return odt;
724 }
725 
726 static void *get_ddr_drv_odt_info(u32 dramtype)
727 {
728 	struct sdram_head_info_index_v2 *index =
729 		(struct sdram_head_info_index_v2 *)common_info;
730 	void *ddr_info = 0;
731 
732 	if (dramtype == DDR4)
733 		ddr_info = (void *)common_info + index->ddr4_index.offset * 4;
734 	else if (dramtype == DDR3)
735 		ddr_info = (void *)common_info + index->ddr3_index.offset * 4;
736 	else if (dramtype == LPDDR3)
737 		ddr_info = (void *)common_info + index->lp3_index.offset * 4;
738 	else if (dramtype == LPDDR4)
739 		ddr_info = (void *)common_info + index->lp4_index.offset * 4;
740 	else
741 		printascii("unsupported dram type\n");
742 	return ddr_info;
743 }
744 
745 static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info,
746 			 u32 freq_mhz, u32 dst_fsp)
747 {
748 	void __iomem *pctl_base = dram->pctl;
749 	u32 ca_vref, dq_vref;
750 
751 	if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
752 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff);
753 	else
754 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten);
755 
756 	if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq))
757 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff);
758 	else
759 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten);
760 
761 	if (ca_vref < 100)
762 		ca_vref = 100;
763 	if (ca_vref > 420)
764 		ca_vref = 420;
765 
766 	if (ca_vref <= 300)
767 		ca_vref = (0 << 6) | (ca_vref - 100) / 4;
768 	else
769 		ca_vref = (1 << 6) | (ca_vref - 220) / 4;
770 
771 	if (dq_vref < 100)
772 		dq_vref = 100;
773 	if (dq_vref > 420)
774 		dq_vref = 420;
775 
776 	if (dq_vref <= 300)
777 		dq_vref = (0 << 6) | (dq_vref - 100) / 4;
778 	else
779 		dq_vref = (1 << 6) | (dq_vref - 220) / 4;
780 
781 	sw_set_req(dram);
782 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
783 			DDR_PCTL2_INIT6,
784 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT,
785 			ca_vref << PCTL2_LPDDR4_MR12_SHIFT);
786 
787 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
788 			DDR_PCTL2_INIT7,
789 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT,
790 			dq_vref << PCTL2_LPDDR4_MR14_SHIFT);
791 	sw_set_ack(dram);
792 }
793 
794 static void set_ds_odt(struct dram_info *dram,
795 		       struct rv1126_sdram_params *sdram_params, u32 dst_fsp)
796 {
797 	void __iomem *phy_base = dram->phy;
798 	void __iomem *pctl_base = dram->pctl;
799 	u32 dramtype = sdram_params->base.dramtype;
800 	struct ddr2_3_4_lp2_3_info *ddr_info;
801 	struct lp4_info *lp4_info;
802 	u32 i, j, tmp;
803 	const u16 (*p_drv)[2];
804 	const u16 (*p_odt)[2];
805 	u32 drv_info, sr_info;
806 	u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm;
807 	u32 phy_odt_ohm, dram_odt_ohm;
808 	u32 lp4_pu_cal, phy_lp4_drv_pd_en;
809 	u32 phy_odt_up_en, phy_odt_dn_en;
810 	u32 sr_dq, sr_clk;
811 	u32 freq = sdram_params->base.ddr_freq;
812 	u32 mr1_mr3, mr11, mr22, vref_out, vref_inner;
813 	u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0;
814 	u32 phy_dq_drv = 0;
815 	u32 phy_odt_up = 0, phy_odt_dn = 0;
816 
817 	ddr_info = get_ddr_drv_odt_info(dramtype);
818 	lp4_info = (void *)ddr_info;
819 
820 	if (!ddr_info)
821 		return;
822 
823 	/* dram odt en freq control phy drv, dram odt and phy sr */
824 	if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) {
825 		drv_info = ddr_info->drv_when_odtoff;
826 		dram_odt_ohm = 0;
827 		sr_info = ddr_info->sr_when_odtoff;
828 		phy_lp4_drv_pd_en =
829 			PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info);
830 	} else {
831 		drv_info = ddr_info->drv_when_odten;
832 		dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info);
833 		sr_info = ddr_info->sr_when_odten;
834 		phy_lp4_drv_pd_en =
835 			PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info);
836 	}
837 	phy_dq_drv_ohm =
838 		DRV_INFO_PHY_DQ_DRV(drv_info);
839 	phy_clk_drv_ohm =
840 		DRV_INFO_PHY_CLK_DRV(drv_info);
841 	phy_ca_drv_ohm =
842 		DRV_INFO_PHY_CA_DRV(drv_info);
843 
844 	sr_dq = DQ_SR_INFO(sr_info);
845 	sr_clk = CLK_SR_INFO(sr_info);
846 
847 	/* phy odt en freq control dram drv and phy odt */
848 	if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) {
849 		dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff);
850 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info);
851 		phy_odt_ohm = 0;
852 		phy_odt_up_en = 0;
853 		phy_odt_dn_en = 0;
854 	} else {
855 		dram_drv_ohm =
856 			DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten);
857 		phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info);
858 		phy_odt_up_en =
859 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
860 		phy_odt_dn_en =
861 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
862 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info);
863 	}
864 
865 	if (dramtype == LPDDR4) {
866 		if (phy_odt_ohm) {
867 			phy_odt_up_en = 0;
868 			phy_odt_dn_en = 1;
869 		}
870 		if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
871 			dram_caodt_ohm = 0;
872 		else
873 			dram_caodt_ohm =
874 				ODT_INFO_LP4_CA_ODT(lp4_info->odt_info);
875 	}
876 
877 	if (dramtype == DDR3) {
878 		p_drv = d3_phy_drv_2_ohm;
879 		p_odt = d3_phy_odt_2_ohm;
880 	} else if (dramtype == LPDDR4) {
881 		p_drv = lp4_phy_drv_2_ohm;
882 		p_odt = lp4_phy_odt_2_ohm;
883 	} else {
884 		p_drv = d4lp3_phy_drv_2_ohm;
885 		p_odt = d4lp3_phy_odt_2_ohm;
886 	}
887 
888 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
889 		if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) {
890 			phy_dq_drv = **(p_drv + i);
891 			break;
892 		}
893 		if (i == 0)
894 			break;
895 	}
896 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
897 		if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) {
898 			phy_clk_drv = **(p_drv + i);
899 			break;
900 		}
901 		if (i == 0)
902 			break;
903 	}
904 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
905 		if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) {
906 			phy_ca_drv = **(p_drv + i);
907 			break;
908 		}
909 		if (i == 0)
910 			break;
911 	}
912 	if (!phy_odt_ohm)
913 		phy_odt = 0;
914 	else
915 		for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) {
916 			if (phy_odt_ohm <= *(*(p_odt + i) + 1)) {
917 				phy_odt = **(p_odt + i);
918 				break;
919 			}
920 			if (i == 0)
921 				break;
922 		}
923 
924 	if (dramtype != LPDDR4) {
925 		if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en))
926 			vref_inner = 0x80;
927 		else if (phy_odt_up_en)
928 			vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 /
929 				     (dram_drv_ohm + phy_odt_ohm);
930 		else
931 			vref_inner = phy_odt_ohm * 128 /
932 				(phy_odt_ohm + dram_drv_ohm);
933 
934 		if (dramtype != DDR3 && dram_odt_ohm)
935 			vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 /
936 				   (phy_dq_drv_ohm + dram_odt_ohm);
937 		else
938 			vref_out = 0x80;
939 	} else {
940 		/* for lp4 */
941 		if (phy_odt_ohm)
942 			vref_inner =
943 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) *
944 				 256) / 1000;
945 		else
946 			vref_inner =
947 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) *
948 				 256) / 1000;
949 
950 		vref_out = 0x80;
951 	}
952 
953 	/* default ZQCALIB bypass mode */
954 	clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv);
955 	clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv);
956 	clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv);
957 	clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv);
958 	if (dramtype == LPDDR4) {
959 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_clk_drv);
960 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_clk_drv);
961 	} else {
962 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_ca_drv);
963 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_ca_drv);
964 	}
965 	/* clk / cmd slew rate */
966 	clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk);
967 
968 	phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1;
969 	if (phy_odt_up_en)
970 		phy_odt_up = phy_odt;
971 	if (phy_odt_dn_en)
972 		phy_odt_dn = phy_odt;
973 
974 	for (i = 0; i < 4; i++) {
975 		j = 0x110 + i * 0x10;
976 		clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up);
977 		clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn);
978 		clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv);
979 		clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv);
980 		writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10));
981 
982 		clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10),
983 				1 << 3, phy_lp4_drv_pd_en << 3);
984 		/* dq slew rate */
985 		clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10),
986 				0x1f, sr_dq);
987 	}
988 
989 	/* reg_rx_vref_value_update */
990 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
991 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
992 
993 	/* RAM VREF */
994 	writel(vref_out, PHY_REG(phy_base, 0x105));
995 	if (dramtype == LPDDR3)
996 		udelay(100);
997 
998 	if (dramtype == LPDDR4)
999 		set_lp4_vref(dram, lp4_info, freq, dst_fsp);
1000 
1001 	if (dramtype == DDR3 || dramtype == DDR4) {
1002 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1003 				DDR_PCTL2_INIT3);
1004 		mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK;
1005 	} else {
1006 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1007 				DDR_PCTL2_INIT4);
1008 		mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK;
1009 	}
1010 
1011 	if (dramtype == DDR3) {
1012 		mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK);
1013 		if (dram_drv_ohm == 34)
1014 			mr1_mr3 |= DDR3_DS_34;
1015 
1016 		if (dram_odt_ohm == 0)
1017 			mr1_mr3 |= DDR3_RTT_NOM_DIS;
1018 		else if (dram_odt_ohm <= 40)
1019 			mr1_mr3 |= DDR3_RTT_NOM_40;
1020 		else if (dram_odt_ohm <= 60)
1021 			mr1_mr3 |= DDR3_RTT_NOM_60;
1022 		else
1023 			mr1_mr3 |= DDR3_RTT_NOM_120;
1024 
1025 	} else if (dramtype == DDR4) {
1026 		mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK);
1027 		if (dram_drv_ohm == 48)
1028 			mr1_mr3 |= DDR4_DS_48;
1029 
1030 		if (dram_odt_ohm == 0)
1031 			mr1_mr3 |= DDR4_RTT_NOM_DIS;
1032 		else if (dram_odt_ohm <= 34)
1033 			mr1_mr3 |= DDR4_RTT_NOM_34;
1034 		else if (dram_odt_ohm <= 40)
1035 			mr1_mr3 |= DDR4_RTT_NOM_40;
1036 		else if (dram_odt_ohm <= 48)
1037 			mr1_mr3 |= DDR4_RTT_NOM_48;
1038 		else if (dram_odt_ohm <= 60)
1039 			mr1_mr3 |= DDR4_RTT_NOM_60;
1040 		else
1041 			mr1_mr3 |= DDR4_RTT_NOM_120;
1042 
1043 	} else if (dramtype == LPDDR3) {
1044 		if (dram_drv_ohm <= 34)
1045 			mr1_mr3 |= LPDDR3_DS_34;
1046 		else if (dram_drv_ohm <= 40)
1047 			mr1_mr3 |= LPDDR3_DS_40;
1048 		else if (dram_drv_ohm <= 48)
1049 			mr1_mr3 |= LPDDR3_DS_48;
1050 		else if (dram_drv_ohm <= 60)
1051 			mr1_mr3 |= LPDDR3_DS_60;
1052 		else if (dram_drv_ohm <= 80)
1053 			mr1_mr3 |= LPDDR3_DS_80;
1054 
1055 		if (dram_odt_ohm == 0)
1056 			lp3_odt_value = LPDDR3_ODT_DIS;
1057 		else if (dram_odt_ohm <= 60)
1058 			lp3_odt_value = LPDDR3_ODT_60;
1059 		else if (dram_odt_ohm <= 120)
1060 			lp3_odt_value = LPDDR3_ODT_120;
1061 		else
1062 			lp3_odt_value = LPDDR3_ODT_240;
1063 	} else {/* for lpddr4 */
1064 		/* MR3 for lp4 PU-CAL and PDDS */
1065 		mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK);
1066 		mr1_mr3 |= lp4_pu_cal;
1067 
1068 		tmp = lp4_odt_calc(dram_drv_ohm);
1069 		if (!tmp)
1070 			tmp = LPDDR4_PDDS_240;
1071 		mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT);
1072 
1073 		/* MR11 for lp4 ca odt, dq odt set */
1074 		mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1075 			     DDR_PCTL2_INIT6);
1076 		mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK;
1077 
1078 		mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK);
1079 
1080 		tmp = lp4_odt_calc(dram_odt_ohm);
1081 		mr11 |= (tmp << LPDDR4_DQODT_SHIFT);
1082 
1083 		tmp = lp4_odt_calc(dram_caodt_ohm);
1084 		mr11 |= (tmp << LPDDR4_CAODT_SHIFT);
1085 		sw_set_req(dram);
1086 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1087 				DDR_PCTL2_INIT6,
1088 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT,
1089 				mr11 << PCTL2_LPDDR4_MR11_SHIFT);
1090 		sw_set_ack(dram);
1091 
1092 		/* MR22 for soc odt/odt-ck/odt-cs/odt-ca */
1093 		mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1094 			     DDR_PCTL2_INIT7);
1095 		mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK;
1096 		mr22 &= ~LPDDR4_SOC_ODT_MASK;
1097 
1098 		tmp = lp4_odt_calc(phy_odt_ohm);
1099 		mr22 |= tmp;
1100 		mr22 = mr22 |
1101 		       (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) <<
1102 			LPDDR4_ODTE_CK_SHIFT) |
1103 		       (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) <<
1104 			LPDDR4_ODTE_CS_SHIFT) |
1105 		       (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) <<
1106 			LPDDR4_ODTD_CA_SHIFT);
1107 
1108 		sw_set_req(dram);
1109 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1110 				DDR_PCTL2_INIT7,
1111 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT,
1112 				mr22 << PCTL2_LPDDR4_MR22_SHIFT);
1113 		sw_set_ack(dram);
1114 	}
1115 
1116 	if (dramtype == DDR4 || dramtype == DDR3) {
1117 		sw_set_req(dram);
1118 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1119 				DDR_PCTL2_INIT3,
1120 				PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT,
1121 				mr1_mr3 << PCTL2_DDR34_MR1_SHIFT);
1122 		sw_set_ack(dram);
1123 	} else {
1124 		sw_set_req(dram);
1125 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1126 				DDR_PCTL2_INIT4,
1127 				PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT,
1128 				mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT);
1129 		sw_set_ack(dram);
1130 	}
1131 }
1132 
1133 static int sdram_cmd_dq_path_remap(struct dram_info *dram,
1134 				   struct rv1126_sdram_params *sdram_params)
1135 {
1136 	void __iomem *phy_base = dram->phy;
1137 	u32 dramtype = sdram_params->base.dramtype;
1138 	struct sdram_head_info_index_v2 *index =
1139 		(struct sdram_head_info_index_v2 *)common_info;
1140 	struct dq_map_info *map_info;
1141 
1142 	map_info = (struct dq_map_info *)((void *)common_info +
1143 		index->dq_map_index.offset * 4);
1144 
1145 	if (dramtype <= LPDDR4)
1146 		writel((map_info->byte_map[dramtype / 4] >>
1147 			((dramtype % 4) * 8)) & 0xff,
1148 		       PHY_REG(phy_base, 0x4f));
1149 
1150 	return 0;
1151 }
1152 
1153 static void phy_cfg(struct dram_info *dram,
1154 		    struct rv1126_sdram_params *sdram_params)
1155 {
1156 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
1157 	void __iomem *phy_base = dram->phy;
1158 	u32 i, dq_map, tmp;
1159 	u32 byte1 = 0, byte0 = 0;
1160 
1161 	sdram_cmd_dq_path_remap(dram, sdram_params);
1162 
1163 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0);
1164 	for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
1165 		writel(sdram_params->phy_regs.phy[i][1],
1166 		       phy_base + sdram_params->phy_regs.phy[i][0]);
1167 	}
1168 
1169 	clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5));
1170 	dq_map = readl(PHY_REG(phy_base, 0x4f));
1171 	for (i = 0; i < 4; i++) {
1172 		if (((dq_map >> (i * 2)) & 0x3) == 0)
1173 			byte0 = i;
1174 		if (((dq_map >> (i * 2)) & 0x3) == 1)
1175 			byte1 = i;
1176 	}
1177 
1178 	tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK);
1179 	if (cap_info->bw == 2)
1180 		tmp |= 0xf;
1181 	else if (cap_info->bw == 1)
1182 		tmp |= ((1 << byte0) | (1 << byte1));
1183 	else
1184 		tmp |= (1 << byte0);
1185 
1186 	writel(tmp, PHY_REG(phy_base, 0xf));
1187 
1188 	/* lpddr4 odt control by phy, enable cs0 odt */
1189 	if (sdram_params->base.dramtype == LPDDR4)
1190 		clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4,
1191 				(1 << 6) | (1 << 4));
1192 	/* for ca training ca vref choose range1 */
1193 	setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6));
1194 	setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6));
1195 	/* for wr training PHY_0x7c[5], choose range0 */
1196 	clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5));
1197 }
1198 
1199 static int update_refresh_reg(struct dram_info *dram)
1200 {
1201 	void __iomem *pctl_base = dram->pctl;
1202 	u32 ret;
1203 
1204 	ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
1205 	writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
1206 
1207 	return 0;
1208 }
1209 
1210 /*
1211  * rank = 1: cs0
1212  * rank = 2: cs1
1213  */
1214 int read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype)
1215 {
1216 	u32 ret;
1217 	u32 i, temp;
1218 	u32 dqmap;
1219 
1220 	void __iomem *pctl_base = dram->pctl;
1221 	struct sdram_head_info_index_v2 *index =
1222 		(struct sdram_head_info_index_v2 *)common_info;
1223 	struct dq_map_info *map_info;
1224 
1225 	map_info = (struct dq_map_info *)((void *)common_info +
1226 		index->dq_map_index.offset * 4);
1227 
1228 	if (dramtype == LPDDR2)
1229 		dqmap = map_info->lp2_dq0_7_map;
1230 	else
1231 		dqmap = map_info->lp3_dq0_7_map;
1232 
1233 	pctl_read_mr(pctl_base, rank, mr_num);
1234 
1235 	ret = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff);
1236 
1237 	if (dramtype != LPDDR4) {
1238 		temp = 0;
1239 		for (i = 0; i < 8; i++) {
1240 			temp = temp | (((ret >> i) & 0x1) <<
1241 				       ((dqmap >> (i * 4)) & 0xf));
1242 		}
1243 	} else {
1244 		ret = (readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff);
1245 	}
1246 
1247 	return ret;
1248 }
1249 
1250 /* before call this function autorefresh should be disabled */
1251 void send_a_refresh(struct dram_info *dram)
1252 {
1253 	void __iomem *pctl_base = dram->pctl;
1254 
1255 	while (readl(pctl_base + DDR_PCTL2_DBGSTAT) & 0x3)
1256 		continue;
1257 	writel(0x3, pctl_base + DDR_PCTL2_DBGCMD);
1258 }
1259 
1260 static void enter_sr(struct dram_info *dram, u32 en)
1261 {
1262 	void __iomem *pctl_base = dram->pctl;
1263 
1264 	if (en) {
1265 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1266 		while (1) {
1267 			if (((readl(pctl_base + DDR_PCTL2_STAT) &
1268 			      PCTL2_SELFREF_TYPE_MASK) ==
1269 			     PCTL2_SELFREF_TYPE_SR_NOT_AUTO) &&
1270 			    ((readl(pctl_base + DDR_PCTL2_STAT) &
1271 			      PCTL2_OPERATING_MODE_MASK) ==
1272 			     PCTL2_OPERATING_MODE_SR))
1273 				break;
1274 		}
1275 	} else {
1276 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1277 		while ((readl(pctl_base + DDR_PCTL2_STAT) &
1278 		       PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR)
1279 			continue;
1280 	}
1281 }
1282 
1283 void record_dq_prebit(struct dram_info *dram)
1284 {
1285 	u32 group, i, tmp;
1286 	void __iomem *phy_base = dram->phy;
1287 
1288 	for (group = 0; group < 4; group++) {
1289 		for (i = 0; i < ARRAY_SIZE(dq_sel); i++) {
1290 			/* l_loop_invdelaysel */
1291 			writel(dq_sel[i][0], PHY_REG(phy_base,
1292 						     grp_addr[group] + 0x2c));
1293 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e));
1294 			writel(tmp, PHY_REG(phy_base,
1295 					    grp_addr[group] + dq_sel[i][1]));
1296 
1297 			/* r_loop_invdelaysel */
1298 			writel(dq_sel[i][0], PHY_REG(phy_base,
1299 						     grp_addr[group] + 0x2d));
1300 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f));
1301 			writel(tmp, PHY_REG(phy_base,
1302 					    grp_addr[group] + dq_sel[i][2]));
1303 		}
1304 	}
1305 }
1306 
1307 static void update_dq_rx_prebit(struct dram_info *dram)
1308 {
1309 	void __iomem *phy_base = dram->phy;
1310 
1311 	clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4),
1312 			BIT(4));
1313 	udelay(1);
1314 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4));
1315 }
1316 
1317 static void update_dq_tx_prebit(struct dram_info *dram)
1318 {
1319 	void __iomem *phy_base = dram->phy;
1320 
1321 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1322 	setbits_le32(PHY_REG(phy_base, 0x2), BIT(3));
1323 	setbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1324 	udelay(1);
1325 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1326 }
1327 
1328 static void update_ca_prebit(struct dram_info *dram)
1329 {
1330 	void __iomem *phy_base = dram->phy;
1331 
1332 	clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2));
1333 	setbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1334 	udelay(1);
1335 	clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1336 }
1337 
1338 /*
1339  * dir: 0: de-skew = delta_*
1340  *	1: de-skew = reg val - delta_*
1341  * delta_dir: value for differential signal: clk/
1342  * delta_sig: value for single signal: ca/cmd
1343  */
1344 static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif,
1345 			     int delta_sig, u32 cs, u32 dramtype)
1346 {
1347 	void __iomem *phy_base = dram->phy;
1348 	u32 i, cs_en, tmp;
1349 	u32 dfi_lp_stat = 0;
1350 
1351 	if (cs == 0)
1352 		cs_en = 1;
1353 	else if (cs == 2)
1354 		cs_en = 2;
1355 	else
1356 		cs_en = 3;
1357 
1358 	if (dramtype == LPDDR4 &&
1359 	    ((readl(PHY_REG(phy_base, 0x60)) & BIT(5)) == 0)) {
1360 		dfi_lp_stat = 1;
1361 		setbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1362 	}
1363 	enter_sr(dram, 1);
1364 
1365 	for (i = 0; i < 0x20; i++) {
1366 		if (dir == DESKEW_MDF_ABS_VAL)
1367 			tmp = delta_sig;
1368 		else
1369 			tmp = readl(PHY_REG(phy_base, 0x150 + i)) +
1370 			      delta_sig;
1371 		writel(tmp, PHY_REG(phy_base, 0x150 + i));
1372 	}
1373 
1374 	if (dir == DESKEW_MDF_ABS_VAL)
1375 		tmp = delta_dif;
1376 	else
1377 		tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) -
1378 		       delta_sig + delta_dif;
1379 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x17));
1380 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x18));
1381 	if (dramtype == LPDDR4) {
1382 		writel(tmp, PHY_REG(phy_base, 0x150 + 0x4));
1383 		writel(tmp, PHY_REG(phy_base, 0x150 + 0xa));
1384 
1385 		clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6);
1386 		update_ca_prebit(dram);
1387 	}
1388 	enter_sr(dram, 0);
1389 
1390 	if (dfi_lp_stat)
1391 		clrbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1392 
1393 }
1394 
1395 static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank)
1396 {
1397 	u32 i, j, offset = 0;
1398 	u32 min = 0x3f;
1399 	void __iomem *phy_base = dram->phy;
1400 	u32 byte_en;
1401 
1402 	if (signal == SKEW_TX_SIGNAL)
1403 		offset = 8;
1404 
1405 	if (signal == SKEW_CA_SIGNAL) {
1406 		for (i = 0; i < 0x20; i++)
1407 			min = MIN(min, readl(PHY_REG(phy_base, 0x150  + i)));
1408 	} else {
1409 		byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1410 		for (j = offset; j < offset + rank * 4; j++) {
1411 			if (!((byte_en >> (j % 4)) & 1))
1412 				continue;
1413 			for (i = 0; i < 11; i++)
1414 				min = MIN(min,
1415 					  readl(PHY_REG(phy_base,
1416 							dqs_dq_skew_adr[j] +
1417 							i)));
1418 		}
1419 	}
1420 
1421 	return min;
1422 }
1423 
1424 static u32 low_power_update(struct dram_info *dram, u32 en)
1425 {
1426 	void __iomem *pctl_base = dram->pctl;
1427 	u32 lp_stat = 0;
1428 
1429 	if (en) {
1430 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf);
1431 	} else {
1432 		lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf;
1433 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf);
1434 	}
1435 
1436 	return lp_stat;
1437 }
1438 
1439 /*
1440  * signal:
1441  * dir: 0: de-skew = delta_*
1442  *	1: de-skew = reg val - delta_*
1443  * delta_dir: value for differential signal: dqs
1444  * delta_sig: value for single signal: dq/dm
1445  */
1446 static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir,
1447 			     int delta_dif, int delta_sig, u32 rank)
1448 {
1449 	void __iomem *phy_base = dram->phy;
1450 	u32 i, j, tmp, offset;
1451 	u32 byte_en;
1452 
1453 	byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1454 
1455 	if (signal == SKEW_RX_SIGNAL)
1456 		offset = 0;
1457 	else
1458 		offset = 8;
1459 
1460 	for (j = offset; j < (offset + rank * 4); j++) {
1461 		if (!((byte_en >> (j % 4)) & 1))
1462 			continue;
1463 		for (i = 0; i < 0x9; i++) {
1464 			if (dir == DESKEW_MDF_ABS_VAL)
1465 				tmp = delta_sig;
1466 			else
1467 				tmp = delta_sig + readl(PHY_REG(phy_base,
1468 							dqs_dq_skew_adr[j] +
1469 							i));
1470 			writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i));
1471 		}
1472 		if (dir == DESKEW_MDF_ABS_VAL)
1473 			tmp = delta_dif;
1474 		else
1475 			tmp = delta_dif + readl(PHY_REG(phy_base,
1476 						dqs_dq_skew_adr[j] + 9));
1477 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9));
1478 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa));
1479 	}
1480 	if (signal == SKEW_RX_SIGNAL)
1481 		update_dq_rx_prebit(dram);
1482 	else
1483 		update_dq_tx_prebit(dram);
1484 }
1485 
1486 static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype)
1487 {
1488 	void __iomem *phy_base = dram->phy;
1489 	u32 ret;
1490 	u32 dis_auto_zq = 0;
1491 	u32 odt_val_up, odt_val_dn;
1492 	u32 i, j;
1493 
1494 	odt_val_dn = readl(PHY_REG(phy_base, 0x110));
1495 	odt_val_up = readl(PHY_REG(phy_base, 0x111));
1496 
1497 	if (dramtype != LPDDR4) {
1498 		for (i = 0; i < 4; i++) {
1499 			j = 0x110 + i * 0x10;
1500 			writel(PHY_DDR4_LPDDR3_RTT_294ohm,
1501 			       PHY_REG(phy_base, j));
1502 			writel(PHY_DDR4_LPDDR3_RTT_DISABLE,
1503 			       PHY_REG(phy_base, j + 0x1));
1504 		}
1505 	}
1506 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1507 	/* use normal read mode for data training */
1508 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1509 
1510 	if (dramtype == DDR4)
1511 		setbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1512 
1513 	/* choose training cs */
1514 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
1515 	/* enable gate training */
1516 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
1517 	udelay(50);
1518 	ret = readl(PHY_REG(phy_base, 0x91));
1519 	/* disable gate training */
1520 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
1521 	clrbits_le32(PHY_REG(phy_base, 2), 0x30);
1522 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1523 
1524 	if (ret & 0x20)
1525 		ret = -1;
1526 	else
1527 		ret = (ret & 0xf) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf);
1528 
1529 	if (dramtype != LPDDR4) {
1530 		for (i = 0; i < 4; i++) {
1531 			j = 0x110 + i * 0x10;
1532 			writel(odt_val_dn, PHY_REG(phy_base, j));
1533 			writel(odt_val_up, PHY_REG(phy_base, j + 0x1));
1534 		}
1535 	}
1536 	return ret;
1537 }
1538 
1539 static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype,
1540 			    u32 rank)
1541 {
1542 	void __iomem *pctl_base = dram->pctl;
1543 	void __iomem *phy_base = dram->phy;
1544 	u32 dis_auto_zq = 0;
1545 	u32 tmp;
1546 	u32 cur_fsp;
1547 	u32 timeout_us = 1000;
1548 
1549 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1550 
1551 	clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1552 
1553 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1554 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) &
1555 	      0xffff;
1556 	writel(tmp & 0xff, PHY_REG(phy_base, 0x3));
1557 
1558 	/* disable another cs's output */
1559 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1560 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12),
1561 			      dramtype);
1562 	if (dramtype == DDR3 || dramtype == DDR4)
1563 		writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1564 	else
1565 		writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1566 
1567 	/* choose cs */
1568 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1569 			((0x2 >> cs) << 6) | (0 << 2));
1570 	/* enable write leveling */
1571 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1572 			((0x2 >> cs) << 6) | (1 << 2));
1573 
1574 	while (1) {
1575 		if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) ==
1576 		    (readl(PHY_REG(phy_base, 0xf)) & 0xf))
1577 			break;
1578 
1579 		udelay(1);
1580 		if (timeout_us-- == 0) {
1581 			printascii("error: write leveling timeout\n");
1582 			while (1)
1583 				;
1584 		}
1585 	}
1586 
1587 	/* disable write leveling */
1588 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1589 			((0x2 >> cs) << 6) | (0 << 2));
1590 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6);
1591 
1592 	/* enable another cs's output */
1593 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1594 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12),
1595 			      dramtype);
1596 
1597 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1598 
1599 	return 0;
1600 }
1601 
1602 char pattern[32] = {
1603 	0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa,
1604 	0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55,
1605 	0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55,
1606 	0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa
1607 };
1608 
1609 static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype,
1610 			    u32 mhz)
1611 {
1612 	void __iomem *pctl_base = dram->pctl;
1613 	void __iomem *phy_base = dram->phy;
1614 	u32 trefi_1x, trfc_1x;
1615 	u32 dis_auto_zq = 0;
1616 	u32 timeout_us = 1000;
1617 	u32 dqs_default;
1618 	u32 cur_fsp;
1619 	u32 vref_inner;
1620 	u32 i;
1621 	struct sdram_head_info_index_v2 *index =
1622 		(struct sdram_head_info_index_v2 *)common_info;
1623 	struct dq_map_info *map_info;
1624 
1625 	vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff;
1626 	if (dramtype == DDR3 && vref_inner == 0x80) {
1627 		for (i = 0; i < 4; i++)
1628 			writel(vref_inner - 0xa,
1629 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1630 
1631 		/* reg_rx_vref_value_update */
1632 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1633 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1634 	}
1635 
1636 	map_info = (struct dq_map_info *)((void *)common_info +
1637 		index->dq_map_index.offset * 4);
1638 	/* only 1cs a time, 0:cs0 1 cs1 */
1639 	if (cs > 1)
1640 		return -1;
1641 
1642 	dqs_default = 0xf;
1643 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1644 
1645 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1646 	/* config refresh timing */
1647 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1648 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1649 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1650 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1651 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1652 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1653 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1654 	/* reg_phy_trfc */
1655 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1656 	/* reg_max_refi_cnt */
1657 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1658 
1659 	/* choose training cs */
1660 	clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6);
1661 
1662 	/* set dq map for ddr4 */
1663 	if (dramtype == DDR4) {
1664 		setbits_le32(PHY_REG(phy_base, 0x70), BIT(7));
1665 		for (i = 0; i < 4; i++) {
1666 			writel((map_info->ddr4_dq_map[cs * 2] >>
1667 				((i % 4) * 8)) & 0xff,
1668 				PHY_REG(phy_base, 0x238 + i));
1669 			writel((map_info->ddr4_dq_map[cs * 2 + 1] >>
1670 				((i % 4) * 8)) & 0xff,
1671 				PHY_REG(phy_base, 0x2b8 + i));
1672 		}
1673 	}
1674 
1675 	/* cha_l reg_l_rd_train_dqs_default[5:0] */
1676 	clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default);
1677 	/* cha_h reg_h_rd_train_dqs_default[5:0] */
1678 	clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default);
1679 	/* chb_l reg_l_rd_train_dqs_default[5:0] */
1680 	clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default);
1681 	/* chb_h reg_h_rd_train_dqs_default[5:0] */
1682 	clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default);
1683 
1684 	/* Choose the read train auto mode */
1685 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1);
1686 	/* Enable the auto train of the read train */
1687 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3);
1688 
1689 	/* Wait the train done. */
1690 	while (1) {
1691 		if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1)
1692 			break;
1693 
1694 		udelay(1);
1695 		if (timeout_us-- == 0) {
1696 			printascii("error: read training timeout\n");
1697 			return -1;
1698 		}
1699 	}
1700 
1701 	/* Check the read train state */
1702 	if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) ||
1703 	    (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) {
1704 		printascii("error: read training error\n");
1705 		return -1;
1706 	}
1707 
1708 	/* Exit the Read Training by setting */
1709 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1));
1710 
1711 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1712 
1713 	if (dramtype == DDR3 && vref_inner == 0x80) {
1714 		for (i = 0; i < 4; i++)
1715 			writel(vref_inner,
1716 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1717 
1718 		/* reg_rx_vref_value_update */
1719 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1720 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1721 	}
1722 
1723 	return 0;
1724 }
1725 
1726 static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype,
1727 			    u32 mhz, u32 dst_fsp)
1728 {
1729 	void __iomem *pctl_base = dram->pctl;
1730 	void __iomem *phy_base = dram->phy;
1731 	u32 trefi_1x, trfc_1x;
1732 	u32 dis_auto_zq = 0;
1733 	u32 timeout_us = 1000;
1734 	u32 cur_fsp;
1735 	u32 mr_tmp, cl, cwl, phy_fsp, offset = 0;
1736 
1737 	if (dramtype == LPDDR3 && mhz <= 400) {
1738 		phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3;
1739 		offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3;
1740 		cl = readl(PHY_REG(phy_base, offset));
1741 		cwl = readl(PHY_REG(phy_base, offset + 2));
1742 
1743 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8);
1744 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4);
1745 		pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype);
1746 	}
1747 
1748 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1749 
1750 	/* PHY_0x7b[7:0] reg_train_col_addr[7:0] */
1751 	clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0);
1752 	/* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */
1753 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2);
1754 	/* PHY_0x7c[1:0] reg_train_col_addr[9:8] */
1755 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0);
1756 	/* PHY_0x7d[7:0] reg_train_row_addr[7:0] */
1757 	clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0);
1758 	/* PHY_0x7e[7:0] reg_train_row_addr[15:8] */
1759 	clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0);
1760 
1761 	/* PHY_0x71[3] wrtrain_check_data_value_random_gen */
1762 	clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3));
1763 
1764 	/* config refresh timing */
1765 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1766 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1767 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1768 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1769 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1770 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1771 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1772 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1773 	/* reg_phy_trfc */
1774 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1775 	/* reg_max_refi_cnt */
1776 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1777 
1778 	/* choose training cs */
1779 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6);
1780 
1781 	/* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */
1782 	/* 0: Use the write-leveling value. */
1783 	/* 1: use reg0x233 0x237 0x2b3 0x2b7 */
1784 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4));
1785 
1786 	/* PHY_0x7a [0] reg_dq_wr_train_auto */
1787 	setbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1788 
1789 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1790 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1791 
1792 	send_a_refresh(dram);
1793 
1794 	while (1) {
1795 		if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1)
1796 			break;
1797 
1798 		udelay(1);
1799 		if (timeout_us-- == 0) {
1800 			printascii("error: write training timeout\n");
1801 			while (1)
1802 				;
1803 		}
1804 	}
1805 
1806 	/* Check the write train state */
1807 	if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) {
1808 		printascii("error: write training error\n");
1809 		return -1;
1810 	}
1811 
1812 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1813 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1814 
1815 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1816 
1817 	/* save LPDDR4 write vref to fsp_param for dfs */
1818 	if (dramtype == LPDDR4) {
1819 		fsp_param[dst_fsp].vref_dq[cs] =
1820 			((readl(PHY_REG(phy_base, 0x384)) & 0x3f) +
1821 			 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2;
1822 		/* add range info */
1823 		fsp_param[dst_fsp].vref_dq[cs] |=
1824 			((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1);
1825 	}
1826 
1827 	if (dramtype == LPDDR3 && mhz <= 400) {
1828 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl);
1829 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl);
1830 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1831 			       DDR_PCTL2_INIT3);
1832 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
1833 			      dramtype);
1834 	}
1835 
1836 	return 0;
1837 }
1838 
1839 static int data_training(struct dram_info *dram, u32 cs,
1840 			 struct rv1126_sdram_params *sdram_params, u32 dst_fsp,
1841 			 u32 training_flag)
1842 {
1843 	u32 ret = 0;
1844 
1845 	if (training_flag == FULL_TRAINING)
1846 		training_flag = READ_GATE_TRAINING | WRITE_LEVELING |
1847 				WRITE_TRAINING | READ_TRAINING;
1848 
1849 	if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) {
1850 		ret = data_training_wl(dram, cs,
1851 				       sdram_params->base.dramtype,
1852 				       sdram_params->ch.cap_info.rank);
1853 		if (ret != 0)
1854 			goto out;
1855 	}
1856 
1857 	if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) {
1858 		ret = data_training_rg(dram, cs,
1859 				       sdram_params->base.dramtype);
1860 		if (ret != 0)
1861 			goto out;
1862 	}
1863 
1864 	if ((training_flag & READ_TRAINING) == READ_TRAINING) {
1865 		ret = data_training_rd(dram, cs,
1866 				       sdram_params->base.dramtype,
1867 				       sdram_params->base.ddr_freq);
1868 		if (ret != 0)
1869 			goto out;
1870 	}
1871 
1872 	if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) {
1873 		ret = data_training_wr(dram, cs,
1874 				       sdram_params->base.dramtype,
1875 				       sdram_params->base.ddr_freq, dst_fsp);
1876 		if (ret != 0)
1877 			goto out;
1878 	}
1879 
1880 out:
1881 	return ret;
1882 }
1883 
1884 static int get_wrlvl_val(struct dram_info *dram,
1885 			 struct rv1126_sdram_params *sdram_params)
1886 {
1887 	u32 i, j, clk_skew;
1888 	void __iomem *phy_base = dram->phy;
1889 	u32 lp_stat;
1890 	int ret;
1891 
1892 	lp_stat = low_power_update(dram, 0);
1893 
1894 	clk_skew = 0x1f;
1895 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, clk_skew, 3,
1896 			 sdram_params->base.dramtype);
1897 
1898 	ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING);
1899 	if (sdram_params->ch.cap_info.rank == 2)
1900 		ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING);
1901 
1902 	for (j = 0; j < 2; j++)
1903 		for (i = 0; i < 4; i++)
1904 			wrlvl_result[j][i] =
1905 				readl(PHY_REG(phy_base,
1906 					      wrlvl_result_offset[j][i])) -
1907 				clk_skew;
1908 
1909 	low_power_update(dram, lp_stat);
1910 
1911 	return ret;
1912 }
1913 
1914 static int high_freq_training(struct dram_info *dram,
1915 			      struct rv1126_sdram_params *sdram_params,
1916 			      u32 fsp)
1917 {
1918 	u32 i, j;
1919 	void __iomem *phy_base = dram->phy;
1920 	u32 dramtype = sdram_params->base.dramtype;
1921 	int min_val;
1922 	int dqs_skew, clk_skew, ca_skew;
1923 	int ret;
1924 
1925 	dqs_skew = 0;
1926 	for (j = 0; j < sdram_params->ch.cap_info.rank; j++)
1927 		for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++)
1928 			dqs_skew += wrlvl_result[j][i];
1929 	dqs_skew = dqs_skew / (sdram_params->ch.cap_info.rank *
1930 			       ARRAY_SIZE(wrlvl_result[0]));
1931 
1932 	clk_skew = 0x20 - dqs_skew;
1933 	dqs_skew = 0x20;
1934 
1935 	if (dramtype == LPDDR4) {
1936 		clk_skew = 0;
1937 		ca_skew = 0;
1938 	} else if (dramtype == LPDDR3) {
1939 		ca_skew = clk_skew - 4;
1940 	} else {
1941 		ca_skew = clk_skew;
1942 	}
1943 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3,
1944 			 dramtype);
1945 
1946 	writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233));
1947 	writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237));
1948 	writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
1949 	writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
1950 	ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING |
1951 			    READ_TRAINING | WRITE_TRAINING);
1952 	if (sdram_params->ch.cap_info.rank == 2) {
1953 		writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233));
1954 		writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237));
1955 		writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
1956 		writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
1957 		ret |= data_training(dram, 1, sdram_params, fsp,
1958 				     READ_GATE_TRAINING | READ_TRAINING |
1959 				     WRITE_TRAINING);
1960 	}
1961 	if (ret)
1962 		goto out;
1963 
1964 	record_dq_prebit(dram);
1965 
1966 	min_val = get_min_value(dram, SKEW_RX_SIGNAL,
1967 				sdram_params->ch.cap_info.rank) * -1;
1968 	modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL,
1969 			 min_val, min_val, sdram_params->ch.cap_info.rank);
1970 
1971 	min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL,
1972 				    sdram_params->ch.cap_info.rank),
1973 		      get_min_value(dram, SKEW_CA_SIGNAL,
1974 				    sdram_params->ch.cap_info.rank)) * -1;
1975 
1976 	/* clk = 0, rx all skew -7, tx - min_value */
1977 	modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3,
1978 			 dramtype);
1979 
1980 	modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL,
1981 			 min_val, min_val, sdram_params->ch.cap_info.rank);
1982 
1983 	ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING);
1984 	if (sdram_params->ch.cap_info.rank == 2)
1985 		ret |= data_training(dram, 1, sdram_params, 0,
1986 				     READ_GATE_TRAINING);
1987 out:
1988 	return ret;
1989 }
1990 
1991 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
1992 {
1993 	writel(ddrconfig, &dram->msch->deviceconf);
1994 	clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0);
1995 }
1996 
1997 static void update_noc_timing(struct dram_info *dram,
1998 			      struct rv1126_sdram_params *sdram_params)
1999 {
2000 	void __iomem *pctl_base = dram->pctl;
2001 	u32 bw, bl;
2002 
2003 	bw = 8 << sdram_params->ch.cap_info.bw;
2004 	bl = ((readl(pctl_base + DDR_PCTL2_MSTR) >> 16) & 0xf) * 2;
2005 
2006 	/* update the noc timing related to data bus width */
2007 	if ((bw / 8 * bl) == 16)
2008 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 0;
2009 	else if ((bw / 8 * bl) == 32)
2010 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 1;
2011 	else if ((bw / 8 * bl) == 64)
2012 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 2;
2013 	else
2014 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 3;
2015 
2016 	sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty =
2017 		(bl * bw / 8) > 16 ? (bl / 4) : (16 / (bl * bw / 8)) * bl / 4;
2018 
2019 	if (sdram_params->base.dramtype == LPDDR4) {
2020 		sdram_params->ch.noc_timings.ddrmode.b.mwrsize =
2021 			(bw == 16) ? 0x1 : 0x2;
2022 		sdram_params->ch.noc_timings.ddrtimingc0.b.wrtomwr =
2023 			3 * sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty;
2024 	}
2025 
2026 	writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
2027 	       &dram->msch->ddrtiminga0);
2028 	writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
2029 	       &dram->msch->ddrtimingb0);
2030 	writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
2031 	       &dram->msch->ddrtimingc0);
2032 	writel(sdram_params->ch.noc_timings.devtodev0.d32,
2033 	       &dram->msch->devtodev0);
2034 	writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
2035 	writel(sdram_params->ch.noc_timings.ddr4timing.d32,
2036 	       &dram->msch->ddr4timing);
2037 }
2038 
2039 static void dram_all_config(struct dram_info *dram,
2040 			    struct rv1126_sdram_params *sdram_params)
2041 {
2042 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2043 	u32 dram_type = sdram_params->base.dramtype;
2044 	void __iomem *pctl_base = dram->pctl;
2045 	u32 sys_reg2 = 0;
2046 	u32 sys_reg3 = 0;
2047 	u64 cs_cap[2];
2048 	u32 cs_pst;
2049 
2050 	set_ddrconfig(dram, cap_info->ddrconfig);
2051 	sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
2052 			 &sys_reg3, 0);
2053 	writel(sys_reg2, &dram->pmugrf->os_reg[2]);
2054 	writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2055 
2056 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2057 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2058 
2059 	if (cap_info->rank == 2) {
2060 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2061 			6 + 2;
2062 		if (cs_pst > 28)
2063 			cs_cap[0] = 1 << cs_pst;
2064 	}
2065 
2066 	writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
2067 			(((cs_cap[0] >> 20) / 64) & 0xff),
2068 			&dram->msch->devicesize);
2069 	update_noc_timing(dram, sdram_params);
2070 }
2071 
2072 static void enable_low_power(struct dram_info *dram,
2073 			     struct rv1126_sdram_params *sdram_params)
2074 {
2075 	void __iomem *pctl_base = dram->pctl;
2076 	u32 grf_lp_con;
2077 
2078 	writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]);
2079 
2080 	if (sdram_params->base.dramtype == DDR4)
2081 		grf_lp_con = (0x7 << 16) | (1 << 1);
2082 	else if (sdram_params->base.dramtype == DDR3)
2083 		grf_lp_con = (0x7 << 16) | (1 << 0);
2084 	else
2085 		grf_lp_con = (0x7 << 16) | (1 << 2);
2086 
2087 	/* en lpckdis_en */
2088 	grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
2089 	writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con);
2090 
2091 	/* enable sr, pd */
2092 	if (dram->pd_idle == 0)
2093 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2094 	else
2095 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2096 	if (dram->sr_idle == 0)
2097 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2098 	else
2099 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2100 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
2101 }
2102 
2103 static void ddr_set_atags(struct dram_info *dram,
2104 			  struct rv1126_sdram_params *sdram_params)
2105 {
2106 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2107 	u32 dram_type = sdram_params->base.dramtype;
2108 	void __iomem *pctl_base = dram->pctl;
2109 	struct tag_serial t_serial;
2110 	struct tag_ddr_mem t_ddrmem;
2111 	struct tag_soc_info t_socinfo;
2112 	u64 cs_cap[2];
2113 	u32 cs_pst = 0;
2114 
2115 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2116 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2117 
2118 	memset(&t_serial, 0, sizeof(struct tag_serial));
2119 
2120 	t_serial.version = 0;
2121 	t_serial.enable = 1;
2122 	t_serial.addr = CONFIG_DEBUG_UART_BASE;
2123 	t_serial.baudrate = CONFIG_BAUDRATE;
2124 	t_serial.m_mode = SERIAL_M_MODE_M0;
2125 	t_serial.id = 2;
2126 
2127 	atags_destroy();
2128 	atags_set_tag(ATAG_SERIAL,  &t_serial);
2129 
2130 	memset(&t_ddrmem, 0, sizeof(struct tag_ddr_mem));
2131 	if (cap_info->row_3_4) {
2132 		cs_cap[0] =  cs_cap[0] * 3 / 4;
2133 		cs_cap[1] =  cs_cap[1] * 3 / 4;
2134 	}
2135 	t_ddrmem.version = 0;
2136 	t_ddrmem.bank[0] = CONFIG_SYS_SDRAM_BASE;
2137 	if (cs_cap[1]) {
2138 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2139 			6 + 2;
2140 	}
2141 
2142 	if (cs_cap[1] && cs_pst > 27) {
2143 		t_ddrmem.count = 2;
2144 		t_ddrmem.bank[1] = 1 << cs_pst;
2145 		t_ddrmem.bank[2] = cs_cap[0];
2146 		t_ddrmem.bank[3] = cs_cap[1];
2147 	} else {
2148 		t_ddrmem.count = 1;
2149 		t_ddrmem.bank[1] = (u64)cs_cap[0] + (u64)cs_cap[1];
2150 	}
2151 
2152 	atags_set_tag(ATAG_DDR_MEM,  &t_ddrmem);
2153 
2154 	memset(&t_socinfo, 0, sizeof(struct tag_soc_info));
2155 	t_socinfo.version = 0;
2156 	t_socinfo.name = 0x1126;
2157 }
2158 
2159 static void print_ddr_info(struct rv1126_sdram_params *sdram_params)
2160 {
2161 	u32 split;
2162 
2163 	if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2164 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2165 		split = 0;
2166 	else
2167 		split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2168 			SPLIT_SIZE_MASK;
2169 
2170 	sdram_print_ddr_info(&sdram_params->ch.cap_info,
2171 			     &sdram_params->base, split);
2172 }
2173 
2174 static int sdram_init_(struct dram_info *dram,
2175 		       struct rv1126_sdram_params *sdram_params, u32 post_init)
2176 {
2177 	void __iomem *pctl_base = dram->pctl;
2178 	void __iomem *phy_base = dram->phy;
2179 	u32 ddr4_vref;
2180 	u32 mr_tmp;
2181 
2182 	rkclk_configure_ddr(dram, sdram_params);
2183 
2184 	rkclk_ddr_reset(dram, 1, 1, 1, 1);
2185 	udelay(10);
2186 
2187 	rkclk_ddr_reset(dram, 1, 1, 1, 0);
2188 	phy_cfg(dram, sdram_params);
2189 
2190 	rkclk_ddr_reset(dram, 1, 1, 0, 0);
2191 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1);
2192 
2193 	rkclk_ddr_reset(dram, 1, 0, 0, 0);
2194 	pctl_cfg(dram->pctl, &sdram_params->pctl_regs,
2195 		 dram->sr_idle, dram->pd_idle);
2196 
2197 	if (sdram_params->ch.cap_info.bw == 2)
2198 		/* 32bit interface use pageclose */
2199 		setbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2200 	else
2201 		clrbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2202 
2203 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2204 	u32 tmp, trefi;
2205 
2206 	tmp = readl(pctl_base + DDR_PCTL2_RFSHTMG);
2207 	trefi = (tmp >> 16) & 0xfff;
2208 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2209 	       pctl_base + DDR_PCTL2_RFSHTMG);
2210 #endif
2211 
2212 	/* set frequency_mode */
2213 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
2214 	/* set target_frequency to Frequency 0 */
2215 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0);
2216 
2217 	set_ds_odt(dram, sdram_params, 0);
2218 	sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params);
2219 	set_ctl_address_map(dram, sdram_params);
2220 
2221 	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
2222 
2223 	rkclk_ddr_reset(dram, 0, 0, 0, 0);
2224 
2225 	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
2226 		continue;
2227 
2228 	if (sdram_params->base.dramtype == LPDDR3) {
2229 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3);
2230 	} else if (sdram_params->base.dramtype == LPDDR4) {
2231 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6);
2232 		/* MR11 */
2233 		pctl_write_mr(dram->pctl, 3, 11,
2234 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2235 			      LPDDR4);
2236 		/* MR12 */
2237 		pctl_write_mr(dram->pctl, 3, 12,
2238 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2239 			      LPDDR4);
2240 
2241 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2242 		/* MR22 */
2243 		pctl_write_mr(dram->pctl, 3, 22,
2244 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2245 			      LPDDR4);
2246 		/* MR14 */
2247 		pctl_write_mr(dram->pctl, 3, 14,
2248 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2249 			      LPDDR4);
2250 	}
2251 
2252 	if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) != 0) {
2253 		if (post_init != 0)
2254 			printascii("DTT cs0 error\n");
2255 		return -1;
2256 	}
2257 
2258 	if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) {
2259 		if (data_training(dram, 1, sdram_params, 0,
2260 				  READ_GATE_TRAINING) != 0) {
2261 			printascii("DTT cs1 error\n");
2262 			return -1;
2263 		}
2264 	}
2265 
2266 	if (sdram_params->base.dramtype == DDR4) {
2267 		ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39;
2268 		pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref,
2269 				  sdram_params->base.dramtype);
2270 	}
2271 
2272 	dram_all_config(dram, sdram_params);
2273 	enable_low_power(dram, sdram_params);
2274 
2275 	return 0;
2276 }
2277 
2278 static u64 dram_detect_cap(struct dram_info *dram,
2279 			   struct rv1126_sdram_params *sdram_params,
2280 			   unsigned char channel)
2281 {
2282 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2283 	void __iomem *pctl_base = dram->pctl;
2284 	void __iomem *phy_base = dram->phy;
2285 	u32 mr8;
2286 
2287 	u32 bktmp;
2288 	u32 coltmp;
2289 	u32 rowtmp;
2290 	u32 cs;
2291 	u32 bw = 1;
2292 	u32 dram_type = sdram_params->base.dramtype;
2293 	u32 pwrctl;
2294 
2295 	cap_info->bw = bw;
2296 	if (dram_type != LPDDR4) {
2297 		if (dram_type != DDR4) {
2298 			coltmp = 12;
2299 			bktmp = 3;
2300 			if (dram_type == LPDDR2)
2301 				rowtmp = 15;
2302 			else
2303 				rowtmp = 16;
2304 
2305 			if (sdram_detect_col(cap_info, coltmp) != 0)
2306 				goto cap_err;
2307 
2308 			sdram_detect_bank(cap_info, coltmp, bktmp);
2309 			sdram_detect_dbw(cap_info, dram_type);
2310 		} else {
2311 			coltmp = 10;
2312 			bktmp = 4;
2313 			rowtmp = 17;
2314 
2315 			cap_info->col = 10;
2316 			cap_info->bk = 2;
2317 			sdram_detect_bg(cap_info, coltmp);
2318 		}
2319 
2320 		if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
2321 			goto cap_err;
2322 
2323 		sdram_detect_row_3_4(cap_info, coltmp, bktmp);
2324 	} else {
2325 		mr8 = (read_mr(dram, 1, 8, dram_type) >> 2) & 0xf;
2326 		cap_info->col = 10;
2327 		cap_info->bk = 3;
2328 		cap_info->cs0_row = 14 + (mr8 + 1) / 2;
2329 		if (mr8 % 2)
2330 			cap_info->row_3_4 = 1;
2331 		else
2332 			cap_info->row_3_4 = 0;
2333 		cap_info->dbw = 1;
2334 		cap_info->bw = 2;
2335 	}
2336 
2337 	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
2338 	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
2339 
2340 	if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0)
2341 		cs = 1;
2342 	else
2343 		cs = 0;
2344 	cap_info->rank = cs + 1;
2345 
2346 	if (dram_type != LPDDR4) {
2347 		setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2348 
2349 		if (data_training(dram, 0, sdram_params, 0,
2350 				  READ_GATE_TRAINING) == 0)
2351 			cap_info->bw = 2;
2352 		else
2353 			cap_info->bw = 1;
2354 	}
2355 
2356 	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
2357 
2358 	cap_info->cs0_high16bit_row = cap_info->cs0_row;
2359 	if (cs) {
2360 		cap_info->cs1_row = cap_info->cs0_row;
2361 		cap_info->cs1_high16bit_row = cap_info->cs0_row;
2362 	} else {
2363 		cap_info->cs1_row = 0;
2364 		cap_info->cs1_high16bit_row = 0;
2365 	}
2366 
2367 	return 0;
2368 cap_err:
2369 	return -1;
2370 }
2371 
2372 static int dram_detect_cs1_row(struct dram_info *dram,
2373 			       struct rv1126_sdram_params *sdram_params,
2374 			       unsigned char channel)
2375 {
2376 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2377 	void __iomem *pctl_base = dram->pctl;
2378 	u32 ret = 0;
2379 	void __iomem *test_addr;
2380 	u32 row, bktmp, coltmp, bw;
2381 	u64 cs0_cap;
2382 	u32 byte_mask;
2383 	u32 cs_pst;
2384 	u32 cs_add = 0;
2385 	u32 max_row;
2386 
2387 	if (cap_info->rank == 2) {
2388 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2389 			6 + 2;
2390 		if (cs_pst < 28)
2391 			cs_add = 1;
2392 
2393 		cs0_cap = 1 << cs_pst;
2394 
2395 		if (sdram_params->base.dramtype == DDR4) {
2396 			if (cap_info->dbw == 0)
2397 				bktmp = cap_info->bk + 2;
2398 			else
2399 				bktmp = cap_info->bk + 1;
2400 		} else {
2401 			bktmp = cap_info->bk;
2402 		}
2403 		bw = cap_info->bw;
2404 		coltmp = cap_info->col;
2405 
2406 		if (bw == 2)
2407 			byte_mask = 0xFFFF;
2408 		else
2409 			byte_mask = 0xFF;
2410 
2411 		max_row = (cs_pst == 31) ? 30 : 31;
2412 
2413 		max_row = max_row - bktmp - coltmp - bw - cs_add + 1;
2414 
2415 		row = (cap_info->cs0_row > max_row) ? max_row :
2416 			cap_info->cs0_row;
2417 
2418 		for (; row > 12; row--) {
2419 			test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
2420 				    (u32)cs0_cap +
2421 				    (1ul << (row + bktmp + coltmp +
2422 					     cs_add + bw - 1ul)));
2423 
2424 			writel(0, CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap);
2425 			writel(PATTERN, test_addr);
2426 
2427 			if (((readl(test_addr) & byte_mask) ==
2428 			     (PATTERN & byte_mask)) &&
2429 			    ((readl(CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap) &
2430 			      byte_mask) == 0)) {
2431 				ret = row;
2432 				break;
2433 			}
2434 		}
2435 	}
2436 
2437 	return ret;
2438 }
2439 
2440 /* return: 0 = success, other = fail */
2441 static int sdram_init_detect(struct dram_info *dram,
2442 			     struct rv1126_sdram_params *sdram_params)
2443 {
2444 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2445 	u32 ret;
2446 	u32 sys_reg = 0;
2447 	u32 sys_reg3 = 0;
2448 	struct sdram_head_info_index_v2 *index =
2449 		(struct sdram_head_info_index_v2 *)common_info;
2450 	struct dq_map_info *map_info;
2451 
2452 	map_info = (struct dq_map_info *)((void *)common_info +
2453 		index->dq_map_index.offset * 4);
2454 
2455 	if (sdram_init_(dram, sdram_params, 0)) {
2456 		if (sdram_params->base.dramtype == DDR3) {
2457 			clrsetbits_le32(&map_info->byte_map[0], 0xff << 24,
2458 					((0x1 << 6) | (0x3 << 4) | (0x2 << 2) |
2459 					(0x0 << 0)) << 24);
2460 			if (sdram_init_(dram, sdram_params, 0))
2461 				return -1;
2462 		} else {
2463 			return -1;
2464 		}
2465 	}
2466 
2467 	if (sdram_params->base.dramtype == DDR3) {
2468 		writel(PATTERN, CONFIG_SYS_SDRAM_BASE);
2469 		if (readl(CONFIG_SYS_SDRAM_BASE) != PATTERN)
2470 			return -1;
2471 	}
2472 
2473 	if (dram_detect_cap(dram, sdram_params, 0) != 0)
2474 		return -1;
2475 
2476 	pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
2477 				   sdram_params->base.dramtype);
2478 	ret = sdram_init_(dram, sdram_params, 1);
2479 	if (ret != 0)
2480 		goto out;
2481 
2482 	cap_info->cs1_row =
2483 		dram_detect_cs1_row(dram, sdram_params, 0);
2484 	if (cap_info->cs1_row) {
2485 		sys_reg = readl(&dram->pmugrf->os_reg[2]);
2486 		sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
2487 		SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
2488 				    sys_reg, sys_reg3, 0);
2489 		writel(sys_reg, &dram->pmugrf->os_reg[2]);
2490 		writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2491 	}
2492 
2493 	sdram_detect_high_row(cap_info);
2494 
2495 out:
2496 	return ret;
2497 }
2498 
2499 struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz)
2500 {
2501 	u32 i;
2502 	u32 offset = 0;
2503 	struct ddr2_3_4_lp2_3_info *ddr_info;
2504 
2505 	if (!freq_mhz) {
2506 		ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype);
2507 		if (ddr_info)
2508 			freq_mhz =
2509 				(ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
2510 				DDR_FREQ_MASK;
2511 		else
2512 			freq_mhz = 0;
2513 	}
2514 
2515 	for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) {
2516 		if (sdram_configs[i].base.ddr_freq == 0 ||
2517 		    freq_mhz < sdram_configs[i].base.ddr_freq)
2518 			break;
2519 	}
2520 	offset = i == 0 ? 0 : i - 1;
2521 
2522 	return &sdram_configs[offset];
2523 }
2524 
2525 static const u16 pctl_need_update_reg[] = {
2526 	DDR_PCTL2_RFSHTMG,
2527 	DDR_PCTL2_INIT3,
2528 	DDR_PCTL2_INIT4,
2529 	DDR_PCTL2_INIT6,
2530 	DDR_PCTL2_INIT7,
2531 	DDR_PCTL2_DRAMTMG0,
2532 	DDR_PCTL2_DRAMTMG1,
2533 	DDR_PCTL2_DRAMTMG2,
2534 	DDR_PCTL2_DRAMTMG3,
2535 	DDR_PCTL2_DRAMTMG4,
2536 	DDR_PCTL2_DRAMTMG5,
2537 	DDR_PCTL2_DRAMTMG6,
2538 	DDR_PCTL2_DRAMTMG7,
2539 	DDR_PCTL2_DRAMTMG8,
2540 	DDR_PCTL2_DRAMTMG9,
2541 	DDR_PCTL2_DRAMTMG12,
2542 	DDR_PCTL2_DRAMTMG13,
2543 	DDR_PCTL2_DRAMTMG14,
2544 	DDR_PCTL2_ZQCTL0,
2545 	DDR_PCTL2_DFITMG0,
2546 	DDR_PCTL2_ODTCFG
2547 };
2548 
2549 static const u16 phy_need_update_reg[] = {
2550 	0x14,
2551 	0x18,
2552 	0x1c
2553 };
2554 
2555 static void pre_set_rate(struct dram_info *dram,
2556 			 struct rv1126_sdram_params *sdram_params,
2557 			 u32 dst_fsp, u32 dst_fsp_lp4)
2558 {
2559 	u32 i, j, find;
2560 	void __iomem *pctl_base = dram->pctl;
2561 	void __iomem *phy_base = dram->phy;
2562 	u32 phy_offset;
2563 	u32 mr_tmp;
2564 	u32 dramtype = sdram_params->base.dramtype;
2565 
2566 	sw_set_req(dram);
2567 	/* pctl timing update */
2568 	for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) {
2569 		for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF;
2570 		     j++) {
2571 			if (sdram_params->pctl_regs.pctl[j][0] ==
2572 			    pctl_need_update_reg[i]) {
2573 				writel(sdram_params->pctl_regs.pctl[j][1],
2574 				       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2575 				       pctl_need_update_reg[i]);
2576 				find = j;
2577 				break;
2578 			}
2579 		}
2580 	}
2581 
2582 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2583 	u32 tmp, trefi;
2584 
2585 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2586 	trefi = (tmp >> 16) & 0xfff;
2587 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2588 	       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2589 #endif
2590 
2591 	sw_set_ack(dram);
2592 
2593 	/* phy timing update */
2594 	if (dst_fsp == 0)
2595 		phy_offset = 0;
2596 	else
2597 		phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3);
2598 	/* cl cwl al update */
2599 	for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) {
2600 		for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF;
2601 		     j++) {
2602 			if (sdram_params->phy_regs.phy[j][0] ==
2603 			    phy_need_update_reg[i]) {
2604 				writel(sdram_params->phy_regs.phy[j][1],
2605 				       phy_base + phy_offset +
2606 				       phy_need_update_reg[i]);
2607 				find = j;
2608 				break;
2609 			}
2610 		}
2611 	}
2612 
2613 	set_ds_odt(dram, sdram_params, dst_fsp);
2614 	if (dramtype == LPDDR4) {
2615 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2616 			       DDR_PCTL2_INIT4);
2617 		/* MR13 */
2618 		pctl_write_mr(dram->pctl, 3, 13,
2619 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2620 			       PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2621 			      ((0x2 << 6) >> dst_fsp_lp4), dramtype);
2622 		writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2623 				      PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2624 				      ((0x2 << 6) >> dst_fsp_lp4),
2625 				       PHY_REG(phy_base, 0x1b));
2626 		/* MR3 */
2627 		pctl_write_mr(dram->pctl, 3, 3,
2628 			      mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT &
2629 			      PCTL2_MR_MASK,
2630 			      dramtype);
2631 		writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK,
2632 		       PHY_REG(phy_base, 0x19));
2633 
2634 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2635 			       DDR_PCTL2_INIT3);
2636 		/* MR1 */
2637 		pctl_write_mr(dram->pctl, 3, 1,
2638 			      mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT &
2639 			      PCTL2_MR_MASK,
2640 			      dramtype);
2641 		writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK,
2642 		       PHY_REG(phy_base, 0x17));
2643 		/* MR2 */
2644 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
2645 			      dramtype);
2646 		writel(mr_tmp & PCTL2_MR_MASK,
2647 		       PHY_REG(phy_base, 0x18));
2648 
2649 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2650 			       DDR_PCTL2_INIT6);
2651 		/* MR11 */
2652 		pctl_write_mr(dram->pctl, 3, 11,
2653 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2654 			      dramtype);
2655 		writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2656 		       PHY_REG(phy_base, 0x1a));
2657 		/* MR12 */
2658 		pctl_write_mr(dram->pctl, 3, 12,
2659 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2660 			      dramtype);
2661 
2662 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2663 			       DDR_PCTL2_INIT7);
2664 		/* MR22 */
2665 		pctl_write_mr(dram->pctl, 3, 22,
2666 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2667 			      dramtype);
2668 		writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2669 		       PHY_REG(phy_base, 0x1d));
2670 		/* MR14 */
2671 		pctl_write_mr(dram->pctl, 3, 14,
2672 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2673 			      dramtype);
2674 		writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2675 		       PHY_REG(phy_base, 0x1c));
2676 	}
2677 
2678 	update_noc_timing(dram, sdram_params);
2679 }
2680 
2681 static void save_fsp_param(struct dram_info *dram, u32 dst_fsp,
2682 			   struct rv1126_sdram_params *sdram_params)
2683 {
2684 	void __iomem *pctl_base = dram->pctl;
2685 	void __iomem *phy_base = dram->phy;
2686 	struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp];
2687 	u32 temp, temp1;
2688 	struct ddr2_3_4_lp2_3_info *ddr_info;
2689 
2690 	ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype);
2691 
2692 	p_fsp_param->freq_mhz = sdram_params->base.ddr_freq;
2693 
2694 	if (sdram_params->base.dramtype == LPDDR4) {
2695 		p_fsp_param->rd_odt_up_en = 0;
2696 		p_fsp_param->rd_odt_down_en = 1;
2697 	} else {
2698 		p_fsp_param->rd_odt_up_en =
2699 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
2700 		p_fsp_param->rd_odt_down_en =
2701 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
2702 	}
2703 
2704 	if (p_fsp_param->rd_odt_up_en)
2705 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111));
2706 	else if (p_fsp_param->rd_odt_down_en)
2707 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110));
2708 	else
2709 		p_fsp_param->rd_odt = 0;
2710 	p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112));
2711 	p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100));
2712 	p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102));
2713 	p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128));
2714 	p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105));
2715 
2716 	if (sdram_params->base.dramtype == DDR3) {
2717 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2718 			     DDR_PCTL2_INIT3);
2719 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
2720 		p_fsp_param->ds_pdds = temp & DDR3_DS_MASK;
2721 		p_fsp_param->dq_odt = temp & DDR3_RTT_NOM_MASK;
2722 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2723 	} else if (sdram_params->base.dramtype == DDR4) {
2724 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2725 			     DDR_PCTL2_INIT3);
2726 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
2727 		p_fsp_param->ds_pdds = temp & DDR4_DS_MASK;
2728 		p_fsp_param->dq_odt = temp & DDR4_RTT_NOM_MASK;
2729 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2730 	} else if (sdram_params->base.dramtype == LPDDR3) {
2731 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2732 			     DDR_PCTL2_INIT4);
2733 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
2734 		p_fsp_param->ds_pdds = temp & 0xf;
2735 
2736 		p_fsp_param->dq_odt = lp3_odt_value;
2737 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2738 	} else if (sdram_params->base.dramtype == LPDDR4) {
2739 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2740 			     DDR_PCTL2_INIT4);
2741 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
2742 		p_fsp_param->ds_pdds = temp & LPDDR4_PDDS_MASK;
2743 
2744 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2745 			     DDR_PCTL2_INIT6);
2746 		temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK;
2747 		p_fsp_param->dq_odt = temp & LPDDR4_DQODT_MASK;
2748 		p_fsp_param->ca_odt = temp & LPDDR4_CAODT_MASK;
2749 
2750 		temp = MAX(readl(PHY_REG(phy_base, 0x3ae)),
2751 			   readl(PHY_REG(phy_base, 0x3ce)));
2752 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)),
2753 			    readl(PHY_REG(phy_base, 0x3de)));
2754 		p_fsp_param->vref_ca[0] = (temp + temp1) / 2;
2755 		temp = MAX(readl(PHY_REG(phy_base, 0x3af)),
2756 			   readl(PHY_REG(phy_base, 0x3cf)));
2757 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)),
2758 			    readl(PHY_REG(phy_base, 0x3df)));
2759 		p_fsp_param->vref_ca[1] = (temp + temp1) / 2;
2760 		p_fsp_param->vref_ca[0] |=
2761 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
2762 		p_fsp_param->vref_ca[1] |=
2763 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
2764 
2765 		p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >>
2766 					      3) & 0x1;
2767 	}
2768 
2769 	p_fsp_param->noc_timings.ddrtiminga0 =
2770 		sdram_params->ch.noc_timings.ddrtiminga0;
2771 	p_fsp_param->noc_timings.ddrtimingb0 =
2772 		sdram_params->ch.noc_timings.ddrtimingb0;
2773 	p_fsp_param->noc_timings.ddrtimingc0 =
2774 		sdram_params->ch.noc_timings.ddrtimingc0;
2775 	p_fsp_param->noc_timings.devtodev0 =
2776 		sdram_params->ch.noc_timings.devtodev0;
2777 	p_fsp_param->noc_timings.ddrmode =
2778 		sdram_params->ch.noc_timings.ddrmode;
2779 	p_fsp_param->noc_timings.ddr4timing =
2780 		sdram_params->ch.noc_timings.ddr4timing;
2781 	p_fsp_param->noc_timings.agingx0 =
2782 		sdram_params->ch.noc_timings.agingx0;
2783 	p_fsp_param->noc_timings.aging0 =
2784 		sdram_params->ch.noc_timings.aging0;
2785 	p_fsp_param->noc_timings.aging1 =
2786 		sdram_params->ch.noc_timings.aging1;
2787 	p_fsp_param->noc_timings.aging2 =
2788 		sdram_params->ch.noc_timings.aging2;
2789 	p_fsp_param->noc_timings.aging3 =
2790 		sdram_params->ch.noc_timings.aging3;
2791 
2792 	p_fsp_param->flag = FSP_FLAG;
2793 }
2794 
2795 #ifndef CONFIG_SPL_KERNEL_BOOT
2796 static void copy_fsp_param_to_ddr(void)
2797 {
2798 	memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param,
2799 	       sizeof(fsp_param));
2800 }
2801 #endif
2802 
2803 void ddr_set_rate(struct dram_info *dram,
2804 		  struct rv1126_sdram_params *sdram_params,
2805 		  u32 freq, u32 cur_freq, u32 dst_fsp,
2806 		  u32 dst_fsp_lp4, u32 training_en)
2807 {
2808 	u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off;
2809 	u32 mr_tmp;
2810 	u32 lp_stat;
2811 	u32 dramtype = sdram_params->base.dramtype;
2812 	struct rv1126_sdram_params *sdram_params_new;
2813 	void __iomem *pctl_base = dram->pctl;
2814 	void __iomem *phy_base = dram->phy;
2815 
2816 	lp_stat = low_power_update(dram, 0);
2817 	sdram_params_new = get_default_sdram_config(freq);
2818 	sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank;
2819 	sdram_params_new->ch.cap_info.bw = sdram_params->ch.cap_info.bw;
2820 
2821 	pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4);
2822 
2823 	while ((readl(pctl_base + DDR_PCTL2_STAT) &
2824 			 PCTL2_OPERATING_MODE_MASK) ==
2825 			 PCTL2_OPERATING_MODE_SR)
2826 		continue;
2827 
2828 	dest_dll_off = 0;
2829 	dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2830 			  DDR_PCTL2_INIT3);
2831 	if ((dramtype == DDR3 && (dst_init3 & 1)) ||
2832 	    (dramtype == DDR4 && !(dst_init3 & 1)))
2833 		dest_dll_off = 1;
2834 
2835 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
2836 	cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
2837 			  DDR_PCTL2_INIT3);
2838 	cur_init3 &= PCTL2_MR_MASK;
2839 	cur_dll_off = 1;
2840 	if ((dramtype == DDR3 && !(cur_init3 & 1)) ||
2841 	    (dramtype == DDR4 && (cur_init3 & 1)))
2842 		cur_dll_off = 0;
2843 
2844 	if (!cur_dll_off) {
2845 		if (dramtype == DDR3)
2846 			cur_init3 |= 1;
2847 		else
2848 			cur_init3 &= ~1;
2849 		pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype);
2850 	}
2851 
2852 	setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
2853 		     PCTL2_DIS_AUTO_REFRESH);
2854 	update_refresh_reg(dram);
2855 
2856 	enter_sr(dram, 1);
2857 
2858 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
2859 	       PMUGRF_CON_DDRPHY_BUFFEREN_EN,
2860 	       &dram->pmugrf->soc_con[0]);
2861 	sw_set_req(dram);
2862 	clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC,
2863 		     PCTL2_DFI_INIT_COMPLETE_EN);
2864 	sw_set_ack(dram);
2865 
2866 	sw_set_req(dram);
2867 	if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off)
2868 		setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
2869 	else
2870 		clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
2871 
2872 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0,
2873 		     PCTL2_DIS_SRX_ZQCL);
2874 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0,
2875 		     PCTL2_DIS_SRX_ZQCL);
2876 	sw_set_ack(dram);
2877 
2878 	writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT),
2879 	       &dram->cru->clkgate_con[21]);
2880 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
2881 					(0x1 << CLK_DDR_UPCTL_EN_SHIFT) |
2882 					(0x1 << ACLK_DDR_UPCTL_EN_SHIFT),
2883 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
2884 
2885 	clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
2886 	rkclk_set_dpll(dram, freq * MHz / 2);
2887 	phy_pll_set(dram, freq * MHz, 0);
2888 	phy_pll_set(dram, freq * MHz, 1);
2889 	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
2890 
2891 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
2892 			PMUGRF_CON_DDRPHY_BUFFEREN_DIS,
2893 			&dram->pmugrf->soc_con[0]);
2894 	writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT),
2895 	       &dram->cru->clkgate_con[21]);
2896 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
2897 					(0x0 << CLK_DDR_UPCTL_EN_SHIFT) |
2898 					(0x0 << ACLK_DDR_UPCTL_EN_SHIFT),
2899 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
2900 	while ((readl(pctl_base + DDR_PCTL2_DFISTAT) &
2901 	       PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE)
2902 		continue;
2903 
2904 	sw_set_req(dram);
2905 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
2906 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp);
2907 	sw_set_ack(dram);
2908 	update_refresh_reg(dram);
2909 	clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2);
2910 
2911 	enter_sr(dram, 0);
2912 
2913 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
2914 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
2915 
2916 	mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4);
2917 	if (dramtype == LPDDR3) {
2918 		pctl_write_mr(dram->pctl, 3, 1,
2919 			      (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) &
2920 			      PCTL2_MR_MASK,
2921 			      dramtype);
2922 		pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK,
2923 			      dramtype);
2924 		pctl_write_mr(dram->pctl, 3, 3,
2925 			      (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) &
2926 			      PCTL2_MR_MASK,
2927 			      dramtype);
2928 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype);
2929 	} else if ((dramtype == DDR3) || (dramtype == DDR4)) {
2930 		pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK,
2931 			      dramtype);
2932 		if (!dest_dll_off) {
2933 			pctl_write_mr(dram->pctl, 3, 0,
2934 				      ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) &
2935 				      PCTL2_MR_MASK) | DDR3_DLL_RESET,
2936 				      dramtype);
2937 			udelay(2);
2938 		}
2939 		pctl_write_mr(dram->pctl, 3, 0,
2940 			      (dst_init3 >> PCTL2_DDR34_MR0_SHIFT &
2941 			       PCTL2_MR_MASK) & (~DDR3_DLL_RESET),
2942 			      dramtype);
2943 		pctl_write_mr(dram->pctl, 3, 2,
2944 			      ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) &
2945 			       PCTL2_MR_MASK), dramtype);
2946 		if (dramtype == DDR4) {
2947 			pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK,
2948 				      dramtype);
2949 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2950 				       DDR_PCTL2_INIT6);
2951 			pctl_write_mr(dram->pctl, 3, 4,
2952 				      (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) &
2953 				       PCTL2_MR_MASK,
2954 				      dramtype);
2955 			pctl_write_mr(dram->pctl, 3, 5,
2956 				      mr_tmp >> PCTL2_DDR4_MR5_SHIFT &
2957 				      PCTL2_MR_MASK,
2958 				      dramtype);
2959 
2960 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2961 				       DDR_PCTL2_INIT7);
2962 			pctl_write_mr(dram->pctl, 3, 6,
2963 				      mr_tmp >> PCTL2_DDR4_MR6_SHIFT &
2964 				      PCTL2_MR_MASK,
2965 				      dramtype);
2966 		}
2967 	} else if (dramtype == LPDDR4) {
2968 		pctl_write_mr(dram->pctl, 3, 13,
2969 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2970 			       PCTL2_MR_MASK) & (~(BIT(7)))) |
2971 			      dst_fsp_lp4 << 7, dramtype);
2972 	}
2973 	clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
2974 		     PCTL2_DIS_AUTO_REFRESH);
2975 	update_refresh_reg(dram);
2976 
2977 	/* training */
2978 	high_freq_training(dram, sdram_params_new, dst_fsp);
2979 	low_power_update(dram, lp_stat);
2980 
2981 	save_fsp_param(dram, dst_fsp, sdram_params_new);
2982 }
2983 
2984 static void ddr_set_rate_for_fsp(struct dram_info *dram,
2985 				 struct rv1126_sdram_params *sdram_params)
2986 {
2987 	struct ddr2_3_4_lp2_3_info *ddr_info;
2988 	u32 f0;
2989 	u32 dramtype = sdram_params->base.dramtype;
2990 #ifndef CONFIG_SPL_KERNEL_BOOT
2991 	u32 f1, f2, f3;
2992 #endif
2993 
2994 	ddr_info = get_ddr_drv_odt_info(dramtype);
2995 	if (!ddr_info)
2996 		return;
2997 
2998 	f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
2999 	     DDR_FREQ_MASK;
3000 
3001 #ifndef CONFIG_SPL_KERNEL_BOOT
3002 	memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param));
3003 	memset((void *)&fsp_param, 0, sizeof(fsp_param));
3004 
3005 	f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) &
3006 	     DDR_FREQ_MASK;
3007 	f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) &
3008 	     DDR_FREQ_MASK;
3009 	f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) &
3010 	     DDR_FREQ_MASK;
3011 #endif
3012 
3013 	if (get_wrlvl_val(dram, sdram_params))
3014 		printascii("get wrlvl value fail\n");
3015 
3016 #ifndef CONFIG_SPL_KERNEL_BOOT
3017 	printascii("change to: ");
3018 	printdec(f1);
3019 	printascii("MHz\n");
3020 	ddr_set_rate(&dram_info, sdram_params, f1,
3021 		     sdram_params->base.ddr_freq, 1, 1, 1);
3022 	printascii("change to: ");
3023 	printdec(f2);
3024 	printascii("MHz\n");
3025 	ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1);
3026 	printascii("change to: ");
3027 	printdec(f3);
3028 	printascii("MHz\n");
3029 	ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1);
3030 #endif
3031 	printascii("change to: ");
3032 	printdec(f0);
3033 	printascii("MHz(final freq)\n");
3034 #ifndef CONFIG_SPL_KERNEL_BOOT
3035 	ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1);
3036 #else
3037 	ddr_set_rate(&dram_info, sdram_params, f0, sdram_params->base.ddr_freq, 1, 1, 1);
3038 #endif
3039 }
3040 
3041 int get_uart_config(void)
3042 {
3043 	struct sdram_head_info_index_v2 *index =
3044 		(struct sdram_head_info_index_v2 *)common_info;
3045 	struct global_info *gbl_info;
3046 
3047 	gbl_info = (struct global_info *)((void *)common_info +
3048 		index->global_index.offset * 4);
3049 
3050 	return gbl_info->uart_info;
3051 }
3052 
3053 /* return: 0 = success, other = fail */
3054 int sdram_init(void)
3055 {
3056 	struct rv1126_sdram_params *sdram_params;
3057 	int ret = 0;
3058 	struct sdram_head_info_index_v2 *index =
3059 		(struct sdram_head_info_index_v2 *)common_info;
3060 	struct global_info *gbl_info;
3061 
3062 	dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
3063 	dram_info.pctl = (void *)UPCTL2_BASE_ADDR;
3064 	dram_info.grf = (void *)GRF_BASE_ADDR;
3065 	dram_info.cru = (void *)CRU_BASE_ADDR;
3066 	dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR;
3067 	dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR;
3068 	dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR;
3069 
3070 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
3071 	printascii("extended temp support\n");
3072 #endif
3073 	if (index->version_info != 2 ||
3074 	    (index->global_index.size != sizeof(struct global_info) / 4) ||
3075 	    (index->ddr3_index.size !=
3076 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3077 	    (index->ddr4_index.size !=
3078 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3079 	    (index->lp3_index.size !=
3080 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3081 	    (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) ||
3082 	    index->global_index.offset == 0 ||
3083 	    index->ddr3_index.offset == 0 ||
3084 	    index->ddr4_index.offset == 0 ||
3085 	    index->lp3_index.offset == 0 ||
3086 	    index->lp4_index.offset == 0) {
3087 		printascii("common info error\n");
3088 		goto error;
3089 	}
3090 
3091 	gbl_info = (struct global_info *)((void *)common_info +
3092 		index->global_index.offset * 4);
3093 
3094 	dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info);
3095 	dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info);
3096 
3097 	sdram_params = &sdram_configs[0];
3098 
3099 	if (sdram_params->base.dramtype == DDR3 ||
3100 	    sdram_params->base.dramtype == DDR4) {
3101 		if (DDR_2T_INFO(gbl_info->info_2t))
3102 			sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10;
3103 		else
3104 			sdram_params->pctl_regs.pctl[0][1] &=
3105 				~(0x1 << 10);
3106 	}
3107 	ret = sdram_init_detect(&dram_info, sdram_params);
3108 	if (ret) {
3109 		sdram_print_dram_type(sdram_params->base.dramtype);
3110 		printascii(", ");
3111 		printdec(sdram_params->base.ddr_freq);
3112 		printascii("MHz\n");
3113 		goto error;
3114 	}
3115 	print_ddr_info(sdram_params);
3116 
3117 	ddr_set_rate_for_fsp(&dram_info, sdram_params);
3118 #ifndef CONFIG_SPL_KERNEL_BOOT
3119 	copy_fsp_param_to_ddr();
3120 #endif
3121 
3122 	ddr_set_atags(&dram_info, sdram_params);
3123 
3124 	printascii("out\n");
3125 
3126 	return ret;
3127 error:
3128 	printascii("error\n");
3129 	return (-1);
3130 }
3131 #endif /* CONFIG_TPL_BUILD */
3132