xref: /rk3399_rockchip-uboot/drivers/ram/rockchip/sdram_rv1126.c (revision f627cf25e97b0bb49b3faf2118965c992c4dee8f)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * (C) Copyright 2020 Rockchip Electronics Co., Ltd.
4  */
5 
6 #include <common.h>
7 #include <debug_uart.h>
8 #include <dm.h>
9 #include <ram.h>
10 #include <syscon.h>
11 #include <asm/io.h>
12 #include <asm/arch/clock.h>
13 #include <asm/arch/hardware.h>
14 #include <asm/arch/rk_atags.h>
15 #include <asm/arch/cru_rv1126.h>
16 #include <asm/arch/grf_rv1126.h>
17 #include <asm/arch/sdram_common.h>
18 #include <asm/arch/sdram_rv1126.h>
19 
20 /* define training flag */
21 #define CA_TRAINING			(0x1 << 0)
22 #define READ_GATE_TRAINING		(0x1 << 1)
23 #define WRITE_LEVELING			(0x1 << 2)
24 #define WRITE_TRAINING			(0x1 << 3)
25 #define READ_TRAINING			(0x1 << 4)
26 #define FULL_TRAINING			(0xff)
27 
28 #define SKEW_RX_SIGNAL			(0)
29 #define SKEW_TX_SIGNAL			(1)
30 #define SKEW_CA_SIGNAL			(2)
31 
32 #define DESKEW_MDF_ABS_VAL		(0)
33 #define DESKEW_MDF_DIFF_VAL		(1)
34 
35 #ifdef CONFIG_TPL_BUILD
36 #ifndef CONFIG_TPL_TINY_FRAMEWORK
37 #error please defined CONFIG_TPL_TINY_FRAMEWORK for RV1126 !!!
38 #endif
39 #endif
40 
41 #ifdef CONFIG_TPL_BUILD
42 
43 struct dram_info {
44 	void __iomem *pctl;
45 	void __iomem *phy;
46 	struct rv1126_cru *cru;
47 	struct msch_regs *msch;
48 	struct rv1126_ddrgrf *ddrgrf;
49 	struct rv1126_grf *grf;
50 	struct ram_info info;
51 	struct rv1126_pmugrf *pmugrf;
52 	u32 sr_idle;
53 	u32 pd_idle;
54 };
55 
56 #define GRF_BASE_ADDR			0xfe000000
57 #define PMU_GRF_BASE_ADDR		0xfe020000
58 #define DDR_GRF_BASE_ADDR		0xfe030000
59 #define BUS_SGRF_BASE_ADDR		0xfe0a0000
60 #define SERVER_MSCH_BASE_ADDR		0xfe800000
61 #define CRU_BASE_ADDR			0xff490000
62 #define DDR_PHY_BASE_ADDR		0xff4a0000
63 #define UPCTL2_BASE_ADDR		0xffa50000
64 
65 #define SGRF_SOC_CON2			0x8
66 #define SGRF_SOC_CON12			0x30
67 #define SGRF_SOC_CON13			0x34
68 
69 struct dram_info dram_info;
70 
71 #if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 3)
72 struct rv1126_sdram_params sdram_configs[] = {
73 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-328.inc"
74 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-396.inc"
75 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-528.inc"
76 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-664.inc"
77 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-784.inc"
78 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-924.inc"
79 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-1056.inc"
80 };
81 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 0)
82 struct rv1126_sdram_params sdram_configs[] = {
83 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-328.inc"
84 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-396.inc"
85 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-528.inc"
86 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-664.inc"
87 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-784.inc"
88 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-924.inc"
89 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-1056.inc"
90 };
91 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 6)
92 struct rv1126_sdram_params sdram_configs[] = {
93 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-328.inc"
94 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-396.inc"
95 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-528.inc"
96 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-664.inc"
97 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-784.inc"
98 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-924.inc"
99 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-1056.inc"
100 };
101 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 7)
102 struct rv1126_sdram_params sdram_configs[] = {
103 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-328.inc"
104 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-396.inc"
105 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-528.inc"
106 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-664.inc"
107 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-784.inc"
108 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-924.inc"
109 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-1056.inc"
110 };
111 #endif
112 
113 u32 common_info[] = {
114 	#include "sdram_inc/rv1126/sdram-rv1126-loader_params.inc"
115 };
116 
117 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
118 static struct rw_trn_result rw_trn_result;
119 #endif
120 
121 static struct rv1126_fsp_param fsp_param[MAX_IDX];
122 
123 static u8 lp3_odt_value;
124 
125 static s8 wrlvl_result[2][4];
126 
127 /* DDR configuration 0-9 */
128 u16 ddr_cfg_2_rbc[] = {
129 	((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */
130 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */
131 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */
132 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */
133 	((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */
134 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */
135 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */
136 	((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */
137 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */
138 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */
139 };
140 
141 /* DDR configuration 10-21 */
142 u8 ddr4_cfg_2_rbc[] = {
143 	((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */
144 	((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */
145 	((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */
146 	((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */
147 	((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */
148 	((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */
149 	((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */
150 	((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */
151 	((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */
152 	((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */
153 	((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */
154 	((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */
155 };
156 
157 /* DDR configuration 22-28 */
158 u16 ddr_cfg_2_rbc_p2[] = {
159 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */
160 	((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */
161 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */
162 	((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */
163 	((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */
164 	((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */
165 	((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */
166 };
167 
168 u8 d4_rbc_2_d3_rbc[][2] = {
169 	{10, 0},
170 	{11, 2},
171 	{12, 23},
172 	{13, 1},
173 	{14, 28},
174 	{15, 24},
175 	{16, 27},
176 	{17, 7},
177 	{18, 6},
178 	{19, 25},
179 	{20, 26},
180 	{21, 3}
181 };
182 
183 u32 addrmap[23][9] = {
184 	{24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
185 		0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */
186 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
187 		0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */
188 	{23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
189 		0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */
190 	{22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606,
191 		0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */
192 	{24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909,
193 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */
194 	{6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707,
195 		0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */
196 	{7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808,
197 		0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */
198 	{8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909,
199 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */
200 	{22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
201 		0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */
202 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
203 		0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */
204 
205 	{24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
206 		0x08080808, 0x00000f0f, 0x0801}, /* 10 */
207 	{23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
208 		0x0f080808, 0x00000f0f, 0x0801}, /* 11 */
209 	{24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
210 		0x07070707, 0x00000f07, 0x0700}, /* 12 */
211 	{23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
212 		0x07070707, 0x00000f0f, 0x0700}, /* 13 */
213 	{24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
214 		0x07070707, 0x00000f07, 0x3f01}, /* 14 */
215 	{23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
216 		0x07070707, 0x00000f0f, 0x3f01}, /* 15 */
217 	{23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606,
218 		0x06060606, 0x00000f06, 0x3f00}, /* 16 */
219 	{8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909,
220 		0x0f090909, 0x00000f0f, 0x0801}, /* 17 */
221 	{7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808,
222 		0x08080808, 0x00000f0f, 0x0700}, /* 18 */
223 	{7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
224 		0x08080808, 0x00000f0f, 0x3f01}, /* 19 */
225 
226 	{6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
227 		0x07070707, 0x00000f07, 0x3f00}, /* 20 */
228 	{23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606,
229 		0x06060606, 0x00000f06, 0x0600}, /* 21 */
230 	{21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505,
231 		0x05050505, 0x00000f0f, 0x3f3f} /* 22 */
232 };
233 
234 static u8 dq_sel[22][3] = {
235 	{0x0, 0x17, 0x22},
236 	{0x1, 0x18, 0x23},
237 	{0x2, 0x19, 0x24},
238 	{0x3, 0x1a, 0x25},
239 	{0x4, 0x1b, 0x26},
240 	{0x5, 0x1c, 0x27},
241 	{0x6, 0x1d, 0x28},
242 	{0x7, 0x1e, 0x29},
243 	{0x8, 0x16, 0x21},
244 	{0x9, 0x1f, 0x2a},
245 	{0xa, 0x20, 0x2b},
246 	{0x10, 0x1, 0xc},
247 	{0x11, 0x2, 0xd},
248 	{0x12, 0x3, 0xe},
249 	{0x13, 0x4, 0xf},
250 	{0x14, 0x5, 0x10},
251 	{0x15, 0x6, 0x11},
252 	{0x16, 0x7, 0x12},
253 	{0x17, 0x8, 0x13},
254 	{0x18, 0x0, 0xb},
255 	{0x19, 0x9, 0x14},
256 	{0x1a, 0xa, 0x15}
257 };
258 
259 static u16 grp_addr[4] = {
260 	ADD_GROUP_CS0_A,
261 	ADD_GROUP_CS0_B,
262 	ADD_GROUP_CS1_A,
263 	ADD_GROUP_CS1_B
264 };
265 
266 static u8 wrlvl_result_offset[2][4] = {
267 	{0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27},
268 	{0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29},
269 };
270 
271 static u16 dqs_dq_skew_adr[16] = {
272 	0x170 + 0,	/* SKEW_UPDATE_RX_CS0_DQS0 */
273 	0x170 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS1 */
274 	0x1d0 + 0,	/* SKEW_UPDATE_RX_CS0_DQS2 */
275 	0x1d0 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS3 */
276 	0x1a0 + 0,	/* SKEW_UPDATE_RX_CS1_DQS0 */
277 	0x1a0 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS1 */
278 	0x200 + 0,	/* SKEW_UPDATE_RX_CS1_DQS2 */
279 	0x200 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS3 */
280 	0x170 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS0 */
281 	0x170 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS1 */
282 	0x1d0 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS2 */
283 	0x1d0 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS3 */
284 	0x1a0 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS0 */
285 	0x1a0 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS1 */
286 	0x200 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS2 */
287 	0x200 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS3 */
288 };
289 
290 static void rkclk_ddr_reset(struct dram_info *dram,
291 			    u32 ctl_srstn, u32 ctl_psrstn,
292 			    u32 phy_srstn, u32 phy_psrstn)
293 {
294 	writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) |
295 	       UPCTL2_ASRSTN_REQ(ctl_srstn),
296 	       BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13);
297 
298 	writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn),
299 	       &dram->cru->softrst_con[12]);
300 }
301 
302 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
303 {
304 	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
305 	int delay = 1000;
306 	u32 mhz = hz / MHz;
307 	struct global_info *gbl_info;
308 	struct sdram_head_info_index_v2 *index =
309 		(struct sdram_head_info_index_v2 *)common_info;
310 	u32 ssmod_info;
311 	u32 dsmpd = 1;
312 
313 	gbl_info = (struct global_info *)((void *)common_info +
314 		    index->global_index.offset * 4);
315 	ssmod_info = gbl_info->info_2t;
316 	refdiv = 1;
317 	if (mhz <= 100) {
318 		postdiv1 = 6;
319 		postdiv2 = 4;
320 	} else if (mhz <= 150) {
321 		postdiv1 = 4;
322 		postdiv2 = 4;
323 	} else if (mhz <= 200) {
324 		postdiv1 = 6;
325 		postdiv2 = 2;
326 	} else if (mhz <= 300) {
327 		postdiv1 = 4;
328 		postdiv2 = 2;
329 	} else if (mhz <= 400) {
330 		postdiv1 = 6;
331 		postdiv2 = 1;
332 	} else {
333 		postdiv1 = 4;
334 		postdiv2 = 1;
335 	}
336 	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
337 
338 	writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
339 
340 	writel(0x1f000000, &dram->cru->clksel_con[64]);
341 	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
342 	/* enable ssmod */
343 	if (PLL_SSMOD_SPREAD(ssmod_info)) {
344 		dsmpd = 0;
345 		clrsetbits_le32(&dram->cru->pll[1].con2,
346 				0xffffff << 0, 0x0 << 0);
347 		writel(SSMOD_SPREAD(PLL_SSMOD_SPREAD(ssmod_info)) |
348 		       SSMOD_DIVVAL(PLL_SSMOD_DIV(ssmod_info)) |
349 		       SSMOD_DOWNSPREAD(PLL_SSMOD_DOWNSPREAD(ssmod_info)) |
350 		       SSMOD_RESET(0) |
351 		       SSMOD_DIS_SSCG(0) |
352 		       SSMOD_BP(0),
353 		       &dram->cru->pll[1].con3);
354 	}
355 	writel(DSMPD(dsmpd) | POSTDIV2(postdiv2) | REFDIV(refdiv),
356 	       &dram->cru->pll[1].con1);
357 
358 	while (delay > 0) {
359 		udelay(1);
360 		if (LOCK(readl(&dram->cru->pll[1].con1)))
361 			break;
362 		delay--;
363 	}
364 
365 	writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
366 }
367 
368 static void rkclk_configure_ddr(struct dram_info *dram,
369 				struct rv1126_sdram_params *sdram_params)
370 {
371 	/* for inno ddr phy need freq / 2 */
372 	rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2);
373 }
374 
375 static unsigned int
376 	calculate_ddrconfig(struct rv1126_sdram_params *sdram_params)
377 {
378 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
379 	u32 cs, bw, die_bw, col, row, bank;
380 	u32 cs1_row;
381 	u32 i, tmp;
382 	u32 ddrconf = -1;
383 	u32 row_3_4;
384 
385 	cs = cap_info->rank;
386 	bw = cap_info->bw;
387 	die_bw = cap_info->dbw;
388 	col = cap_info->col;
389 	row = cap_info->cs0_row;
390 	cs1_row = cap_info->cs1_row;
391 	bank = cap_info->bk;
392 	row_3_4 = cap_info->row_3_4;
393 
394 	if (sdram_params->base.dramtype == DDR4) {
395 		if (cs == 2 && row == cs1_row && !row_3_4) {
396 			tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) |
397 			      die_bw;
398 			for (i = 17; i < 21; i++) {
399 				if (((tmp & 0xf) ==
400 				     (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
401 				    ((tmp & 0x70) <=
402 				     (ddr4_cfg_2_rbc[i - 10] & 0x70))) {
403 					ddrconf = i;
404 					goto out;
405 				}
406 			}
407 		}
408 
409 		tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw;
410 		for (i = 10; i < 21; i++) {
411 			if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
412 			    ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) &&
413 			    ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) {
414 				ddrconf = i;
415 				goto out;
416 			}
417 		}
418 	} else {
419 		if (cs == 2 && row == cs1_row && bank == 3) {
420 			for (i = 5; i < 8; i++) {
421 				if (((bw + col - 10) == (ddr_cfg_2_rbc[i] &
422 							 0x7)) &&
423 				    ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] &
424 							  (0x7 << 5))) {
425 					ddrconf = i;
426 					goto out;
427 				}
428 			}
429 		}
430 
431 		tmp = ((cs - 1) << 8) | ((row - 13) << 5) |
432 		      ((bw + col - 10) << 0);
433 		if (bank == 3)
434 			tmp |= (1 << 3);
435 
436 		for (i = 0; i < 9; i++)
437 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) &&
438 			    ((tmp & (7 << 5)) <=
439 			     (ddr_cfg_2_rbc[i] & (7 << 5))) &&
440 			    ((tmp & (1 << 8)) <=
441 			     (ddr_cfg_2_rbc[i] & (1 << 8)))) {
442 				ddrconf = i;
443 				goto out;
444 			}
445 
446 		for (i = 0; i < 7; i++)
447 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc_p2[i] & 0x1f)) &&
448 			    ((tmp & (7 << 5)) <=
449 			     (ddr_cfg_2_rbc_p2[i] & (7 << 5))) &&
450 			    ((tmp & (1 << 8)) <=
451 			     (ddr_cfg_2_rbc_p2[i] & (1 << 8)))) {
452 				ddrconf = i + 22;
453 				goto out;
454 			}
455 
456 		if (cs == 1 && bank == 3 && row <= 17 &&
457 		    (col + bw) == 12)
458 			ddrconf = 23;
459 	}
460 
461 out:
462 	if (ddrconf > 28)
463 		printascii("calculate ddrconfig error\n");
464 
465 	if (sdram_params->base.dramtype == DDR4) {
466 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
467 			if (ddrconf == d4_rbc_2_d3_rbc[i][0]) {
468 				if (ddrconf == 21 && row > 16)
469 					printascii("warn:ddrconf21 row > 16\n");
470 				else
471 					ddrconf = d4_rbc_2_d3_rbc[i][1];
472 				break;
473 			}
474 		}
475 	}
476 
477 	return ddrconf;
478 }
479 
480 static void sw_set_req(struct dram_info *dram)
481 {
482 	void __iomem *pctl_base = dram->pctl;
483 
484 	/* clear sw_done=0 */
485 	writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL);
486 }
487 
488 static void sw_set_ack(struct dram_info *dram)
489 {
490 	void __iomem *pctl_base = dram->pctl;
491 
492 	/* set sw_done=1 */
493 	writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL);
494 	while (1) {
495 		/* wait programming done */
496 		if (readl(pctl_base + DDR_PCTL2_SWSTAT) &
497 				PCTL2_SW_DONE_ACK)
498 			break;
499 	}
500 }
501 
502 static void set_ctl_address_map(struct dram_info *dram,
503 				struct rv1126_sdram_params *sdram_params)
504 {
505 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
506 	void __iomem *pctl_base = dram->pctl;
507 	u32 ddrconf = cap_info->ddrconfig;
508 	u32 i, row;
509 
510 	row = cap_info->cs0_row;
511 	if (sdram_params->base.dramtype == DDR4) {
512 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
513 			if (ddrconf == d4_rbc_2_d3_rbc[i][1]) {
514 				ddrconf = d4_rbc_2_d3_rbc[i][0];
515 				break;
516 			}
517 		}
518 	}
519 
520 	if (ddrconf > ARRAY_SIZE(addrmap)) {
521 		printascii("set ctl address map fail\n");
522 		return;
523 	}
524 
525 	sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
526 			  &addrmap[ddrconf][0], 9 * 4);
527 
528 	/* unused row set to 0xf */
529 	for (i = 17; i >= row; i--)
530 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
531 			((i - 12) * 8 / 32) * 4,
532 			0xf << ((i - 12) * 8 % 32));
533 
534 	if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4)
535 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
536 	if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1)
537 		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
538 
539 	if (cap_info->rank == 1)
540 		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
541 }
542 
543 static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait)
544 {
545 	void __iomem *phy_base = dram->phy;
546 	u32 fbdiv, prediv, postdiv, postdiv_en;
547 
548 	if (wait) {
549 		clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB);
550 		while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK))
551 			continue;
552 	} else {
553 		freq /= MHz;
554 		prediv = 1;
555 		if (freq <= 200) {
556 			fbdiv = 16;
557 			postdiv = 2;
558 			postdiv_en = 1;
559 		} else if (freq <= 456) {
560 			fbdiv = 8;
561 			postdiv = 1;
562 			postdiv_en = 1;
563 		} else {
564 			fbdiv = 4;
565 			postdiv = 0;
566 			postdiv_en = 0;
567 		}
568 		writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50));
569 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK,
570 				(fbdiv >> 8) & 1);
571 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK,
572 				postdiv_en << PHY_POSTDIV_EN_SHIFT);
573 
574 		clrsetbits_le32(PHY_REG(phy_base, 0x52),
575 				PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv);
576 		clrsetbits_le32(PHY_REG(phy_base, 0x53),
577 				PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT,
578 				postdiv << PHY_POSTDIV_SHIFT);
579 	}
580 }
581 
582 static const u16 d3_phy_drv_2_ohm[][2] = {
583 	{PHY_DDR3_RON_455ohm, 455},
584 	{PHY_DDR3_RON_230ohm, 230},
585 	{PHY_DDR3_RON_153ohm, 153},
586 	{PHY_DDR3_RON_115ohm, 115},
587 	{PHY_DDR3_RON_91ohm, 91},
588 	{PHY_DDR3_RON_76ohm, 76},
589 	{PHY_DDR3_RON_65ohm, 65},
590 	{PHY_DDR3_RON_57ohm, 57},
591 	{PHY_DDR3_RON_51ohm, 51},
592 	{PHY_DDR3_RON_46ohm, 46},
593 	{PHY_DDR3_RON_41ohm, 41},
594 	{PHY_DDR3_RON_38ohm, 38},
595 	{PHY_DDR3_RON_35ohm, 35},
596 	{PHY_DDR3_RON_32ohm, 32},
597 	{PHY_DDR3_RON_30ohm, 30},
598 	{PHY_DDR3_RON_28ohm, 28},
599 	{PHY_DDR3_RON_27ohm, 27},
600 	{PHY_DDR3_RON_25ohm, 25},
601 	{PHY_DDR3_RON_24ohm, 24},
602 	{PHY_DDR3_RON_23ohm, 23},
603 	{PHY_DDR3_RON_22ohm, 22},
604 	{PHY_DDR3_RON_21ohm, 21},
605 	{PHY_DDR3_RON_20ohm, 20}
606 };
607 
608 static u16 d3_phy_odt_2_ohm[][2] = {
609 	{PHY_DDR3_RTT_DISABLE, 0},
610 	{PHY_DDR3_RTT_561ohm, 561},
611 	{PHY_DDR3_RTT_282ohm, 282},
612 	{PHY_DDR3_RTT_188ohm, 188},
613 	{PHY_DDR3_RTT_141ohm, 141},
614 	{PHY_DDR3_RTT_113ohm, 113},
615 	{PHY_DDR3_RTT_94ohm, 94},
616 	{PHY_DDR3_RTT_81ohm, 81},
617 	{PHY_DDR3_RTT_72ohm, 72},
618 	{PHY_DDR3_RTT_64ohm, 64},
619 	{PHY_DDR3_RTT_58ohm, 58},
620 	{PHY_DDR3_RTT_52ohm, 52},
621 	{PHY_DDR3_RTT_48ohm, 48},
622 	{PHY_DDR3_RTT_44ohm, 44},
623 	{PHY_DDR3_RTT_41ohm, 41},
624 	{PHY_DDR3_RTT_38ohm, 38},
625 	{PHY_DDR3_RTT_37ohm, 37},
626 	{PHY_DDR3_RTT_34ohm, 34},
627 	{PHY_DDR3_RTT_32ohm, 32},
628 	{PHY_DDR3_RTT_31ohm, 31},
629 	{PHY_DDR3_RTT_29ohm, 29},
630 	{PHY_DDR3_RTT_28ohm, 28},
631 	{PHY_DDR3_RTT_27ohm, 27},
632 	{PHY_DDR3_RTT_25ohm, 25}
633 };
634 
635 static u16 d4lp3_phy_drv_2_ohm[][2] = {
636 	{PHY_DDR4_LPDDR3_RON_482ohm, 482},
637 	{PHY_DDR4_LPDDR3_RON_244ohm, 244},
638 	{PHY_DDR4_LPDDR3_RON_162ohm, 162},
639 	{PHY_DDR4_LPDDR3_RON_122ohm, 122},
640 	{PHY_DDR4_LPDDR3_RON_97ohm, 97},
641 	{PHY_DDR4_LPDDR3_RON_81ohm, 81},
642 	{PHY_DDR4_LPDDR3_RON_69ohm, 69},
643 	{PHY_DDR4_LPDDR3_RON_61ohm, 61},
644 	{PHY_DDR4_LPDDR3_RON_54ohm, 54},
645 	{PHY_DDR4_LPDDR3_RON_48ohm, 48},
646 	{PHY_DDR4_LPDDR3_RON_44ohm, 44},
647 	{PHY_DDR4_LPDDR3_RON_40ohm, 40},
648 	{PHY_DDR4_LPDDR3_RON_37ohm, 37},
649 	{PHY_DDR4_LPDDR3_RON_34ohm, 34},
650 	{PHY_DDR4_LPDDR3_RON_32ohm, 32},
651 	{PHY_DDR4_LPDDR3_RON_30ohm, 30},
652 	{PHY_DDR4_LPDDR3_RON_28ohm, 28},
653 	{PHY_DDR4_LPDDR3_RON_27ohm, 27},
654 	{PHY_DDR4_LPDDR3_RON_25ohm, 25},
655 	{PHY_DDR4_LPDDR3_RON_24ohm, 24},
656 	{PHY_DDR4_LPDDR3_RON_23ohm, 23},
657 	{PHY_DDR4_LPDDR3_RON_22ohm, 22},
658 	{PHY_DDR4_LPDDR3_RON_21ohm, 21}
659 };
660 
661 static u16 d4lp3_phy_odt_2_ohm[][2] = {
662 	{PHY_DDR4_LPDDR3_RTT_DISABLE, 0},
663 	{PHY_DDR4_LPDDR3_RTT_586ohm, 586},
664 	{PHY_DDR4_LPDDR3_RTT_294ohm, 294},
665 	{PHY_DDR4_LPDDR3_RTT_196ohm, 196},
666 	{PHY_DDR4_LPDDR3_RTT_148ohm, 148},
667 	{PHY_DDR4_LPDDR3_RTT_118ohm, 118},
668 	{PHY_DDR4_LPDDR3_RTT_99ohm, 99},
669 	{PHY_DDR4_LPDDR3_RTT_85ohm, 58},
670 	{PHY_DDR4_LPDDR3_RTT_76ohm, 76},
671 	{PHY_DDR4_LPDDR3_RTT_67ohm, 67},
672 	{PHY_DDR4_LPDDR3_RTT_60ohm, 60},
673 	{PHY_DDR4_LPDDR3_RTT_55ohm, 55},
674 	{PHY_DDR4_LPDDR3_RTT_50ohm, 50},
675 	{PHY_DDR4_LPDDR3_RTT_46ohm, 46},
676 	{PHY_DDR4_LPDDR3_RTT_43ohm, 43},
677 	{PHY_DDR4_LPDDR3_RTT_40ohm, 40},
678 	{PHY_DDR4_LPDDR3_RTT_38ohm, 38},
679 	{PHY_DDR4_LPDDR3_RTT_36ohm, 36},
680 	{PHY_DDR4_LPDDR3_RTT_34ohm, 34},
681 	{PHY_DDR4_LPDDR3_RTT_32ohm, 32},
682 	{PHY_DDR4_LPDDR3_RTT_31ohm, 31},
683 	{PHY_DDR4_LPDDR3_RTT_29ohm, 29},
684 	{PHY_DDR4_LPDDR3_RTT_28ohm, 28},
685 	{PHY_DDR4_LPDDR3_RTT_27ohm, 27}
686 };
687 
688 static u16 lp4_phy_drv_2_ohm[][2] = {
689 	{PHY_LPDDR4_RON_501ohm, 501},
690 	{PHY_LPDDR4_RON_253ohm, 253},
691 	{PHY_LPDDR4_RON_168ohm, 168},
692 	{PHY_LPDDR4_RON_126ohm, 126},
693 	{PHY_LPDDR4_RON_101ohm, 101},
694 	{PHY_LPDDR4_RON_84ohm, 84},
695 	{PHY_LPDDR4_RON_72ohm, 72},
696 	{PHY_LPDDR4_RON_63ohm, 63},
697 	{PHY_LPDDR4_RON_56ohm, 56},
698 	{PHY_LPDDR4_RON_50ohm, 50},
699 	{PHY_LPDDR4_RON_46ohm, 46},
700 	{PHY_LPDDR4_RON_42ohm, 42},
701 	{PHY_LPDDR4_RON_38ohm, 38},
702 	{PHY_LPDDR4_RON_36ohm, 36},
703 	{PHY_LPDDR4_RON_33ohm, 33},
704 	{PHY_LPDDR4_RON_31ohm, 31},
705 	{PHY_LPDDR4_RON_29ohm, 29},
706 	{PHY_LPDDR4_RON_28ohm, 28},
707 	{PHY_LPDDR4_RON_26ohm, 26},
708 	{PHY_LPDDR4_RON_25ohm, 25},
709 	{PHY_LPDDR4_RON_24ohm, 24},
710 	{PHY_LPDDR4_RON_23ohm, 23},
711 	{PHY_LPDDR4_RON_22ohm, 22}
712 };
713 
714 static u16 lp4_phy_odt_2_ohm[][2] = {
715 	{PHY_LPDDR4_RTT_DISABLE, 0},
716 	{PHY_LPDDR4_RTT_604ohm, 604},
717 	{PHY_LPDDR4_RTT_303ohm, 303},
718 	{PHY_LPDDR4_RTT_202ohm, 202},
719 	{PHY_LPDDR4_RTT_152ohm, 152},
720 	{PHY_LPDDR4_RTT_122ohm, 122},
721 	{PHY_LPDDR4_RTT_101ohm, 101},
722 	{PHY_LPDDR4_RTT_87ohm,	87},
723 	{PHY_LPDDR4_RTT_78ohm, 78},
724 	{PHY_LPDDR4_RTT_69ohm, 69},
725 	{PHY_LPDDR4_RTT_62ohm, 62},
726 	{PHY_LPDDR4_RTT_56ohm, 56},
727 	{PHY_LPDDR4_RTT_52ohm, 52},
728 	{PHY_LPDDR4_RTT_48ohm, 48},
729 	{PHY_LPDDR4_RTT_44ohm, 44},
730 	{PHY_LPDDR4_RTT_41ohm, 41},
731 	{PHY_LPDDR4_RTT_39ohm, 39},
732 	{PHY_LPDDR4_RTT_37ohm, 37},
733 	{PHY_LPDDR4_RTT_35ohm, 35},
734 	{PHY_LPDDR4_RTT_33ohm, 33},
735 	{PHY_LPDDR4_RTT_32ohm, 32},
736 	{PHY_LPDDR4_RTT_30ohm, 30},
737 	{PHY_LPDDR4_RTT_29ohm, 29},
738 	{PHY_LPDDR4_RTT_27ohm, 27}
739 };
740 
741 static u32 lp4_odt_calc(u32 odt_ohm)
742 {
743 	u32 odt;
744 
745 	if (odt_ohm == 0)
746 		odt = LPDDR4_DQODT_DIS;
747 	else if (odt_ohm <= 40)
748 		odt = LPDDR4_DQODT_40;
749 	else if (odt_ohm <= 48)
750 		odt = LPDDR4_DQODT_48;
751 	else if (odt_ohm <= 60)
752 		odt = LPDDR4_DQODT_60;
753 	else if (odt_ohm <= 80)
754 		odt = LPDDR4_DQODT_80;
755 	else if (odt_ohm <= 120)
756 		odt = LPDDR4_DQODT_120;
757 	else
758 		odt = LPDDR4_DQODT_240;
759 
760 	return odt;
761 }
762 
763 static void *get_ddr_drv_odt_info(u32 dramtype)
764 {
765 	struct sdram_head_info_index_v2 *index =
766 		(struct sdram_head_info_index_v2 *)common_info;
767 	void *ddr_info = 0;
768 
769 	if (dramtype == DDR4)
770 		ddr_info = (void *)common_info + index->ddr4_index.offset * 4;
771 	else if (dramtype == DDR3)
772 		ddr_info = (void *)common_info + index->ddr3_index.offset * 4;
773 	else if (dramtype == LPDDR3)
774 		ddr_info = (void *)common_info + index->lp3_index.offset * 4;
775 	else if (dramtype == LPDDR4)
776 		ddr_info = (void *)common_info + index->lp4_index.offset * 4;
777 	else
778 		printascii("unsupported dram type\n");
779 	return ddr_info;
780 }
781 
782 static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info,
783 			 u32 freq_mhz, u32 dst_fsp)
784 {
785 	void __iomem *pctl_base = dram->pctl;
786 	u32 ca_vref, dq_vref;
787 
788 	if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
789 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff);
790 	else
791 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten);
792 
793 	if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq))
794 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff);
795 	else
796 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten);
797 
798 	if (ca_vref < 100)
799 		ca_vref = 100;
800 	if (ca_vref > 420)
801 		ca_vref = 420;
802 
803 	if (ca_vref <= 300)
804 		ca_vref = (0 << 6) | (ca_vref - 100) / 4;
805 	else
806 		ca_vref = (1 << 6) | (ca_vref - 220) / 4;
807 
808 	if (dq_vref < 100)
809 		dq_vref = 100;
810 	if (dq_vref > 420)
811 		dq_vref = 420;
812 
813 	if (dq_vref <= 300)
814 		dq_vref = (0 << 6) | (dq_vref - 100) / 4;
815 	else
816 		dq_vref = (1 << 6) | (dq_vref - 220) / 4;
817 
818 	sw_set_req(dram);
819 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
820 			DDR_PCTL2_INIT6,
821 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT,
822 			ca_vref << PCTL2_LPDDR4_MR12_SHIFT);
823 
824 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
825 			DDR_PCTL2_INIT7,
826 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT,
827 			dq_vref << PCTL2_LPDDR4_MR14_SHIFT);
828 	sw_set_ack(dram);
829 }
830 
831 static void set_ds_odt(struct dram_info *dram,
832 		       struct rv1126_sdram_params *sdram_params, u32 dst_fsp)
833 {
834 	void __iomem *phy_base = dram->phy;
835 	void __iomem *pctl_base = dram->pctl;
836 	u32 dramtype = sdram_params->base.dramtype;
837 	struct ddr2_3_4_lp2_3_info *ddr_info;
838 	struct lp4_info *lp4_info;
839 	u32 i, j, tmp;
840 	const u16 (*p_drv)[2];
841 	const u16 (*p_odt)[2];
842 	u32 drv_info, sr_info;
843 	u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm;
844 	u32 phy_odt_ohm, dram_odt_ohm;
845 	u32 lp4_pu_cal, phy_lp4_drv_pd_en;
846 	u32 phy_odt_up_en, phy_odt_dn_en;
847 	u32 sr_dq, sr_clk;
848 	u32 freq = sdram_params->base.ddr_freq;
849 	u32 mr1_mr3, mr11, mr22, vref_out, vref_inner;
850 	u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0;
851 	u32 phy_dq_drv = 0;
852 	u32 phy_odt_up = 0, phy_odt_dn = 0;
853 
854 	ddr_info = get_ddr_drv_odt_info(dramtype);
855 	lp4_info = (void *)ddr_info;
856 
857 	if (!ddr_info)
858 		return;
859 
860 	/* dram odt en freq control phy drv, dram odt and phy sr */
861 	if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) {
862 		drv_info = ddr_info->drv_when_odtoff;
863 		dram_odt_ohm = 0;
864 		sr_info = ddr_info->sr_when_odtoff;
865 		phy_lp4_drv_pd_en =
866 			PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info);
867 	} else {
868 		drv_info = ddr_info->drv_when_odten;
869 		dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info);
870 		sr_info = ddr_info->sr_when_odten;
871 		phy_lp4_drv_pd_en =
872 			PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info);
873 	}
874 	phy_dq_drv_ohm =
875 		DRV_INFO_PHY_DQ_DRV(drv_info);
876 	phy_clk_drv_ohm =
877 		DRV_INFO_PHY_CLK_DRV(drv_info);
878 	phy_ca_drv_ohm =
879 		DRV_INFO_PHY_CA_DRV(drv_info);
880 
881 	sr_dq = DQ_SR_INFO(sr_info);
882 	sr_clk = CLK_SR_INFO(sr_info);
883 
884 	/* phy odt en freq control dram drv and phy odt */
885 	if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) {
886 		dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff);
887 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info);
888 		phy_odt_ohm = 0;
889 		phy_odt_up_en = 0;
890 		phy_odt_dn_en = 0;
891 	} else {
892 		dram_drv_ohm =
893 			DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten);
894 		phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info);
895 		phy_odt_up_en =
896 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
897 		phy_odt_dn_en =
898 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
899 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info);
900 	}
901 
902 	if (dramtype == LPDDR4) {
903 		if (phy_odt_ohm) {
904 			phy_odt_up_en = 0;
905 			phy_odt_dn_en = 1;
906 		}
907 		if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
908 			dram_caodt_ohm = 0;
909 		else
910 			dram_caodt_ohm =
911 				ODT_INFO_LP4_CA_ODT(lp4_info->odt_info);
912 	}
913 
914 	if (dramtype == DDR3) {
915 		p_drv = d3_phy_drv_2_ohm;
916 		p_odt = d3_phy_odt_2_ohm;
917 	} else if (dramtype == LPDDR4) {
918 		p_drv = lp4_phy_drv_2_ohm;
919 		p_odt = lp4_phy_odt_2_ohm;
920 	} else {
921 		p_drv = d4lp3_phy_drv_2_ohm;
922 		p_odt = d4lp3_phy_odt_2_ohm;
923 	}
924 
925 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
926 		if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) {
927 			phy_dq_drv = **(p_drv + i);
928 			break;
929 		}
930 		if (i == 0)
931 			break;
932 	}
933 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
934 		if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) {
935 			phy_clk_drv = **(p_drv + i);
936 			break;
937 		}
938 		if (i == 0)
939 			break;
940 	}
941 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
942 		if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) {
943 			phy_ca_drv = **(p_drv + i);
944 			break;
945 		}
946 		if (i == 0)
947 			break;
948 	}
949 	if (!phy_odt_ohm)
950 		phy_odt = 0;
951 	else
952 		for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) {
953 			if (phy_odt_ohm <= *(*(p_odt + i) + 1)) {
954 				phy_odt = **(p_odt + i);
955 				break;
956 			}
957 			if (i == 0)
958 				break;
959 		}
960 
961 	if (dramtype != LPDDR4) {
962 		if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en))
963 			vref_inner = 0x80;
964 		else if (phy_odt_up_en)
965 			vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 /
966 				     (dram_drv_ohm + phy_odt_ohm);
967 		else
968 			vref_inner = phy_odt_ohm * 128 /
969 				(phy_odt_ohm + dram_drv_ohm);
970 
971 		if (dramtype != DDR3 && dram_odt_ohm)
972 			vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 /
973 				   (phy_dq_drv_ohm + dram_odt_ohm);
974 		else
975 			vref_out = 0x80;
976 	} else {
977 		/* for lp4 */
978 		if (phy_odt_ohm)
979 			vref_inner =
980 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) *
981 				 256) / 1000;
982 		else
983 			vref_inner =
984 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) *
985 				 256) / 1000;
986 
987 		vref_out = 0x80;
988 	}
989 
990 	/* default ZQCALIB bypass mode */
991 	clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv);
992 	clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv);
993 	clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv);
994 	clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv);
995 	if (dramtype == LPDDR4) {
996 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_clk_drv);
997 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_clk_drv);
998 	} else {
999 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_ca_drv);
1000 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_ca_drv);
1001 	}
1002 	/* clk / cmd slew rate */
1003 	clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk);
1004 
1005 	phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1;
1006 	if (phy_odt_up_en)
1007 		phy_odt_up = phy_odt;
1008 	if (phy_odt_dn_en)
1009 		phy_odt_dn = phy_odt;
1010 
1011 	for (i = 0; i < 4; i++) {
1012 		j = 0x110 + i * 0x10;
1013 		clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up);
1014 		clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn);
1015 		clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv);
1016 		clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv);
1017 		writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10));
1018 
1019 		clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10),
1020 				1 << 3, phy_lp4_drv_pd_en << 3);
1021 		/* dq slew rate */
1022 		clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10),
1023 				0x1f, sr_dq);
1024 	}
1025 
1026 	/* reg_rx_vref_value_update */
1027 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1028 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1029 
1030 	/* RAM VREF */
1031 	writel(vref_out, PHY_REG(phy_base, 0x105));
1032 	if (dramtype == LPDDR3)
1033 		udelay(100);
1034 
1035 	if (dramtype == LPDDR4)
1036 		set_lp4_vref(dram, lp4_info, freq, dst_fsp);
1037 
1038 	if (dramtype == DDR3 || dramtype == DDR4) {
1039 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1040 				DDR_PCTL2_INIT3);
1041 		mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK;
1042 	} else {
1043 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1044 				DDR_PCTL2_INIT4);
1045 		mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK;
1046 	}
1047 
1048 	if (dramtype == DDR3) {
1049 		mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK);
1050 		if (dram_drv_ohm == 34)
1051 			mr1_mr3 |= DDR3_DS_34;
1052 
1053 		if (dram_odt_ohm == 0)
1054 			mr1_mr3 |= DDR3_RTT_NOM_DIS;
1055 		else if (dram_odt_ohm <= 40)
1056 			mr1_mr3 |= DDR3_RTT_NOM_40;
1057 		else if (dram_odt_ohm <= 60)
1058 			mr1_mr3 |= DDR3_RTT_NOM_60;
1059 		else
1060 			mr1_mr3 |= DDR3_RTT_NOM_120;
1061 
1062 	} else if (dramtype == DDR4) {
1063 		mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK);
1064 		if (dram_drv_ohm == 48)
1065 			mr1_mr3 |= DDR4_DS_48;
1066 
1067 		if (dram_odt_ohm == 0)
1068 			mr1_mr3 |= DDR4_RTT_NOM_DIS;
1069 		else if (dram_odt_ohm <= 34)
1070 			mr1_mr3 |= DDR4_RTT_NOM_34;
1071 		else if (dram_odt_ohm <= 40)
1072 			mr1_mr3 |= DDR4_RTT_NOM_40;
1073 		else if (dram_odt_ohm <= 48)
1074 			mr1_mr3 |= DDR4_RTT_NOM_48;
1075 		else if (dram_odt_ohm <= 60)
1076 			mr1_mr3 |= DDR4_RTT_NOM_60;
1077 		else
1078 			mr1_mr3 |= DDR4_RTT_NOM_120;
1079 
1080 	} else if (dramtype == LPDDR3) {
1081 		if (dram_drv_ohm <= 34)
1082 			mr1_mr3 |= LPDDR3_DS_34;
1083 		else if (dram_drv_ohm <= 40)
1084 			mr1_mr3 |= LPDDR3_DS_40;
1085 		else if (dram_drv_ohm <= 48)
1086 			mr1_mr3 |= LPDDR3_DS_48;
1087 		else if (dram_drv_ohm <= 60)
1088 			mr1_mr3 |= LPDDR3_DS_60;
1089 		else if (dram_drv_ohm <= 80)
1090 			mr1_mr3 |= LPDDR3_DS_80;
1091 
1092 		if (dram_odt_ohm == 0)
1093 			lp3_odt_value = LPDDR3_ODT_DIS;
1094 		else if (dram_odt_ohm <= 60)
1095 			lp3_odt_value = LPDDR3_ODT_60;
1096 		else if (dram_odt_ohm <= 120)
1097 			lp3_odt_value = LPDDR3_ODT_120;
1098 		else
1099 			lp3_odt_value = LPDDR3_ODT_240;
1100 	} else {/* for lpddr4 */
1101 		/* MR3 for lp4 PU-CAL and PDDS */
1102 		mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK);
1103 		mr1_mr3 |= lp4_pu_cal;
1104 
1105 		tmp = lp4_odt_calc(dram_drv_ohm);
1106 		if (!tmp)
1107 			tmp = LPDDR4_PDDS_240;
1108 		mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT);
1109 
1110 		/* MR11 for lp4 ca odt, dq odt set */
1111 		mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1112 			     DDR_PCTL2_INIT6);
1113 		mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK;
1114 
1115 		mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK);
1116 
1117 		tmp = lp4_odt_calc(dram_odt_ohm);
1118 		mr11 |= (tmp << LPDDR4_DQODT_SHIFT);
1119 
1120 		tmp = lp4_odt_calc(dram_caodt_ohm);
1121 		mr11 |= (tmp << LPDDR4_CAODT_SHIFT);
1122 		sw_set_req(dram);
1123 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1124 				DDR_PCTL2_INIT6,
1125 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT,
1126 				mr11 << PCTL2_LPDDR4_MR11_SHIFT);
1127 		sw_set_ack(dram);
1128 
1129 		/* MR22 for soc odt/odt-ck/odt-cs/odt-ca */
1130 		mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1131 			     DDR_PCTL2_INIT7);
1132 		mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK;
1133 		mr22 &= ~LPDDR4_SOC_ODT_MASK;
1134 
1135 		tmp = lp4_odt_calc(phy_odt_ohm);
1136 		mr22 |= tmp;
1137 		mr22 = mr22 |
1138 		       (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) <<
1139 			LPDDR4_ODTE_CK_SHIFT) |
1140 		       (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) <<
1141 			LPDDR4_ODTE_CS_SHIFT) |
1142 		       (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) <<
1143 			LPDDR4_ODTD_CA_SHIFT);
1144 
1145 		sw_set_req(dram);
1146 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1147 				DDR_PCTL2_INIT7,
1148 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT,
1149 				mr22 << PCTL2_LPDDR4_MR22_SHIFT);
1150 		sw_set_ack(dram);
1151 	}
1152 
1153 	if (dramtype == DDR4 || dramtype == DDR3) {
1154 		sw_set_req(dram);
1155 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1156 				DDR_PCTL2_INIT3,
1157 				PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT,
1158 				mr1_mr3 << PCTL2_DDR34_MR1_SHIFT);
1159 		sw_set_ack(dram);
1160 	} else {
1161 		sw_set_req(dram);
1162 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1163 				DDR_PCTL2_INIT4,
1164 				PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT,
1165 				mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT);
1166 		sw_set_ack(dram);
1167 	}
1168 }
1169 
1170 static int sdram_cmd_dq_path_remap(struct dram_info *dram,
1171 				   struct rv1126_sdram_params *sdram_params)
1172 {
1173 	void __iomem *phy_base = dram->phy;
1174 	u32 dramtype = sdram_params->base.dramtype;
1175 	struct sdram_head_info_index_v2 *index =
1176 		(struct sdram_head_info_index_v2 *)common_info;
1177 	struct dq_map_info *map_info;
1178 
1179 	map_info = (struct dq_map_info *)((void *)common_info +
1180 		index->dq_map_index.offset * 4);
1181 
1182 	if (dramtype <= LPDDR4)
1183 		writel((map_info->byte_map[dramtype / 4] >>
1184 			((dramtype % 4) * 8)) & 0xff,
1185 		       PHY_REG(phy_base, 0x4f));
1186 
1187 	return 0;
1188 }
1189 
1190 static void phy_cfg(struct dram_info *dram,
1191 		    struct rv1126_sdram_params *sdram_params)
1192 {
1193 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
1194 	void __iomem *phy_base = dram->phy;
1195 	u32 i, dq_map, tmp;
1196 	u32 byte1 = 0, byte0 = 0;
1197 
1198 	sdram_cmd_dq_path_remap(dram, sdram_params);
1199 
1200 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0);
1201 	for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
1202 		writel(sdram_params->phy_regs.phy[i][1],
1203 		       phy_base + sdram_params->phy_regs.phy[i][0]);
1204 	}
1205 
1206 	clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5));
1207 	dq_map = readl(PHY_REG(phy_base, 0x4f));
1208 	for (i = 0; i < 4; i++) {
1209 		if (((dq_map >> (i * 2)) & 0x3) == 0)
1210 			byte0 = i;
1211 		if (((dq_map >> (i * 2)) & 0x3) == 1)
1212 			byte1 = i;
1213 	}
1214 
1215 	tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK);
1216 	if (cap_info->bw == 2)
1217 		tmp |= 0xf;
1218 	else if (cap_info->bw == 1)
1219 		tmp |= ((1 << byte0) | (1 << byte1));
1220 	else
1221 		tmp |= (1 << byte0);
1222 
1223 	writel(tmp, PHY_REG(phy_base, 0xf));
1224 
1225 	/* lpddr4 odt control by phy, enable cs0 odt */
1226 	if (sdram_params->base.dramtype == LPDDR4)
1227 		clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4,
1228 				(1 << 6) | (1 << 4));
1229 	/* for ca training ca vref choose range1 */
1230 	setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6));
1231 	setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6));
1232 	/* for wr training PHY_0x7c[5], choose range0 */
1233 	clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5));
1234 }
1235 
1236 static int update_refresh_reg(struct dram_info *dram)
1237 {
1238 	void __iomem *pctl_base = dram->pctl;
1239 	u32 ret;
1240 
1241 	ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
1242 	writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
1243 
1244 	return 0;
1245 }
1246 
1247 /*
1248  * rank = 1: cs0
1249  * rank = 2: cs1
1250  */
1251 int read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype)
1252 {
1253 	u32 ret;
1254 	u32 i, temp;
1255 	u32 dqmap;
1256 
1257 	void __iomem *pctl_base = dram->pctl;
1258 	struct sdram_head_info_index_v2 *index =
1259 		(struct sdram_head_info_index_v2 *)common_info;
1260 	struct dq_map_info *map_info;
1261 
1262 	map_info = (struct dq_map_info *)((void *)common_info +
1263 		index->dq_map_index.offset * 4);
1264 
1265 	if (dramtype == LPDDR2)
1266 		dqmap = map_info->lp2_dq0_7_map;
1267 	else
1268 		dqmap = map_info->lp3_dq0_7_map;
1269 
1270 	pctl_read_mr(pctl_base, rank, mr_num);
1271 
1272 	ret = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff);
1273 
1274 	if (dramtype != LPDDR4) {
1275 		temp = 0;
1276 		for (i = 0; i < 8; i++) {
1277 			temp = temp | (((ret >> i) & 0x1) <<
1278 				       ((dqmap >> (i * 4)) & 0xf));
1279 		}
1280 	} else {
1281 		temp = (readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff);
1282 	}
1283 
1284 	return temp;
1285 }
1286 
1287 /* before call this function autorefresh should be disabled */
1288 void send_a_refresh(struct dram_info *dram)
1289 {
1290 	void __iomem *pctl_base = dram->pctl;
1291 
1292 	while (readl(pctl_base + DDR_PCTL2_DBGSTAT) & 0x3)
1293 		continue;
1294 	writel(0x3, pctl_base + DDR_PCTL2_DBGCMD);
1295 }
1296 
1297 static void enter_sr(struct dram_info *dram, u32 en)
1298 {
1299 	void __iomem *pctl_base = dram->pctl;
1300 
1301 	if (en) {
1302 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1303 		while (1) {
1304 			if (((readl(pctl_base + DDR_PCTL2_STAT) &
1305 			      PCTL2_SELFREF_TYPE_MASK) ==
1306 			     PCTL2_SELFREF_TYPE_SR_NOT_AUTO) &&
1307 			    ((readl(pctl_base + DDR_PCTL2_STAT) &
1308 			      PCTL2_OPERATING_MODE_MASK) ==
1309 			     PCTL2_OPERATING_MODE_SR))
1310 				break;
1311 		}
1312 	} else {
1313 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1314 		while ((readl(pctl_base + DDR_PCTL2_STAT) &
1315 		       PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR)
1316 			continue;
1317 	}
1318 }
1319 
1320 void record_dq_prebit(struct dram_info *dram)
1321 {
1322 	u32 group, i, tmp;
1323 	void __iomem *phy_base = dram->phy;
1324 
1325 	for (group = 0; group < 4; group++) {
1326 		for (i = 0; i < ARRAY_SIZE(dq_sel); i++) {
1327 			/* l_loop_invdelaysel */
1328 			writel(dq_sel[i][0], PHY_REG(phy_base,
1329 						     grp_addr[group] + 0x2c));
1330 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e));
1331 			writel(tmp, PHY_REG(phy_base,
1332 					    grp_addr[group] + dq_sel[i][1]));
1333 
1334 			/* r_loop_invdelaysel */
1335 			writel(dq_sel[i][0], PHY_REG(phy_base,
1336 						     grp_addr[group] + 0x2d));
1337 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f));
1338 			writel(tmp, PHY_REG(phy_base,
1339 					    grp_addr[group] + dq_sel[i][2]));
1340 		}
1341 	}
1342 }
1343 
1344 static void update_dq_rx_prebit(struct dram_info *dram)
1345 {
1346 	void __iomem *phy_base = dram->phy;
1347 
1348 	clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4),
1349 			BIT(4));
1350 	udelay(1);
1351 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4));
1352 }
1353 
1354 static void update_dq_tx_prebit(struct dram_info *dram)
1355 {
1356 	void __iomem *phy_base = dram->phy;
1357 
1358 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1359 	setbits_le32(PHY_REG(phy_base, 0x2), BIT(3));
1360 	setbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1361 	udelay(1);
1362 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1363 }
1364 
1365 static void update_ca_prebit(struct dram_info *dram)
1366 {
1367 	void __iomem *phy_base = dram->phy;
1368 
1369 	clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2));
1370 	setbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1371 	udelay(1);
1372 	clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1373 }
1374 
1375 /*
1376  * dir: 0: de-skew = delta_*
1377  *	1: de-skew = reg val - delta_*
1378  * delta_dir: value for differential signal: clk/
1379  * delta_sig: value for single signal: ca/cmd
1380  */
1381 static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif,
1382 			     int delta_sig, u32 cs, u32 dramtype)
1383 {
1384 	void __iomem *phy_base = dram->phy;
1385 	u32 i, cs_en, tmp;
1386 	u32 dfi_lp_stat = 0;
1387 
1388 	if (cs == 0)
1389 		cs_en = 1;
1390 	else if (cs == 2)
1391 		cs_en = 2;
1392 	else
1393 		cs_en = 3;
1394 
1395 	if (dramtype == LPDDR4 &&
1396 	    ((readl(PHY_REG(phy_base, 0x60)) & BIT(5)) == 0)) {
1397 		dfi_lp_stat = 1;
1398 		setbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1399 	}
1400 	enter_sr(dram, 1);
1401 
1402 	for (i = 0; i < 0x20; i++) {
1403 		if (dir == DESKEW_MDF_ABS_VAL)
1404 			tmp = delta_sig;
1405 		else
1406 			tmp = readl(PHY_REG(phy_base, 0x150 + i)) +
1407 			      delta_sig;
1408 		writel(tmp, PHY_REG(phy_base, 0x150 + i));
1409 	}
1410 
1411 	if (dir == DESKEW_MDF_ABS_VAL)
1412 		tmp = delta_dif;
1413 	else
1414 		tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) -
1415 		       delta_sig + delta_dif;
1416 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x17));
1417 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x18));
1418 	if (dramtype == LPDDR4) {
1419 		writel(tmp, PHY_REG(phy_base, 0x150 + 0x4));
1420 		writel(tmp, PHY_REG(phy_base, 0x150 + 0xa));
1421 
1422 		clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6);
1423 		update_ca_prebit(dram);
1424 	}
1425 	enter_sr(dram, 0);
1426 
1427 	if (dfi_lp_stat)
1428 		clrbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1429 
1430 }
1431 
1432 static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank)
1433 {
1434 	u32 i, j, offset = 0;
1435 	u32 min = 0x3f;
1436 	void __iomem *phy_base = dram->phy;
1437 	u32 byte_en;
1438 
1439 	if (signal == SKEW_TX_SIGNAL)
1440 		offset = 8;
1441 
1442 	if (signal == SKEW_CA_SIGNAL) {
1443 		for (i = 0; i < 0x20; i++)
1444 			min = MIN(min, readl(PHY_REG(phy_base, 0x150  + i)));
1445 	} else {
1446 		byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1447 		for (j = offset; j < offset + rank * 4; j++) {
1448 			if (!((byte_en >> (j % 4)) & 1))
1449 				continue;
1450 			for (i = 0; i < 11; i++)
1451 				min = MIN(min,
1452 					  readl(PHY_REG(phy_base,
1453 							dqs_dq_skew_adr[j] +
1454 							i)));
1455 		}
1456 	}
1457 
1458 	return min;
1459 }
1460 
1461 static u32 low_power_update(struct dram_info *dram, u32 en)
1462 {
1463 	void __iomem *pctl_base = dram->pctl;
1464 	u32 lp_stat = 0;
1465 
1466 	if (en) {
1467 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf);
1468 	} else {
1469 		lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf;
1470 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf);
1471 	}
1472 
1473 	return lp_stat;
1474 }
1475 
1476 /*
1477  * signal:
1478  * dir: 0: de-skew = delta_*
1479  *	1: de-skew = reg val - delta_*
1480  * delta_dir: value for differential signal: dqs
1481  * delta_sig: value for single signal: dq/dm
1482  */
1483 static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir,
1484 			     int delta_dif, int delta_sig, u32 rank)
1485 {
1486 	void __iomem *phy_base = dram->phy;
1487 	u32 i, j, tmp, offset;
1488 	u32 byte_en;
1489 
1490 	byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1491 
1492 	if (signal == SKEW_RX_SIGNAL)
1493 		offset = 0;
1494 	else
1495 		offset = 8;
1496 
1497 	for (j = offset; j < (offset + rank * 4); j++) {
1498 		if (!((byte_en >> (j % 4)) & 1))
1499 			continue;
1500 		for (i = 0; i < 0x9; i++) {
1501 			if (dir == DESKEW_MDF_ABS_VAL)
1502 				tmp = delta_sig;
1503 			else
1504 				tmp = delta_sig + readl(PHY_REG(phy_base,
1505 							dqs_dq_skew_adr[j] +
1506 							i));
1507 			writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i));
1508 		}
1509 		if (dir == DESKEW_MDF_ABS_VAL)
1510 			tmp = delta_dif;
1511 		else
1512 			tmp = delta_dif + readl(PHY_REG(phy_base,
1513 						dqs_dq_skew_adr[j] + 9));
1514 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9));
1515 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa));
1516 	}
1517 	if (signal == SKEW_RX_SIGNAL)
1518 		update_dq_rx_prebit(dram);
1519 	else
1520 		update_dq_tx_prebit(dram);
1521 }
1522 
1523 static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype)
1524 {
1525 	void __iomem *phy_base = dram->phy;
1526 	u32 ret;
1527 	u32 dis_auto_zq = 0;
1528 	u32 odt_val_up, odt_val_dn;
1529 	u32 i, j;
1530 
1531 	odt_val_dn = readl(PHY_REG(phy_base, 0x110));
1532 	odt_val_up = readl(PHY_REG(phy_base, 0x111));
1533 
1534 	if (dramtype != LPDDR4) {
1535 		for (i = 0; i < 4; i++) {
1536 			j = 0x110 + i * 0x10;
1537 			writel(PHY_DDR4_LPDDR3_RTT_294ohm,
1538 			       PHY_REG(phy_base, j));
1539 			writel(PHY_DDR4_LPDDR3_RTT_DISABLE,
1540 			       PHY_REG(phy_base, j + 0x1));
1541 		}
1542 	}
1543 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1544 	/* use normal read mode for data training */
1545 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1546 
1547 	if (dramtype == DDR4)
1548 		setbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1549 
1550 	/* choose training cs */
1551 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
1552 	/* enable gate training */
1553 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
1554 	udelay(50);
1555 	ret = readl(PHY_REG(phy_base, 0x91));
1556 	/* disable gate training */
1557 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
1558 	clrbits_le32(PHY_REG(phy_base, 2), 0x30);
1559 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1560 
1561 	if (ret & 0x20)
1562 		ret = -1;
1563 	else
1564 		ret = (ret & 0xf) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf);
1565 
1566 	if (dramtype != LPDDR4) {
1567 		for (i = 0; i < 4; i++) {
1568 			j = 0x110 + i * 0x10;
1569 			writel(odt_val_dn, PHY_REG(phy_base, j));
1570 			writel(odt_val_up, PHY_REG(phy_base, j + 0x1));
1571 		}
1572 	}
1573 	return ret;
1574 }
1575 
1576 static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype,
1577 			    u32 rank)
1578 {
1579 	void __iomem *pctl_base = dram->pctl;
1580 	void __iomem *phy_base = dram->phy;
1581 	u32 dis_auto_zq = 0;
1582 	u32 tmp;
1583 	u32 cur_fsp;
1584 	u32 timeout_us = 1000;
1585 
1586 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1587 
1588 	clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1589 
1590 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1591 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) &
1592 	      0xffff;
1593 	writel(tmp & 0xff, PHY_REG(phy_base, 0x3));
1594 
1595 	/* disable another cs's output */
1596 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1597 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12),
1598 			      dramtype);
1599 	if (dramtype == DDR3 || dramtype == DDR4)
1600 		writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1601 	else
1602 		writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1603 
1604 	/* choose cs */
1605 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1606 			((0x2 >> cs) << 6) | (0 << 2));
1607 	/* enable write leveling */
1608 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1609 			((0x2 >> cs) << 6) | (1 << 2));
1610 
1611 	while (1) {
1612 		if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) ==
1613 		    (readl(PHY_REG(phy_base, 0xf)) & 0xf))
1614 			break;
1615 
1616 		udelay(1);
1617 		if (timeout_us-- == 0) {
1618 			printascii("error: write leveling timeout\n");
1619 			while (1)
1620 				;
1621 		}
1622 	}
1623 
1624 	/* disable write leveling */
1625 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1626 			((0x2 >> cs) << 6) | (0 << 2));
1627 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6);
1628 
1629 	/* enable another cs's output */
1630 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1631 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12),
1632 			      dramtype);
1633 
1634 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1635 
1636 	return 0;
1637 }
1638 
1639 char pattern[32] = {
1640 	0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa,
1641 	0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55,
1642 	0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55,
1643 	0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa
1644 };
1645 
1646 static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype,
1647 			    u32 mhz)
1648 {
1649 	void __iomem *pctl_base = dram->pctl;
1650 	void __iomem *phy_base = dram->phy;
1651 	u32 trefi_1x, trfc_1x;
1652 	u32 dis_auto_zq = 0;
1653 	u32 timeout_us = 1000;
1654 	u32 dqs_default;
1655 	u32 cur_fsp;
1656 	u32 vref_inner;
1657 	u32 i;
1658 	struct sdram_head_info_index_v2 *index =
1659 		(struct sdram_head_info_index_v2 *)common_info;
1660 	struct dq_map_info *map_info;
1661 
1662 	vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff;
1663 	if (dramtype == DDR3 && vref_inner == 0x80) {
1664 		for (i = 0; i < 4; i++)
1665 			writel(vref_inner - 0xa,
1666 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1667 
1668 		/* reg_rx_vref_value_update */
1669 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1670 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1671 	}
1672 
1673 	map_info = (struct dq_map_info *)((void *)common_info +
1674 		index->dq_map_index.offset * 4);
1675 	/* only 1cs a time, 0:cs0 1 cs1 */
1676 	if (cs > 1)
1677 		return -1;
1678 
1679 	dqs_default = 0xf;
1680 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1681 
1682 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1683 	/* config refresh timing */
1684 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1685 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1686 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1687 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1688 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1689 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1690 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1691 	/* reg_phy_trfc */
1692 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1693 	/* reg_max_refi_cnt */
1694 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1695 
1696 	/* choose training cs */
1697 	clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6);
1698 
1699 	/* set dq map for ddr4 */
1700 	if (dramtype == DDR4) {
1701 		setbits_le32(PHY_REG(phy_base, 0x70), BIT(7));
1702 		for (i = 0; i < 4; i++) {
1703 			writel((map_info->ddr4_dq_map[cs * 2] >>
1704 				((i % 4) * 8)) & 0xff,
1705 				PHY_REG(phy_base, 0x238 + i));
1706 			writel((map_info->ddr4_dq_map[cs * 2 + 1] >>
1707 				((i % 4) * 8)) & 0xff,
1708 				PHY_REG(phy_base, 0x2b8 + i));
1709 		}
1710 	}
1711 
1712 	/* cha_l reg_l_rd_train_dqs_default[5:0] */
1713 	clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default);
1714 	/* cha_h reg_h_rd_train_dqs_default[5:0] */
1715 	clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default);
1716 	/* chb_l reg_l_rd_train_dqs_default[5:0] */
1717 	clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default);
1718 	/* chb_h reg_h_rd_train_dqs_default[5:0] */
1719 	clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default);
1720 
1721 	/* Choose the read train auto mode */
1722 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1);
1723 	/* Enable the auto train of the read train */
1724 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3);
1725 
1726 	/* Wait the train done. */
1727 	while (1) {
1728 		if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1)
1729 			break;
1730 
1731 		udelay(1);
1732 		if (timeout_us-- == 0) {
1733 			printascii("error: read training timeout\n");
1734 			return -1;
1735 		}
1736 	}
1737 
1738 	/* Check the read train state */
1739 	if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) ||
1740 	    (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) {
1741 		printascii("error: read training error\n");
1742 		return -1;
1743 	}
1744 
1745 	/* Exit the Read Training by setting */
1746 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1));
1747 
1748 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1749 
1750 	if (dramtype == DDR3 && vref_inner == 0x80) {
1751 		for (i = 0; i < 4; i++)
1752 			writel(vref_inner,
1753 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1754 
1755 		/* reg_rx_vref_value_update */
1756 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1757 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1758 	}
1759 
1760 	return 0;
1761 }
1762 
1763 static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype,
1764 			    u32 mhz, u32 dst_fsp)
1765 {
1766 	void __iomem *pctl_base = dram->pctl;
1767 	void __iomem *phy_base = dram->phy;
1768 	u32 trefi_1x, trfc_1x;
1769 	u32 dis_auto_zq = 0;
1770 	u32 timeout_us = 1000;
1771 	u32 cur_fsp;
1772 	u32 mr_tmp, cl, cwl, phy_fsp, offset = 0;
1773 
1774 	if (dramtype == LPDDR3 && mhz <= 400) {
1775 		phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3;
1776 		offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3;
1777 		cl = readl(PHY_REG(phy_base, offset));
1778 		cwl = readl(PHY_REG(phy_base, offset + 2));
1779 
1780 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8);
1781 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4);
1782 		pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype);
1783 	}
1784 
1785 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1786 
1787 	/* PHY_0x7b[7:0] reg_train_col_addr[7:0] */
1788 	clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0);
1789 	/* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */
1790 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2);
1791 	/* PHY_0x7c[1:0] reg_train_col_addr[9:8] */
1792 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0);
1793 	/* PHY_0x7d[7:0] reg_train_row_addr[7:0] */
1794 	clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0);
1795 	/* PHY_0x7e[7:0] reg_train_row_addr[15:8] */
1796 	clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0);
1797 
1798 	/* PHY_0x71[3] wrtrain_check_data_value_random_gen */
1799 	clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3));
1800 
1801 	/* config refresh timing */
1802 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1803 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1804 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1805 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1806 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1807 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1808 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1809 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1810 	/* reg_phy_trfc */
1811 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1812 	/* reg_max_refi_cnt */
1813 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1814 
1815 	/* choose training cs */
1816 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6);
1817 
1818 	/* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */
1819 	/* 0: Use the write-leveling value. */
1820 	/* 1: use reg0x233 0x237 0x2b3 0x2b7 */
1821 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4));
1822 
1823 	/* PHY_0x7a [0] reg_dq_wr_train_auto */
1824 	setbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1825 
1826 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1827 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1828 
1829 	send_a_refresh(dram);
1830 
1831 	while (1) {
1832 		if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1)
1833 			break;
1834 
1835 		udelay(1);
1836 		if (timeout_us-- == 0) {
1837 			printascii("error: write training timeout\n");
1838 			while (1)
1839 				;
1840 		}
1841 	}
1842 
1843 	/* Check the write train state */
1844 	if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) {
1845 		printascii("error: write training error\n");
1846 		return -1;
1847 	}
1848 
1849 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1850 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1851 
1852 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1853 
1854 	/* save LPDDR4 write vref to fsp_param for dfs */
1855 	if (dramtype == LPDDR4) {
1856 		fsp_param[dst_fsp].vref_dq[cs] =
1857 			((readl(PHY_REG(phy_base, 0x384)) & 0x3f) +
1858 			 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2;
1859 		/* add range info */
1860 		fsp_param[dst_fsp].vref_dq[cs] |=
1861 			((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1);
1862 	}
1863 
1864 	if (dramtype == LPDDR3 && mhz <= 400) {
1865 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl);
1866 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl);
1867 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1868 			       DDR_PCTL2_INIT3);
1869 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
1870 			      dramtype);
1871 	}
1872 
1873 	return 0;
1874 }
1875 
1876 static int data_training(struct dram_info *dram, u32 cs,
1877 			 struct rv1126_sdram_params *sdram_params, u32 dst_fsp,
1878 			 u32 training_flag)
1879 {
1880 	u32 ret = 0;
1881 
1882 	if (training_flag == FULL_TRAINING)
1883 		training_flag = READ_GATE_TRAINING | WRITE_LEVELING |
1884 				WRITE_TRAINING | READ_TRAINING;
1885 
1886 	if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) {
1887 		ret = data_training_wl(dram, cs,
1888 				       sdram_params->base.dramtype,
1889 				       sdram_params->ch.cap_info.rank);
1890 		if (ret != 0)
1891 			goto out;
1892 	}
1893 
1894 	if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) {
1895 		ret = data_training_rg(dram, cs,
1896 				       sdram_params->base.dramtype);
1897 		if (ret != 0)
1898 			goto out;
1899 	}
1900 
1901 	if ((training_flag & READ_TRAINING) == READ_TRAINING) {
1902 		ret = data_training_rd(dram, cs,
1903 				       sdram_params->base.dramtype,
1904 				       sdram_params->base.ddr_freq);
1905 		if (ret != 0)
1906 			goto out;
1907 	}
1908 
1909 	if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) {
1910 		ret = data_training_wr(dram, cs,
1911 				       sdram_params->base.dramtype,
1912 				       sdram_params->base.ddr_freq, dst_fsp);
1913 		if (ret != 0)
1914 			goto out;
1915 	}
1916 
1917 out:
1918 	return ret;
1919 }
1920 
1921 static int get_wrlvl_val(struct dram_info *dram,
1922 			 struct rv1126_sdram_params *sdram_params)
1923 {
1924 	u32 i, j, clk_skew;
1925 	void __iomem *phy_base = dram->phy;
1926 	u32 lp_stat;
1927 	int ret;
1928 
1929 	lp_stat = low_power_update(dram, 0);
1930 
1931 	clk_skew = 0x1f;
1932 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, clk_skew, 3,
1933 			 sdram_params->base.dramtype);
1934 
1935 	ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING);
1936 	if (sdram_params->ch.cap_info.rank == 2)
1937 		ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING);
1938 
1939 	for (j = 0; j < 2; j++)
1940 		for (i = 0; i < 4; i++)
1941 			wrlvl_result[j][i] =
1942 				readl(PHY_REG(phy_base,
1943 					      wrlvl_result_offset[j][i])) -
1944 				clk_skew;
1945 
1946 	low_power_update(dram, lp_stat);
1947 
1948 	return ret;
1949 }
1950 
1951 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
1952 static void init_rw_trn_result_struct(struct rw_trn_result *result,
1953 				      void __iomem *phy_base, u8 cs_num)
1954 {
1955 	int i;
1956 
1957 	result->cs_num = cs_num;
1958 	result->byte_en = readb(PHY_REG(dram_info.phy, 0xf)) &
1959 			  PHY_DQ_WIDTH_MASK;
1960 	for (i = 0; i < FSP_NUM; i++)
1961 		result->fsp_mhz[i] = 0;
1962 }
1963 
1964 static void save_rw_trn_min_max(void __iomem *phy_base,
1965 				struct cs_rw_trn_result *rd_result,
1966 				struct cs_rw_trn_result *wr_result,
1967 				u8 byte_en)
1968 {
1969 	u16 phy_ofs;
1970 	u8 dqs;
1971 	u8 dq;
1972 
1973 	for (dqs = 0; dqs < BYTE_NUM; dqs++) {
1974 		if ((byte_en & BIT(dqs)) == 0)
1975 			continue;
1976 
1977 		/* Channel A or B (low or high 16 bit) */
1978 		phy_ofs = dqs < 2 ? 0x230 : 0x2b0;
1979 		/* low or high 8 bit */
1980 		phy_ofs += (dqs & 0x1) == 0 ? 0 : 0x9;
1981 		for (dq = 0; dq < 8; dq++) {
1982 			rd_result->dqs[dqs].dq_min[dq] =
1983 				readb(PHY_REG(phy_base, phy_ofs + 0x15 + dq));
1984 			rd_result->dqs[dqs].dq_max[dq] =
1985 				readb(PHY_REG(phy_base, phy_ofs + 0x27 + dq));
1986 			wr_result->dqs[dqs].dq_min[dq] =
1987 				readb(PHY_REG(phy_base, phy_ofs + 0x3d + dq));
1988 			wr_result->dqs[dqs].dq_max[dq] =
1989 				readb(PHY_REG(phy_base, phy_ofs + 0x4f + dq));
1990 		}
1991 	}
1992 }
1993 
1994 static void save_rw_trn_deskew(void __iomem *phy_base,
1995 			       struct fsp_rw_trn_result *result, u8 cs_num,
1996 			       int min_val, bool rw)
1997 {
1998 	u16 phy_ofs;
1999 	u8 cs;
2000 	u8 dq;
2001 
2002 	result->min_val = min_val;
2003 
2004 	for (cs = 0; cs < cs_num; cs++) {
2005 		phy_ofs = cs == 0 ? 0x170 : 0x1a0;
2006 		phy_ofs += rw == SKEW_RX_SIGNAL ? 0x1 : 0x17;
2007 		for (dq = 0; dq < 8; dq++) {
2008 			result->cs[cs].dqs[0].dq_deskew[dq] =
2009 				readb(PHY_REG(phy_base, phy_ofs + dq));
2010 			result->cs[cs].dqs[1].dq_deskew[dq] =
2011 				readb(PHY_REG(phy_base, phy_ofs + 0xb + dq));
2012 			result->cs[cs].dqs[2].dq_deskew[dq] =
2013 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + dq));
2014 			result->cs[cs].dqs[3].dq_deskew[dq] =
2015 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + dq));
2016 		}
2017 
2018 		result->cs[cs].dqs[0].dqs_deskew =
2019 			readb(PHY_REG(phy_base, phy_ofs + 0x8));
2020 		result->cs[cs].dqs[1].dqs_deskew =
2021 			readb(PHY_REG(phy_base, phy_ofs + 0xb + 0x8));
2022 		result->cs[cs].dqs[2].dqs_deskew =
2023 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0x8));
2024 		result->cs[cs].dqs[3].dqs_deskew =
2025 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + 0x8));
2026 	}
2027 }
2028 
2029 static void save_rw_trn_result_to_ddr(struct rw_trn_result *result)
2030 {
2031 	result->flag = DDR_DQ_EYE_FLAG;
2032 	memcpy((void *)(RW_TRN_RESULT_ADDR), result, sizeof(*result));
2033 }
2034 #endif
2035 
2036 static int high_freq_training(struct dram_info *dram,
2037 			      struct rv1126_sdram_params *sdram_params,
2038 			      u32 fsp)
2039 {
2040 	u32 i, j;
2041 	void __iomem *phy_base = dram->phy;
2042 	u32 dramtype = sdram_params->base.dramtype;
2043 	int min_val;
2044 	int dqs_skew, clk_skew, ca_skew;
2045 	u8 byte_en;
2046 	int ret;
2047 
2048 	byte_en = readl(PHY_REG(phy_base, 0xf)) & PHY_DQ_WIDTH_MASK;
2049 	dqs_skew = 0;
2050 	for (j = 0; j < sdram_params->ch.cap_info.rank; j++) {
2051 		for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) {
2052 			if ((byte_en & BIT(i)) != 0)
2053 				dqs_skew += wrlvl_result[j][i];
2054 		}
2055 	}
2056 	dqs_skew = dqs_skew / (sdram_params->ch.cap_info.rank *
2057 			       (1 << sdram_params->ch.cap_info.bw));
2058 
2059 	clk_skew = 0x20 - dqs_skew;
2060 	dqs_skew = 0x20;
2061 
2062 	if (dramtype == LPDDR4) {
2063 		clk_skew = 0;
2064 		ca_skew = 0;
2065 	} else if (dramtype == LPDDR3) {
2066 		ca_skew = clk_skew - 4;
2067 	} else {
2068 		ca_skew = clk_skew;
2069 	}
2070 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3,
2071 			 dramtype);
2072 
2073 	writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233));
2074 	writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237));
2075 	writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2076 	writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2077 	ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING |
2078 			    READ_TRAINING | WRITE_TRAINING);
2079 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2080 	rw_trn_result.fsp_mhz[fsp] = (u16)sdram_params->base.ddr_freq;
2081 	save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[0],
2082 			    &rw_trn_result.wr_fsp[fsp].cs[0],
2083 			    rw_trn_result.byte_en);
2084 #endif
2085 	if (sdram_params->ch.cap_info.rank == 2) {
2086 		writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233));
2087 		writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237));
2088 		writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2089 		writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2090 		ret |= data_training(dram, 1, sdram_params, fsp,
2091 				     READ_GATE_TRAINING | READ_TRAINING |
2092 				     WRITE_TRAINING);
2093 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2094 		save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[1],
2095 				    &rw_trn_result.wr_fsp[fsp].cs[1],
2096 				    rw_trn_result.byte_en);
2097 #endif
2098 	}
2099 	if (ret)
2100 		goto out;
2101 
2102 	record_dq_prebit(dram);
2103 
2104 	min_val = get_min_value(dram, SKEW_RX_SIGNAL,
2105 				sdram_params->ch.cap_info.rank) * -1;
2106 	modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2107 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2108 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2109 	save_rw_trn_deskew(phy_base, &rw_trn_result.rd_fsp[fsp],
2110 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2111 			   SKEW_RX_SIGNAL);
2112 #endif
2113 
2114 	min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL,
2115 				    sdram_params->ch.cap_info.rank),
2116 		      get_min_value(dram, SKEW_CA_SIGNAL,
2117 				    sdram_params->ch.cap_info.rank)) * -1;
2118 
2119 	/* clk = 0, rx all skew -7, tx - min_value */
2120 	modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3,
2121 			 dramtype);
2122 
2123 	modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2124 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2125 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2126 	save_rw_trn_deskew(phy_base, &rw_trn_result.wr_fsp[fsp],
2127 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2128 			   SKEW_TX_SIGNAL);
2129 #endif
2130 
2131 	ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING);
2132 	if (sdram_params->ch.cap_info.rank == 2)
2133 		ret |= data_training(dram, 1, sdram_params, 0,
2134 				     READ_GATE_TRAINING);
2135 out:
2136 	return ret;
2137 }
2138 
2139 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
2140 {
2141 	writel(ddrconfig, &dram->msch->deviceconf);
2142 	clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0);
2143 }
2144 
2145 static void update_noc_timing(struct dram_info *dram,
2146 			      struct rv1126_sdram_params *sdram_params)
2147 {
2148 	void __iomem *pctl_base = dram->pctl;
2149 	u32 bw, bl;
2150 
2151 	bw = 8 << sdram_params->ch.cap_info.bw;
2152 	bl = ((readl(pctl_base + DDR_PCTL2_MSTR) >> 16) & 0xf) * 2;
2153 
2154 	/* update the noc timing related to data bus width */
2155 	if ((bw / 8 * bl) <= 16)
2156 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 0;
2157 	else if ((bw / 8 * bl) == 32)
2158 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 1;
2159 	else if ((bw / 8 * bl) == 64)
2160 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 2;
2161 	else
2162 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 3;
2163 
2164 	sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty =
2165 		(bl * bw / 8) > 16 ? (bl / 4) : (16 / (bl * bw / 8)) * bl / 4;
2166 
2167 	if (sdram_params->base.dramtype == LPDDR4) {
2168 		sdram_params->ch.noc_timings.ddrmode.b.mwrsize =
2169 			(bw == 16) ? 0x1 : 0x2;
2170 		sdram_params->ch.noc_timings.ddrtimingc0.b.wrtomwr =
2171 			3 * sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty;
2172 	}
2173 
2174 	writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
2175 	       &dram->msch->ddrtiminga0);
2176 	writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
2177 	       &dram->msch->ddrtimingb0);
2178 	writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
2179 	       &dram->msch->ddrtimingc0);
2180 	writel(sdram_params->ch.noc_timings.devtodev0.d32,
2181 	       &dram->msch->devtodev0);
2182 	writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
2183 	writel(sdram_params->ch.noc_timings.ddr4timing.d32,
2184 	       &dram->msch->ddr4timing);
2185 }
2186 
2187 static int split_setup(struct dram_info *dram,
2188 		       struct rv1126_sdram_params *sdram_params)
2189 {
2190 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2191 	u32 dramtype = sdram_params->base.dramtype;
2192 	u32 split_size, split_mode;
2193 	u64 cs_cap[2], cap;
2194 
2195 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dramtype);
2196 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dramtype);
2197 	/* only support the larger cap is in low 16bit */
2198 	if (cap_info->cs0_high16bit_row < cap_info->cs0_row) {
2199 		cap = cs_cap[0] / (1 << (cap_info->cs0_row -
2200 		cap_info->cs0_high16bit_row));
2201 	} else if ((cap_info->cs1_high16bit_row < cap_info->cs1_row) &&
2202 		   (cap_info->rank == 2)) {
2203 		if (!cap_info->cs1_high16bit_row)
2204 			cap = cs_cap[0];
2205 		else
2206 			cap = cs_cap[0] + cs_cap[1] / (1 << (cap_info->cs1_row -
2207 				cap_info->cs1_high16bit_row));
2208 	} else {
2209 		goto out;
2210 	}
2211 	split_size = (u32)(cap >> 24) & SPLIT_SIZE_MASK;
2212 	if (cap_info->bw == 2)
2213 		split_mode = SPLIT_MODE_32_L16_VALID;
2214 	else
2215 		split_mode = SPLIT_MODE_16_L8_VALID;
2216 
2217 	rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2218 		     (SPLIT_MODE_MASK << SPLIT_MODE_OFFSET) |
2219 		     (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2220 		     (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2221 		     (split_mode << SPLIT_MODE_OFFSET) |
2222 		     (0x0 << SPLIT_BYPASS_OFFSET) |
2223 		     (split_size << SPLIT_SIZE_OFFSET));
2224 
2225 	rk_clrsetreg(BUS_SGRF_BASE_ADDR + SGRF_SOC_CON2,
2226 		     MSCH_AXI_BYPASS_ALL_MASK << MSCH_AXI_BYPASS_ALL_SHIFT,
2227 		     0x0 << MSCH_AXI_BYPASS_ALL_SHIFT);
2228 
2229 out:
2230 	return 0;
2231 }
2232 
2233 static void split_bypass(struct dram_info *dram)
2234 {
2235 	if ((readl(&dram->ddrgrf->grf_ddrsplit_con) &
2236 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2237 		return;
2238 
2239 	/* bypass split */
2240 	rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2241 		     (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2242 		     (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2243 		     (0x1 << SPLIT_BYPASS_OFFSET) |
2244 		     (0x0 << SPLIT_SIZE_OFFSET));
2245 }
2246 
2247 static void dram_all_config(struct dram_info *dram,
2248 			    struct rv1126_sdram_params *sdram_params)
2249 {
2250 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2251 	u32 dram_type = sdram_params->base.dramtype;
2252 	void __iomem *pctl_base = dram->pctl;
2253 	u32 sys_reg2 = 0;
2254 	u32 sys_reg3 = 0;
2255 	u64 cs_cap[2];
2256 	u32 cs_pst;
2257 
2258 	set_ddrconfig(dram, cap_info->ddrconfig);
2259 	sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
2260 			 &sys_reg3, 0);
2261 	writel(sys_reg2, &dram->pmugrf->os_reg[2]);
2262 	writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2263 
2264 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2265 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2266 
2267 	if (cap_info->rank == 2) {
2268 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2269 			6 + 2;
2270 		if (cs_pst > 28)
2271 			cs_cap[0] = 1llu << cs_pst;
2272 	}
2273 
2274 	writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
2275 			(((cs_cap[0] >> 20) / 64) & 0xff),
2276 			&dram->msch->devicesize);
2277 	update_noc_timing(dram, sdram_params);
2278 }
2279 
2280 static void enable_low_power(struct dram_info *dram,
2281 			     struct rv1126_sdram_params *sdram_params)
2282 {
2283 	void __iomem *pctl_base = dram->pctl;
2284 	u32 grf_lp_con;
2285 
2286 	writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]);
2287 
2288 	if (sdram_params->base.dramtype == DDR4)
2289 		grf_lp_con = (0x7 << 16) | (1 << 1);
2290 	else if (sdram_params->base.dramtype == DDR3)
2291 		grf_lp_con = (0x7 << 16) | (1 << 0);
2292 	else
2293 		grf_lp_con = (0x7 << 16) | (1 << 2);
2294 
2295 	/* en lpckdis_en */
2296 	grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
2297 	writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con);
2298 
2299 	/* enable sr, pd */
2300 	if (dram->pd_idle == 0)
2301 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2302 	else
2303 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2304 	if (dram->sr_idle == 0)
2305 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2306 	else
2307 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2308 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
2309 }
2310 
2311 static void ddr_set_atags(struct dram_info *dram,
2312 			  struct rv1126_sdram_params *sdram_params)
2313 {
2314 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2315 	u32 dram_type = sdram_params->base.dramtype;
2316 	void __iomem *pctl_base = dram->pctl;
2317 	struct tag_serial t_serial;
2318 	struct tag_ddr_mem t_ddrmem;
2319 	struct tag_soc_info t_socinfo;
2320 	u64 cs_cap[2];
2321 	u32 cs_pst = 0;
2322 	u32 split, split_size;
2323 	u64 reduce_cap = 0;
2324 
2325 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2326 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2327 
2328 	memset(&t_serial, 0, sizeof(struct tag_serial));
2329 
2330 	t_serial.version = 0;
2331 	t_serial.enable = 1;
2332 	t_serial.addr = CONFIG_DEBUG_UART_BASE;
2333 	t_serial.baudrate = CONFIG_BAUDRATE;
2334 	t_serial.m_mode = SERIAL_M_MODE_M0;
2335 	t_serial.id = 2;
2336 
2337 	atags_destroy();
2338 	atags_set_tag(ATAG_SERIAL,  &t_serial);
2339 
2340 	split = readl(&dram->ddrgrf->grf_ddrsplit_con);
2341 	memset(&t_ddrmem, 0, sizeof(struct tag_ddr_mem));
2342 	if (cap_info->row_3_4) {
2343 		cs_cap[0] =  cs_cap[0] * 3 / 4;
2344 		cs_cap[1] =  cs_cap[1] * 3 / 4;
2345 	} else if (!(split & (1 << SPLIT_BYPASS_OFFSET))) {
2346 		split_size = (split >> SPLIT_SIZE_OFFSET) & SPLIT_SIZE_MASK;
2347 		reduce_cap = (cs_cap[0] + cs_cap[1] - (split_size << 24)) / 2;
2348 	}
2349 	t_ddrmem.version = 0;
2350 	t_ddrmem.bank[0] = CONFIG_SYS_SDRAM_BASE;
2351 	if (cs_cap[1]) {
2352 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2353 			6 + 2;
2354 	}
2355 
2356 	if (cs_cap[1] && cs_pst > 27) {
2357 		t_ddrmem.count = 2;
2358 		t_ddrmem.bank[1] = 1 << cs_pst;
2359 		t_ddrmem.bank[2] = cs_cap[0];
2360 		t_ddrmem.bank[3] = cs_cap[1] - reduce_cap;
2361 	} else {
2362 		t_ddrmem.count = 1;
2363 		t_ddrmem.bank[1] = (u64)cs_cap[0] + (u64)cs_cap[1] - reduce_cap;
2364 	}
2365 
2366 	atags_set_tag(ATAG_DDR_MEM,  &t_ddrmem);
2367 
2368 	memset(&t_socinfo, 0, sizeof(struct tag_soc_info));
2369 	t_socinfo.version = 0;
2370 	t_socinfo.name = 0x1126;
2371 }
2372 
2373 static void print_ddr_info(struct rv1126_sdram_params *sdram_params)
2374 {
2375 	u32 split;
2376 
2377 	if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2378 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2379 		split = 0;
2380 	else
2381 		split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2382 			SPLIT_SIZE_MASK;
2383 
2384 	sdram_print_ddr_info(&sdram_params->ch.cap_info,
2385 			     &sdram_params->base, split);
2386 }
2387 
2388 static int sdram_init_(struct dram_info *dram,
2389 		       struct rv1126_sdram_params *sdram_params, u32 post_init)
2390 {
2391 	void __iomem *pctl_base = dram->pctl;
2392 	void __iomem *phy_base = dram->phy;
2393 	u32 ddr4_vref;
2394 	u32 mr_tmp;
2395 
2396 	rkclk_configure_ddr(dram, sdram_params);
2397 
2398 	rkclk_ddr_reset(dram, 1, 1, 1, 1);
2399 	udelay(10);
2400 
2401 	rkclk_ddr_reset(dram, 1, 1, 1, 0);
2402 	phy_cfg(dram, sdram_params);
2403 
2404 	rkclk_ddr_reset(dram, 1, 1, 0, 0);
2405 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1);
2406 
2407 	rkclk_ddr_reset(dram, 1, 0, 0, 0);
2408 	pctl_cfg(dram->pctl, &sdram_params->pctl_regs,
2409 		 dram->sr_idle, dram->pd_idle);
2410 
2411 	if (sdram_params->ch.cap_info.bw == 2) {
2412 		/* 32bit interface use pageclose */
2413 		setbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2414 		/* pageclose = 1, pageclose_timer = 0 will err in lp4 328MHz */
2415 		clrsetbits_le32(pctl_base + DDR_PCTL2_SCHED1, 0xff, 0x1 << 0);
2416 	} else {
2417 		clrbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2418 	}
2419 
2420 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2421 	u32 tmp, trefi;
2422 
2423 	tmp = readl(pctl_base + DDR_PCTL2_RFSHTMG);
2424 	trefi = (tmp >> 16) & 0xfff;
2425 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2426 	       pctl_base + DDR_PCTL2_RFSHTMG);
2427 #endif
2428 
2429 	/* set frequency_mode */
2430 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
2431 	/* set target_frequency to Frequency 0 */
2432 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0);
2433 
2434 	set_ds_odt(dram, sdram_params, 0);
2435 	sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params);
2436 	set_ctl_address_map(dram, sdram_params);
2437 
2438 	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
2439 
2440 	rkclk_ddr_reset(dram, 0, 0, 0, 0);
2441 
2442 	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
2443 		continue;
2444 
2445 	if (sdram_params->base.dramtype == LPDDR3) {
2446 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3);
2447 	} else if (sdram_params->base.dramtype == LPDDR4) {
2448 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6);
2449 		/* MR11 */
2450 		pctl_write_mr(dram->pctl, 3, 11,
2451 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2452 			      LPDDR4);
2453 		/* MR12 */
2454 		pctl_write_mr(dram->pctl, 3, 12,
2455 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2456 			      LPDDR4);
2457 
2458 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2459 		/* MR22 */
2460 		pctl_write_mr(dram->pctl, 3, 22,
2461 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2462 			      LPDDR4);
2463 		/* MR14 */
2464 		pctl_write_mr(dram->pctl, 3, 14,
2465 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2466 			      LPDDR4);
2467 	}
2468 
2469 	if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) != 0) {
2470 		if (post_init != 0)
2471 			printascii("DTT cs0 error\n");
2472 		return -1;
2473 	}
2474 
2475 	if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) {
2476 		if (data_training(dram, 1, sdram_params, 0,
2477 				  READ_GATE_TRAINING) != 0) {
2478 			printascii("DTT cs1 error\n");
2479 			return -1;
2480 		}
2481 	}
2482 
2483 	if (sdram_params->base.dramtype == DDR4) {
2484 		ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39;
2485 		pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref,
2486 				  sdram_params->base.dramtype);
2487 	}
2488 
2489 	dram_all_config(dram, sdram_params);
2490 	enable_low_power(dram, sdram_params);
2491 
2492 	return 0;
2493 }
2494 
2495 static u64 dram_detect_cap(struct dram_info *dram,
2496 			   struct rv1126_sdram_params *sdram_params,
2497 			   unsigned char channel)
2498 {
2499 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2500 	void __iomem *pctl_base = dram->pctl;
2501 	void __iomem *phy_base = dram->phy;
2502 	u32 mr8;
2503 
2504 	u32 bktmp;
2505 	u32 coltmp;
2506 	u32 rowtmp;
2507 	u32 cs;
2508 	u32 dram_type = sdram_params->base.dramtype;
2509 	u32 pwrctl;
2510 	u32 i, dq_map;
2511 	u32 byte1 = 0, byte0 = 0;
2512 
2513 	cap_info->bw = dram_type == DDR3 ? 0 : 1;
2514 	if (dram_type != LPDDR4) {
2515 		if (dram_type != DDR4) {
2516 			coltmp = 12;
2517 			bktmp = 3;
2518 			if (dram_type == LPDDR2)
2519 				rowtmp = 15;
2520 			else
2521 				rowtmp = 16;
2522 
2523 			if (sdram_detect_col(cap_info, coltmp) != 0)
2524 				goto cap_err;
2525 
2526 			sdram_detect_bank(cap_info, coltmp, bktmp);
2527 			sdram_detect_dbw(cap_info, dram_type);
2528 		} else {
2529 			coltmp = 10;
2530 			bktmp = 4;
2531 			rowtmp = 17;
2532 
2533 			cap_info->col = 10;
2534 			cap_info->bk = 2;
2535 			sdram_detect_bg(cap_info, coltmp);
2536 		}
2537 
2538 		if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
2539 			goto cap_err;
2540 
2541 		sdram_detect_row_3_4(cap_info, coltmp, bktmp);
2542 	} else {
2543 		mr8 = (read_mr(dram, 1, 8, dram_type) >> 2) & 0xf;
2544 		cap_info->col = 10;
2545 		cap_info->bk = 3;
2546 		cap_info->cs0_row = 14 + (mr8 + 1) / 2;
2547 		if (mr8 % 2)
2548 			cap_info->row_3_4 = 1;
2549 		else
2550 			cap_info->row_3_4 = 0;
2551 		cap_info->dbw = 1;
2552 		cap_info->bw = 2;
2553 	}
2554 
2555 	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
2556 	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
2557 
2558 	if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0)
2559 		cs = 1;
2560 	else
2561 		cs = 0;
2562 	cap_info->rank = cs + 1;
2563 
2564 	if (dram_type != LPDDR4) {
2565 		setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2566 
2567 		if (data_training(dram, 0, sdram_params, 0,
2568 				  READ_GATE_TRAINING) == 0) {
2569 			cap_info->bw = 2;
2570 		} else {
2571 			dq_map = readl(PHY_REG(phy_base, 0x4f));
2572 			for (i = 0; i < 4; i++) {
2573 				if (((dq_map >> (i * 2)) & 0x3) == 0)
2574 					byte0 = i;
2575 				if (((dq_map >> (i * 2)) & 0x3) == 1)
2576 					byte1 = i;
2577 			}
2578 			clrsetbits_le32(PHY_REG(phy_base, 0xf), PHY_DQ_WIDTH_MASK,
2579 					BIT(byte0) | BIT(byte1));
2580 			if (data_training(dram, 0, sdram_params, 0,
2581 					  READ_GATE_TRAINING) == 0)
2582 				cap_info->bw = 1;
2583 			else
2584 				cap_info->bw = 0;
2585 		}
2586 		if (cap_info->bw > 0)
2587 			cap_info->dbw = 1;
2588 	}
2589 
2590 	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
2591 
2592 	cap_info->cs0_high16bit_row = cap_info->cs0_row;
2593 	if (cs) {
2594 		cap_info->cs1_row = cap_info->cs0_row;
2595 		cap_info->cs1_high16bit_row = cap_info->cs0_row;
2596 	} else {
2597 		cap_info->cs1_row = 0;
2598 		cap_info->cs1_high16bit_row = 0;
2599 	}
2600 
2601 	return 0;
2602 cap_err:
2603 	return -1;
2604 }
2605 
2606 static int dram_detect_cs1_row(struct dram_info *dram,
2607 			       struct rv1126_sdram_params *sdram_params,
2608 			       unsigned char channel)
2609 {
2610 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2611 	void __iomem *pctl_base = dram->pctl;
2612 	u32 ret = 0;
2613 	void __iomem *test_addr;
2614 	u32 row, bktmp, coltmp, bw;
2615 	u64 cs0_cap;
2616 	u32 byte_mask;
2617 	u32 cs_pst;
2618 	u32 cs_add = 0;
2619 	u32 max_row;
2620 
2621 	if (cap_info->rank == 2) {
2622 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2623 			6 + 2;
2624 		if (cs_pst < 28)
2625 			cs_add = 1;
2626 
2627 		cs0_cap = 1 << cs_pst;
2628 
2629 		if (sdram_params->base.dramtype == DDR4) {
2630 			if (cap_info->dbw == 0)
2631 				bktmp = cap_info->bk + 2;
2632 			else
2633 				bktmp = cap_info->bk + 1;
2634 		} else {
2635 			bktmp = cap_info->bk;
2636 		}
2637 		bw = cap_info->bw;
2638 		coltmp = cap_info->col;
2639 
2640 		if (bw == 2)
2641 			byte_mask = 0xFFFF;
2642 		else
2643 			byte_mask = 0xFF;
2644 
2645 		max_row = (cs_pst == 31) ? 30 : 31;
2646 
2647 		max_row = max_row - bktmp - coltmp - bw - cs_add + 1;
2648 
2649 		row = (cap_info->cs0_row > max_row) ? max_row :
2650 			cap_info->cs0_row;
2651 
2652 		for (; row > 12; row--) {
2653 			test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
2654 				    (u32)cs0_cap +
2655 				    (1ul << (row + bktmp + coltmp +
2656 					     cs_add + bw - 1ul)));
2657 
2658 			writel(0, CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap);
2659 			writel(PATTERN, test_addr);
2660 
2661 			if (((readl(test_addr) & byte_mask) ==
2662 			     (PATTERN & byte_mask)) &&
2663 			    ((readl(CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap) &
2664 			      byte_mask) == 0)) {
2665 				ret = row;
2666 				break;
2667 			}
2668 		}
2669 	}
2670 
2671 	return ret;
2672 }
2673 
2674 /* return: 0 = success, other = fail */
2675 static int sdram_init_detect(struct dram_info *dram,
2676 			     struct rv1126_sdram_params *sdram_params)
2677 {
2678 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2679 	u32 ret;
2680 	u32 sys_reg = 0;
2681 	u32 sys_reg3 = 0;
2682 	struct sdram_head_info_index_v2 *index =
2683 		(struct sdram_head_info_index_v2 *)common_info;
2684 	struct dq_map_info *map_info;
2685 
2686 	map_info = (struct dq_map_info *)((void *)common_info +
2687 		index->dq_map_index.offset * 4);
2688 
2689 	if (sdram_init_(dram, sdram_params, 0)) {
2690 		if (sdram_params->base.dramtype == DDR3) {
2691 			clrsetbits_le32(&map_info->byte_map[0], 0xff << 24,
2692 					((0x1 << 6) | (0x3 << 4) | (0x2 << 2) |
2693 					(0x0 << 0)) << 24);
2694 			if (sdram_init_(dram, sdram_params, 0))
2695 				return -1;
2696 		} else {
2697 			return -1;
2698 		}
2699 	}
2700 
2701 	if (sdram_params->base.dramtype == DDR3) {
2702 		writel(PATTERN, CONFIG_SYS_SDRAM_BASE);
2703 		if (readl(CONFIG_SYS_SDRAM_BASE) != PATTERN)
2704 			return -1;
2705 	}
2706 
2707 	split_bypass(dram);
2708 	if (dram_detect_cap(dram, sdram_params, 0) != 0)
2709 		return -1;
2710 
2711 	pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
2712 				   sdram_params->base.dramtype);
2713 	ret = sdram_init_(dram, sdram_params, 1);
2714 	if (ret != 0)
2715 		goto out;
2716 
2717 	cap_info->cs1_row =
2718 		dram_detect_cs1_row(dram, sdram_params, 0);
2719 	if (cap_info->cs1_row) {
2720 		sys_reg = readl(&dram->pmugrf->os_reg[2]);
2721 		sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
2722 		SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
2723 				    sys_reg, sys_reg3, 0);
2724 		writel(sys_reg, &dram->pmugrf->os_reg[2]);
2725 		writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2726 	}
2727 
2728 	sdram_detect_high_row(cap_info, sdram_params->base.dramtype);
2729 	split_setup(dram, sdram_params);
2730 out:
2731 	return ret;
2732 }
2733 
2734 struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz)
2735 {
2736 	u32 i;
2737 	u32 offset = 0;
2738 	struct ddr2_3_4_lp2_3_info *ddr_info;
2739 
2740 	if (!freq_mhz) {
2741 		ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype);
2742 		if (ddr_info)
2743 			freq_mhz =
2744 				(ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
2745 				DDR_FREQ_MASK;
2746 		else
2747 			freq_mhz = 0;
2748 	}
2749 
2750 	for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) {
2751 		if (sdram_configs[i].base.ddr_freq == 0 ||
2752 		    freq_mhz < sdram_configs[i].base.ddr_freq)
2753 			break;
2754 	}
2755 	offset = i == 0 ? 0 : i - 1;
2756 
2757 	return &sdram_configs[offset];
2758 }
2759 
2760 static const u16 pctl_need_update_reg[] = {
2761 	DDR_PCTL2_RFSHTMG,
2762 	DDR_PCTL2_INIT3,
2763 	DDR_PCTL2_INIT4,
2764 	DDR_PCTL2_INIT6,
2765 	DDR_PCTL2_INIT7,
2766 	DDR_PCTL2_DRAMTMG0,
2767 	DDR_PCTL2_DRAMTMG1,
2768 	DDR_PCTL2_DRAMTMG2,
2769 	DDR_PCTL2_DRAMTMG3,
2770 	DDR_PCTL2_DRAMTMG4,
2771 	DDR_PCTL2_DRAMTMG5,
2772 	DDR_PCTL2_DRAMTMG6,
2773 	DDR_PCTL2_DRAMTMG7,
2774 	DDR_PCTL2_DRAMTMG8,
2775 	DDR_PCTL2_DRAMTMG9,
2776 	DDR_PCTL2_DRAMTMG12,
2777 	DDR_PCTL2_DRAMTMG13,
2778 	DDR_PCTL2_DRAMTMG14,
2779 	DDR_PCTL2_ZQCTL0,
2780 	DDR_PCTL2_DFITMG0,
2781 	DDR_PCTL2_ODTCFG
2782 };
2783 
2784 static const u16 phy_need_update_reg[] = {
2785 	0x14,
2786 	0x18,
2787 	0x1c
2788 };
2789 
2790 static void pre_set_rate(struct dram_info *dram,
2791 			 struct rv1126_sdram_params *sdram_params,
2792 			 u32 dst_fsp, u32 dst_fsp_lp4)
2793 {
2794 	u32 i, j, find;
2795 	void __iomem *pctl_base = dram->pctl;
2796 	void __iomem *phy_base = dram->phy;
2797 	u32 phy_offset;
2798 	u32 mr_tmp;
2799 	u32 dramtype = sdram_params->base.dramtype;
2800 
2801 	sw_set_req(dram);
2802 	/* pctl timing update */
2803 	for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) {
2804 		for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF;
2805 		     j++) {
2806 			if (sdram_params->pctl_regs.pctl[j][0] ==
2807 			    pctl_need_update_reg[i]) {
2808 				writel(sdram_params->pctl_regs.pctl[j][1],
2809 				       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2810 				       pctl_need_update_reg[i]);
2811 				find = j;
2812 				break;
2813 			}
2814 		}
2815 	}
2816 
2817 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2818 	u32 tmp, trefi;
2819 
2820 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2821 	trefi = (tmp >> 16) & 0xfff;
2822 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2823 	       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2824 #endif
2825 
2826 	sw_set_ack(dram);
2827 
2828 	/* phy timing update */
2829 	if (dst_fsp == 0)
2830 		phy_offset = 0;
2831 	else
2832 		phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3);
2833 	/* cl cwl al update */
2834 	for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) {
2835 		for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF;
2836 		     j++) {
2837 			if (sdram_params->phy_regs.phy[j][0] ==
2838 			    phy_need_update_reg[i]) {
2839 				writel(sdram_params->phy_regs.phy[j][1],
2840 				       phy_base + phy_offset +
2841 				       phy_need_update_reg[i]);
2842 				find = j;
2843 				break;
2844 			}
2845 		}
2846 	}
2847 
2848 	set_ds_odt(dram, sdram_params, dst_fsp);
2849 	if (dramtype == LPDDR4) {
2850 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2851 			       DDR_PCTL2_INIT4);
2852 		/* MR13 */
2853 		pctl_write_mr(dram->pctl, 3, 13,
2854 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2855 			       PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2856 			      ((0x2 << 6) >> dst_fsp_lp4), dramtype);
2857 		writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2858 				      PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2859 				      ((0x2 << 6) >> dst_fsp_lp4),
2860 				       PHY_REG(phy_base, 0x1b));
2861 		/* MR3 */
2862 		pctl_write_mr(dram->pctl, 3, 3,
2863 			      mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT &
2864 			      PCTL2_MR_MASK,
2865 			      dramtype);
2866 		writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK,
2867 		       PHY_REG(phy_base, 0x19));
2868 
2869 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2870 			       DDR_PCTL2_INIT3);
2871 		/* MR1 */
2872 		pctl_write_mr(dram->pctl, 3, 1,
2873 			      mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT &
2874 			      PCTL2_MR_MASK,
2875 			      dramtype);
2876 		writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK,
2877 		       PHY_REG(phy_base, 0x17));
2878 		/* MR2 */
2879 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
2880 			      dramtype);
2881 		writel(mr_tmp & PCTL2_MR_MASK,
2882 		       PHY_REG(phy_base, 0x18));
2883 
2884 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2885 			       DDR_PCTL2_INIT6);
2886 		/* MR11 */
2887 		pctl_write_mr(dram->pctl, 3, 11,
2888 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2889 			      dramtype);
2890 		writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2891 		       PHY_REG(phy_base, 0x1a));
2892 		/* MR12 */
2893 		pctl_write_mr(dram->pctl, 3, 12,
2894 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2895 			      dramtype);
2896 
2897 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2898 			       DDR_PCTL2_INIT7);
2899 		/* MR22 */
2900 		pctl_write_mr(dram->pctl, 3, 22,
2901 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2902 			      dramtype);
2903 		writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2904 		       PHY_REG(phy_base, 0x1d));
2905 		/* MR14 */
2906 		pctl_write_mr(dram->pctl, 3, 14,
2907 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2908 			      dramtype);
2909 		writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2910 		       PHY_REG(phy_base, 0x1c));
2911 	}
2912 
2913 	update_noc_timing(dram, sdram_params);
2914 }
2915 
2916 static void save_fsp_param(struct dram_info *dram, u32 dst_fsp,
2917 			   struct rv1126_sdram_params *sdram_params)
2918 {
2919 	void __iomem *pctl_base = dram->pctl;
2920 	void __iomem *phy_base = dram->phy;
2921 	struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp];
2922 	u32 temp, temp1;
2923 	struct ddr2_3_4_lp2_3_info *ddr_info;
2924 
2925 	ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype);
2926 
2927 	p_fsp_param->freq_mhz = sdram_params->base.ddr_freq;
2928 
2929 	if (sdram_params->base.dramtype == LPDDR4) {
2930 		p_fsp_param->rd_odt_up_en = 0;
2931 		p_fsp_param->rd_odt_down_en = 1;
2932 	} else {
2933 		p_fsp_param->rd_odt_up_en =
2934 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
2935 		p_fsp_param->rd_odt_down_en =
2936 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
2937 	}
2938 
2939 	if (p_fsp_param->rd_odt_up_en)
2940 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111));
2941 	else if (p_fsp_param->rd_odt_down_en)
2942 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110));
2943 	else
2944 		p_fsp_param->rd_odt = 0;
2945 	p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112));
2946 	p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100));
2947 	p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102));
2948 	p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128));
2949 	p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105));
2950 
2951 	if (sdram_params->base.dramtype == DDR3) {
2952 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2953 			     DDR_PCTL2_INIT3);
2954 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
2955 		p_fsp_param->ds_pdds = temp & DDR3_DS_MASK;
2956 		p_fsp_param->dq_odt = temp & DDR3_RTT_NOM_MASK;
2957 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2958 	} else if (sdram_params->base.dramtype == DDR4) {
2959 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2960 			     DDR_PCTL2_INIT3);
2961 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
2962 		p_fsp_param->ds_pdds = temp & DDR4_DS_MASK;
2963 		p_fsp_param->dq_odt = temp & DDR4_RTT_NOM_MASK;
2964 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2965 	} else if (sdram_params->base.dramtype == LPDDR3) {
2966 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2967 			     DDR_PCTL2_INIT4);
2968 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
2969 		p_fsp_param->ds_pdds = temp & 0xf;
2970 
2971 		p_fsp_param->dq_odt = lp3_odt_value;
2972 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2973 	} else if (sdram_params->base.dramtype == LPDDR4) {
2974 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2975 			     DDR_PCTL2_INIT4);
2976 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
2977 		p_fsp_param->ds_pdds = temp & LPDDR4_PDDS_MASK;
2978 
2979 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2980 			     DDR_PCTL2_INIT6);
2981 		temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK;
2982 		p_fsp_param->dq_odt = temp & LPDDR4_DQODT_MASK;
2983 		p_fsp_param->ca_odt = temp & LPDDR4_CAODT_MASK;
2984 
2985 		temp = MAX(readl(PHY_REG(phy_base, 0x3ae)),
2986 			   readl(PHY_REG(phy_base, 0x3ce)));
2987 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)),
2988 			    readl(PHY_REG(phy_base, 0x3de)));
2989 		p_fsp_param->vref_ca[0] = (temp + temp1) / 2;
2990 		temp = MAX(readl(PHY_REG(phy_base, 0x3af)),
2991 			   readl(PHY_REG(phy_base, 0x3cf)));
2992 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)),
2993 			    readl(PHY_REG(phy_base, 0x3df)));
2994 		p_fsp_param->vref_ca[1] = (temp + temp1) / 2;
2995 		p_fsp_param->vref_ca[0] |=
2996 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
2997 		p_fsp_param->vref_ca[1] |=
2998 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
2999 
3000 		p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >>
3001 					      3) & 0x1;
3002 	}
3003 
3004 	p_fsp_param->noc_timings.ddrtiminga0 =
3005 		sdram_params->ch.noc_timings.ddrtiminga0;
3006 	p_fsp_param->noc_timings.ddrtimingb0 =
3007 		sdram_params->ch.noc_timings.ddrtimingb0;
3008 	p_fsp_param->noc_timings.ddrtimingc0 =
3009 		sdram_params->ch.noc_timings.ddrtimingc0;
3010 	p_fsp_param->noc_timings.devtodev0 =
3011 		sdram_params->ch.noc_timings.devtodev0;
3012 	p_fsp_param->noc_timings.ddrmode =
3013 		sdram_params->ch.noc_timings.ddrmode;
3014 	p_fsp_param->noc_timings.ddr4timing =
3015 		sdram_params->ch.noc_timings.ddr4timing;
3016 	p_fsp_param->noc_timings.agingx0 =
3017 		sdram_params->ch.noc_timings.agingx0;
3018 	p_fsp_param->noc_timings.aging0 =
3019 		sdram_params->ch.noc_timings.aging0;
3020 	p_fsp_param->noc_timings.aging1 =
3021 		sdram_params->ch.noc_timings.aging1;
3022 	p_fsp_param->noc_timings.aging2 =
3023 		sdram_params->ch.noc_timings.aging2;
3024 	p_fsp_param->noc_timings.aging3 =
3025 		sdram_params->ch.noc_timings.aging3;
3026 
3027 	p_fsp_param->flag = FSP_FLAG;
3028 }
3029 
3030 #ifndef CONFIG_SPL_KERNEL_BOOT
3031 static void copy_fsp_param_to_ddr(void)
3032 {
3033 	memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param,
3034 	       sizeof(fsp_param));
3035 }
3036 #endif
3037 
3038 static void pctl_modify_trfc(struct ddr_pctl_regs *pctl_regs,
3039 			     struct sdram_cap_info *cap_info, u32 dram_type,
3040 			     u32 freq)
3041 {
3042 	u64 cs0_cap;
3043 	u32 die_cap;
3044 	u32 trfc_ns, trfc4_ns;
3045 	u32 trfc, txsnr;
3046 	u32 txs_abort_fast = 0;
3047 	u32 tmp;
3048 
3049 	cs0_cap = sdram_get_cs_cap(cap_info, 0, dram_type);
3050 	die_cap = (u32)(cs0_cap >> (20 + (cap_info->bw - cap_info->dbw)));
3051 
3052 	switch (dram_type) {
3053 	case DDR3:
3054 		if (die_cap <= DIE_CAP_512MBIT)
3055 			trfc_ns = 90;
3056 		else if (die_cap <= DIE_CAP_1GBIT)
3057 			trfc_ns = 110;
3058 		else if (die_cap <= DIE_CAP_2GBIT)
3059 			trfc_ns = 160;
3060 		else if (die_cap <= DIE_CAP_4GBIT)
3061 			trfc_ns = 260;
3062 		else
3063 			trfc_ns = 350;
3064 		txsnr = MAX(5, ((trfc_ns + 10) * freq + 999) / 1000);
3065 		break;
3066 
3067 	case DDR4:
3068 		if (die_cap <= DIE_CAP_2GBIT) {
3069 			trfc_ns = 160;
3070 			trfc4_ns = 90;
3071 		} else if (die_cap <= DIE_CAP_4GBIT) {
3072 			trfc_ns = 260;
3073 			trfc4_ns = 110;
3074 		} else if (die_cap <= DIE_CAP_8GBIT) {
3075 			trfc_ns = 350;
3076 			trfc4_ns = 160;
3077 		} else {
3078 			trfc_ns = 550;
3079 			trfc4_ns = 260;
3080 		}
3081 		txsnr = ((trfc_ns + 10) * freq + 999) / 1000;
3082 		txs_abort_fast = ((trfc4_ns + 10) * freq + 999) / 1000;
3083 		break;
3084 
3085 	case LPDDR3:
3086 		if (die_cap <= DIE_CAP_4GBIT)
3087 			trfc_ns = 130;
3088 		else
3089 			trfc_ns = 210;
3090 		txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3091 		break;
3092 
3093 	case LPDDR4:
3094 	case LPDDR4X:
3095 		if (die_cap <= DIE_CAP_4GBIT)
3096 			trfc_ns = 130;
3097 		else if (die_cap <= DIE_CAP_8GBIT)
3098 			trfc_ns = 180;
3099 		else if (die_cap <= DIE_CAP_16GBIT)
3100 			trfc_ns = 280;
3101 		else
3102 			trfc_ns = 380;
3103 		txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3104 		break;
3105 
3106 	default:
3107 		return;
3108 	}
3109 	trfc = (trfc_ns * freq + 999) / 1000;
3110 
3111 	for (int i = 0; pctl_regs->pctl[i][0] != 0xffffffff; i++) {
3112 		switch (pctl_regs->pctl[i][0]) {
3113 		case DDR_PCTL2_RFSHTMG:
3114 			tmp = pctl_regs->pctl[i][1];
3115 			/* t_rfc_min */
3116 			tmp &= ~((u32)0x3ff);
3117 			tmp |= ((trfc + 1) / 2) & 0x3ff;
3118 			pctl_regs->pctl[i][1] = tmp;
3119 			break;
3120 
3121 		case DDR_PCTL2_DRAMTMG8:
3122 			if (dram_type == DDR3 || dram_type == DDR4) {
3123 				tmp = pctl_regs->pctl[i][1];
3124 				/* t_xs_x32 */
3125 				tmp &= ~((u32)0x7f);
3126 				tmp |= ((txsnr + 63) / 64) & 0x7f;
3127 
3128 				if (dram_type == DDR4) {
3129 					/* t_xs_abort_x32 */
3130 					tmp &= ~((u32)(0x7f << 16));
3131 					tmp |= (((txs_abort_fast + 63) / 64) & 0x7f) << 16;
3132 					/* t_xs_fast_x32 */
3133 					tmp &= ~((u32)(0x7f << 24));
3134 					tmp |= (((txs_abort_fast + 63) / 64) & 0x7f) << 24;
3135 				}
3136 
3137 				pctl_regs->pctl[i][1] = tmp;
3138 			}
3139 			break;
3140 
3141 		case DDR_PCTL2_DRAMTMG14:
3142 			if (dram_type == LPDDR3 ||
3143 			    dram_type == LPDDR4 || dram_type == LPDDR4X) {
3144 				tmp = pctl_regs->pctl[i][1];
3145 				/* t_xsr */
3146 				tmp &= ~((u32)0xfff);
3147 				tmp |= ((txsnr + 1) / 2) & 0xfff;
3148 				pctl_regs->pctl[i][1] = tmp;
3149 			}
3150 			break;
3151 
3152 		default:
3153 			break;
3154 		}
3155 	}
3156 }
3157 
3158 void ddr_set_rate(struct dram_info *dram,
3159 		  struct rv1126_sdram_params *sdram_params,
3160 		  u32 freq, u32 cur_freq, u32 dst_fsp,
3161 		  u32 dst_fsp_lp4, u32 training_en)
3162 {
3163 	u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off;
3164 	u32 mr_tmp;
3165 	u32 lp_stat;
3166 	u32 dramtype = sdram_params->base.dramtype;
3167 	struct rv1126_sdram_params *sdram_params_new;
3168 	void __iomem *pctl_base = dram->pctl;
3169 	void __iomem *phy_base = dram->phy;
3170 
3171 	lp_stat = low_power_update(dram, 0);
3172 	sdram_params_new = get_default_sdram_config(freq);
3173 	sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank;
3174 	sdram_params_new->ch.cap_info.bw = sdram_params->ch.cap_info.bw;
3175 
3176 	pctl_modify_trfc(&sdram_params_new->pctl_regs,
3177 			 &sdram_params->ch.cap_info, dramtype, freq);
3178 	pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4);
3179 
3180 	while ((readl(pctl_base + DDR_PCTL2_STAT) &
3181 			 PCTL2_OPERATING_MODE_MASK) ==
3182 			 PCTL2_OPERATING_MODE_SR)
3183 		continue;
3184 
3185 	dest_dll_off = 0;
3186 	dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3187 			  DDR_PCTL2_INIT3);
3188 	if ((dramtype == DDR3 && (dst_init3 & 1)) ||
3189 	    (dramtype == DDR4 && !(dst_init3 & 1)))
3190 		dest_dll_off = 1;
3191 
3192 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
3193 	cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
3194 			  DDR_PCTL2_INIT3);
3195 	cur_init3 &= PCTL2_MR_MASK;
3196 	cur_dll_off = 1;
3197 	if ((dramtype == DDR3 && !(cur_init3 & 1)) ||
3198 	    (dramtype == DDR4 && (cur_init3 & 1)))
3199 		cur_dll_off = 0;
3200 
3201 	if (!cur_dll_off) {
3202 		if (dramtype == DDR3)
3203 			cur_init3 |= 1;
3204 		else
3205 			cur_init3 &= ~1;
3206 		pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype);
3207 	}
3208 
3209 	setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3210 		     PCTL2_DIS_AUTO_REFRESH);
3211 	update_refresh_reg(dram);
3212 
3213 	enter_sr(dram, 1);
3214 
3215 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3216 	       PMUGRF_CON_DDRPHY_BUFFEREN_EN,
3217 	       &dram->pmugrf->soc_con[0]);
3218 	sw_set_req(dram);
3219 	clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC,
3220 		     PCTL2_DFI_INIT_COMPLETE_EN);
3221 	sw_set_ack(dram);
3222 
3223 	sw_set_req(dram);
3224 	if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off)
3225 		setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3226 	else
3227 		clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3228 
3229 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0,
3230 		     PCTL2_DIS_SRX_ZQCL);
3231 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0,
3232 		     PCTL2_DIS_SRX_ZQCL);
3233 	sw_set_ack(dram);
3234 
3235 	writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT),
3236 	       &dram->cru->clkgate_con[21]);
3237 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3238 					(0x1 << CLK_DDR_UPCTL_EN_SHIFT) |
3239 					(0x1 << ACLK_DDR_UPCTL_EN_SHIFT),
3240 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3241 
3242 	clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3243 	rkclk_set_dpll(dram, freq * MHz / 2);
3244 	phy_pll_set(dram, freq * MHz, 0);
3245 	phy_pll_set(dram, freq * MHz, 1);
3246 	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3247 
3248 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3249 			PMUGRF_CON_DDRPHY_BUFFEREN_DIS,
3250 			&dram->pmugrf->soc_con[0]);
3251 	writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT),
3252 	       &dram->cru->clkgate_con[21]);
3253 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3254 					(0x0 << CLK_DDR_UPCTL_EN_SHIFT) |
3255 					(0x0 << ACLK_DDR_UPCTL_EN_SHIFT),
3256 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3257 	while ((readl(pctl_base + DDR_PCTL2_DFISTAT) &
3258 	       PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE)
3259 		continue;
3260 
3261 	sw_set_req(dram);
3262 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
3263 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp);
3264 	sw_set_ack(dram);
3265 	update_refresh_reg(dram);
3266 	clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2);
3267 
3268 	enter_sr(dram, 0);
3269 
3270 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3271 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3272 
3273 	mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4);
3274 	if (dramtype == LPDDR3) {
3275 		pctl_write_mr(dram->pctl, 3, 1,
3276 			      (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) &
3277 			      PCTL2_MR_MASK,
3278 			      dramtype);
3279 		pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK,
3280 			      dramtype);
3281 		pctl_write_mr(dram->pctl, 3, 3,
3282 			      (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) &
3283 			      PCTL2_MR_MASK,
3284 			      dramtype);
3285 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype);
3286 	} else if ((dramtype == DDR3) || (dramtype == DDR4)) {
3287 		pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK,
3288 			      dramtype);
3289 		if (!dest_dll_off) {
3290 			pctl_write_mr(dram->pctl, 3, 0,
3291 				      ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) &
3292 				      PCTL2_MR_MASK) | DDR3_DLL_RESET,
3293 				      dramtype);
3294 			udelay(2);
3295 		}
3296 		pctl_write_mr(dram->pctl, 3, 0,
3297 			      (dst_init3 >> PCTL2_DDR34_MR0_SHIFT &
3298 			       PCTL2_MR_MASK) & (~DDR3_DLL_RESET),
3299 			      dramtype);
3300 		pctl_write_mr(dram->pctl, 3, 2,
3301 			      ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) &
3302 			       PCTL2_MR_MASK), dramtype);
3303 		if (dramtype == DDR4) {
3304 			pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK,
3305 				      dramtype);
3306 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3307 				       DDR_PCTL2_INIT6);
3308 			pctl_write_mr(dram->pctl, 3, 4,
3309 				      (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) &
3310 				       PCTL2_MR_MASK,
3311 				      dramtype);
3312 			pctl_write_mr(dram->pctl, 3, 5,
3313 				      mr_tmp >> PCTL2_DDR4_MR5_SHIFT &
3314 				      PCTL2_MR_MASK,
3315 				      dramtype);
3316 
3317 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3318 				       DDR_PCTL2_INIT7);
3319 			pctl_write_mr(dram->pctl, 3, 6,
3320 				      mr_tmp >> PCTL2_DDR4_MR6_SHIFT &
3321 				      PCTL2_MR_MASK,
3322 				      dramtype);
3323 		}
3324 	} else if (dramtype == LPDDR4) {
3325 		pctl_write_mr(dram->pctl, 3, 13,
3326 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
3327 			       PCTL2_MR_MASK) & (~(BIT(7)))) |
3328 			      dst_fsp_lp4 << 7, dramtype);
3329 	}
3330 	clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3331 		     PCTL2_DIS_AUTO_REFRESH);
3332 	update_refresh_reg(dram);
3333 
3334 	/* training */
3335 	high_freq_training(dram, sdram_params_new, dst_fsp);
3336 	low_power_update(dram, lp_stat);
3337 
3338 	save_fsp_param(dram, dst_fsp, sdram_params_new);
3339 }
3340 
3341 static void ddr_set_rate_for_fsp(struct dram_info *dram,
3342 				 struct rv1126_sdram_params *sdram_params)
3343 {
3344 	struct ddr2_3_4_lp2_3_info *ddr_info;
3345 	u32 f0;
3346 	u32 dramtype = sdram_params->base.dramtype;
3347 #ifndef CONFIG_SPL_KERNEL_BOOT
3348 	u32 f1, f2, f3;
3349 #endif
3350 
3351 	ddr_info = get_ddr_drv_odt_info(dramtype);
3352 	if (!ddr_info)
3353 		return;
3354 
3355 	f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
3356 	     DDR_FREQ_MASK;
3357 
3358 #ifndef CONFIG_SPL_KERNEL_BOOT
3359 	memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param));
3360 	memset((void *)&fsp_param, 0, sizeof(fsp_param));
3361 
3362 	f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) &
3363 	     DDR_FREQ_MASK;
3364 	f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) &
3365 	     DDR_FREQ_MASK;
3366 	f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) &
3367 	     DDR_FREQ_MASK;
3368 #endif
3369 
3370 	if (get_wrlvl_val(dram, sdram_params))
3371 		printascii("get wrlvl value fail\n");
3372 
3373 #ifndef CONFIG_SPL_KERNEL_BOOT
3374 	printascii("change to: ");
3375 	printdec(f1);
3376 	printascii("MHz\n");
3377 	ddr_set_rate(&dram_info, sdram_params, f1,
3378 		     sdram_params->base.ddr_freq, 1, 1, 1);
3379 	printascii("change to: ");
3380 	printdec(f2);
3381 	printascii("MHz\n");
3382 	ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1);
3383 	printascii("change to: ");
3384 	printdec(f3);
3385 	printascii("MHz\n");
3386 	ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1);
3387 #endif
3388 	printascii("change to: ");
3389 	printdec(f0);
3390 	printascii("MHz(final freq)\n");
3391 #ifndef CONFIG_SPL_KERNEL_BOOT
3392 	ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1);
3393 #else
3394 	ddr_set_rate(&dram_info, sdram_params, f0, sdram_params->base.ddr_freq, 1, 1, 1);
3395 #endif
3396 }
3397 
3398 int get_uart_config(void)
3399 {
3400 	struct sdram_head_info_index_v2 *index =
3401 		(struct sdram_head_info_index_v2 *)common_info;
3402 	struct global_info *gbl_info;
3403 
3404 	gbl_info = (struct global_info *)((void *)common_info +
3405 		index->global_index.offset * 4);
3406 
3407 	return gbl_info->uart_info;
3408 }
3409 
3410 /* return: 0 = success, other = fail */
3411 int sdram_init(void)
3412 {
3413 	struct rv1126_sdram_params *sdram_params;
3414 	int ret = 0;
3415 	struct sdram_head_info_index_v2 *index =
3416 		(struct sdram_head_info_index_v2 *)common_info;
3417 	struct global_info *gbl_info;
3418 
3419 	dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
3420 	dram_info.pctl = (void *)UPCTL2_BASE_ADDR;
3421 	dram_info.grf = (void *)GRF_BASE_ADDR;
3422 	dram_info.cru = (void *)CRU_BASE_ADDR;
3423 	dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR;
3424 	dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR;
3425 	dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR;
3426 
3427 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
3428 	printascii("extended temp support\n");
3429 #endif
3430 	if (index->version_info != 2 ||
3431 	    (index->global_index.size != sizeof(struct global_info) / 4) ||
3432 	    (index->ddr3_index.size !=
3433 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3434 	    (index->ddr4_index.size !=
3435 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3436 	    (index->lp3_index.size !=
3437 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3438 	    (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) ||
3439 	    index->global_index.offset == 0 ||
3440 	    index->ddr3_index.offset == 0 ||
3441 	    index->ddr4_index.offset == 0 ||
3442 	    index->lp3_index.offset == 0 ||
3443 	    index->lp4_index.offset == 0) {
3444 		printascii("common info error\n");
3445 		goto error;
3446 	}
3447 
3448 	gbl_info = (struct global_info *)((void *)common_info +
3449 		index->global_index.offset * 4);
3450 
3451 	dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info);
3452 	dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info);
3453 
3454 	sdram_params = &sdram_configs[0];
3455 
3456 	if (sdram_params->base.dramtype == DDR3 ||
3457 	    sdram_params->base.dramtype == DDR4) {
3458 		if (DDR_2T_INFO(gbl_info->info_2t))
3459 			sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10;
3460 		else
3461 			sdram_params->pctl_regs.pctl[0][1] &=
3462 				~(0x1 << 10);
3463 	}
3464 	ret = sdram_init_detect(&dram_info, sdram_params);
3465 	if (ret) {
3466 		sdram_print_dram_type(sdram_params->base.dramtype);
3467 		printascii(", ");
3468 		printdec(sdram_params->base.ddr_freq);
3469 		printascii("MHz\n");
3470 		goto error;
3471 	}
3472 	print_ddr_info(sdram_params);
3473 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3474 	init_rw_trn_result_struct(&rw_trn_result, dram_info.phy,
3475 				  (u8)sdram_params->ch.cap_info.rank);
3476 #endif
3477 
3478 	ddr_set_rate_for_fsp(&dram_info, sdram_params);
3479 #ifndef CONFIG_SPL_KERNEL_BOOT
3480 	copy_fsp_param_to_ddr();
3481 #endif
3482 
3483 	ddr_set_atags(&dram_info, sdram_params);
3484 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3485 	save_rw_trn_result_to_ddr(&rw_trn_result);
3486 #endif
3487 
3488 	printascii("out\n");
3489 
3490 	return ret;
3491 error:
3492 	printascii("error\n");
3493 	return (-1);
3494 }
3495 #endif /* CONFIG_TPL_BUILD */
3496