xref: /rk3399_rockchip-uboot/drivers/ram/rockchip/sdram_rv1126.c (revision 6b2a0489fa7eaa0d1673ebcd920dcd4cec936d9f)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * (C) Copyright 2020 Rockchip Electronics Co., Ltd.
4  */
5 
6 #include <common.h>
7 #include <debug_uart.h>
8 #include <dm.h>
9 #include <ram.h>
10 #include <syscon.h>
11 #include <asm/io.h>
12 #include <asm/arch/clock.h>
13 #include <asm/arch/hardware.h>
14 #include <asm/arch/rk_atags.h>
15 #include <asm/arch/cru_rv1126.h>
16 #include <asm/arch/grf_rv1126.h>
17 #include <asm/arch/sdram_common.h>
18 #include <asm/arch/sdram_rv1126.h>
19 
20 /* define training flag */
21 #define CA_TRAINING			(0x1 << 0)
22 #define READ_GATE_TRAINING		(0x1 << 1)
23 #define WRITE_LEVELING			(0x1 << 2)
24 #define WRITE_TRAINING			(0x1 << 3)
25 #define READ_TRAINING			(0x1 << 4)
26 #define FULL_TRAINING			(0xff)
27 
28 /* #define DDR4_READ_GATE_PREAMBLE_MODE */
29 #ifndef DDR4_READ_GATE_PREAMBLE_MODE
30 /* DDR4 read gate normal mode conflicts with 1nCK preamble */
31 #define DDR4_READ_GATE_2NCK_PREAMBLE
32 #endif
33 
34 #define SKEW_RX_SIGNAL			(0)
35 #define SKEW_TX_SIGNAL			(1)
36 #define SKEW_CA_SIGNAL			(2)
37 
38 #define DESKEW_MDF_ABS_VAL		(0)
39 #define DESKEW_MDF_DIFF_VAL		(1)
40 
41 #ifdef CONFIG_TPL_BUILD
42 #ifndef CONFIG_TPL_TINY_FRAMEWORK
43 #error please defined CONFIG_TPL_TINY_FRAMEWORK for RV1126 !!!
44 #endif
45 #endif
46 
47 #ifdef CONFIG_TPL_BUILD
48 
49 struct dram_info {
50 	void __iomem *pctl;
51 	void __iomem *phy;
52 	struct rv1126_cru *cru;
53 	struct msch_regs *msch;
54 	struct rv1126_ddrgrf *ddrgrf;
55 	struct rv1126_grf *grf;
56 	struct ram_info info;
57 	struct rv1126_pmugrf *pmugrf;
58 	u32 sr_idle;
59 	u32 pd_idle;
60 };
61 
62 #define GRF_BASE_ADDR			0xfe000000
63 #define PMU_GRF_BASE_ADDR		0xfe020000
64 #define DDR_GRF_BASE_ADDR		0xfe030000
65 #define BUS_SGRF_BASE_ADDR		0xfe0a0000
66 #define SERVER_MSCH_BASE_ADDR		0xfe800000
67 #define CRU_BASE_ADDR			0xff490000
68 #define DDR_PHY_BASE_ADDR		0xff4a0000
69 #define UPCTL2_BASE_ADDR		0xffa50000
70 
71 #define SGRF_SOC_CON2			0x8
72 #define SGRF_SOC_CON12			0x30
73 #define SGRF_SOC_CON13			0x34
74 
75 struct dram_info dram_info;
76 
77 #if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 3)
78 struct rv1126_sdram_params sdram_configs[] = {
79 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-328.inc"
80 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-396.inc"
81 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-528.inc"
82 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-664.inc"
83 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-784.inc"
84 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-924.inc"
85 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-1056.inc"
86 };
87 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 0)
88 struct rv1126_sdram_params sdram_configs[] = {
89 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-328.inc"
90 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-396.inc"
91 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-528.inc"
92 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-664.inc"
93 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-784.inc"
94 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-924.inc"
95 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-1056.inc"
96 };
97 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 6)
98 struct rv1126_sdram_params sdram_configs[] = {
99 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-328.inc"
100 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-396.inc"
101 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-528.inc"
102 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-664.inc"
103 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-784.inc"
104 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-924.inc"
105 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-1056.inc"
106 };
107 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 7) || (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 8)
108 struct rv1126_sdram_params sdram_configs[] = {
109 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-328.inc"
110 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-396.inc"
111 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-528.inc"
112 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-664.inc"
113 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-784.inc"
114 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-924.inc"
115 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-1056.inc"
116 };
117 #endif
118 
119 u32 common_info[] = {
120 	#include "sdram_inc/rv1126/sdram-rv1126-loader_params.inc"
121 };
122 
123 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
124 static struct rw_trn_result rw_trn_result;
125 #endif
126 
127 static struct rv1126_fsp_param fsp_param[MAX_IDX];
128 
129 static u8 lp3_odt_value;
130 
131 static s8 wrlvl_result[2][4];
132 
133 /* DDR configuration 0-9 */
134 u16 ddr_cfg_2_rbc[] = {
135 	((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */
136 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */
137 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */
138 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */
139 	((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */
140 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */
141 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */
142 	((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */
143 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */
144 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */
145 };
146 
147 /* DDR configuration 10-21 */
148 u8 ddr4_cfg_2_rbc[] = {
149 	((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */
150 	((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */
151 	((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */
152 	((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */
153 	((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */
154 	((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */
155 	((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */
156 	((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */
157 	((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */
158 	((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */
159 	((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */
160 	((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */
161 };
162 
163 /* DDR configuration 22-28 */
164 u16 ddr_cfg_2_rbc_p2[] = {
165 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */
166 	((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */
167 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */
168 	((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */
169 	((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */
170 	((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */
171 	((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */
172 };
173 
174 u8 d4_rbc_2_d3_rbc[][2] = {
175 	{10, 0},
176 	{11, 2},
177 	{12, 23},
178 	{13, 1},
179 	{14, 28},
180 	{15, 24},
181 	{16, 27},
182 	{17, 7},
183 	{18, 6},
184 	{19, 25},
185 	{20, 26},
186 	{21, 3}
187 };
188 
189 u32 addrmap[29][9] = {
190 	{24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
191 		0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */
192 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
193 		0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */
194 	{23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
195 		0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */
196 	{22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606,
197 		0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */
198 	{24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909,
199 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */
200 	{6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707,
201 		0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */
202 	{7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808,
203 		0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */
204 	{8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909,
205 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */
206 	{22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
207 		0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */
208 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
209 		0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */
210 
211 	{24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
212 		0x08080808, 0x00000f0f, 0x0801}, /* 10 */
213 	{23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
214 		0x0f080808, 0x00000f0f, 0x0801}, /* 11 */
215 	{24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
216 		0x07070707, 0x00000f07, 0x0700}, /* 12 */
217 	{23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
218 		0x07070707, 0x00000f0f, 0x0700}, /* 13 */
219 	{24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
220 		0x07070707, 0x00000f07, 0x3f01}, /* 14 */
221 	{23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
222 		0x07070707, 0x00000f0f, 0x3f01}, /* 15 */
223 	{23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606,
224 		0x06060606, 0x00000f06, 0x3f00}, /* 16 */
225 	{8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909,
226 		0x0f090909, 0x00000f0f, 0x0801}, /* 17 */
227 	{7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808,
228 		0x08080808, 0x00000f0f, 0x0700}, /* 18 */
229 	{7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
230 		0x08080808, 0x00000f0f, 0x3f01}, /* 19 */
231 
232 	{6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
233 		0x07070707, 0x00000f07, 0x3f00}, /* 20 */
234 	{23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606,
235 		0x06060606, 0x00000f06, 0x0600}, /* 21 */
236 	{21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505,
237 		0x05050505, 0x00000f0f, 0x3f3f}, /* 22 */
238 
239 	{24, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
240 		0x07070707, 0x00000f07, 0x3f3f}, /* 23 */
241 	{23, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
242 		0x07070707, 0x00000f0f, 0x3f3f}, /* 24 */
243 	{7, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
244 		0x08080808, 0x00000f0f, 0x3f3f}, /* 25 */
245 	{6, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
246 		0x07070707, 0x00000f07, 0x3f3f}, /* 26 */
247 	{23, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
248 		0x06060606, 0x00000f06, 0x3f3f}, /* 27 */
249 	{24, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
250 		0x07070707, 0x00000f07, 0x3f3f} /* 28 */
251 };
252 
253 static u8 dq_sel[22][3] = {
254 	{0x0, 0x17, 0x22},
255 	{0x1, 0x18, 0x23},
256 	{0x2, 0x19, 0x24},
257 	{0x3, 0x1a, 0x25},
258 	{0x4, 0x1b, 0x26},
259 	{0x5, 0x1c, 0x27},
260 	{0x6, 0x1d, 0x28},
261 	{0x7, 0x1e, 0x29},
262 	{0x8, 0x16, 0x21},
263 	{0x9, 0x1f, 0x2a},
264 	{0xa, 0x20, 0x2b},
265 	{0x10, 0x1, 0xc},
266 	{0x11, 0x2, 0xd},
267 	{0x12, 0x3, 0xe},
268 	{0x13, 0x4, 0xf},
269 	{0x14, 0x5, 0x10},
270 	{0x15, 0x6, 0x11},
271 	{0x16, 0x7, 0x12},
272 	{0x17, 0x8, 0x13},
273 	{0x18, 0x0, 0xb},
274 	{0x19, 0x9, 0x14},
275 	{0x1a, 0xa, 0x15}
276 };
277 
278 static u16 grp_addr[4] = {
279 	ADD_GROUP_CS0_A,
280 	ADD_GROUP_CS0_B,
281 	ADD_GROUP_CS1_A,
282 	ADD_GROUP_CS1_B
283 };
284 
285 static u8 wrlvl_result_offset[2][4] = {
286 	{0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27},
287 	{0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29},
288 };
289 
290 static u16 dqs_dq_skew_adr[16] = {
291 	0x170 + 0,	/* SKEW_UPDATE_RX_CS0_DQS0 */
292 	0x170 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS1 */
293 	0x1d0 + 0,	/* SKEW_UPDATE_RX_CS0_DQS2 */
294 	0x1d0 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS3 */
295 	0x1a0 + 0,	/* SKEW_UPDATE_RX_CS1_DQS0 */
296 	0x1a0 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS1 */
297 	0x200 + 0,	/* SKEW_UPDATE_RX_CS1_DQS2 */
298 	0x200 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS3 */
299 	0x170 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS0 */
300 	0x170 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS1 */
301 	0x1d0 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS2 */
302 	0x1d0 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS3 */
303 	0x1a0 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS0 */
304 	0x1a0 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS1 */
305 	0x200 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS2 */
306 	0x200 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS3 */
307 };
308 
309 static void rkclk_ddr_reset(struct dram_info *dram,
310 			    u32 ctl_srstn, u32 ctl_psrstn,
311 			    u32 phy_srstn, u32 phy_psrstn)
312 {
313 	writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) |
314 	       UPCTL2_ASRSTN_REQ(ctl_srstn),
315 	       BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13);
316 
317 	writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn),
318 	       &dram->cru->softrst_con[12]);
319 }
320 
321 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
322 {
323 	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
324 	int delay = 1000;
325 	u32 mhz = hz / MHz;
326 	struct global_info *gbl_info;
327 	struct sdram_head_info_index_v2 *index =
328 		(struct sdram_head_info_index_v2 *)common_info;
329 	u32 ssmod_info;
330 	u32 dsmpd = 1;
331 
332 	gbl_info = (struct global_info *)((void *)common_info +
333 		    index->global_index.offset * 4);
334 	ssmod_info = gbl_info->info_2t;
335 	refdiv = 1;
336 	if (mhz <= 100) {
337 		postdiv1 = 6;
338 		postdiv2 = 4;
339 	} else if (mhz <= 150) {
340 		postdiv1 = 4;
341 		postdiv2 = 4;
342 	} else if (mhz <= 200) {
343 		postdiv1 = 6;
344 		postdiv2 = 2;
345 	} else if (mhz <= 300) {
346 		postdiv1 = 4;
347 		postdiv2 = 2;
348 	} else if (mhz <= 400) {
349 		postdiv1 = 6;
350 		postdiv2 = 1;
351 	} else {
352 		postdiv1 = 4;
353 		postdiv2 = 1;
354 	}
355 	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
356 
357 	writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
358 
359 	writel(0x1f000000, &dram->cru->clksel_con[64]);
360 	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
361 	/* enable ssmod */
362 	if (PLL_SSMOD_SPREAD(ssmod_info)) {
363 		dsmpd = 0;
364 		clrsetbits_le32(&dram->cru->pll[1].con2,
365 				0xffffff << 0, 0x0 << 0);
366 		writel(SSMOD_SPREAD(PLL_SSMOD_SPREAD(ssmod_info)) |
367 		       SSMOD_DIVVAL(PLL_SSMOD_DIV(ssmod_info)) |
368 		       SSMOD_DOWNSPREAD(PLL_SSMOD_DOWNSPREAD(ssmod_info)) |
369 		       SSMOD_RESET(0) |
370 		       SSMOD_DIS_SSCG(0) |
371 		       SSMOD_BP(0),
372 		       &dram->cru->pll[1].con3);
373 	}
374 	writel(DSMPD(dsmpd) | POSTDIV2(postdiv2) | REFDIV(refdiv),
375 	       &dram->cru->pll[1].con1);
376 
377 	while (delay > 0) {
378 		udelay(1);
379 		if (LOCK(readl(&dram->cru->pll[1].con1)))
380 			break;
381 		delay--;
382 	}
383 	if (delay <= 0)
384 		printascii("ERROR: DPLL lock timeout!\n");
385 
386 	writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
387 }
388 
389 static void rkclk_configure_ddr(struct dram_info *dram,
390 				struct rv1126_sdram_params *sdram_params)
391 {
392 	/* for ddr phy need freq / 2 */
393 	rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2);
394 }
395 
396 static unsigned int
397 	calculate_ddrconfig(struct rv1126_sdram_params *sdram_params)
398 {
399 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
400 	u32 cs, bw, die_bw, col, row, bank;
401 	u32 cs1_row;
402 	u32 i, tmp;
403 	u32 ddrconf = -1;
404 	u32 row_3_4;
405 
406 	cs = cap_info->rank;
407 	bw = cap_info->bw;
408 	die_bw = cap_info->dbw;
409 	col = cap_info->col;
410 	row = cap_info->cs0_row;
411 	cs1_row = cap_info->cs1_row;
412 	bank = cap_info->bk;
413 	row_3_4 = cap_info->row_3_4;
414 
415 	if (sdram_params->base.dramtype == DDR4) {
416 		if (cs == 2 && row == cs1_row && !row_3_4) {
417 			tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) |
418 			      die_bw;
419 			for (i = 17; i < 21; i++) {
420 				if (((tmp & 0xf) ==
421 				     (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
422 				    ((tmp & 0x70) <=
423 				     (ddr4_cfg_2_rbc[i - 10] & 0x70))) {
424 					ddrconf = i;
425 					goto out;
426 				}
427 			}
428 		}
429 
430 		tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw;
431 		for (i = 10; i < 21; i++) {
432 			if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
433 			    ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) &&
434 			    ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) {
435 				ddrconf = i;
436 				goto out;
437 			}
438 		}
439 	} else {
440 		if (cs == 2 && row == cs1_row && bank == 3) {
441 			for (i = 5; i < 8; i++) {
442 				if (((bw + col - 10) == (ddr_cfg_2_rbc[i] &
443 							 0x7)) &&
444 				    ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] &
445 							  (0x7 << 5))) {
446 					ddrconf = i;
447 					goto out;
448 				}
449 			}
450 		}
451 
452 		tmp = ((cs - 1) << 8) | ((row - 13) << 5) |
453 		      ((bw + col - 10) << 0);
454 		if (bank == 3)
455 			tmp |= (1 << 3);
456 
457 		for (i = 0; i < 9; i++)
458 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) &&
459 			    ((tmp & (7 << 5)) <=
460 			     (ddr_cfg_2_rbc[i] & (7 << 5))) &&
461 			    ((tmp & (1 << 8)) <=
462 			     (ddr_cfg_2_rbc[i] & (1 << 8)))) {
463 				ddrconf = i;
464 				goto out;
465 			}
466 
467 		for (i = 0; i < 7; i++)
468 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc_p2[i] & 0x1f)) &&
469 			    ((tmp & (7 << 5)) <=
470 			     (ddr_cfg_2_rbc_p2[i] & (7 << 5))) &&
471 			    ((tmp & (1 << 8)) <=
472 			     (ddr_cfg_2_rbc_p2[i] & (1 << 8)))) {
473 				ddrconf = i + 22;
474 				goto out;
475 			}
476 
477 		if (cs == 1 && bank == 3 && row <= 17 &&
478 		    (col + bw) == 12)
479 			ddrconf = 23;
480 	}
481 
482 out:
483 	if (ddrconf > 28)
484 		printascii("calculate ddrconfig error\n");
485 
486 	if (sdram_params->base.dramtype == DDR4) {
487 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
488 			if (ddrconf == d4_rbc_2_d3_rbc[i][0]) {
489 				if (ddrconf == 21 && row > 16)
490 					printascii("warn:ddrconf21 row > 16\n");
491 				else
492 					ddrconf = d4_rbc_2_d3_rbc[i][1];
493 				break;
494 			}
495 		}
496 	}
497 
498 	return ddrconf;
499 }
500 
501 static void sw_set_req(struct dram_info *dram)
502 {
503 	void __iomem *pctl_base = dram->pctl;
504 
505 	/* clear sw_done=0 */
506 	writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL);
507 }
508 
509 static void sw_set_ack(struct dram_info *dram)
510 {
511 	void __iomem *pctl_base = dram->pctl;
512 
513 	/* set sw_done=1 */
514 	writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL);
515 	while (1) {
516 		/* wait programming done */
517 		if (readl(pctl_base + DDR_PCTL2_SWSTAT) &
518 				PCTL2_SW_DONE_ACK)
519 			break;
520 	}
521 }
522 
523 static void set_ctl_address_map(struct dram_info *dram,
524 				struct rv1126_sdram_params *sdram_params)
525 {
526 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
527 	void __iomem *pctl_base = dram->pctl;
528 	u32 ddrconf = cap_info->ddrconfig;
529 	u32 i, row;
530 
531 	row = cap_info->cs0_row;
532 	if (sdram_params->base.dramtype == DDR4) {
533 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
534 			if (ddrconf == d4_rbc_2_d3_rbc[i][1]) {
535 				ddrconf = d4_rbc_2_d3_rbc[i][0];
536 				break;
537 			}
538 		}
539 	}
540 
541 	if (ddrconf >= ARRAY_SIZE(addrmap)) {
542 		printascii("set ctl address map fail\n");
543 		return;
544 	}
545 
546 	sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
547 			  &addrmap[ddrconf][0], ARRAY_SIZE(addrmap[ddrconf]) * 4);
548 
549 	/* unused row set to 0xf */
550 	for (i = 17; i >= row; i--)
551 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
552 			((i - 12) * 8 / 32) * 4,
553 			0xf << ((i - 12) * 8 % 32));
554 
555 	if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4)
556 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
557 	if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1)
558 		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
559 
560 	if (cap_info->rank == 1)
561 		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
562 }
563 
564 static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait)
565 {
566 	void __iomem *phy_base = dram->phy;
567 	u32 fbdiv, prediv, postdiv, postdiv_en;
568 	int delay = 1000;
569 
570 	if (wait) {
571 		clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB);
572 		while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK)) {
573 			udelay(1);
574 			if (delay-- <= 0) {
575 				printascii("ERROR: phy pll lock timeout!\n");
576 				while (1)
577 					;
578 			}
579 		}
580 	} else {
581 		freq /= MHz;
582 		prediv = 1;
583 		if (freq <= 200) {
584 			fbdiv = 16;
585 			postdiv = 2;
586 			postdiv_en = 1;
587 		} else if (freq <= 456) {
588 			fbdiv = 8;
589 			postdiv = 1;
590 			postdiv_en = 1;
591 		} else {
592 			fbdiv = 4;
593 			postdiv = 0;
594 			postdiv_en = 0;
595 		}
596 		writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50));
597 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK,
598 				(fbdiv >> 8) & 1);
599 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK,
600 				postdiv_en << PHY_POSTDIV_EN_SHIFT);
601 
602 		clrsetbits_le32(PHY_REG(phy_base, 0x52),
603 				PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv);
604 		clrsetbits_le32(PHY_REG(phy_base, 0x53),
605 				PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT,
606 				postdiv << PHY_POSTDIV_SHIFT);
607 	}
608 }
609 
610 static const u16 d3_phy_drv_2_ohm[][2] = {
611 	{PHY_DDR3_RON_455ohm, 455},
612 	{PHY_DDR3_RON_230ohm, 230},
613 	{PHY_DDR3_RON_153ohm, 153},
614 	{PHY_DDR3_RON_115ohm, 115},
615 	{PHY_DDR3_RON_91ohm, 91},
616 	{PHY_DDR3_RON_76ohm, 76},
617 	{PHY_DDR3_RON_65ohm, 65},
618 	{PHY_DDR3_RON_57ohm, 57},
619 	{PHY_DDR3_RON_51ohm, 51},
620 	{PHY_DDR3_RON_46ohm, 46},
621 	{PHY_DDR3_RON_41ohm, 41},
622 	{PHY_DDR3_RON_38ohm, 38},
623 	{PHY_DDR3_RON_35ohm, 35},
624 	{PHY_DDR3_RON_32ohm, 32},
625 	{PHY_DDR3_RON_30ohm, 30},
626 	{PHY_DDR3_RON_28ohm, 28},
627 	{PHY_DDR3_RON_27ohm, 27},
628 	{PHY_DDR3_RON_25ohm, 25},
629 	{PHY_DDR3_RON_24ohm, 24},
630 	{PHY_DDR3_RON_23ohm, 23},
631 	{PHY_DDR3_RON_22ohm, 22},
632 	{PHY_DDR3_RON_21ohm, 21},
633 	{PHY_DDR3_RON_20ohm, 20}
634 };
635 
636 static u16 d3_phy_odt_2_ohm[][2] = {
637 	{PHY_DDR3_RTT_DISABLE, 0},
638 	{PHY_DDR3_RTT_561ohm, 561},
639 	{PHY_DDR3_RTT_282ohm, 282},
640 	{PHY_DDR3_RTT_188ohm, 188},
641 	{PHY_DDR3_RTT_141ohm, 141},
642 	{PHY_DDR3_RTT_113ohm, 113},
643 	{PHY_DDR3_RTT_94ohm, 94},
644 	{PHY_DDR3_RTT_81ohm, 81},
645 	{PHY_DDR3_RTT_72ohm, 72},
646 	{PHY_DDR3_RTT_64ohm, 64},
647 	{PHY_DDR3_RTT_58ohm, 58},
648 	{PHY_DDR3_RTT_52ohm, 52},
649 	{PHY_DDR3_RTT_48ohm, 48},
650 	{PHY_DDR3_RTT_44ohm, 44},
651 	{PHY_DDR3_RTT_41ohm, 41},
652 	{PHY_DDR3_RTT_38ohm, 38},
653 	{PHY_DDR3_RTT_37ohm, 37},
654 	{PHY_DDR3_RTT_34ohm, 34},
655 	{PHY_DDR3_RTT_32ohm, 32},
656 	{PHY_DDR3_RTT_31ohm, 31},
657 	{PHY_DDR3_RTT_29ohm, 29},
658 	{PHY_DDR3_RTT_28ohm, 28},
659 	{PHY_DDR3_RTT_27ohm, 27},
660 	{PHY_DDR3_RTT_25ohm, 25}
661 };
662 
663 static u16 d4lp3_phy_drv_2_ohm[][2] = {
664 	{PHY_DDR4_LPDDR3_RON_482ohm, 482},
665 	{PHY_DDR4_LPDDR3_RON_244ohm, 244},
666 	{PHY_DDR4_LPDDR3_RON_162ohm, 162},
667 	{PHY_DDR4_LPDDR3_RON_122ohm, 122},
668 	{PHY_DDR4_LPDDR3_RON_97ohm, 97},
669 	{PHY_DDR4_LPDDR3_RON_81ohm, 81},
670 	{PHY_DDR4_LPDDR3_RON_69ohm, 69},
671 	{PHY_DDR4_LPDDR3_RON_61ohm, 61},
672 	{PHY_DDR4_LPDDR3_RON_54ohm, 54},
673 	{PHY_DDR4_LPDDR3_RON_48ohm, 48},
674 	{PHY_DDR4_LPDDR3_RON_44ohm, 44},
675 	{PHY_DDR4_LPDDR3_RON_40ohm, 40},
676 	{PHY_DDR4_LPDDR3_RON_37ohm, 37},
677 	{PHY_DDR4_LPDDR3_RON_34ohm, 34},
678 	{PHY_DDR4_LPDDR3_RON_32ohm, 32},
679 	{PHY_DDR4_LPDDR3_RON_30ohm, 30},
680 	{PHY_DDR4_LPDDR3_RON_28ohm, 28},
681 	{PHY_DDR4_LPDDR3_RON_27ohm, 27},
682 	{PHY_DDR4_LPDDR3_RON_25ohm, 25},
683 	{PHY_DDR4_LPDDR3_RON_24ohm, 24},
684 	{PHY_DDR4_LPDDR3_RON_23ohm, 23},
685 	{PHY_DDR4_LPDDR3_RON_22ohm, 22},
686 	{PHY_DDR4_LPDDR3_RON_21ohm, 21}
687 };
688 
689 static u16 d4lp3_phy_odt_2_ohm[][2] = {
690 	{PHY_DDR4_LPDDR3_RTT_DISABLE, 0},
691 	{PHY_DDR4_LPDDR3_RTT_586ohm, 586},
692 	{PHY_DDR4_LPDDR3_RTT_294ohm, 294},
693 	{PHY_DDR4_LPDDR3_RTT_196ohm, 196},
694 	{PHY_DDR4_LPDDR3_RTT_148ohm, 148},
695 	{PHY_DDR4_LPDDR3_RTT_118ohm, 118},
696 	{PHY_DDR4_LPDDR3_RTT_99ohm, 99},
697 	{PHY_DDR4_LPDDR3_RTT_85ohm, 58},
698 	{PHY_DDR4_LPDDR3_RTT_76ohm, 76},
699 	{PHY_DDR4_LPDDR3_RTT_67ohm, 67},
700 	{PHY_DDR4_LPDDR3_RTT_60ohm, 60},
701 	{PHY_DDR4_LPDDR3_RTT_55ohm, 55},
702 	{PHY_DDR4_LPDDR3_RTT_50ohm, 50},
703 	{PHY_DDR4_LPDDR3_RTT_46ohm, 46},
704 	{PHY_DDR4_LPDDR3_RTT_43ohm, 43},
705 	{PHY_DDR4_LPDDR3_RTT_40ohm, 40},
706 	{PHY_DDR4_LPDDR3_RTT_38ohm, 38},
707 	{PHY_DDR4_LPDDR3_RTT_36ohm, 36},
708 	{PHY_DDR4_LPDDR3_RTT_34ohm, 34},
709 	{PHY_DDR4_LPDDR3_RTT_32ohm, 32},
710 	{PHY_DDR4_LPDDR3_RTT_31ohm, 31},
711 	{PHY_DDR4_LPDDR3_RTT_29ohm, 29},
712 	{PHY_DDR4_LPDDR3_RTT_28ohm, 28},
713 	{PHY_DDR4_LPDDR3_RTT_27ohm, 27}
714 };
715 
716 static u16 lp4_phy_drv_2_ohm[][2] = {
717 	{PHY_LPDDR4_RON_501ohm, 501},
718 	{PHY_LPDDR4_RON_253ohm, 253},
719 	{PHY_LPDDR4_RON_168ohm, 168},
720 	{PHY_LPDDR4_RON_126ohm, 126},
721 	{PHY_LPDDR4_RON_101ohm, 101},
722 	{PHY_LPDDR4_RON_84ohm, 84},
723 	{PHY_LPDDR4_RON_72ohm, 72},
724 	{PHY_LPDDR4_RON_63ohm, 63},
725 	{PHY_LPDDR4_RON_56ohm, 56},
726 	{PHY_LPDDR4_RON_50ohm, 50},
727 	{PHY_LPDDR4_RON_46ohm, 46},
728 	{PHY_LPDDR4_RON_42ohm, 42},
729 	{PHY_LPDDR4_RON_38ohm, 38},
730 	{PHY_LPDDR4_RON_36ohm, 36},
731 	{PHY_LPDDR4_RON_33ohm, 33},
732 	{PHY_LPDDR4_RON_31ohm, 31},
733 	{PHY_LPDDR4_RON_29ohm, 29},
734 	{PHY_LPDDR4_RON_28ohm, 28},
735 	{PHY_LPDDR4_RON_26ohm, 26},
736 	{PHY_LPDDR4_RON_25ohm, 25},
737 	{PHY_LPDDR4_RON_24ohm, 24},
738 	{PHY_LPDDR4_RON_23ohm, 23},
739 	{PHY_LPDDR4_RON_22ohm, 22}
740 };
741 
742 static u16 lp4_phy_odt_2_ohm[][2] = {
743 	{PHY_LPDDR4_RTT_DISABLE, 0},
744 	{PHY_LPDDR4_RTT_604ohm, 604},
745 	{PHY_LPDDR4_RTT_303ohm, 303},
746 	{PHY_LPDDR4_RTT_202ohm, 202},
747 	{PHY_LPDDR4_RTT_152ohm, 152},
748 	{PHY_LPDDR4_RTT_122ohm, 122},
749 	{PHY_LPDDR4_RTT_101ohm, 101},
750 	{PHY_LPDDR4_RTT_87ohm,	87},
751 	{PHY_LPDDR4_RTT_78ohm, 78},
752 	{PHY_LPDDR4_RTT_69ohm, 69},
753 	{PHY_LPDDR4_RTT_62ohm, 62},
754 	{PHY_LPDDR4_RTT_56ohm, 56},
755 	{PHY_LPDDR4_RTT_52ohm, 52},
756 	{PHY_LPDDR4_RTT_48ohm, 48},
757 	{PHY_LPDDR4_RTT_44ohm, 44},
758 	{PHY_LPDDR4_RTT_41ohm, 41},
759 	{PHY_LPDDR4_RTT_39ohm, 39},
760 	{PHY_LPDDR4_RTT_37ohm, 37},
761 	{PHY_LPDDR4_RTT_35ohm, 35},
762 	{PHY_LPDDR4_RTT_33ohm, 33},
763 	{PHY_LPDDR4_RTT_32ohm, 32},
764 	{PHY_LPDDR4_RTT_30ohm, 30},
765 	{PHY_LPDDR4_RTT_29ohm, 29},
766 	{PHY_LPDDR4_RTT_27ohm, 27}
767 };
768 
769 static u32 lp4_odt_calc(u32 odt_ohm)
770 {
771 	u32 odt;
772 
773 	if (odt_ohm == 0)
774 		odt = LPDDR4_DQODT_DIS;
775 	else if (odt_ohm <= 40)
776 		odt = LPDDR4_DQODT_40;
777 	else if (odt_ohm <= 48)
778 		odt = LPDDR4_DQODT_48;
779 	else if (odt_ohm <= 60)
780 		odt = LPDDR4_DQODT_60;
781 	else if (odt_ohm <= 80)
782 		odt = LPDDR4_DQODT_80;
783 	else if (odt_ohm <= 120)
784 		odt = LPDDR4_DQODT_120;
785 	else
786 		odt = LPDDR4_DQODT_240;
787 
788 	return odt;
789 }
790 
791 static void *get_ddr_drv_odt_info(u32 dramtype)
792 {
793 	struct sdram_head_info_index_v2 *index =
794 		(struct sdram_head_info_index_v2 *)common_info;
795 	void *ddr_info = 0;
796 
797 	if (dramtype == DDR4)
798 		ddr_info = (void *)common_info + index->ddr4_index.offset * 4;
799 	else if (dramtype == DDR3)
800 		ddr_info = (void *)common_info + index->ddr3_index.offset * 4;
801 	else if (dramtype == LPDDR3)
802 		ddr_info = (void *)common_info + index->lp3_index.offset * 4;
803 	else if (dramtype == LPDDR4)
804 		ddr_info = (void *)common_info + index->lp4_index.offset * 4;
805 	else if (dramtype == LPDDR4X)
806 		ddr_info = (void *)common_info + index->lp4x_index.offset * 4;
807 	else
808 		printascii("unsupported dram type\n");
809 	return ddr_info;
810 }
811 
812 static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info,
813 			 u32 freq_mhz, u32 dst_fsp, u32 dramtype)
814 {
815 	void __iomem *pctl_base = dram->pctl;
816 	u32 ca_vref, dq_vref;
817 
818 	if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
819 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff);
820 	else
821 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten);
822 
823 	if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq))
824 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff);
825 	else
826 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten);
827 
828 	if (dramtype == LPDDR4) {
829 		if (ca_vref < 100)
830 			ca_vref = 100;
831 		if (ca_vref > 420)
832 			ca_vref = 420;
833 
834 		if (ca_vref <= 300)
835 			ca_vref = (0 << 6) | (ca_vref - 100) / 4;
836 		else
837 			ca_vref = (1 << 6) | (ca_vref - 220) / 4;
838 
839 		if (dq_vref < 100)
840 			dq_vref = 100;
841 		if (dq_vref > 420)
842 			dq_vref = 420;
843 
844 		if (dq_vref <= 300)
845 			dq_vref = (0 << 6) | (dq_vref - 100) / 4;
846 		else
847 			dq_vref = (1 << 6) | (dq_vref - 220) / 4;
848 	} else {
849 		ca_vref = ca_vref * 11 / 6;
850 		if (ca_vref < 150)
851 			ca_vref = 150;
852 		if (ca_vref > 629)
853 			ca_vref = 629;
854 
855 		if (ca_vref <= 449)
856 			ca_vref = (0 << 6) | (ca_vref - 150) / 4;
857 		else
858 			ca_vref = (1 << 6) | (ca_vref - 329) / 4;
859 
860 		if (dq_vref < 150)
861 			dq_vref = 150;
862 		if (dq_vref > 629)
863 			dq_vref = 629;
864 
865 		if (dq_vref <= 449)
866 			dq_vref = (0 << 6) | (dq_vref - 150) / 6;
867 		else
868 			dq_vref = (1 << 6) | (dq_vref - 329) / 6;
869 	}
870 	sw_set_req(dram);
871 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
872 			DDR_PCTL2_INIT6,
873 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT,
874 			ca_vref << PCTL2_LPDDR4_MR12_SHIFT);
875 
876 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
877 			DDR_PCTL2_INIT7,
878 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT,
879 			dq_vref << PCTL2_LPDDR4_MR14_SHIFT);
880 	sw_set_ack(dram);
881 }
882 
883 static void set_ds_odt(struct dram_info *dram,
884 		       struct rv1126_sdram_params *sdram_params, u32 dst_fsp)
885 {
886 	void __iomem *phy_base = dram->phy;
887 	void __iomem *pctl_base = dram->pctl;
888 	u32 dramtype = sdram_params->base.dramtype;
889 	struct ddr2_3_4_lp2_3_info *ddr_info;
890 	struct lp4_info *lp4_info;
891 	u32 i, j, tmp;
892 	const u16 (*p_drv)[2];
893 	const u16 (*p_odt)[2];
894 	u32 drv_info, sr_info;
895 	u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm;
896 	u32 phy_odt_ohm, dram_odt_ohm;
897 	u32 lp4_pu_cal, phy_lp4_drv_pd_en;
898 	u32 phy_odt_up_en, phy_odt_dn_en;
899 	u32 sr_dq, sr_clk;
900 	u32 freq = sdram_params->base.ddr_freq;
901 	u32 mr1_mr3, mr11, mr22, vref_out, vref_inner;
902 	u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0;
903 	u32 phy_dq_drv = 0;
904 	u32 phy_odt_up = 0, phy_odt_dn = 0;
905 
906 	ddr_info = get_ddr_drv_odt_info(dramtype);
907 	lp4_info = (void *)ddr_info;
908 
909 	if (!ddr_info)
910 		return;
911 
912 	/* dram odt en freq control phy drv, dram odt and phy sr */
913 	if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) {
914 		drv_info = ddr_info->drv_when_odtoff;
915 		dram_odt_ohm = 0;
916 		sr_info = ddr_info->sr_when_odtoff;
917 		phy_lp4_drv_pd_en =
918 			PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info);
919 	} else {
920 		drv_info = ddr_info->drv_when_odten;
921 		dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info);
922 		sr_info = ddr_info->sr_when_odten;
923 		phy_lp4_drv_pd_en =
924 			PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info);
925 	}
926 	phy_dq_drv_ohm =
927 		DRV_INFO_PHY_DQ_DRV(drv_info);
928 	phy_clk_drv_ohm =
929 		DRV_INFO_PHY_CLK_DRV(drv_info);
930 	phy_ca_drv_ohm =
931 		DRV_INFO_PHY_CA_DRV(drv_info);
932 
933 	sr_dq = DQ_SR_INFO(sr_info);
934 	sr_clk = CLK_SR_INFO(sr_info);
935 
936 	/* phy odt en freq control dram drv and phy odt */
937 	if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) {
938 		dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff);
939 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info);
940 		phy_odt_ohm = 0;
941 		phy_odt_up_en = 0;
942 		phy_odt_dn_en = 0;
943 	} else {
944 		dram_drv_ohm =
945 			DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten);
946 		phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info);
947 		phy_odt_up_en =
948 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
949 		phy_odt_dn_en =
950 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
951 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info);
952 	}
953 
954 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
955 		if (phy_odt_ohm) {
956 			phy_odt_up_en = 0;
957 			phy_odt_dn_en = 1;
958 		}
959 		if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
960 			dram_caodt_ohm = 0;
961 		else
962 			dram_caodt_ohm =
963 				ODT_INFO_LP4_CA_ODT(lp4_info->odt_info);
964 	}
965 
966 	if (dramtype == DDR3) {
967 		p_drv = d3_phy_drv_2_ohm;
968 		p_odt = d3_phy_odt_2_ohm;
969 	} else if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
970 		p_drv = lp4_phy_drv_2_ohm;
971 		p_odt = lp4_phy_odt_2_ohm;
972 	} else {
973 		p_drv = d4lp3_phy_drv_2_ohm;
974 		p_odt = d4lp3_phy_odt_2_ohm;
975 	}
976 
977 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
978 		if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) {
979 			phy_dq_drv = **(p_drv + i);
980 			break;
981 		}
982 		if (i == 0)
983 			break;
984 	}
985 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
986 		if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) {
987 			phy_clk_drv = **(p_drv + i);
988 			break;
989 		}
990 		if (i == 0)
991 			break;
992 	}
993 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
994 		if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) {
995 			phy_ca_drv = **(p_drv + i);
996 			break;
997 		}
998 		if (i == 0)
999 			break;
1000 	}
1001 	if (!phy_odt_ohm)
1002 		phy_odt = 0;
1003 	else
1004 		for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) {
1005 			if (phy_odt_ohm <= *(*(p_odt + i) + 1)) {
1006 				phy_odt = **(p_odt + i);
1007 				break;
1008 			}
1009 			if (i == 0)
1010 				break;
1011 		}
1012 
1013 	if (dramtype != LPDDR4 && dramtype != LPDDR4X) {
1014 		if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en))
1015 			vref_inner = 0x80;
1016 		else if (phy_odt_up_en)
1017 			vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 /
1018 				     (dram_drv_ohm + phy_odt_ohm);
1019 		else
1020 			vref_inner = phy_odt_ohm * 128 /
1021 				(phy_odt_ohm + dram_drv_ohm);
1022 
1023 		if (dramtype != DDR3 && dram_odt_ohm)
1024 			vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 /
1025 				   (phy_dq_drv_ohm + dram_odt_ohm);
1026 		else
1027 			vref_out = 0x80;
1028 	} else {
1029 		/* for lp4 and lp4x*/
1030 		if (phy_odt_ohm)
1031 			vref_inner =
1032 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) *
1033 				 256) / 1000;
1034 		else
1035 			vref_inner =
1036 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) *
1037 				 256) / 1000;
1038 
1039 		vref_out = 0x80;
1040 	}
1041 
1042 	/* default ZQCALIB bypass mode */
1043 	clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv);
1044 	clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv);
1045 	clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv);
1046 	clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv);
1047 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1048 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_clk_drv);
1049 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_clk_drv);
1050 	} else {
1051 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_ca_drv);
1052 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_ca_drv);
1053 	}
1054 	/* clk / cmd slew rate */
1055 	clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk);
1056 
1057 	phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1;
1058 	if (phy_odt_up_en)
1059 		phy_odt_up = phy_odt;
1060 	if (phy_odt_dn_en)
1061 		phy_odt_dn = phy_odt;
1062 
1063 	for (i = 0; i < 4; i++) {
1064 		j = 0x110 + i * 0x10;
1065 		clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up);
1066 		clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn);
1067 		clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv);
1068 		clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv);
1069 		writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10));
1070 
1071 		clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10),
1072 				1 << 3, phy_lp4_drv_pd_en << 3);
1073 		if (dramtype == LPDDR4 || dramtype == LPDDR4X)
1074 			clrbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10), BIT(5));
1075 		/* dq slew rate */
1076 		clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10),
1077 				0x1f, sr_dq);
1078 	}
1079 
1080 	/* reg_rx_vref_value_update */
1081 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1082 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1083 
1084 	/* RAM VREF */
1085 	writel(vref_out, PHY_REG(phy_base, 0x105));
1086 	if (dramtype == LPDDR3)
1087 		udelay(100);
1088 
1089 	if (dramtype == LPDDR4 || dramtype == LPDDR4X)
1090 		set_lp4_vref(dram, lp4_info, freq, dst_fsp, dramtype);
1091 
1092 	if (dramtype == DDR3 || dramtype == DDR4) {
1093 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1094 				DDR_PCTL2_INIT3);
1095 		mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK;
1096 	} else {
1097 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1098 				DDR_PCTL2_INIT4);
1099 		mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK;
1100 	}
1101 
1102 	if (dramtype == DDR3) {
1103 		mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK);
1104 		if (dram_drv_ohm == 34)
1105 			mr1_mr3 |= DDR3_DS_34;
1106 
1107 		if (dram_odt_ohm == 0)
1108 			mr1_mr3 |= DDR3_RTT_NOM_DIS;
1109 		else if (dram_odt_ohm <= 40)
1110 			mr1_mr3 |= DDR3_RTT_NOM_40;
1111 		else if (dram_odt_ohm <= 60)
1112 			mr1_mr3 |= DDR3_RTT_NOM_60;
1113 		else
1114 			mr1_mr3 |= DDR3_RTT_NOM_120;
1115 
1116 	} else if (dramtype == DDR4) {
1117 		mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK);
1118 		if (dram_drv_ohm == 48)
1119 			mr1_mr3 |= DDR4_DS_48;
1120 
1121 		if (dram_odt_ohm == 0)
1122 			mr1_mr3 |= DDR4_RTT_NOM_DIS;
1123 		else if (dram_odt_ohm <= 34)
1124 			mr1_mr3 |= DDR4_RTT_NOM_34;
1125 		else if (dram_odt_ohm <= 40)
1126 			mr1_mr3 |= DDR4_RTT_NOM_40;
1127 		else if (dram_odt_ohm <= 48)
1128 			mr1_mr3 |= DDR4_RTT_NOM_48;
1129 		else if (dram_odt_ohm <= 60)
1130 			mr1_mr3 |= DDR4_RTT_NOM_60;
1131 		else
1132 			mr1_mr3 |= DDR4_RTT_NOM_120;
1133 
1134 	} else if (dramtype == LPDDR3) {
1135 		if (dram_drv_ohm <= 34)
1136 			mr1_mr3 |= LPDDR3_DS_34;
1137 		else if (dram_drv_ohm <= 40)
1138 			mr1_mr3 |= LPDDR3_DS_40;
1139 		else if (dram_drv_ohm <= 48)
1140 			mr1_mr3 |= LPDDR3_DS_48;
1141 		else if (dram_drv_ohm <= 60)
1142 			mr1_mr3 |= LPDDR3_DS_60;
1143 		else if (dram_drv_ohm <= 80)
1144 			mr1_mr3 |= LPDDR3_DS_80;
1145 
1146 		if (dram_odt_ohm == 0)
1147 			lp3_odt_value = LPDDR3_ODT_DIS;
1148 		else if (dram_odt_ohm <= 60)
1149 			lp3_odt_value = LPDDR3_ODT_60;
1150 		else if (dram_odt_ohm <= 120)
1151 			lp3_odt_value = LPDDR3_ODT_120;
1152 		else
1153 			lp3_odt_value = LPDDR3_ODT_240;
1154 	} else {/* for lpddr4 and lpddr4x */
1155 		/* MR3 for lp4 PU-CAL and PDDS */
1156 		mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK);
1157 		mr1_mr3 |= lp4_pu_cal;
1158 
1159 		tmp = lp4_odt_calc(dram_drv_ohm);
1160 		if (!tmp)
1161 			tmp = LPDDR4_PDDS_240;
1162 		mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT);
1163 
1164 		/* MR11 for lp4 ca odt, dq odt set */
1165 		mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1166 			     DDR_PCTL2_INIT6);
1167 		mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK;
1168 
1169 		mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK);
1170 
1171 		tmp = lp4_odt_calc(dram_odt_ohm);
1172 		mr11 |= (tmp << LPDDR4_DQODT_SHIFT);
1173 
1174 		tmp = lp4_odt_calc(dram_caodt_ohm);
1175 		mr11 |= (tmp << LPDDR4_CAODT_SHIFT);
1176 		sw_set_req(dram);
1177 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1178 				DDR_PCTL2_INIT6,
1179 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT,
1180 				mr11 << PCTL2_LPDDR4_MR11_SHIFT);
1181 		sw_set_ack(dram);
1182 
1183 		/* MR22 for soc odt/odt-ck/odt-cs/odt-ca */
1184 		mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1185 			     DDR_PCTL2_INIT7);
1186 		mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK;
1187 		mr22 &= ~LPDDR4_SOC_ODT_MASK;
1188 
1189 		tmp = lp4_odt_calc(phy_odt_ohm);
1190 		mr22 |= tmp;
1191 		mr22 = mr22 |
1192 		       (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) <<
1193 			LPDDR4_ODTE_CK_SHIFT) |
1194 		       (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) <<
1195 			LPDDR4_ODTE_CS_SHIFT) |
1196 		       (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) <<
1197 			LPDDR4_ODTD_CA_SHIFT);
1198 
1199 		sw_set_req(dram);
1200 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1201 				DDR_PCTL2_INIT7,
1202 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT,
1203 				mr22 << PCTL2_LPDDR4_MR22_SHIFT);
1204 		sw_set_ack(dram);
1205 	}
1206 
1207 	if (dramtype == DDR4 || dramtype == DDR3) {
1208 		sw_set_req(dram);
1209 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1210 				DDR_PCTL2_INIT3,
1211 				PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT,
1212 				mr1_mr3 << PCTL2_DDR34_MR1_SHIFT);
1213 		sw_set_ack(dram);
1214 	} else {
1215 		sw_set_req(dram);
1216 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1217 				DDR_PCTL2_INIT4,
1218 				PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT,
1219 				mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT);
1220 		sw_set_ack(dram);
1221 	}
1222 }
1223 
1224 static int sdram_cmd_dq_path_remap(struct dram_info *dram,
1225 				   struct rv1126_sdram_params *sdram_params)
1226 {
1227 	void __iomem *phy_base = dram->phy;
1228 	u32 dramtype = sdram_params->base.dramtype;
1229 	struct sdram_head_info_index_v2 *index =
1230 		(struct sdram_head_info_index_v2 *)common_info;
1231 	struct dq_map_info *map_info;
1232 
1233 	map_info = (struct dq_map_info *)((void *)common_info +
1234 		index->dq_map_index.offset * 4);
1235 
1236 	if (dramtype == LPDDR4X)
1237 		dramtype = LPDDR4;
1238 
1239 	if (dramtype <= LPDDR4)
1240 		writel((map_info->byte_map[dramtype / 4] >>
1241 			((dramtype % 4) * 8)) & 0xff,
1242 		       PHY_REG(phy_base, 0x4f));
1243 
1244 	return 0;
1245 }
1246 
1247 static void phy_cfg(struct dram_info *dram,
1248 		    struct rv1126_sdram_params *sdram_params)
1249 {
1250 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
1251 	void __iomem *phy_base = dram->phy;
1252 	u32 i, dq_map, tmp;
1253 	u32 byte1 = 0, byte0 = 0;
1254 
1255 	sdram_cmd_dq_path_remap(dram, sdram_params);
1256 
1257 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0);
1258 	for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
1259 		writel(sdram_params->phy_regs.phy[i][1],
1260 		       phy_base + sdram_params->phy_regs.phy[i][0]);
1261 	}
1262 
1263 	clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5));
1264 	dq_map = readl(PHY_REG(phy_base, 0x4f));
1265 	for (i = 0; i < 4; i++) {
1266 		if (((dq_map >> (i * 2)) & 0x3) == 0) {
1267 			byte0 = i;
1268 			break;
1269 		}
1270 	}
1271 	for (i = 0; i < 4; i++) {
1272 		if (((dq_map >> (i * 2)) & 0x3) == 1) {
1273 			byte1 = i;
1274 			break;
1275 		}
1276 	}
1277 
1278 	tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK);
1279 	if (cap_info->bw == 2)
1280 		tmp |= 0xf;
1281 	else if (cap_info->bw == 1)
1282 		tmp |= ((1 << byte0) | (1 << byte1));
1283 	else
1284 		tmp |= (1 << byte0);
1285 
1286 	writel(tmp, PHY_REG(phy_base, 0xf));
1287 
1288 	/* lpddr4 odt control by phy, enable cs0 odt */
1289 	if (sdram_params->base.dramtype == LPDDR4 ||
1290 	    sdram_params->base.dramtype == LPDDR4X)
1291 		clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4,
1292 				(1 << 6) | (1 << 4));
1293 	/* for ca training ca vref choose range1 */
1294 	setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6));
1295 	setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6));
1296 	/* for wr training PHY_0x7c[5], choose range0 */
1297 	clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5));
1298 }
1299 
1300 static int update_refresh_reg(struct dram_info *dram)
1301 {
1302 	void __iomem *pctl_base = dram->pctl;
1303 	u32 ret;
1304 
1305 	ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
1306 	writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
1307 
1308 	return 0;
1309 }
1310 
1311 /*
1312  * rank = 1: cs0
1313  * rank = 2: cs1
1314  */
1315 u32 read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype)
1316 {
1317 	u32 ret;
1318 	u32 i, temp;
1319 	void __iomem *pctl_base = dram->pctl;
1320 	struct sdram_head_info_index_v2 *index =
1321 		(struct sdram_head_info_index_v2 *)common_info;
1322 	struct dq_map_info *map_info;
1323 
1324 	map_info = (struct dq_map_info *)((void *)common_info +
1325 		index->dq_map_index.offset * 4);
1326 
1327 	pctl_read_mr(pctl_base, rank, mr_num);
1328 
1329 	if (dramtype == LPDDR3) {
1330 		temp = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff);
1331 		ret = 0;
1332 		for (i = 0; i < 8; i++)
1333 			ret |= ((temp >> i) & 0x1) << ((map_info->lp3_dq0_7_map >> (i * 4)) & 0xf);
1334 	} else {
1335 		ret = readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff;
1336 	}
1337 
1338 	return ret;
1339 }
1340 
1341 static void enter_sr(struct dram_info *dram, u32 en)
1342 {
1343 	void __iomem *pctl_base = dram->pctl;
1344 
1345 	if (en) {
1346 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1347 		while (1) {
1348 			if (((readl(pctl_base + DDR_PCTL2_STAT) &
1349 			      PCTL2_SELFREF_TYPE_MASK) ==
1350 			     PCTL2_SELFREF_TYPE_SR_NOT_AUTO) &&
1351 			    ((readl(pctl_base + DDR_PCTL2_STAT) &
1352 			      PCTL2_OPERATING_MODE_MASK) ==
1353 			     PCTL2_OPERATING_MODE_SR))
1354 				break;
1355 		}
1356 	} else {
1357 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1358 		while ((readl(pctl_base + DDR_PCTL2_STAT) &
1359 		       PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR)
1360 			continue;
1361 	}
1362 }
1363 
1364 void record_dq_prebit(struct dram_info *dram)
1365 {
1366 	u32 group, i, tmp;
1367 	void __iomem *phy_base = dram->phy;
1368 
1369 	for (group = 0; group < 4; group++) {
1370 		for (i = 0; i < ARRAY_SIZE(dq_sel); i++) {
1371 			/* l_loop_invdelaysel */
1372 			writel(dq_sel[i][0], PHY_REG(phy_base,
1373 						     grp_addr[group] + 0x2c));
1374 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e));
1375 			writel(tmp, PHY_REG(phy_base,
1376 					    grp_addr[group] + dq_sel[i][1]));
1377 
1378 			/* r_loop_invdelaysel */
1379 			writel(dq_sel[i][0], PHY_REG(phy_base,
1380 						     grp_addr[group] + 0x2d));
1381 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f));
1382 			writel(tmp, PHY_REG(phy_base,
1383 					    grp_addr[group] + dq_sel[i][2]));
1384 		}
1385 	}
1386 }
1387 
1388 static void update_dq_rx_prebit(struct dram_info *dram)
1389 {
1390 	void __iomem *phy_base = dram->phy;
1391 
1392 	clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4),
1393 			BIT(4));
1394 	udelay(1);
1395 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4));
1396 }
1397 
1398 static void update_dq_tx_prebit(struct dram_info *dram)
1399 {
1400 	void __iomem *phy_base = dram->phy;
1401 
1402 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1403 	setbits_le32(PHY_REG(phy_base, 0x2), BIT(3));
1404 	setbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1405 	udelay(1);
1406 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1407 }
1408 
1409 static void update_ca_prebit(struct dram_info *dram)
1410 {
1411 	void __iomem *phy_base = dram->phy;
1412 
1413 	clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2));
1414 	setbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1415 	udelay(1);
1416 	clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1417 }
1418 
1419 /*
1420  * dir: 0: de-skew = delta_*
1421  *	1: de-skew = reg val - delta_*
1422  * delta_dir: value for differential signal: clk/
1423  * delta_sig: value for single signal: ca/cmd
1424  */
1425 static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif,
1426 			     int delta_sig, u32 cs, u32 dramtype)
1427 {
1428 	void __iomem *phy_base = dram->phy;
1429 	u32 i, cs_en, tmp;
1430 	u32 dfi_lp_stat = 0;
1431 
1432 	if (cs == 0)
1433 		cs_en = 1;
1434 	else if (cs == 2)
1435 		cs_en = 2;
1436 	else
1437 		cs_en = 3;
1438 
1439 	if ((dramtype == LPDDR4 || dramtype == LPDDR4X) &&
1440 	    ((readl(PHY_REG(phy_base, 0x60)) & BIT(5)) == 0)) {
1441 		dfi_lp_stat = 1;
1442 		setbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1443 	}
1444 	enter_sr(dram, 1);
1445 
1446 	for (i = 0; i < 0x20; i++) {
1447 		if (dir == DESKEW_MDF_ABS_VAL)
1448 			tmp = delta_sig;
1449 		else
1450 			tmp = readl(PHY_REG(phy_base, 0x150 + i)) +
1451 			      delta_sig;
1452 		writel(tmp, PHY_REG(phy_base, 0x150 + i));
1453 	}
1454 
1455 	if (dir == DESKEW_MDF_ABS_VAL)
1456 		tmp = delta_dif;
1457 	else
1458 		tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) -
1459 		       delta_sig + delta_dif;
1460 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x17));
1461 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x18));
1462 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1463 		writel(tmp, PHY_REG(phy_base, 0x150 + 0x4));
1464 		writel(tmp, PHY_REG(phy_base, 0x150 + 0xa));
1465 
1466 		clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6);
1467 		update_ca_prebit(dram);
1468 	}
1469 	enter_sr(dram, 0);
1470 
1471 	if (dfi_lp_stat)
1472 		clrbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1473 
1474 }
1475 
1476 static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank)
1477 {
1478 	u32 i, j, offset = 0;
1479 	u32 min = 0x3f;
1480 	void __iomem *phy_base = dram->phy;
1481 	u32 byte_en;
1482 
1483 	if (signal == SKEW_TX_SIGNAL)
1484 		offset = 8;
1485 
1486 	if (signal == SKEW_CA_SIGNAL) {
1487 		for (i = 0; i < 0x20; i++)
1488 			min = MIN(min, readl(PHY_REG(phy_base, 0x150  + i)));
1489 	} else {
1490 		byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1491 		for (j = offset; j < offset + rank * 4; j++) {
1492 			if (!((byte_en >> (j % 4)) & 1))
1493 				continue;
1494 			for (i = 0; i < 11; i++)
1495 				min = MIN(min,
1496 					  readl(PHY_REG(phy_base,
1497 							dqs_dq_skew_adr[j] +
1498 							i)));
1499 		}
1500 	}
1501 
1502 	return min;
1503 }
1504 
1505 static u32 low_power_update(struct dram_info *dram, u32 en)
1506 {
1507 	void __iomem *pctl_base = dram->pctl;
1508 	u32 lp_stat = 0;
1509 
1510 	if (en) {
1511 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf);
1512 	} else {
1513 		lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf;
1514 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf);
1515 	}
1516 
1517 	return lp_stat;
1518 }
1519 
1520 /*
1521  * signal:
1522  * dir: 0: de-skew = delta_*
1523  *	1: de-skew = reg val - delta_*
1524  * delta_dir: value for differential signal: dqs
1525  * delta_sig: value for single signal: dq/dm
1526  */
1527 static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir,
1528 			     int delta_dif, int delta_sig, u32 rank)
1529 {
1530 	void __iomem *phy_base = dram->phy;
1531 	u32 i, j, tmp, offset;
1532 	u32 byte_en;
1533 
1534 	byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1535 
1536 	if (signal == SKEW_RX_SIGNAL)
1537 		offset = 0;
1538 	else
1539 		offset = 8;
1540 
1541 	for (j = offset; j < (offset + rank * 4); j++) {
1542 		if (!((byte_en >> (j % 4)) & 1))
1543 			continue;
1544 		for (i = 0; i < 0x9; i++) {
1545 			if (dir == DESKEW_MDF_ABS_VAL)
1546 				tmp = delta_sig;
1547 			else
1548 				tmp = delta_sig + readl(PHY_REG(phy_base,
1549 							dqs_dq_skew_adr[j] +
1550 							i));
1551 			writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i));
1552 		}
1553 		if (dir == DESKEW_MDF_ABS_VAL)
1554 			tmp = delta_dif;
1555 		else
1556 			tmp = delta_dif + readl(PHY_REG(phy_base,
1557 						dqs_dq_skew_adr[j] + 9));
1558 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9));
1559 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa));
1560 	}
1561 	if (signal == SKEW_RX_SIGNAL)
1562 		update_dq_rx_prebit(dram);
1563 	else
1564 		update_dq_tx_prebit(dram);
1565 }
1566 
1567 static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype)
1568 {
1569 	void __iomem *phy_base = dram->phy;
1570 	u32 ret;
1571 	u32 dis_auto_zq = 0;
1572 	u32 odt_val_up, odt_val_dn;
1573 	u32 i, j;
1574 #if defined(DDR4_READ_GATE_2NCK_PREAMBLE)
1575 	void __iomem *pctl_base = dram->pctl;
1576 	u32 mr4_d4 = 0;
1577 #endif
1578 
1579 	odt_val_dn = readl(PHY_REG(phy_base, 0x110));
1580 	odt_val_up = readl(PHY_REG(phy_base, 0x111));
1581 
1582 	if (dramtype != LPDDR4 || dramtype != LPDDR4X) {
1583 		for (i = 0; i < 4; i++) {
1584 			j = 0x110 + i * 0x10;
1585 			writel(PHY_DDR4_LPDDR3_RTT_294ohm,
1586 			       PHY_REG(phy_base, j));
1587 			writel(PHY_DDR4_LPDDR3_RTT_DISABLE,
1588 			       PHY_REG(phy_base, j + 0x1));
1589 		}
1590 	}
1591 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1592 	/* use normal read mode for data training */
1593 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1594 
1595 	if (dramtype == DDR4) {
1596 #if defined(DDR4_READ_GATE_PREAMBLE_MODE)
1597 		setbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1598 #elif defined(DDR4_READ_GATE_2NCK_PREAMBLE)
1599 		mr4_d4 = readl(pctl_base + DDR_PCTL2_INIT6) >> PCTL2_DDR4_MR4_SHIFT & PCTL2_MR_MASK;
1600 		/* 2nCK Read Preamble */
1601 		pctl_write_mr(pctl_base, BIT(cs), 4, mr4_d4 | BIT(11), DDR4);
1602 #endif
1603 	}
1604 
1605 	/* choose training cs */
1606 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
1607 	/* enable gate training */
1608 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
1609 	udelay(50);
1610 	ret = readl(PHY_REG(phy_base, 0x91));
1611 	/* disable gate training */
1612 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
1613 	clrbits_le32(PHY_REG(phy_base, 2), 0x30);
1614 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1615 
1616 #if defined(DDR4_READ_GATE_2NCK_PREAMBLE)
1617 	if (dramtype == DDR4) {
1618 		pctl_write_mr(pctl_base, BIT(cs), 4, mr4_d4, DDR4);
1619 	}
1620 #endif
1621 
1622 	ret = (ret & 0x2f) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf);
1623 
1624 	if (dramtype != LPDDR4 || dramtype != LPDDR4X) {
1625 		for (i = 0; i < 4; i++) {
1626 			j = 0x110 + i * 0x10;
1627 			writel(odt_val_dn, PHY_REG(phy_base, j));
1628 			writel(odt_val_up, PHY_REG(phy_base, j + 0x1));
1629 		}
1630 	}
1631 	return ret;
1632 }
1633 
1634 static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype,
1635 			    u32 rank)
1636 {
1637 	void __iomem *pctl_base = dram->pctl;
1638 	void __iomem *phy_base = dram->phy;
1639 	u32 dis_auto_zq = 0;
1640 	u32 tmp;
1641 	u32 cur_fsp;
1642 	u32 timeout_us = 1000;
1643 
1644 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1645 
1646 	clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1647 
1648 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1649 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) &
1650 	      0xffff;
1651 	writel(tmp & 0xff, PHY_REG(phy_base, 0x3));
1652 
1653 	/* disable another cs's output */
1654 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1655 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12),
1656 			      dramtype);
1657 	if (dramtype == DDR3 || dramtype == DDR4)
1658 		writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1659 	else
1660 		writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1661 
1662 	/* choose cs */
1663 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1664 			((0x2 >> cs) << 6) | (0 << 2));
1665 	/* enable write leveling */
1666 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1667 			((0x2 >> cs) << 6) | (1 << 2));
1668 
1669 	while (1) {
1670 		if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) ==
1671 		    (readl(PHY_REG(phy_base, 0xf)) & 0xf))
1672 			break;
1673 
1674 		udelay(1);
1675 		if (timeout_us-- == 0) {
1676 			printascii("error: write leveling timeout\n");
1677 			while (1)
1678 				;
1679 		}
1680 	}
1681 
1682 	/* disable write leveling */
1683 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1684 			((0x2 >> cs) << 6) | (0 << 2));
1685 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6);
1686 
1687 	/* enable another cs's output */
1688 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1689 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12),
1690 			      dramtype);
1691 
1692 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1693 
1694 	return 0;
1695 }
1696 
1697 char pattern[32] = {
1698 	0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa,
1699 	0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55,
1700 	0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55,
1701 	0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa
1702 };
1703 
1704 static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype,
1705 			    u32 mhz)
1706 {
1707 	void __iomem *pctl_base = dram->pctl;
1708 	void __iomem *phy_base = dram->phy;
1709 	u32 trefi_1x, trfc_1x;
1710 	u32 dis_auto_zq = 0;
1711 	u32 timeout_us = 1000;
1712 	u32 dqs_default;
1713 	u32 cur_fsp;
1714 	u32 vref_inner;
1715 	u32 i;
1716 	struct sdram_head_info_index_v2 *index =
1717 		(struct sdram_head_info_index_v2 *)common_info;
1718 	struct dq_map_info *map_info;
1719 
1720 	vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff;
1721 	if (dramtype == DDR3 && vref_inner == 0x80) {
1722 		for (i = 0; i < 4; i++)
1723 			writel(vref_inner - 0xa,
1724 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1725 
1726 		/* reg_rx_vref_value_update */
1727 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1728 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1729 	}
1730 
1731 	map_info = (struct dq_map_info *)((void *)common_info +
1732 		index->dq_map_index.offset * 4);
1733 	/* only 1cs a time, 0:cs0 1 cs1 */
1734 	if (cs > 1)
1735 		return -1;
1736 
1737 	dqs_default = 0xf;
1738 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1739 
1740 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1741 	/* config refresh timing */
1742 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1743 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1744 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1745 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1746 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1747 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1748 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1749 	/* reg_phy_trfc */
1750 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1751 	/* reg_max_refi_cnt */
1752 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1753 
1754 	/* choose training cs */
1755 	clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6);
1756 
1757 	/* set dq map for ddr4 */
1758 	if (dramtype == DDR4) {
1759 		setbits_le32(PHY_REG(phy_base, 0x70), BIT(7));
1760 		for (i = 0; i < 4; i++) {
1761 			writel((map_info->ddr4_dq_map[cs * 2] >>
1762 				((i % 4) * 8)) & 0xff,
1763 				PHY_REG(phy_base, 0x238 + i));
1764 			writel((map_info->ddr4_dq_map[cs * 2 + 1] >>
1765 				((i % 4) * 8)) & 0xff,
1766 				PHY_REG(phy_base, 0x2b8 + i));
1767 		}
1768 	}
1769 
1770 	/* cha_l reg_l_rd_train_dqs_default[5:0] */
1771 	clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default);
1772 	/* cha_h reg_h_rd_train_dqs_default[5:0] */
1773 	clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default);
1774 	/* chb_l reg_l_rd_train_dqs_default[5:0] */
1775 	clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default);
1776 	/* chb_h reg_h_rd_train_dqs_default[5:0] */
1777 	clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default);
1778 
1779 	/* Choose the read train auto mode */
1780 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1);
1781 	/* Enable the auto train of the read train */
1782 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3);
1783 
1784 	/* Wait the train done. */
1785 	while (1) {
1786 		if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1)
1787 			break;
1788 
1789 		udelay(1);
1790 		if (timeout_us-- == 0) {
1791 			printascii("error: read training timeout\n");
1792 			return -1;
1793 		}
1794 	}
1795 
1796 	/* Check the read train state */
1797 	if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) ||
1798 	    (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) {
1799 		printascii("error: read training error\n");
1800 		return -1;
1801 	}
1802 
1803 	/* Exit the Read Training by setting */
1804 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1));
1805 
1806 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1807 
1808 	if (dramtype == DDR3 && vref_inner == 0x80) {
1809 		for (i = 0; i < 4; i++)
1810 			writel(vref_inner,
1811 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1812 
1813 		/* reg_rx_vref_value_update */
1814 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1815 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1816 	}
1817 
1818 	return 0;
1819 }
1820 
1821 static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype,
1822 			    u32 mhz, u32 dst_fsp)
1823 {
1824 	void __iomem *pctl_base = dram->pctl;
1825 	void __iomem *phy_base = dram->phy;
1826 	u32 trefi_1x, trfc_1x;
1827 	u32 dis_auto_zq = 0;
1828 	u32 timeout_us = 1000;
1829 	u32 cur_fsp;
1830 	u32 mr_tmp, cl, cwl, phy_fsp, offset = 0;
1831 
1832 	if (dramtype == LPDDR3 && mhz <= 400) {
1833 		phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3;
1834 		offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3;
1835 		cl = readl(PHY_REG(phy_base, offset));
1836 		cwl = readl(PHY_REG(phy_base, offset + 2));
1837 
1838 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8);
1839 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4);
1840 		pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype);
1841 	}
1842 
1843 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1844 
1845 	/* PHY_0x7b[7:0] reg_train_col_addr[7:0] */
1846 	clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0);
1847 	/* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */
1848 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2);
1849 	/* PHY_0x7c[1:0] reg_train_col_addr[9:8] */
1850 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0);
1851 	/* PHY_0x7d[7:0] reg_train_row_addr[7:0] */
1852 	clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0);
1853 	/* PHY_0x7e[7:0] reg_train_row_addr[15:8] */
1854 	clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0);
1855 
1856 	/* PHY_0x71[3] wrtrain_check_data_value_random_gen */
1857 	clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3));
1858 
1859 	/* config refresh timing */
1860 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1861 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1862 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1863 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1864 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1865 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1866 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1867 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1868 	/* reg_phy_trfc */
1869 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1870 	/* reg_max_refi_cnt */
1871 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1872 
1873 	/* choose training cs */
1874 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6);
1875 
1876 	/* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */
1877 	/* 0: Use the write-leveling value. */
1878 	/* 1: use reg0x233 0x237 0x2b3 0x2b7 */
1879 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4));
1880 
1881 	/* PHY_0x7a [0] reg_dq_wr_train_auto */
1882 	setbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1883 
1884 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1885 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1886 
1887 	send_a_refresh(dram->pctl, 0x3);
1888 
1889 	while (1) {
1890 		if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1)
1891 			break;
1892 
1893 		udelay(1);
1894 		if (timeout_us-- == 0) {
1895 			printascii("error: write training timeout\n");
1896 			while (1)
1897 				;
1898 		}
1899 	}
1900 
1901 	/* Check the write train state */
1902 	if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) {
1903 		printascii("error: write training error\n");
1904 		return -1;
1905 	}
1906 
1907 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1908 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1909 
1910 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1911 
1912 	/* save LPDDR4/LPDDR4X write vref to fsp_param for dfs */
1913 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1914 		fsp_param[dst_fsp].vref_dq[cs] =
1915 			((readl(PHY_REG(phy_base, 0x384)) & 0x3f) +
1916 			 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2;
1917 		/* add range info */
1918 		fsp_param[dst_fsp].vref_dq[cs] |=
1919 			((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1);
1920 	}
1921 
1922 	if (dramtype == LPDDR3 && mhz <= 400) {
1923 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl);
1924 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl);
1925 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1926 			       DDR_PCTL2_INIT3);
1927 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
1928 			      dramtype);
1929 	}
1930 
1931 	return 0;
1932 }
1933 
1934 static int data_training(struct dram_info *dram, u32 cs,
1935 			 struct rv1126_sdram_params *sdram_params, u32 dst_fsp,
1936 			 u32 training_flag)
1937 {
1938 	u32 ret = 0;
1939 
1940 	if (training_flag == FULL_TRAINING)
1941 		training_flag = READ_GATE_TRAINING | WRITE_LEVELING |
1942 				WRITE_TRAINING | READ_TRAINING;
1943 
1944 	if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) {
1945 		ret = data_training_wl(dram, cs,
1946 				       sdram_params->base.dramtype,
1947 				       sdram_params->ch.cap_info.rank);
1948 		if (ret != 0)
1949 			goto out;
1950 	}
1951 
1952 	if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) {
1953 		ret = data_training_rg(dram, cs,
1954 				       sdram_params->base.dramtype);
1955 		if (ret != 0)
1956 			goto out;
1957 	}
1958 
1959 	if ((training_flag & READ_TRAINING) == READ_TRAINING) {
1960 		ret = data_training_rd(dram, cs,
1961 				       sdram_params->base.dramtype,
1962 				       sdram_params->base.ddr_freq);
1963 		if (ret != 0)
1964 			goto out;
1965 	}
1966 
1967 	if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) {
1968 		ret = data_training_wr(dram, cs,
1969 				       sdram_params->base.dramtype,
1970 				       sdram_params->base.ddr_freq, dst_fsp);
1971 		if (ret != 0)
1972 			goto out;
1973 	}
1974 
1975 out:
1976 	return ret;
1977 }
1978 
1979 static int get_wrlvl_val(struct dram_info *dram,
1980 			 struct rv1126_sdram_params *sdram_params)
1981 {
1982 	int i, j, clk_skew;
1983 	void __iomem *phy_base = dram->phy;
1984 	u32 lp_stat;
1985 	int ret;
1986 
1987 	lp_stat = low_power_update(dram, 0);
1988 
1989 	clk_skew = 0x1f;
1990 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, clk_skew, 3,
1991 			 sdram_params->base.dramtype);
1992 
1993 	ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING);
1994 	if (sdram_params->ch.cap_info.rank == 2)
1995 		ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING);
1996 
1997 	for (j = 0; j < 2; j++)
1998 		for (i = 0; i < 4; i++)
1999 			wrlvl_result[j][i] =
2000 				(readl(PHY_REG(phy_base, wrlvl_result_offset[j][i])) & 0x3f) -
2001 				clk_skew;
2002 
2003 	low_power_update(dram, lp_stat);
2004 
2005 	return ret;
2006 }
2007 
2008 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2009 static void init_rw_trn_result_struct(struct rw_trn_result *result,
2010 				      void __iomem *phy_base, u8 cs_num)
2011 {
2012 	int i;
2013 
2014 	result->cs_num = cs_num;
2015 	result->byte_en = readb(PHY_REG(dram_info.phy, 0xf)) &
2016 			  PHY_DQ_WIDTH_MASK;
2017 	for (i = 0; i < FSP_NUM; i++)
2018 		result->fsp_mhz[i] = 0;
2019 }
2020 
2021 static void save_rw_trn_min_max(void __iomem *phy_base,
2022 				struct cs_rw_trn_result *rd_result,
2023 				struct cs_rw_trn_result *wr_result,
2024 				u8 byte_en)
2025 {
2026 	u16 phy_ofs;
2027 	u8 dqs;
2028 	u8 dq;
2029 
2030 	for (dqs = 0; dqs < BYTE_NUM; dqs++) {
2031 		if ((byte_en & BIT(dqs)) == 0)
2032 			continue;
2033 
2034 		/* Channel A or B (low or high 16 bit) */
2035 		phy_ofs = dqs < 2 ? 0x230 : 0x2b0;
2036 		/* low or high 8 bit */
2037 		phy_ofs += (dqs & 0x1) == 0 ? 0 : 0x9;
2038 		for (dq = 0; dq < 8; dq++) {
2039 			rd_result->dqs[dqs].dq_min[dq] =
2040 				readb(PHY_REG(phy_base, phy_ofs + 0x15 + dq));
2041 			rd_result->dqs[dqs].dq_max[dq] =
2042 				readb(PHY_REG(phy_base, phy_ofs + 0x27 + dq));
2043 			wr_result->dqs[dqs].dq_min[dq] =
2044 				readb(PHY_REG(phy_base, phy_ofs + 0x3d + dq));
2045 			wr_result->dqs[dqs].dq_max[dq] =
2046 				readb(PHY_REG(phy_base, phy_ofs + 0x4f + dq));
2047 		}
2048 	}
2049 }
2050 
2051 static void save_rw_trn_deskew(void __iomem *phy_base,
2052 			       struct fsp_rw_trn_result *result, u8 cs_num,
2053 			       int min_val, bool rw)
2054 {
2055 	u16 phy_ofs;
2056 	u8 cs;
2057 	u8 dq;
2058 
2059 	result->min_val = min_val;
2060 
2061 	for (cs = 0; cs < cs_num; cs++) {
2062 		phy_ofs = cs == 0 ? 0x170 : 0x1a0;
2063 		phy_ofs += rw == SKEW_RX_SIGNAL ? 0x1 : 0x17;
2064 		for (dq = 0; dq < 8; dq++) {
2065 			result->cs[cs].dqs[0].dq_deskew[dq] =
2066 				readb(PHY_REG(phy_base, phy_ofs + dq));
2067 			result->cs[cs].dqs[1].dq_deskew[dq] =
2068 				readb(PHY_REG(phy_base, phy_ofs + 0xb + dq));
2069 			result->cs[cs].dqs[2].dq_deskew[dq] =
2070 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + dq));
2071 			result->cs[cs].dqs[3].dq_deskew[dq] =
2072 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + dq));
2073 		}
2074 
2075 		result->cs[cs].dqs[0].dqs_deskew =
2076 			readb(PHY_REG(phy_base, phy_ofs + 0x8));
2077 		result->cs[cs].dqs[1].dqs_deskew =
2078 			readb(PHY_REG(phy_base, phy_ofs + 0xb + 0x8));
2079 		result->cs[cs].dqs[2].dqs_deskew =
2080 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0x8));
2081 		result->cs[cs].dqs[3].dqs_deskew =
2082 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + 0x8));
2083 	}
2084 }
2085 
2086 static void save_rw_trn_result_to_ddr(struct rw_trn_result *result)
2087 {
2088 	result->flag = DDR_DQ_EYE_FLAG;
2089 	memcpy((void *)(RW_TRN_RESULT_ADDR), result, sizeof(*result));
2090 }
2091 #endif
2092 
2093 static int high_freq_training(struct dram_info *dram,
2094 			      struct rv1126_sdram_params *sdram_params,
2095 			      u32 fsp)
2096 {
2097 	u32 i, j;
2098 	void __iomem *phy_base = dram->phy;
2099 	u32 dramtype = sdram_params->base.dramtype;
2100 	int min_val;
2101 	int dqs_skew, clk_skew, ca_skew;
2102 	u8 byte_en;
2103 	int ret;
2104 
2105 	byte_en = readl(PHY_REG(phy_base, 0xf)) & PHY_DQ_WIDTH_MASK;
2106 	dqs_skew = 0;
2107 	for (j = 0; j < sdram_params->ch.cap_info.rank; j++) {
2108 		for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) {
2109 			if ((byte_en & BIT(i)) != 0)
2110 				dqs_skew += wrlvl_result[j][i];
2111 		}
2112 	}
2113 	dqs_skew = dqs_skew /
2114 		   (int)(sdram_params->ch.cap_info.rank * (1 << sdram_params->ch.cap_info.bw));
2115 
2116 	clk_skew = 0x20 - dqs_skew;
2117 	dqs_skew = 0x20;
2118 
2119 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
2120 		min_val = 0xff;
2121 		for (j = 0; j < sdram_params->ch.cap_info.rank; j++)
2122 			for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) {
2123 				if ((byte_en & BIT(i)) != 0)
2124 					min_val = MIN(wrlvl_result[j][i], min_val);
2125 			}
2126 
2127 		if (min_val < 0) {
2128 			clk_skew = -min_val;
2129 			ca_skew = -min_val;
2130 		} else {
2131 			clk_skew = 0;
2132 			ca_skew = 0;
2133 		}
2134 	} else if (dramtype == LPDDR3) {
2135 		ca_skew = clk_skew - 4;
2136 	} else {
2137 		ca_skew = clk_skew;
2138 	}
2139 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3,
2140 			 dramtype);
2141 
2142 	writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233));
2143 	writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237));
2144 	writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2145 	writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2146 	ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING |
2147 			    READ_TRAINING | WRITE_TRAINING);
2148 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2149 	rw_trn_result.fsp_mhz[fsp] = (u16)sdram_params->base.ddr_freq;
2150 	save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[0],
2151 			    &rw_trn_result.wr_fsp[fsp].cs[0],
2152 			    rw_trn_result.byte_en);
2153 #endif
2154 	if (sdram_params->ch.cap_info.rank == 2) {
2155 		writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233));
2156 		writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237));
2157 		writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2158 		writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2159 		ret |= data_training(dram, 1, sdram_params, fsp,
2160 				     READ_GATE_TRAINING | READ_TRAINING |
2161 				     WRITE_TRAINING);
2162 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2163 		save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[1],
2164 				    &rw_trn_result.wr_fsp[fsp].cs[1],
2165 				    rw_trn_result.byte_en);
2166 #endif
2167 	}
2168 	if (ret)
2169 		goto out;
2170 
2171 	record_dq_prebit(dram);
2172 
2173 	min_val = get_min_value(dram, SKEW_RX_SIGNAL,
2174 				sdram_params->ch.cap_info.rank) * -1;
2175 	modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2176 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2177 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2178 	save_rw_trn_deskew(phy_base, &rw_trn_result.rd_fsp[fsp],
2179 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2180 			   SKEW_RX_SIGNAL);
2181 #endif
2182 
2183 	min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL,
2184 				    sdram_params->ch.cap_info.rank),
2185 		      get_min_value(dram, SKEW_CA_SIGNAL,
2186 				    sdram_params->ch.cap_info.rank)) * -1;
2187 
2188 	/* clk = 0, rx all skew -7, tx - min_value */
2189 	modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3,
2190 			 dramtype);
2191 
2192 	modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2193 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2194 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2195 	save_rw_trn_deskew(phy_base, &rw_trn_result.wr_fsp[fsp],
2196 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2197 			   SKEW_TX_SIGNAL);
2198 #endif
2199 
2200 	ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING);
2201 	if (sdram_params->ch.cap_info.rank == 2)
2202 		ret |= data_training(dram, 1, sdram_params, 0,
2203 				     READ_GATE_TRAINING);
2204 out:
2205 	return ret;
2206 }
2207 
2208 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
2209 {
2210 	writel(ddrconfig, &dram->msch->deviceconf);
2211 	clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0);
2212 }
2213 
2214 static void update_noc_timing(struct dram_info *dram,
2215 			      struct rv1126_sdram_params *sdram_params)
2216 {
2217 	void __iomem *pctl_base = dram->pctl;
2218 	u32 bw, bl;
2219 
2220 	bw = 8 << sdram_params->ch.cap_info.bw;
2221 	bl = ((readl(pctl_base + DDR_PCTL2_MSTR) >> 16) & 0xf) * 2;
2222 
2223 	/* update the noc timing related to data bus width */
2224 	if ((bw / 8 * bl) <= 16)
2225 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 0;
2226 	else if ((bw / 8 * bl) == 32)
2227 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 1;
2228 	else if ((bw / 8 * bl) == 64)
2229 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 2;
2230 	else
2231 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 3;
2232 
2233 	sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty =
2234 		(bl * bw / 8) > 16 ? (bl / 4) : (16 / (bl * bw / 8)) * bl / 4;
2235 
2236 	if (sdram_params->base.dramtype == LPDDR4 ||
2237 	    sdram_params->base.dramtype == LPDDR4X) {
2238 		sdram_params->ch.noc_timings.ddrmode.b.mwrsize =
2239 			(bw == 16) ? 0x1 : 0x2;
2240 		sdram_params->ch.noc_timings.ddrtimingc0.b.wrtomwr =
2241 			3 * sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty;
2242 	}
2243 
2244 	writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
2245 	       &dram->msch->ddrtiminga0);
2246 	writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
2247 	       &dram->msch->ddrtimingb0);
2248 	writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
2249 	       &dram->msch->ddrtimingc0);
2250 	writel(sdram_params->ch.noc_timings.devtodev0.d32,
2251 	       &dram->msch->devtodev0);
2252 	writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
2253 	writel(sdram_params->ch.noc_timings.ddr4timing.d32,
2254 	       &dram->msch->ddr4timing);
2255 }
2256 
2257 static int split_setup(struct dram_info *dram,
2258 		       struct rv1126_sdram_params *sdram_params)
2259 {
2260 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2261 	u32 dramtype = sdram_params->base.dramtype;
2262 	u32 split_size, split_mode;
2263 	u64 cs_cap[2], cap;
2264 
2265 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dramtype);
2266 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dramtype);
2267 
2268 	/* The ddr split only support 1 rank and less than 4GB capacity. */
2269 	if ((cs_cap[1]) || (cs_cap[0] >= 0x100000000ULL))
2270 		goto out;
2271 
2272 	/* only support the larger cap is in low 16bit */
2273 	if (cap_info->cs0_high16bit_row < cap_info->cs0_row) {
2274 		cap = cs_cap[0] / (1 << (cap_info->cs0_row -
2275 		cap_info->cs0_high16bit_row));
2276 	} else if ((cap_info->cs1_high16bit_row < cap_info->cs1_row) &&
2277 		   (cap_info->rank == 2)) {
2278 		if (!cap_info->cs1_high16bit_row)
2279 			cap = cs_cap[0];
2280 		else
2281 			cap = cs_cap[0] + cs_cap[1] / (1 << (cap_info->cs1_row -
2282 				cap_info->cs1_high16bit_row));
2283 	} else {
2284 		goto out;
2285 	}
2286 	split_size = (u32)(cap >> 24) & SPLIT_SIZE_MASK;
2287 	if (cap_info->bw == 2)
2288 		split_mode = SPLIT_MODE_32_L16_VALID;
2289 	else
2290 		split_mode = SPLIT_MODE_16_L8_VALID;
2291 
2292 	rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2293 		     (SPLIT_MODE_MASK << SPLIT_MODE_OFFSET) |
2294 		     (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2295 		     (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2296 		     (split_mode << SPLIT_MODE_OFFSET) |
2297 		     (0x0 << SPLIT_BYPASS_OFFSET) |
2298 		     (split_size << SPLIT_SIZE_OFFSET));
2299 
2300 	rk_clrsetreg(BUS_SGRF_BASE_ADDR + SGRF_SOC_CON2,
2301 		     MSCH_AXI_BYPASS_ALL_MASK << MSCH_AXI_BYPASS_ALL_SHIFT,
2302 		     0x0 << MSCH_AXI_BYPASS_ALL_SHIFT);
2303 
2304 out:
2305 	return 0;
2306 }
2307 
2308 static void split_bypass(struct dram_info *dram)
2309 {
2310 	if ((readl(&dram->ddrgrf->grf_ddrsplit_con) &
2311 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2312 		return;
2313 
2314 	/* bypass split */
2315 	rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2316 		     (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2317 		     (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2318 		     (0x1 << SPLIT_BYPASS_OFFSET) |
2319 		     (0x0 << SPLIT_SIZE_OFFSET));
2320 }
2321 
2322 static void dram_all_config(struct dram_info *dram,
2323 			    struct rv1126_sdram_params *sdram_params)
2324 {
2325 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2326 	u32 dram_type = sdram_params->base.dramtype;
2327 	void __iomem *pctl_base = dram->pctl;
2328 	u32 sys_reg2 = 0;
2329 	u32 sys_reg3 = 0;
2330 	u64 cs_cap[2];
2331 	u32 cs_pst;
2332 
2333 	set_ddrconfig(dram, cap_info->ddrconfig);
2334 	sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
2335 			 &sys_reg3, 0);
2336 	writel(sys_reg2, &dram->pmugrf->os_reg[2]);
2337 	writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2338 
2339 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2340 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2341 
2342 	if (cap_info->rank == 2) {
2343 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2344 			6 + 2;
2345 		if (cs_pst > 28)
2346 			cs_cap[0] = 1llu << cs_pst;
2347 	}
2348 
2349 	writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
2350 			(((cs_cap[0] >> 20) / 64) & 0xff),
2351 			&dram->msch->devicesize);
2352 	update_noc_timing(dram, sdram_params);
2353 }
2354 
2355 static void enable_low_power(struct dram_info *dram,
2356 			     struct rv1126_sdram_params *sdram_params)
2357 {
2358 	void __iomem *pctl_base = dram->pctl;
2359 	u32 grf_lp_con;
2360 
2361 	writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]);
2362 
2363 	if (sdram_params->base.dramtype == DDR4)
2364 		grf_lp_con = (0x7 << 16) | (1 << 1);
2365 	else if (sdram_params->base.dramtype == DDR3)
2366 		grf_lp_con = (0x7 << 16) | (1 << 0);
2367 	else
2368 		grf_lp_con = (0x7 << 16) | (1 << 2);
2369 
2370 	/* en lpckdis_en */
2371 	grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
2372 	writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con);
2373 
2374 	/* enable sr, pd */
2375 	if (dram->pd_idle == 0)
2376 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2377 	else
2378 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2379 	if (dram->sr_idle == 0)
2380 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2381 	else
2382 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2383 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
2384 }
2385 
2386 static void ddr_set_atags(struct dram_info *dram,
2387 			  struct rv1126_sdram_params *sdram_params)
2388 {
2389 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2390 	u32 dram_type = sdram_params->base.dramtype;
2391 	void __iomem *pctl_base = dram->pctl;
2392 	struct tag_serial t_serial;
2393 	struct tag_ddr_mem t_ddrmem;
2394 	struct tag_soc_info t_socinfo;
2395 	u64 cs_cap[2];
2396 	u32 cs_pst = 0;
2397 	u32 split, split_size;
2398 	u64 reduce_cap = 0;
2399 
2400 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2401 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2402 
2403 	memset(&t_serial, 0, sizeof(struct tag_serial));
2404 
2405 	t_serial.version = 0;
2406 	t_serial.enable = 1;
2407 	t_serial.addr = CONFIG_DEBUG_UART_BASE;
2408 	t_serial.baudrate = CONFIG_BAUDRATE;
2409 	t_serial.m_mode = SERIAL_M_MODE_M0;
2410 	t_serial.id = 2;
2411 
2412 	atags_destroy();
2413 	atags_set_tag(ATAG_SERIAL, &t_serial);
2414 
2415 	split = readl(&dram->ddrgrf->grf_ddrsplit_con);
2416 	memset(&t_ddrmem, 0, sizeof(struct tag_ddr_mem));
2417 	if (cap_info->row_3_4) {
2418 		cs_cap[0] =  cs_cap[0] * 3 / 4;
2419 		cs_cap[1] =  cs_cap[1] * 3 / 4;
2420 	} else if (!(split & (1 << SPLIT_BYPASS_OFFSET))) {
2421 		split_size = (split >> SPLIT_SIZE_OFFSET) & SPLIT_SIZE_MASK;
2422 		reduce_cap = (cs_cap[0] + cs_cap[1] - (split_size << 24)) / 2;
2423 	}
2424 	t_ddrmem.version = 0;
2425 	t_ddrmem.bank[0] = CONFIG_SYS_SDRAM_BASE;
2426 	if (cs_cap[1]) {
2427 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2428 			6 + 2;
2429 	}
2430 
2431 	if (cs_cap[1] && cs_pst > 27) {
2432 		t_ddrmem.count = 2;
2433 		t_ddrmem.bank[1] = 1 << cs_pst;
2434 		t_ddrmem.bank[2] = cs_cap[0];
2435 		t_ddrmem.bank[3] = cs_cap[1] - reduce_cap;
2436 	} else {
2437 		t_ddrmem.count = 1;
2438 		t_ddrmem.bank[1] = (u64)cs_cap[0] + (u64)cs_cap[1] - reduce_cap;
2439 	}
2440 
2441 	atags_set_tag(ATAG_DDR_MEM, &t_ddrmem);
2442 
2443 	memset(&t_socinfo, 0, sizeof(struct tag_soc_info));
2444 	t_socinfo.version = 0x1;
2445 	t_socinfo.name = 0x1126;
2446 	t_socinfo.flags = SOC_FLAGS_TDBT;
2447 	atags_set_tag(ATAG_SOC_INFO, &t_socinfo);
2448 }
2449 
2450 static void print_ddr_info(struct rv1126_sdram_params *sdram_params)
2451 {
2452 	u32 split;
2453 
2454 	if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2455 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2456 		split = 0;
2457 	else
2458 		split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2459 			SPLIT_SIZE_MASK;
2460 
2461 	sdram_print_ddr_info(&sdram_params->ch.cap_info,
2462 			     &sdram_params->base, split);
2463 }
2464 
2465 static int modify_ddr34_bw_byte_map(u8 rg_result, struct rv1126_sdram_params *sdram_params)
2466 {
2467 	struct sdram_head_info_index_v2 *index = (struct sdram_head_info_index_v2 *)common_info;
2468 	struct dq_map_info *map_info = (struct dq_map_info *)
2469 				       ((void *)common_info + index->dq_map_index.offset * 4);
2470 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2471 	u32 dramtype = sdram_params->base.dramtype;
2472 	u32 byte_map = 0;
2473 	u32 byte = 0;
2474 	u32 byte_map_shift;
2475 	int i;
2476 
2477 	if (dramtype == DDR3)
2478 		byte_map_shift = 24;
2479 	else if (dramtype == DDR4)
2480 		byte_map_shift = 0;
2481 	else
2482 		return -1;
2483 
2484 	for (i = 0; i < 4; i++) {
2485 		if ((rg_result & BIT(i)) == 0) {
2486 			byte_map |= byte << (i * 2);
2487 			byte++;
2488 		}
2489 	}
2490 	if (byte != 1 && byte != 2 && byte != 4) {
2491 		printascii("DTT result is abnormal: ");
2492 		printdec(byte);
2493 		printascii("byte\n");
2494 		return -1;
2495 	}
2496 	cap_info->bw = byte / 2;
2497 	for (i = 0; i < 4; i++) {
2498 		if ((rg_result & BIT(i)) != 0) {
2499 			byte_map |= byte << (i * 2);
2500 			byte++;
2501 		}
2502 	}
2503 
2504 	if ((u8)byte_map != (u8)(map_info->byte_map[0] >> byte_map_shift)) {
2505 		clrsetbits_le32(&map_info->byte_map[0],
2506 				0xff << byte_map_shift, byte_map << byte_map_shift);
2507 		pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info, dramtype);
2508 		return 1;
2509 	}
2510 
2511 	return 0;
2512 }
2513 
2514 int sdram_init_(struct dram_info *dram, struct rv1126_sdram_params *sdram_params, u32 post_init)
2515 {
2516 	void __iomem *pctl_base = dram->pctl;
2517 	void __iomem *phy_base = dram->phy;
2518 	u32 ddr4_vref;
2519 	u32 mr_tmp, tmp;
2520 	int delay = 1000;
2521 
2522 	rkclk_configure_ddr(dram, sdram_params);
2523 
2524 	rkclk_ddr_reset(dram, 1, 1, 1, 1);
2525 	udelay(10);
2526 
2527 	rkclk_ddr_reset(dram, 1, 1, 1, 0);
2528 	phy_cfg(dram, sdram_params);
2529 
2530 	rkclk_ddr_reset(dram, 1, 1, 0, 0);
2531 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1);
2532 
2533 	rkclk_ddr_reset(dram, 1, 0, 0, 0);
2534 	pctl_cfg(dram->pctl, &sdram_params->pctl_regs,
2535 		 dram->sr_idle, dram->pd_idle);
2536 
2537 	if (sdram_params->ch.cap_info.bw == 2) {
2538 		/* 32bit interface use pageclose */
2539 		setbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2540 		/* pageclose = 1, pageclose_timer = 0 will err in lp4 328MHz */
2541 		clrsetbits_le32(pctl_base + DDR_PCTL2_SCHED1, 0xff, 0x1 << 0);
2542 	} else {
2543 		clrbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2544 	}
2545 
2546 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2547 	u32 trefi;
2548 
2549 	tmp = readl(pctl_base + DDR_PCTL2_RFSHTMG);
2550 	trefi = (tmp >> 16) & 0xfff;
2551 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2552 	       pctl_base + DDR_PCTL2_RFSHTMG);
2553 #endif
2554 
2555 	/* set frequency_mode */
2556 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
2557 	/* set target_frequency to Frequency 0 */
2558 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0);
2559 
2560 	set_ds_odt(dram, sdram_params, 0);
2561 	sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params);
2562 	set_ctl_address_map(dram, sdram_params);
2563 
2564 	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
2565 
2566 	rkclk_ddr_reset(dram, 0, 0, 0, 0);
2567 
2568 	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0) {
2569 		udelay(1);
2570 		if (delay-- <= 0) {
2571 			printascii("ERROR: Cannot wait dfi_init_done!\n");
2572 			while (1)
2573 				;
2574 		}
2575 	}
2576 
2577 	if (sdram_params->base.dramtype == LPDDR3) {
2578 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3);
2579 	} else if (sdram_params->base.dramtype == LPDDR4 ||
2580 		   sdram_params->base.dramtype == LPDDR4X) {
2581 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6);
2582 		/* MR11 */
2583 		pctl_write_mr(dram->pctl, 3, 11,
2584 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2585 			      LPDDR4);
2586 		/* MR12 */
2587 		pctl_write_mr(dram->pctl, 3, 12,
2588 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2589 			      LPDDR4);
2590 
2591 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2592 		/* MR22 */
2593 		pctl_write_mr(dram->pctl, 3, 22,
2594 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2595 			      LPDDR4);
2596 	} else if (sdram_params->base.dramtype == DDR4) {
2597 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7) >> PCTL2_DDR4_MR6_SHIFT & PCTL2_MR_MASK;
2598 		pctl_write_mr(dram->pctl, 0x3, 6, mr_tmp | BIT(7), DDR4);
2599 		pctl_write_mr(dram->pctl, 0x3, 6, mr_tmp | BIT(7), DDR4);
2600 		pctl_write_mr(dram->pctl, 0x3, 6, mr_tmp, DDR4);
2601 	}
2602 
2603 	if (sdram_params->base.dramtype == DDR3 && post_init == 0)
2604 		setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2605 	tmp = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) & 0xf;
2606 
2607 	if (tmp != 0) {
2608 		if (post_init != 0) {
2609 			printascii("DTT cs0 error\n");
2610 			return -1;
2611 		}
2612 		if (sdram_params->base.dramtype != DDR3 || tmp == 0xf)
2613 			return -1;
2614 	}
2615 
2616 	if (sdram_params->base.dramtype == DDR3 && post_init == 0) {
2617 		if (modify_ddr34_bw_byte_map((u8)tmp, sdram_params) != 0)
2618 			return -1;
2619 	}
2620 
2621 	if (sdram_params->base.dramtype == LPDDR4) {
2622 		mr_tmp = read_mr(dram, 1, 14, LPDDR4);
2623 
2624 		if (mr_tmp != 0x4d)
2625 			return -1;
2626 	}
2627 
2628 	if (sdram_params->base.dramtype == LPDDR4 ||
2629 	    sdram_params->base.dramtype == LPDDR4X) {
2630 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2631 		/* MR14 */
2632 		pctl_write_mr(dram->pctl, 3, 14,
2633 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2634 			      LPDDR4);
2635 	}
2636 	if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) {
2637 		if (data_training(dram, 1, sdram_params, 0,
2638 				  READ_GATE_TRAINING) != 0) {
2639 			printascii("DTT cs1 error\n");
2640 			return -1;
2641 		}
2642 	}
2643 
2644 	if (sdram_params->base.dramtype == DDR4) {
2645 		ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39;
2646 		pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref,
2647 				  sdram_params->base.dramtype);
2648 	}
2649 
2650 	dram_all_config(dram, sdram_params);
2651 	enable_low_power(dram, sdram_params);
2652 
2653 	return 0;
2654 }
2655 
2656 static u64 dram_detect_cap(struct dram_info *dram,
2657 			   struct rv1126_sdram_params *sdram_params,
2658 			   unsigned char channel)
2659 {
2660 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2661 	void __iomem *pctl_base = dram->pctl;
2662 	void __iomem *phy_base = dram->phy;
2663 	u32 mr8;
2664 
2665 	u32 bktmp;
2666 	u32 coltmp;
2667 	u32 rowtmp;
2668 	u32 cs;
2669 	u32 dram_type = sdram_params->base.dramtype;
2670 	u32 pwrctl;
2671 	u32 i, dq_map;
2672 	u32 byte1 = 0, byte0 = 0;
2673 
2674 	if (dram_type != LPDDR4 && dram_type != LPDDR4X) {
2675 		if (dram_type != DDR4) {
2676 			if (dram_type == DDR3)
2677 				coltmp = 11;
2678 			else
2679 				coltmp = 12;
2680 			bktmp = 3;
2681 			if (dram_type == LPDDR2)
2682 				rowtmp = 15;
2683 			else
2684 				rowtmp = 16;
2685 
2686 			if (sdram_detect_col(cap_info, coltmp) != 0)
2687 				goto cap_err;
2688 
2689 			sdram_detect_bank(cap_info, pctl_base, coltmp, bktmp);
2690 			if (dram_type != LPDDR3)
2691 				sdram_detect_dbw(cap_info, dram_type);
2692 		} else {
2693 			coltmp = 10;
2694 			bktmp = 4;
2695 			rowtmp = 17;
2696 
2697 			cap_info->col = 10;
2698 			cap_info->bk = 2;
2699 			sdram_detect_bg(cap_info, pctl_base, coltmp);
2700 		}
2701 
2702 		if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
2703 			goto cap_err;
2704 
2705 		sdram_detect_row_3_4(cap_info, coltmp, bktmp);
2706 	} else {
2707 		cap_info->col = 10;
2708 		cap_info->bk = 3;
2709 		mr8 = read_mr(dram, 1, 8, dram_type);
2710 		cap_info->dbw = ((mr8 >> 6) & 0x3) == 0 ? 1 : 0;
2711 		mr8 = (mr8 >> 2) & 0xf;
2712 		if (mr8 >= 0 && mr8 <= 6) {
2713 			cap_info->cs0_row = 14 + (mr8 + 1) / 2;
2714 		} else if (mr8 == 0xc) {
2715 			cap_info->cs0_row = 13;
2716 		} else {
2717 			printascii("Cap ERR: Fail to get cap of LPDDR4/X from MR8\n");
2718 			goto cap_err;
2719 		}
2720 		if (cap_info->dbw == 0)
2721 			cap_info->cs0_row++;
2722 		cap_info->row_3_4 = mr8 % 2 == 1 ? 1 : 0;
2723 		if (cap_info->cs0_row >= 17) {
2724 			printascii("Cap ERR: ");
2725 			printascii("RV1126 LPDDR4/X cannot support row >= 17\n");
2726 			goto cap_err;
2727 			// cap_info->cs0_row = 16;
2728 			// cap_info->row_3_4 = 0;
2729 		}
2730 	}
2731 
2732 	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
2733 	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
2734 
2735 	if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0)
2736 		cs = 1;
2737 	else
2738 		cs = 0;
2739 	cap_info->rank = cs + 1;
2740 
2741 	setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2742 
2743 	if (dram_type != DDR3) {
2744 		if ((data_training_rg(dram, 0, dram_type) & 0xf) == 0) {
2745 			cap_info->bw = 2;
2746 		} else {
2747 			dq_map = readl(PHY_REG(phy_base, 0x4f));
2748 			for (i = 0; i < 4; i++) {
2749 				if (((dq_map >> (i * 2)) & 0x3) == 0)
2750 					byte0 = i;
2751 				if (((dq_map >> (i * 2)) & 0x3) == 1)
2752 					byte1 = i;
2753 			}
2754 			clrsetbits_le32(PHY_REG(phy_base, 0xf), PHY_DQ_WIDTH_MASK,
2755 					BIT(byte0) | BIT(byte1));
2756 			if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) == 0)
2757 				cap_info->bw = 1;
2758 			else
2759 				cap_info->bw = 0;
2760 		}
2761 	}
2762 
2763 	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
2764 
2765 	cap_info->cs0_high16bit_row = cap_info->cs0_row;
2766 	if (cs) {
2767 		cap_info->cs1_row = cap_info->cs0_row;
2768 		cap_info->cs1_high16bit_row = cap_info->cs0_row;
2769 	} else {
2770 		cap_info->cs1_row = 0;
2771 		cap_info->cs1_high16bit_row = 0;
2772 	}
2773 
2774 	if (dram_type == LPDDR3)
2775 		sdram_detect_dbw(cap_info, dram_type);
2776 
2777 	return 0;
2778 cap_err:
2779 	return -1;
2780 }
2781 
2782 static int dram_detect_cs1_row(struct dram_info *dram,
2783 			       struct rv1126_sdram_params *sdram_params,
2784 			       unsigned char channel)
2785 {
2786 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2787 	void __iomem *pctl_base = dram->pctl;
2788 	u32 ret = 0;
2789 	void __iomem *test_addr;
2790 	u32 row, bktmp, coltmp, bw;
2791 	u64 cs0_cap;
2792 	u32 byte_mask;
2793 	u32 cs_pst;
2794 	u32 cs_add = 0;
2795 	u32 max_row;
2796 
2797 	if (cap_info->rank == 2) {
2798 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2799 			6 + 2;
2800 		if (cs_pst < 28)
2801 			cs_add = 1;
2802 
2803 		cs0_cap = 1 << cs_pst;
2804 
2805 		if (sdram_params->base.dramtype == DDR4) {
2806 			if (cap_info->dbw == 0)
2807 				bktmp = cap_info->bk + 2;
2808 			else
2809 				bktmp = cap_info->bk + 1;
2810 		} else {
2811 			bktmp = cap_info->bk;
2812 		}
2813 		bw = cap_info->bw;
2814 		coltmp = cap_info->col;
2815 
2816 		if (bw == 2)
2817 			byte_mask = 0xFFFF;
2818 		else
2819 			byte_mask = 0xFF;
2820 
2821 		max_row = (cs_pst == 31) ? 30 : 31;
2822 
2823 		max_row = max_row - bktmp - coltmp - bw - cs_add + 1;
2824 
2825 		row = (cap_info->cs0_row > max_row) ? max_row :
2826 			cap_info->cs0_row;
2827 
2828 		for (; row > 12; row--) {
2829 			test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
2830 				    (u32)cs0_cap +
2831 				    (1ul << (row + bktmp + coltmp +
2832 					     cs_add + bw - 1ul)));
2833 
2834 			writel(0, CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap);
2835 			writel(PATTERN, test_addr);
2836 
2837 			if (((readl(test_addr) & byte_mask) ==
2838 			     (PATTERN & byte_mask)) &&
2839 			    ((readl(CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap) &
2840 			      byte_mask) == 0)) {
2841 				ret = row;
2842 				break;
2843 			}
2844 		}
2845 	}
2846 
2847 	return ret;
2848 }
2849 
2850 /* return: 0 = success, other = fail */
2851 static int sdram_init_detect(struct dram_info *dram,
2852 			     struct rv1126_sdram_params *sdram_params)
2853 {
2854 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2855 	u32 ret;
2856 	u32 sys_reg = 0;
2857 	u32 sys_reg3 = 0;
2858 
2859 	if (sdram_init_(dram, sdram_params, 0)) {
2860 		if (sdram_params->base.dramtype == DDR3) {
2861 			if (sdram_init_(dram, sdram_params, 0))
2862 				return -1;
2863 		} else {
2864 			return -1;
2865 		}
2866 	}
2867 
2868 	if (sdram_params->base.dramtype == DDR3) {
2869 		writel(PATTERN, CONFIG_SYS_SDRAM_BASE);
2870 		if (readl(CONFIG_SYS_SDRAM_BASE) != PATTERN)
2871 			return -1;
2872 	}
2873 
2874 	split_bypass(dram);
2875 	if (dram_detect_cap(dram, sdram_params, 0) != 0)
2876 		return -1;
2877 
2878 	pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
2879 				   sdram_params->base.dramtype);
2880 	ret = sdram_init_(dram, sdram_params, 1);
2881 	if (ret != 0)
2882 		goto out;
2883 
2884 	cap_info->cs1_row =
2885 		dram_detect_cs1_row(dram, sdram_params, 0);
2886 	if (cap_info->cs1_row) {
2887 		sys_reg = readl(&dram->pmugrf->os_reg[2]);
2888 		sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
2889 		SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
2890 				    sys_reg, sys_reg3, 0);
2891 		writel(sys_reg, &dram->pmugrf->os_reg[2]);
2892 		writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2893 	}
2894 
2895 	sdram_detect_high_row(cap_info, sdram_params->base.dramtype);
2896 	split_setup(dram, sdram_params);
2897 out:
2898 	return ret;
2899 }
2900 
2901 struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz)
2902 {
2903 	u32 i;
2904 	u32 offset = 0;
2905 	struct ddr2_3_4_lp2_3_info *ddr_info;
2906 
2907 	if (!freq_mhz) {
2908 		ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype);
2909 		if (ddr_info)
2910 			freq_mhz =
2911 				(ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
2912 				DDR_FREQ_MASK;
2913 		else
2914 			freq_mhz = 0;
2915 	}
2916 
2917 	for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) {
2918 		if (sdram_configs[i].base.ddr_freq == 0 ||
2919 		    freq_mhz < sdram_configs[i].base.ddr_freq)
2920 			break;
2921 	}
2922 	offset = i == 0 ? 0 : i - 1;
2923 
2924 	return &sdram_configs[offset];
2925 }
2926 
2927 static const u16 pctl_need_update_reg[] = {
2928 	DDR_PCTL2_RFSHTMG,
2929 	DDR_PCTL2_INIT3,
2930 	DDR_PCTL2_INIT4,
2931 	DDR_PCTL2_INIT6,
2932 	DDR_PCTL2_INIT7,
2933 	DDR_PCTL2_DRAMTMG0,
2934 	DDR_PCTL2_DRAMTMG1,
2935 	DDR_PCTL2_DRAMTMG2,
2936 	DDR_PCTL2_DRAMTMG3,
2937 	DDR_PCTL2_DRAMTMG4,
2938 	DDR_PCTL2_DRAMTMG5,
2939 	DDR_PCTL2_DRAMTMG6,
2940 	DDR_PCTL2_DRAMTMG7,
2941 	DDR_PCTL2_DRAMTMG8,
2942 	DDR_PCTL2_DRAMTMG9,
2943 	DDR_PCTL2_DRAMTMG12,
2944 	DDR_PCTL2_DRAMTMG13,
2945 	DDR_PCTL2_DRAMTMG14,
2946 	DDR_PCTL2_ZQCTL0,
2947 	DDR_PCTL2_DFITMG0,
2948 	DDR_PCTL2_ODTCFG
2949 };
2950 
2951 static const u16 phy_need_update_reg[] = {
2952 	0x14,
2953 	0x18,
2954 	0x1c
2955 };
2956 
2957 static void pre_set_rate(struct dram_info *dram,
2958 			 struct rv1126_sdram_params *sdram_params,
2959 			 u32 dst_fsp, u32 dst_fsp_lp4)
2960 {
2961 	u32 i, j, find;
2962 	void __iomem *pctl_base = dram->pctl;
2963 	void __iomem *phy_base = dram->phy;
2964 	u32 phy_offset;
2965 	u32 mr_tmp;
2966 	u32 dramtype = sdram_params->base.dramtype;
2967 
2968 	sw_set_req(dram);
2969 	/* DDRCTL timing update */
2970 	for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) {
2971 		for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF;
2972 		     j++) {
2973 			if (sdram_params->pctl_regs.pctl[j][0] ==
2974 			    pctl_need_update_reg[i]) {
2975 				writel(sdram_params->pctl_regs.pctl[j][1],
2976 				       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2977 				       pctl_need_update_reg[i]);
2978 				find = j;
2979 				break;
2980 			}
2981 		}
2982 	}
2983 
2984 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2985 	u32 tmp, trefi;
2986 
2987 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2988 	trefi = (tmp >> 16) & 0xfff;
2989 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2990 	       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2991 #endif
2992 
2993 	sw_set_ack(dram);
2994 
2995 	/* phy timing update */
2996 	if (dst_fsp == 0)
2997 		phy_offset = 0;
2998 	else
2999 		phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3);
3000 	/* cl cwl al update */
3001 	for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) {
3002 		for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF;
3003 		     j++) {
3004 			if (sdram_params->phy_regs.phy[j][0] ==
3005 			    phy_need_update_reg[i]) {
3006 				writel(sdram_params->phy_regs.phy[j][1],
3007 				       phy_base + phy_offset +
3008 				       phy_need_update_reg[i]);
3009 				find = j;
3010 				break;
3011 			}
3012 		}
3013 	}
3014 
3015 	set_ds_odt(dram, sdram_params, dst_fsp);
3016 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
3017 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3018 			       DDR_PCTL2_INIT4);
3019 		/* MR13 */
3020 		pctl_write_mr(dram->pctl, 3, 13,
3021 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
3022 			       PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
3023 			      ((0x2 << 6) >> dst_fsp_lp4), dramtype);
3024 		writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
3025 				      PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
3026 				      ((0x2 << 6) >> dst_fsp_lp4),
3027 				       PHY_REG(phy_base, 0x1b));
3028 		/* MR3 */
3029 		pctl_write_mr(dram->pctl, 3, 3,
3030 			      mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT &
3031 			      PCTL2_MR_MASK,
3032 			      dramtype);
3033 		writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK,
3034 		       PHY_REG(phy_base, 0x19));
3035 
3036 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3037 			       DDR_PCTL2_INIT3);
3038 		/* MR1 */
3039 		pctl_write_mr(dram->pctl, 3, 1,
3040 			      mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT &
3041 			      PCTL2_MR_MASK,
3042 			      dramtype);
3043 		writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK,
3044 		       PHY_REG(phy_base, 0x17));
3045 		/* MR2 */
3046 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
3047 			      dramtype);
3048 		writel(mr_tmp & PCTL2_MR_MASK,
3049 		       PHY_REG(phy_base, 0x18));
3050 
3051 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3052 			       DDR_PCTL2_INIT6);
3053 		/* MR11 */
3054 		pctl_write_mr(dram->pctl, 3, 11,
3055 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
3056 			      dramtype);
3057 		writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
3058 		       PHY_REG(phy_base, 0x1a));
3059 		/* MR12 */
3060 		pctl_write_mr(dram->pctl, 3, 12,
3061 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
3062 			      dramtype);
3063 
3064 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3065 			       DDR_PCTL2_INIT7);
3066 		/* MR22 */
3067 		pctl_write_mr(dram->pctl, 3, 22,
3068 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
3069 			      dramtype);
3070 		writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
3071 		       PHY_REG(phy_base, 0x1d));
3072 		/* MR14 */
3073 		pctl_write_mr(dram->pctl, 3, 14,
3074 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
3075 			      dramtype);
3076 		writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
3077 		       PHY_REG(phy_base, 0x1c));
3078 	}
3079 
3080 	update_noc_timing(dram, sdram_params);
3081 }
3082 
3083 static void save_fsp_param(struct dram_info *dram, u32 dst_fsp,
3084 			   struct rv1126_sdram_params *sdram_params)
3085 {
3086 	void __iomem *pctl_base = dram->pctl;
3087 	void __iomem *phy_base = dram->phy;
3088 	struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp];
3089 	u32 temp, temp1;
3090 	struct ddr2_3_4_lp2_3_info *ddr_info;
3091 
3092 	ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype);
3093 
3094 	p_fsp_param->freq_mhz = sdram_params->base.ddr_freq;
3095 
3096 	if (sdram_params->base.dramtype == LPDDR4 ||
3097 	    sdram_params->base.dramtype == LPDDR4X) {
3098 		p_fsp_param->rd_odt_up_en = 0;
3099 		p_fsp_param->rd_odt_down_en = 1;
3100 	} else {
3101 		p_fsp_param->rd_odt_up_en =
3102 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
3103 		p_fsp_param->rd_odt_down_en =
3104 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
3105 	}
3106 
3107 	if (p_fsp_param->rd_odt_up_en)
3108 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111));
3109 	else if (p_fsp_param->rd_odt_down_en)
3110 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110));
3111 	else
3112 		p_fsp_param->rd_odt = 0;
3113 	p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112));
3114 	p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100));
3115 	p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102));
3116 	p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128));
3117 	p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105));
3118 
3119 	if (sdram_params->base.dramtype == DDR3) {
3120 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3121 			     DDR_PCTL2_INIT3);
3122 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
3123 		p_fsp_param->ds_pdds = temp & DDR3_DS_MASK;
3124 		p_fsp_param->dq_odt = temp & DDR3_RTT_NOM_MASK;
3125 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3126 	} else if (sdram_params->base.dramtype == DDR4) {
3127 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3128 			     DDR_PCTL2_INIT3);
3129 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
3130 		p_fsp_param->ds_pdds = temp & DDR4_DS_MASK;
3131 		p_fsp_param->dq_odt = temp & DDR4_RTT_NOM_MASK;
3132 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3133 	} else if (sdram_params->base.dramtype == LPDDR3) {
3134 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3135 			     DDR_PCTL2_INIT4);
3136 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
3137 		p_fsp_param->ds_pdds = temp & 0xf;
3138 
3139 		p_fsp_param->dq_odt = lp3_odt_value;
3140 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3141 	} else if (sdram_params->base.dramtype == LPDDR4 ||
3142 		   sdram_params->base.dramtype == LPDDR4X) {
3143 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3144 			     DDR_PCTL2_INIT4);
3145 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
3146 		p_fsp_param->ds_pdds = temp & LPDDR4_PDDS_MASK;
3147 
3148 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3149 			     DDR_PCTL2_INIT6);
3150 		temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK;
3151 		p_fsp_param->dq_odt = temp & LPDDR4_DQODT_MASK;
3152 		p_fsp_param->ca_odt = temp & LPDDR4_CAODT_MASK;
3153 
3154 		temp = MAX(readl(PHY_REG(phy_base, 0x3ae)),
3155 			   readl(PHY_REG(phy_base, 0x3ce)));
3156 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)),
3157 			    readl(PHY_REG(phy_base, 0x3de)));
3158 		p_fsp_param->vref_ca[0] = (temp + temp1) / 2;
3159 		temp = MAX(readl(PHY_REG(phy_base, 0x3af)),
3160 			   readl(PHY_REG(phy_base, 0x3cf)));
3161 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)),
3162 			    readl(PHY_REG(phy_base, 0x3df)));
3163 		p_fsp_param->vref_ca[1] = (temp + temp1) / 2;
3164 		p_fsp_param->vref_ca[0] |=
3165 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
3166 		p_fsp_param->vref_ca[1] |=
3167 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
3168 
3169 		p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >>
3170 					      3) & 0x1;
3171 	}
3172 
3173 	p_fsp_param->noc_timings.ddrtiminga0 =
3174 		sdram_params->ch.noc_timings.ddrtiminga0;
3175 	p_fsp_param->noc_timings.ddrtimingb0 =
3176 		sdram_params->ch.noc_timings.ddrtimingb0;
3177 	p_fsp_param->noc_timings.ddrtimingc0 =
3178 		sdram_params->ch.noc_timings.ddrtimingc0;
3179 	p_fsp_param->noc_timings.devtodev0 =
3180 		sdram_params->ch.noc_timings.devtodev0;
3181 	p_fsp_param->noc_timings.ddrmode =
3182 		sdram_params->ch.noc_timings.ddrmode;
3183 	p_fsp_param->noc_timings.ddr4timing =
3184 		sdram_params->ch.noc_timings.ddr4timing;
3185 	p_fsp_param->noc_timings.agingx0 =
3186 		sdram_params->ch.noc_timings.agingx0;
3187 	p_fsp_param->noc_timings.aging0 =
3188 		sdram_params->ch.noc_timings.aging0;
3189 	p_fsp_param->noc_timings.aging1 =
3190 		sdram_params->ch.noc_timings.aging1;
3191 	p_fsp_param->noc_timings.aging2 =
3192 		sdram_params->ch.noc_timings.aging2;
3193 	p_fsp_param->noc_timings.aging3 =
3194 		sdram_params->ch.noc_timings.aging3;
3195 
3196 	p_fsp_param->flag = FSP_FLAG;
3197 }
3198 
3199 #ifndef CONFIG_SPL_KERNEL_BOOT
3200 static void copy_fsp_param_to_ddr(void)
3201 {
3202 	memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param,
3203 	       sizeof(fsp_param));
3204 }
3205 #endif
3206 
3207 static void pctl_modify_trfc(struct ddr_pctl_regs *pctl_regs,
3208 			     struct sdram_cap_info *cap_info, u32 dram_type,
3209 			     u32 freq)
3210 {
3211 	u64 cs0_cap;
3212 	u32 die_cap;
3213 	u32 trfc_ns, trfc4_ns;
3214 	u32 trfc, txsnr;
3215 	u32 txs_abort_fast = 0;
3216 	u32 tmp;
3217 
3218 	cs0_cap = sdram_get_cs_cap(cap_info, 0, dram_type);
3219 	die_cap = (u32)(cs0_cap >> (20 + (cap_info->bw - cap_info->dbw)));
3220 
3221 	switch (dram_type) {
3222 	case DDR3:
3223 		if (die_cap <= DIE_CAP_512MBIT)
3224 			trfc_ns = 90;
3225 		else if (die_cap <= DIE_CAP_1GBIT)
3226 			trfc_ns = 110;
3227 		else if (die_cap <= DIE_CAP_2GBIT)
3228 			trfc_ns = 160;
3229 		else if (die_cap <= DIE_CAP_4GBIT)
3230 			trfc_ns = 260;
3231 		else
3232 			trfc_ns = 350;
3233 		txsnr = MAX(5, ((trfc_ns + 10) * freq + 999) / 1000);
3234 		break;
3235 
3236 	case DDR4:
3237 		if (die_cap <= DIE_CAP_2GBIT) {
3238 			trfc_ns = 160;
3239 			trfc4_ns = 90;
3240 		} else if (die_cap <= DIE_CAP_4GBIT) {
3241 			trfc_ns = 260;
3242 			trfc4_ns = 110;
3243 		} else if (die_cap <= DIE_CAP_8GBIT) {
3244 			trfc_ns = 350;
3245 			trfc4_ns = 160;
3246 		} else {
3247 			trfc_ns = 550;
3248 			trfc4_ns = 260;
3249 		}
3250 		txsnr = ((trfc_ns + 10) * freq + 999) / 1000;
3251 		txs_abort_fast = ((trfc4_ns + 10) * freq + 999) / 1000;
3252 		break;
3253 
3254 	case LPDDR3:
3255 		if (die_cap <= DIE_CAP_4GBIT)
3256 			trfc_ns = 130;
3257 		else
3258 			trfc_ns = 210;
3259 		txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3260 		break;
3261 
3262 	case LPDDR4:
3263 	case LPDDR4X:
3264 		if (die_cap <= DIE_CAP_2GBIT)
3265 			trfc_ns = 130;
3266 		else if (die_cap <= DIE_CAP_4GBIT)
3267 			trfc_ns = 180;
3268 		else if (die_cap <= DIE_CAP_8GBIT)
3269 			trfc_ns = 280;
3270 		else
3271 			trfc_ns = 380;
3272 		txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3273 		break;
3274 
3275 	default:
3276 		return;
3277 	}
3278 	trfc = (trfc_ns * freq + 999) / 1000;
3279 
3280 	for (int i = 0; pctl_regs->pctl[i][0] != 0xffffffff; i++) {
3281 		switch (pctl_regs->pctl[i][0]) {
3282 		case DDR_PCTL2_RFSHTMG:
3283 			tmp = pctl_regs->pctl[i][1];
3284 			/* t_rfc_min */
3285 			tmp &= ~((u32)0x3ff);
3286 			tmp |= ((trfc + 1) / 2) & 0x3ff;
3287 			pctl_regs->pctl[i][1] = tmp;
3288 			break;
3289 
3290 		case DDR_PCTL2_DRAMTMG8:
3291 			if (dram_type == DDR3 || dram_type == DDR4) {
3292 				tmp = pctl_regs->pctl[i][1];
3293 				/* t_xs_x32 */
3294 				tmp &= ~((u32)0x7f);
3295 				tmp |= ((txsnr + 63) / 64 + 1) & 0x7f;
3296 
3297 				if (dram_type == DDR4) {
3298 					/* t_xs_abort_x32 */
3299 					tmp &= ~((u32)(0x7f << 16));
3300 					tmp |= (((txs_abort_fast + 63) / 64 + 1) & 0x7f) << 16;
3301 					/* t_xs_fast_x32 */
3302 					tmp &= ~((u32)(0x7f << 24));
3303 					tmp |= (((txs_abort_fast + 63) / 64 + 1) & 0x7f) << 24;
3304 				}
3305 
3306 				pctl_regs->pctl[i][1] = tmp;
3307 			}
3308 			break;
3309 
3310 		case DDR_PCTL2_DRAMTMG14:
3311 			if (dram_type == LPDDR3 ||
3312 			    dram_type == LPDDR4 || dram_type == LPDDR4X) {
3313 				tmp = pctl_regs->pctl[i][1];
3314 				/* t_xsr */
3315 				tmp &= ~((u32)0xfff);
3316 				tmp |= ((txsnr + 1) / 2) & 0xfff;
3317 				pctl_regs->pctl[i][1] = tmp;
3318 			}
3319 			break;
3320 
3321 		default:
3322 			break;
3323 		}
3324 	}
3325 }
3326 
3327 void ddr_set_rate(struct dram_info *dram,
3328 		  struct rv1126_sdram_params *sdram_params,
3329 		  u32 freq, u32 cur_freq, u32 dst_fsp,
3330 		  u32 dst_fsp_lp4, u32 training_en)
3331 {
3332 	u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off;
3333 	u32 mr_tmp;
3334 	u32 lp_stat;
3335 	u32 dramtype = sdram_params->base.dramtype;
3336 	struct rv1126_sdram_params *sdram_params_new;
3337 	void __iomem *pctl_base = dram->pctl;
3338 	void __iomem *phy_base = dram->phy;
3339 	int delay = 1000;
3340 
3341 	lp_stat = low_power_update(dram, 0);
3342 	sdram_params_new = get_default_sdram_config(freq);
3343 	sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank;
3344 	sdram_params_new->ch.cap_info.bw = sdram_params->ch.cap_info.bw;
3345 
3346 	pctl_modify_trfc(&sdram_params_new->pctl_regs,
3347 			 &sdram_params->ch.cap_info, dramtype, freq);
3348 	pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4);
3349 
3350 	while ((readl(pctl_base + DDR_PCTL2_STAT) &
3351 			 PCTL2_OPERATING_MODE_MASK) ==
3352 			 PCTL2_OPERATING_MODE_SR)
3353 		continue;
3354 
3355 	dest_dll_off = 0;
3356 	dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3357 			  DDR_PCTL2_INIT3);
3358 	if ((dramtype == DDR3 && (dst_init3 & 1)) ||
3359 	    (dramtype == DDR4 && !(dst_init3 & 1)))
3360 		dest_dll_off = 1;
3361 
3362 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
3363 	cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
3364 			  DDR_PCTL2_INIT3);
3365 	cur_init3 &= PCTL2_MR_MASK;
3366 	cur_dll_off = 1;
3367 	if ((dramtype == DDR3 && !(cur_init3 & 1)) ||
3368 	    (dramtype == DDR4 && (cur_init3 & 1)))
3369 		cur_dll_off = 0;
3370 
3371 	if (!cur_dll_off) {
3372 		if (dramtype == DDR3)
3373 			cur_init3 |= 1;
3374 		else
3375 			cur_init3 &= ~1;
3376 		pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype);
3377 	}
3378 
3379 	setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3380 		     PCTL2_DIS_AUTO_REFRESH);
3381 	update_refresh_reg(dram);
3382 
3383 	enter_sr(dram, 1);
3384 
3385 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3386 	       PMUGRF_CON_DDRPHY_BUFFEREN_EN,
3387 	       &dram->pmugrf->soc_con[0]);
3388 	sw_set_req(dram);
3389 	clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC,
3390 		     PCTL2_DFI_INIT_COMPLETE_EN);
3391 	sw_set_ack(dram);
3392 
3393 	sw_set_req(dram);
3394 	if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off)
3395 		setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3396 	else
3397 		clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3398 
3399 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0,
3400 		     PCTL2_DIS_SRX_ZQCL);
3401 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0,
3402 		     PCTL2_DIS_SRX_ZQCL);
3403 	sw_set_ack(dram);
3404 
3405 	writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT),
3406 	       &dram->cru->clkgate_con[21]);
3407 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3408 					(0x1 << CLK_DDR_UPCTL_EN_SHIFT) |
3409 					(0x1 << ACLK_DDR_UPCTL_EN_SHIFT),
3410 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3411 
3412 	clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3413 	rkclk_set_dpll(dram, freq * MHz / 2);
3414 	phy_pll_set(dram, freq * MHz, 0);
3415 	phy_pll_set(dram, freq * MHz, 1);
3416 	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3417 
3418 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3419 			PMUGRF_CON_DDRPHY_BUFFEREN_DIS,
3420 			&dram->pmugrf->soc_con[0]);
3421 	writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT),
3422 	       &dram->cru->clkgate_con[21]);
3423 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3424 					(0x0 << CLK_DDR_UPCTL_EN_SHIFT) |
3425 					(0x0 << ACLK_DDR_UPCTL_EN_SHIFT),
3426 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3427 	while ((readl(pctl_base + DDR_PCTL2_DFISTAT) &
3428 	       PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE) {
3429 		udelay(1);
3430 		if (delay-- <= 0) {
3431 			printascii("ERROR: Cannot wait DFI_INIT_COMPLETE\n");
3432 			while (1)
3433 				;
3434 		}
3435 	}
3436 
3437 	sw_set_req(dram);
3438 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
3439 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp);
3440 	sw_set_ack(dram);
3441 	update_refresh_reg(dram);
3442 	clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2);
3443 
3444 	enter_sr(dram, 0);
3445 
3446 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3447 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3448 
3449 	mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4);
3450 	if (dramtype == LPDDR3) {
3451 		pctl_write_mr(dram->pctl, 3, 1,
3452 			      (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) &
3453 			      PCTL2_MR_MASK,
3454 			      dramtype);
3455 		pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK,
3456 			      dramtype);
3457 		pctl_write_mr(dram->pctl, 3, 3,
3458 			      (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) &
3459 			      PCTL2_MR_MASK,
3460 			      dramtype);
3461 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype);
3462 	} else if ((dramtype == DDR3) || (dramtype == DDR4)) {
3463 		pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK,
3464 			      dramtype);
3465 		if (!dest_dll_off) {
3466 			pctl_write_mr(dram->pctl, 3, 0,
3467 				      ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) &
3468 				      PCTL2_MR_MASK) | DDR3_DLL_RESET,
3469 				      dramtype);
3470 			udelay(2);
3471 		}
3472 		pctl_write_mr(dram->pctl, 3, 0,
3473 			      (dst_init3 >> PCTL2_DDR34_MR0_SHIFT &
3474 			       PCTL2_MR_MASK) & (~DDR3_DLL_RESET),
3475 			      dramtype);
3476 		pctl_write_mr(dram->pctl, 3, 2,
3477 			      ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) &
3478 			       PCTL2_MR_MASK), dramtype);
3479 		if (dramtype == DDR4) {
3480 			pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK,
3481 				      dramtype);
3482 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3483 				       DDR_PCTL2_INIT6);
3484 			pctl_write_mr(dram->pctl, 3, 4,
3485 				      (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) &
3486 				       PCTL2_MR_MASK,
3487 				      dramtype);
3488 			pctl_write_mr(dram->pctl, 3, 5,
3489 				      mr_tmp >> PCTL2_DDR4_MR5_SHIFT &
3490 				      PCTL2_MR_MASK,
3491 				      dramtype);
3492 
3493 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3494 				       DDR_PCTL2_INIT7);
3495 			/* updata ddr4 vrefdq */
3496 			pctl_write_mr(dram->pctl, 3, 6,
3497 				      (mr_tmp | (0x1 << 7)) >> PCTL2_DDR4_MR6_SHIFT &
3498 				      PCTL2_MR_MASK, dramtype);
3499 			pctl_write_mr(dram->pctl, 3, 6,
3500 				      (mr_tmp | (0x1 << 7)) >> PCTL2_DDR4_MR6_SHIFT &
3501 				      PCTL2_MR_MASK, dramtype);
3502 			pctl_write_mr(dram->pctl, 3, 6,
3503 				      mr_tmp >> PCTL2_DDR4_MR6_SHIFT &
3504 				      PCTL2_MR_MASK,
3505 				      dramtype);
3506 		}
3507 	} else if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
3508 		pctl_write_mr(dram->pctl, 3, 13,
3509 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
3510 			       PCTL2_MR_MASK) & (~(BIT(7)))) |
3511 			      dst_fsp_lp4 << 7, dramtype);
3512 	}
3513 	clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3514 		     PCTL2_DIS_AUTO_REFRESH);
3515 	update_refresh_reg(dram);
3516 
3517 	/* training */
3518 	high_freq_training(dram, sdram_params_new, dst_fsp);
3519 	low_power_update(dram, lp_stat);
3520 
3521 	save_fsp_param(dram, dst_fsp, sdram_params_new);
3522 }
3523 
3524 static void ddr_set_rate_for_fsp(struct dram_info *dram,
3525 				 struct rv1126_sdram_params *sdram_params)
3526 {
3527 	struct ddr2_3_4_lp2_3_info *ddr_info;
3528 	u32 f0;
3529 	u32 dramtype = sdram_params->base.dramtype;
3530 #ifndef CONFIG_SPL_KERNEL_BOOT
3531 	u32 f1, f2, f3;
3532 #endif
3533 
3534 	ddr_info = get_ddr_drv_odt_info(dramtype);
3535 	if (!ddr_info)
3536 		return;
3537 
3538 	f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
3539 	     DDR_FREQ_MASK;
3540 
3541 #ifndef CONFIG_SPL_KERNEL_BOOT
3542 	memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param));
3543 	memset((void *)&fsp_param, 0, sizeof(fsp_param));
3544 
3545 	f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) &
3546 	     DDR_FREQ_MASK;
3547 	f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) &
3548 	     DDR_FREQ_MASK;
3549 	f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) &
3550 	     DDR_FREQ_MASK;
3551 #endif
3552 
3553 	if (get_wrlvl_val(dram, sdram_params))
3554 		printascii("get wrlvl value fail\n");
3555 
3556 #ifndef CONFIG_SPL_KERNEL_BOOT
3557 	printascii("change to: ");
3558 	printdec(f1);
3559 	printascii("MHz\n");
3560 	ddr_set_rate(&dram_info, sdram_params, f1,
3561 		     sdram_params->base.ddr_freq, 1, 1, 1);
3562 	printascii("change to: ");
3563 	printdec(f2);
3564 	printascii("MHz\n");
3565 	ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1);
3566 	printascii("change to: ");
3567 	printdec(f3);
3568 	printascii("MHz\n");
3569 	ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1);
3570 #endif
3571 	printascii("change to: ");
3572 	printdec(f0);
3573 	printascii("MHz(final freq)\n");
3574 #ifndef CONFIG_SPL_KERNEL_BOOT
3575 	ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1);
3576 #else
3577 	ddr_set_rate(&dram_info, sdram_params, f0, sdram_params->base.ddr_freq, 1, 1, 1);
3578 #endif
3579 }
3580 
3581 int get_uart_config(void)
3582 {
3583 	struct sdram_head_info_index_v2 *index =
3584 		(struct sdram_head_info_index_v2 *)common_info;
3585 	struct global_info *gbl_info;
3586 
3587 	gbl_info = (struct global_info *)((void *)common_info +
3588 		index->global_index.offset * 4);
3589 
3590 	return gbl_info->uart_info;
3591 }
3592 
3593 /* return: 0 = success, other = fail */
3594 int sdram_init(void)
3595 {
3596 	struct rv1126_sdram_params *sdram_params;
3597 	int ret = 0;
3598 	struct sdram_head_info_index_v2 *index =
3599 		(struct sdram_head_info_index_v2 *)common_info;
3600 	struct global_info *gbl_info;
3601 
3602 	dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
3603 	dram_info.pctl = (void *)UPCTL2_BASE_ADDR;
3604 	dram_info.grf = (void *)GRF_BASE_ADDR;
3605 	dram_info.cru = (void *)CRU_BASE_ADDR;
3606 	dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR;
3607 	dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR;
3608 	dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR;
3609 
3610 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
3611 	printascii("extended temp support\n");
3612 #endif
3613 	if (index->version_info != 2 ||
3614 	    (index->global_index.size != sizeof(struct global_info) / 4) ||
3615 	    (index->ddr3_index.size !=
3616 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3617 	    (index->ddr4_index.size !=
3618 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3619 	    (index->lp3_index.size !=
3620 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3621 	    (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) ||
3622 	    (index->lp4x_index.size != (sizeof(struct lp4_info) / 4)) ||
3623 	    index->global_index.offset == 0 ||
3624 	    index->ddr3_index.offset == 0 ||
3625 	    index->ddr4_index.offset == 0 ||
3626 	    index->lp3_index.offset == 0 ||
3627 	    index->lp4_index.offset == 0 ||
3628 	    index->lp4x_index.offset == 0) {
3629 		printascii("common info error\n");
3630 		goto error;
3631 	}
3632 
3633 	gbl_info = (struct global_info *)((void *)common_info +
3634 		index->global_index.offset * 4);
3635 
3636 	dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info);
3637 	dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info);
3638 
3639 	sdram_params = &sdram_configs[0];
3640 	#if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 8)
3641 	for (int j = 0; j < ARRAY_SIZE(sdram_configs); j++)
3642 		sdram_configs[j].base.dramtype = LPDDR4X;
3643 	#endif
3644 	if (sdram_params->base.dramtype == DDR3 ||
3645 	    sdram_params->base.dramtype == DDR4) {
3646 		if (DDR_2T_INFO(gbl_info->info_2t))
3647 			sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10;
3648 		else
3649 			sdram_params->pctl_regs.pctl[0][1] &=
3650 				~(0x1 << 10);
3651 	}
3652 	ret = sdram_init_detect(&dram_info, sdram_params);
3653 	if (ret) {
3654 		sdram_print_dram_type(sdram_params->base.dramtype);
3655 		printascii(", ");
3656 		printdec(sdram_params->base.ddr_freq);
3657 		printascii("MHz\n");
3658 		goto error;
3659 	}
3660 	print_ddr_info(sdram_params);
3661 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3662 	init_rw_trn_result_struct(&rw_trn_result, dram_info.phy,
3663 				  (u8)sdram_params->ch.cap_info.rank);
3664 #endif
3665 
3666 	ddr_set_rate_for_fsp(&dram_info, sdram_params);
3667 #ifndef CONFIG_SPL_KERNEL_BOOT
3668 	copy_fsp_param_to_ddr();
3669 #endif
3670 
3671 	ddr_set_atags(&dram_info, sdram_params);
3672 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3673 	save_rw_trn_result_to_ddr(&rw_trn_result);
3674 #endif
3675 
3676 	printascii("out\n");
3677 
3678 	return ret;
3679 error:
3680 	printascii("error\n");
3681 	return (-1);
3682 }
3683 #endif /* CONFIG_TPL_BUILD */
3684