xref: /rk3399_rockchip-uboot/drivers/ram/rockchip/sdram_rv1126.c (revision fc96aab00b4040aff8a9f290b40ffc8eb339c40d)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * (C) Copyright 2020 Rockchip Electronics Co., Ltd.
4  */
5 
6 #include <common.h>
7 #include <debug_uart.h>
8 #include <dm.h>
9 #include <ram.h>
10 #include <syscon.h>
11 #include <asm/io.h>
12 #include <asm/arch/clock.h>
13 #include <asm/arch/hardware.h>
14 #include <asm/arch/rk_atags.h>
15 #include <asm/arch/cru_rv1126.h>
16 #include <asm/arch/grf_rv1126.h>
17 #include <asm/arch/sdram_common.h>
18 #include <asm/arch/sdram_rv1126.h>
19 
20 /* define training flag */
21 #define CA_TRAINING			(0x1 << 0)
22 #define READ_GATE_TRAINING		(0x1 << 1)
23 #define WRITE_LEVELING			(0x1 << 2)
24 #define WRITE_TRAINING			(0x1 << 3)
25 #define READ_TRAINING			(0x1 << 4)
26 #define FULL_TRAINING			(0xff)
27 
28 #define SKEW_RX_SIGNAL			(0)
29 #define SKEW_TX_SIGNAL			(1)
30 #define SKEW_CA_SIGNAL			(2)
31 
32 #define DESKEW_MDF_ABS_VAL		(0)
33 #define DESKEW_MDF_DIFF_VAL		(1)
34 
35 #ifdef CONFIG_TPL_BUILD
36 #ifndef CONFIG_TPL_TINY_FRAMEWORK
37 #error please defined CONFIG_TPL_TINY_FRAMEWORK for RV1126 !!!
38 #endif
39 #endif
40 
41 #ifdef CONFIG_TPL_BUILD
42 
43 struct dram_info {
44 	void __iomem *pctl;
45 	void __iomem *phy;
46 	struct rv1126_cru *cru;
47 	struct msch_regs *msch;
48 	struct rv1126_ddrgrf *ddrgrf;
49 	struct rv1126_grf *grf;
50 	struct ram_info info;
51 	struct rv1126_pmugrf *pmugrf;
52 	u32 sr_idle;
53 	u32 pd_idle;
54 };
55 
56 #define GRF_BASE_ADDR			0xfe000000
57 #define PMU_GRF_BASE_ADDR		0xfe020000
58 #define DDR_GRF_BASE_ADDR		0xfe030000
59 #define BUS_SGRF_BASE_ADDR		0xfe0a0000
60 #define SERVER_MSCH_BASE_ADDR		0xfe800000
61 #define CRU_BASE_ADDR			0xff490000
62 #define DDR_PHY_BASE_ADDR		0xff4a0000
63 #define UPCTL2_BASE_ADDR		0xffa50000
64 
65 #define SGRF_SOC_CON2			0x8
66 #define SGRF_SOC_CON12			0x30
67 #define SGRF_SOC_CON13			0x34
68 
69 struct dram_info dram_info;
70 
71 #if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 3)
72 struct rv1126_sdram_params sdram_configs[] = {
73 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-328.inc"
74 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-396.inc"
75 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-528.inc"
76 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-664.inc"
77 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-784.inc"
78 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-924.inc"
79 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-1056.inc"
80 };
81 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 0)
82 struct rv1126_sdram_params sdram_configs[] = {
83 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-328.inc"
84 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-396.inc"
85 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-528.inc"
86 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-664.inc"
87 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-784.inc"
88 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-924.inc"
89 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-1056.inc"
90 };
91 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 6)
92 struct rv1126_sdram_params sdram_configs[] = {
93 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-328.inc"
94 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-396.inc"
95 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-528.inc"
96 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-664.inc"
97 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-784.inc"
98 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-924.inc"
99 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-1056.inc"
100 };
101 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 7)
102 struct rv1126_sdram_params sdram_configs[] = {
103 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-328.inc"
104 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-396.inc"
105 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-528.inc"
106 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-664.inc"
107 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-784.inc"
108 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-924.inc"
109 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-1056.inc"
110 };
111 #endif
112 
113 u32 common_info[] = {
114 	#include "sdram_inc/rv1126/sdram-rv1126-loader_params.inc"
115 };
116 
117 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
118 static struct rw_trn_result rw_trn_result;
119 #endif
120 
121 static struct rv1126_fsp_param fsp_param[MAX_IDX];
122 
123 static u8 lp3_odt_value;
124 
125 static s8 wrlvl_result[2][4];
126 
127 /* DDR configuration 0-9 */
128 u16 ddr_cfg_2_rbc[] = {
129 	((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */
130 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */
131 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */
132 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */
133 	((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */
134 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */
135 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */
136 	((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */
137 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */
138 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */
139 };
140 
141 /* DDR configuration 10-21 */
142 u8 ddr4_cfg_2_rbc[] = {
143 	((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */
144 	((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */
145 	((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */
146 	((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */
147 	((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */
148 	((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */
149 	((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */
150 	((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */
151 	((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */
152 	((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */
153 	((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */
154 	((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */
155 };
156 
157 /* DDR configuration 22-28 */
158 u16 ddr_cfg_2_rbc_p2[] = {
159 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */
160 	((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */
161 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */
162 	((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */
163 	((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */
164 	((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */
165 	((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */
166 };
167 
168 u8 d4_rbc_2_d3_rbc[][2] = {
169 	{10, 0},
170 	{11, 2},
171 	{12, 23},
172 	{13, 1},
173 	{14, 28},
174 	{15, 24},
175 	{16, 27},
176 	{17, 7},
177 	{18, 6},
178 	{19, 25},
179 	{20, 26},
180 	{21, 3}
181 };
182 
183 u32 addrmap[29][9] = {
184 	{24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
185 		0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */
186 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
187 		0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */
188 	{23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
189 		0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */
190 	{22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606,
191 		0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */
192 	{24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909,
193 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */
194 	{6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707,
195 		0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */
196 	{7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808,
197 		0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */
198 	{8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909,
199 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */
200 	{22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
201 		0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */
202 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
203 		0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */
204 
205 	{24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
206 		0x08080808, 0x00000f0f, 0x0801}, /* 10 */
207 	{23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
208 		0x0f080808, 0x00000f0f, 0x0801}, /* 11 */
209 	{24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
210 		0x07070707, 0x00000f07, 0x0700}, /* 12 */
211 	{23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
212 		0x07070707, 0x00000f0f, 0x0700}, /* 13 */
213 	{24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
214 		0x07070707, 0x00000f07, 0x3f01}, /* 14 */
215 	{23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
216 		0x07070707, 0x00000f0f, 0x3f01}, /* 15 */
217 	{23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606,
218 		0x06060606, 0x00000f06, 0x3f00}, /* 16 */
219 	{8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909,
220 		0x0f090909, 0x00000f0f, 0x0801}, /* 17 */
221 	{7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808,
222 		0x08080808, 0x00000f0f, 0x0700}, /* 18 */
223 	{7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
224 		0x08080808, 0x00000f0f, 0x3f01}, /* 19 */
225 
226 	{6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
227 		0x07070707, 0x00000f07, 0x3f00}, /* 20 */
228 	{23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606,
229 		0x06060606, 0x00000f06, 0x0600}, /* 21 */
230 	{21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505,
231 		0x05050505, 0x00000f0f, 0x3f3f}, /* 22 */
232 
233 	{24, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
234 		0x07070707, 0x00000f07, 0x3f3f}, /* 23 */
235 	{23, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
236 		0x07070707, 0x00000f0f, 0x3f3f}, /* 24 */
237 	{7, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
238 		0x08080808, 0x00000f0f, 0x3f3f}, /* 25 */
239 	{6, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
240 		0x07070707, 0x00000f07, 0x3f3f}, /* 26 */
241 	{23, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
242 		0x06060606, 0x00000f06, 0x3f3f}, /* 27 */
243 	{24, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
244 		0x07070707, 0x00000f07, 0x3f3f} /* 28 */
245 };
246 
247 static u8 dq_sel[22][3] = {
248 	{0x0, 0x17, 0x22},
249 	{0x1, 0x18, 0x23},
250 	{0x2, 0x19, 0x24},
251 	{0x3, 0x1a, 0x25},
252 	{0x4, 0x1b, 0x26},
253 	{0x5, 0x1c, 0x27},
254 	{0x6, 0x1d, 0x28},
255 	{0x7, 0x1e, 0x29},
256 	{0x8, 0x16, 0x21},
257 	{0x9, 0x1f, 0x2a},
258 	{0xa, 0x20, 0x2b},
259 	{0x10, 0x1, 0xc},
260 	{0x11, 0x2, 0xd},
261 	{0x12, 0x3, 0xe},
262 	{0x13, 0x4, 0xf},
263 	{0x14, 0x5, 0x10},
264 	{0x15, 0x6, 0x11},
265 	{0x16, 0x7, 0x12},
266 	{0x17, 0x8, 0x13},
267 	{0x18, 0x0, 0xb},
268 	{0x19, 0x9, 0x14},
269 	{0x1a, 0xa, 0x15}
270 };
271 
272 static u16 grp_addr[4] = {
273 	ADD_GROUP_CS0_A,
274 	ADD_GROUP_CS0_B,
275 	ADD_GROUP_CS1_A,
276 	ADD_GROUP_CS1_B
277 };
278 
279 static u8 wrlvl_result_offset[2][4] = {
280 	{0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27},
281 	{0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29},
282 };
283 
284 static u16 dqs_dq_skew_adr[16] = {
285 	0x170 + 0,	/* SKEW_UPDATE_RX_CS0_DQS0 */
286 	0x170 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS1 */
287 	0x1d0 + 0,	/* SKEW_UPDATE_RX_CS0_DQS2 */
288 	0x1d0 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS3 */
289 	0x1a0 + 0,	/* SKEW_UPDATE_RX_CS1_DQS0 */
290 	0x1a0 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS1 */
291 	0x200 + 0,	/* SKEW_UPDATE_RX_CS1_DQS2 */
292 	0x200 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS3 */
293 	0x170 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS0 */
294 	0x170 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS1 */
295 	0x1d0 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS2 */
296 	0x1d0 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS3 */
297 	0x1a0 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS0 */
298 	0x1a0 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS1 */
299 	0x200 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS2 */
300 	0x200 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS3 */
301 };
302 
303 static void rkclk_ddr_reset(struct dram_info *dram,
304 			    u32 ctl_srstn, u32 ctl_psrstn,
305 			    u32 phy_srstn, u32 phy_psrstn)
306 {
307 	writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) |
308 	       UPCTL2_ASRSTN_REQ(ctl_srstn),
309 	       BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13);
310 
311 	writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn),
312 	       &dram->cru->softrst_con[12]);
313 }
314 
315 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
316 {
317 	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
318 	int delay = 1000;
319 	u32 mhz = hz / MHz;
320 	struct global_info *gbl_info;
321 	struct sdram_head_info_index_v2 *index =
322 		(struct sdram_head_info_index_v2 *)common_info;
323 	u32 ssmod_info;
324 	u32 dsmpd = 1;
325 
326 	gbl_info = (struct global_info *)((void *)common_info +
327 		    index->global_index.offset * 4);
328 	ssmod_info = gbl_info->info_2t;
329 	refdiv = 1;
330 	if (mhz <= 100) {
331 		postdiv1 = 6;
332 		postdiv2 = 4;
333 	} else if (mhz <= 150) {
334 		postdiv1 = 4;
335 		postdiv2 = 4;
336 	} else if (mhz <= 200) {
337 		postdiv1 = 6;
338 		postdiv2 = 2;
339 	} else if (mhz <= 300) {
340 		postdiv1 = 4;
341 		postdiv2 = 2;
342 	} else if (mhz <= 400) {
343 		postdiv1 = 6;
344 		postdiv2 = 1;
345 	} else {
346 		postdiv1 = 4;
347 		postdiv2 = 1;
348 	}
349 	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
350 
351 	writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
352 
353 	writel(0x1f000000, &dram->cru->clksel_con[64]);
354 	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
355 	/* enable ssmod */
356 	if (PLL_SSMOD_SPREAD(ssmod_info)) {
357 		dsmpd = 0;
358 		clrsetbits_le32(&dram->cru->pll[1].con2,
359 				0xffffff << 0, 0x0 << 0);
360 		writel(SSMOD_SPREAD(PLL_SSMOD_SPREAD(ssmod_info)) |
361 		       SSMOD_DIVVAL(PLL_SSMOD_DIV(ssmod_info)) |
362 		       SSMOD_DOWNSPREAD(PLL_SSMOD_DOWNSPREAD(ssmod_info)) |
363 		       SSMOD_RESET(0) |
364 		       SSMOD_DIS_SSCG(0) |
365 		       SSMOD_BP(0),
366 		       &dram->cru->pll[1].con3);
367 	}
368 	writel(DSMPD(dsmpd) | POSTDIV2(postdiv2) | REFDIV(refdiv),
369 	       &dram->cru->pll[1].con1);
370 
371 	while (delay > 0) {
372 		udelay(1);
373 		if (LOCK(readl(&dram->cru->pll[1].con1)))
374 			break;
375 		delay--;
376 	}
377 
378 	writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
379 }
380 
381 static void rkclk_configure_ddr(struct dram_info *dram,
382 				struct rv1126_sdram_params *sdram_params)
383 {
384 	/* for inno ddr phy need freq / 2 */
385 	rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2);
386 }
387 
388 static unsigned int
389 	calculate_ddrconfig(struct rv1126_sdram_params *sdram_params)
390 {
391 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
392 	u32 cs, bw, die_bw, col, row, bank;
393 	u32 cs1_row;
394 	u32 i, tmp;
395 	u32 ddrconf = -1;
396 	u32 row_3_4;
397 
398 	cs = cap_info->rank;
399 	bw = cap_info->bw;
400 	die_bw = cap_info->dbw;
401 	col = cap_info->col;
402 	row = cap_info->cs0_row;
403 	cs1_row = cap_info->cs1_row;
404 	bank = cap_info->bk;
405 	row_3_4 = cap_info->row_3_4;
406 
407 	if (sdram_params->base.dramtype == DDR4) {
408 		if (cs == 2 && row == cs1_row && !row_3_4) {
409 			tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) |
410 			      die_bw;
411 			for (i = 17; i < 21; i++) {
412 				if (((tmp & 0xf) ==
413 				     (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
414 				    ((tmp & 0x70) <=
415 				     (ddr4_cfg_2_rbc[i - 10] & 0x70))) {
416 					ddrconf = i;
417 					goto out;
418 				}
419 			}
420 		}
421 
422 		tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw;
423 		for (i = 10; i < 21; i++) {
424 			if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
425 			    ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) &&
426 			    ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) {
427 				ddrconf = i;
428 				goto out;
429 			}
430 		}
431 	} else {
432 		if (cs == 2 && row == cs1_row && bank == 3) {
433 			for (i = 5; i < 8; i++) {
434 				if (((bw + col - 10) == (ddr_cfg_2_rbc[i] &
435 							 0x7)) &&
436 				    ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] &
437 							  (0x7 << 5))) {
438 					ddrconf = i;
439 					goto out;
440 				}
441 			}
442 		}
443 
444 		tmp = ((cs - 1) << 8) | ((row - 13) << 5) |
445 		      ((bw + col - 10) << 0);
446 		if (bank == 3)
447 			tmp |= (1 << 3);
448 
449 		for (i = 0; i < 9; i++)
450 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) &&
451 			    ((tmp & (7 << 5)) <=
452 			     (ddr_cfg_2_rbc[i] & (7 << 5))) &&
453 			    ((tmp & (1 << 8)) <=
454 			     (ddr_cfg_2_rbc[i] & (1 << 8)))) {
455 				ddrconf = i;
456 				goto out;
457 			}
458 
459 		for (i = 0; i < 7; i++)
460 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc_p2[i] & 0x1f)) &&
461 			    ((tmp & (7 << 5)) <=
462 			     (ddr_cfg_2_rbc_p2[i] & (7 << 5))) &&
463 			    ((tmp & (1 << 8)) <=
464 			     (ddr_cfg_2_rbc_p2[i] & (1 << 8)))) {
465 				ddrconf = i + 22;
466 				goto out;
467 			}
468 
469 		if (cs == 1 && bank == 3 && row <= 17 &&
470 		    (col + bw) == 12)
471 			ddrconf = 23;
472 	}
473 
474 out:
475 	if (ddrconf > 28)
476 		printascii("calculate ddrconfig error\n");
477 
478 	if (sdram_params->base.dramtype == DDR4) {
479 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
480 			if (ddrconf == d4_rbc_2_d3_rbc[i][0]) {
481 				if (ddrconf == 21 && row > 16)
482 					printascii("warn:ddrconf21 row > 16\n");
483 				else
484 					ddrconf = d4_rbc_2_d3_rbc[i][1];
485 				break;
486 			}
487 		}
488 	}
489 
490 	return ddrconf;
491 }
492 
493 static void sw_set_req(struct dram_info *dram)
494 {
495 	void __iomem *pctl_base = dram->pctl;
496 
497 	/* clear sw_done=0 */
498 	writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL);
499 }
500 
501 static void sw_set_ack(struct dram_info *dram)
502 {
503 	void __iomem *pctl_base = dram->pctl;
504 
505 	/* set sw_done=1 */
506 	writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL);
507 	while (1) {
508 		/* wait programming done */
509 		if (readl(pctl_base + DDR_PCTL2_SWSTAT) &
510 				PCTL2_SW_DONE_ACK)
511 			break;
512 	}
513 }
514 
515 static void set_ctl_address_map(struct dram_info *dram,
516 				struct rv1126_sdram_params *sdram_params)
517 {
518 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
519 	void __iomem *pctl_base = dram->pctl;
520 	u32 ddrconf = cap_info->ddrconfig;
521 	u32 i, row;
522 
523 	row = cap_info->cs0_row;
524 	if (sdram_params->base.dramtype == DDR4) {
525 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
526 			if (ddrconf == d4_rbc_2_d3_rbc[i][1]) {
527 				ddrconf = d4_rbc_2_d3_rbc[i][0];
528 				break;
529 			}
530 		}
531 	}
532 
533 	if (ddrconf >= ARRAY_SIZE(addrmap)) {
534 		printascii("set ctl address map fail\n");
535 		return;
536 	}
537 
538 	sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
539 			  &addrmap[ddrconf][0], ARRAY_SIZE(addrmap[ddrconf]) * 4);
540 
541 	/* unused row set to 0xf */
542 	for (i = 17; i >= row; i--)
543 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
544 			((i - 12) * 8 / 32) * 4,
545 			0xf << ((i - 12) * 8 % 32));
546 
547 	if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4)
548 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
549 	if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1)
550 		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
551 
552 	if (cap_info->rank == 1)
553 		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
554 }
555 
556 static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait)
557 {
558 	void __iomem *phy_base = dram->phy;
559 	u32 fbdiv, prediv, postdiv, postdiv_en;
560 
561 	if (wait) {
562 		clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB);
563 		while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK))
564 			continue;
565 	} else {
566 		freq /= MHz;
567 		prediv = 1;
568 		if (freq <= 200) {
569 			fbdiv = 16;
570 			postdiv = 2;
571 			postdiv_en = 1;
572 		} else if (freq <= 456) {
573 			fbdiv = 8;
574 			postdiv = 1;
575 			postdiv_en = 1;
576 		} else {
577 			fbdiv = 4;
578 			postdiv = 0;
579 			postdiv_en = 0;
580 		}
581 		writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50));
582 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK,
583 				(fbdiv >> 8) & 1);
584 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK,
585 				postdiv_en << PHY_POSTDIV_EN_SHIFT);
586 
587 		clrsetbits_le32(PHY_REG(phy_base, 0x52),
588 				PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv);
589 		clrsetbits_le32(PHY_REG(phy_base, 0x53),
590 				PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT,
591 				postdiv << PHY_POSTDIV_SHIFT);
592 	}
593 }
594 
595 static const u16 d3_phy_drv_2_ohm[][2] = {
596 	{PHY_DDR3_RON_455ohm, 455},
597 	{PHY_DDR3_RON_230ohm, 230},
598 	{PHY_DDR3_RON_153ohm, 153},
599 	{PHY_DDR3_RON_115ohm, 115},
600 	{PHY_DDR3_RON_91ohm, 91},
601 	{PHY_DDR3_RON_76ohm, 76},
602 	{PHY_DDR3_RON_65ohm, 65},
603 	{PHY_DDR3_RON_57ohm, 57},
604 	{PHY_DDR3_RON_51ohm, 51},
605 	{PHY_DDR3_RON_46ohm, 46},
606 	{PHY_DDR3_RON_41ohm, 41},
607 	{PHY_DDR3_RON_38ohm, 38},
608 	{PHY_DDR3_RON_35ohm, 35},
609 	{PHY_DDR3_RON_32ohm, 32},
610 	{PHY_DDR3_RON_30ohm, 30},
611 	{PHY_DDR3_RON_28ohm, 28},
612 	{PHY_DDR3_RON_27ohm, 27},
613 	{PHY_DDR3_RON_25ohm, 25},
614 	{PHY_DDR3_RON_24ohm, 24},
615 	{PHY_DDR3_RON_23ohm, 23},
616 	{PHY_DDR3_RON_22ohm, 22},
617 	{PHY_DDR3_RON_21ohm, 21},
618 	{PHY_DDR3_RON_20ohm, 20}
619 };
620 
621 static u16 d3_phy_odt_2_ohm[][2] = {
622 	{PHY_DDR3_RTT_DISABLE, 0},
623 	{PHY_DDR3_RTT_561ohm, 561},
624 	{PHY_DDR3_RTT_282ohm, 282},
625 	{PHY_DDR3_RTT_188ohm, 188},
626 	{PHY_DDR3_RTT_141ohm, 141},
627 	{PHY_DDR3_RTT_113ohm, 113},
628 	{PHY_DDR3_RTT_94ohm, 94},
629 	{PHY_DDR3_RTT_81ohm, 81},
630 	{PHY_DDR3_RTT_72ohm, 72},
631 	{PHY_DDR3_RTT_64ohm, 64},
632 	{PHY_DDR3_RTT_58ohm, 58},
633 	{PHY_DDR3_RTT_52ohm, 52},
634 	{PHY_DDR3_RTT_48ohm, 48},
635 	{PHY_DDR3_RTT_44ohm, 44},
636 	{PHY_DDR3_RTT_41ohm, 41},
637 	{PHY_DDR3_RTT_38ohm, 38},
638 	{PHY_DDR3_RTT_37ohm, 37},
639 	{PHY_DDR3_RTT_34ohm, 34},
640 	{PHY_DDR3_RTT_32ohm, 32},
641 	{PHY_DDR3_RTT_31ohm, 31},
642 	{PHY_DDR3_RTT_29ohm, 29},
643 	{PHY_DDR3_RTT_28ohm, 28},
644 	{PHY_DDR3_RTT_27ohm, 27},
645 	{PHY_DDR3_RTT_25ohm, 25}
646 };
647 
648 static u16 d4lp3_phy_drv_2_ohm[][2] = {
649 	{PHY_DDR4_LPDDR3_RON_482ohm, 482},
650 	{PHY_DDR4_LPDDR3_RON_244ohm, 244},
651 	{PHY_DDR4_LPDDR3_RON_162ohm, 162},
652 	{PHY_DDR4_LPDDR3_RON_122ohm, 122},
653 	{PHY_DDR4_LPDDR3_RON_97ohm, 97},
654 	{PHY_DDR4_LPDDR3_RON_81ohm, 81},
655 	{PHY_DDR4_LPDDR3_RON_69ohm, 69},
656 	{PHY_DDR4_LPDDR3_RON_61ohm, 61},
657 	{PHY_DDR4_LPDDR3_RON_54ohm, 54},
658 	{PHY_DDR4_LPDDR3_RON_48ohm, 48},
659 	{PHY_DDR4_LPDDR3_RON_44ohm, 44},
660 	{PHY_DDR4_LPDDR3_RON_40ohm, 40},
661 	{PHY_DDR4_LPDDR3_RON_37ohm, 37},
662 	{PHY_DDR4_LPDDR3_RON_34ohm, 34},
663 	{PHY_DDR4_LPDDR3_RON_32ohm, 32},
664 	{PHY_DDR4_LPDDR3_RON_30ohm, 30},
665 	{PHY_DDR4_LPDDR3_RON_28ohm, 28},
666 	{PHY_DDR4_LPDDR3_RON_27ohm, 27},
667 	{PHY_DDR4_LPDDR3_RON_25ohm, 25},
668 	{PHY_DDR4_LPDDR3_RON_24ohm, 24},
669 	{PHY_DDR4_LPDDR3_RON_23ohm, 23},
670 	{PHY_DDR4_LPDDR3_RON_22ohm, 22},
671 	{PHY_DDR4_LPDDR3_RON_21ohm, 21}
672 };
673 
674 static u16 d4lp3_phy_odt_2_ohm[][2] = {
675 	{PHY_DDR4_LPDDR3_RTT_DISABLE, 0},
676 	{PHY_DDR4_LPDDR3_RTT_586ohm, 586},
677 	{PHY_DDR4_LPDDR3_RTT_294ohm, 294},
678 	{PHY_DDR4_LPDDR3_RTT_196ohm, 196},
679 	{PHY_DDR4_LPDDR3_RTT_148ohm, 148},
680 	{PHY_DDR4_LPDDR3_RTT_118ohm, 118},
681 	{PHY_DDR4_LPDDR3_RTT_99ohm, 99},
682 	{PHY_DDR4_LPDDR3_RTT_85ohm, 58},
683 	{PHY_DDR4_LPDDR3_RTT_76ohm, 76},
684 	{PHY_DDR4_LPDDR3_RTT_67ohm, 67},
685 	{PHY_DDR4_LPDDR3_RTT_60ohm, 60},
686 	{PHY_DDR4_LPDDR3_RTT_55ohm, 55},
687 	{PHY_DDR4_LPDDR3_RTT_50ohm, 50},
688 	{PHY_DDR4_LPDDR3_RTT_46ohm, 46},
689 	{PHY_DDR4_LPDDR3_RTT_43ohm, 43},
690 	{PHY_DDR4_LPDDR3_RTT_40ohm, 40},
691 	{PHY_DDR4_LPDDR3_RTT_38ohm, 38},
692 	{PHY_DDR4_LPDDR3_RTT_36ohm, 36},
693 	{PHY_DDR4_LPDDR3_RTT_34ohm, 34},
694 	{PHY_DDR4_LPDDR3_RTT_32ohm, 32},
695 	{PHY_DDR4_LPDDR3_RTT_31ohm, 31},
696 	{PHY_DDR4_LPDDR3_RTT_29ohm, 29},
697 	{PHY_DDR4_LPDDR3_RTT_28ohm, 28},
698 	{PHY_DDR4_LPDDR3_RTT_27ohm, 27}
699 };
700 
701 static u16 lp4_phy_drv_2_ohm[][2] = {
702 	{PHY_LPDDR4_RON_501ohm, 501},
703 	{PHY_LPDDR4_RON_253ohm, 253},
704 	{PHY_LPDDR4_RON_168ohm, 168},
705 	{PHY_LPDDR4_RON_126ohm, 126},
706 	{PHY_LPDDR4_RON_101ohm, 101},
707 	{PHY_LPDDR4_RON_84ohm, 84},
708 	{PHY_LPDDR4_RON_72ohm, 72},
709 	{PHY_LPDDR4_RON_63ohm, 63},
710 	{PHY_LPDDR4_RON_56ohm, 56},
711 	{PHY_LPDDR4_RON_50ohm, 50},
712 	{PHY_LPDDR4_RON_46ohm, 46},
713 	{PHY_LPDDR4_RON_42ohm, 42},
714 	{PHY_LPDDR4_RON_38ohm, 38},
715 	{PHY_LPDDR4_RON_36ohm, 36},
716 	{PHY_LPDDR4_RON_33ohm, 33},
717 	{PHY_LPDDR4_RON_31ohm, 31},
718 	{PHY_LPDDR4_RON_29ohm, 29},
719 	{PHY_LPDDR4_RON_28ohm, 28},
720 	{PHY_LPDDR4_RON_26ohm, 26},
721 	{PHY_LPDDR4_RON_25ohm, 25},
722 	{PHY_LPDDR4_RON_24ohm, 24},
723 	{PHY_LPDDR4_RON_23ohm, 23},
724 	{PHY_LPDDR4_RON_22ohm, 22}
725 };
726 
727 static u16 lp4_phy_odt_2_ohm[][2] = {
728 	{PHY_LPDDR4_RTT_DISABLE, 0},
729 	{PHY_LPDDR4_RTT_604ohm, 604},
730 	{PHY_LPDDR4_RTT_303ohm, 303},
731 	{PHY_LPDDR4_RTT_202ohm, 202},
732 	{PHY_LPDDR4_RTT_152ohm, 152},
733 	{PHY_LPDDR4_RTT_122ohm, 122},
734 	{PHY_LPDDR4_RTT_101ohm, 101},
735 	{PHY_LPDDR4_RTT_87ohm,	87},
736 	{PHY_LPDDR4_RTT_78ohm, 78},
737 	{PHY_LPDDR4_RTT_69ohm, 69},
738 	{PHY_LPDDR4_RTT_62ohm, 62},
739 	{PHY_LPDDR4_RTT_56ohm, 56},
740 	{PHY_LPDDR4_RTT_52ohm, 52},
741 	{PHY_LPDDR4_RTT_48ohm, 48},
742 	{PHY_LPDDR4_RTT_44ohm, 44},
743 	{PHY_LPDDR4_RTT_41ohm, 41},
744 	{PHY_LPDDR4_RTT_39ohm, 39},
745 	{PHY_LPDDR4_RTT_37ohm, 37},
746 	{PHY_LPDDR4_RTT_35ohm, 35},
747 	{PHY_LPDDR4_RTT_33ohm, 33},
748 	{PHY_LPDDR4_RTT_32ohm, 32},
749 	{PHY_LPDDR4_RTT_30ohm, 30},
750 	{PHY_LPDDR4_RTT_29ohm, 29},
751 	{PHY_LPDDR4_RTT_27ohm, 27}
752 };
753 
754 static u32 lp4_odt_calc(u32 odt_ohm)
755 {
756 	u32 odt;
757 
758 	if (odt_ohm == 0)
759 		odt = LPDDR4_DQODT_DIS;
760 	else if (odt_ohm <= 40)
761 		odt = LPDDR4_DQODT_40;
762 	else if (odt_ohm <= 48)
763 		odt = LPDDR4_DQODT_48;
764 	else if (odt_ohm <= 60)
765 		odt = LPDDR4_DQODT_60;
766 	else if (odt_ohm <= 80)
767 		odt = LPDDR4_DQODT_80;
768 	else if (odt_ohm <= 120)
769 		odt = LPDDR4_DQODT_120;
770 	else
771 		odt = LPDDR4_DQODT_240;
772 
773 	return odt;
774 }
775 
776 static void *get_ddr_drv_odt_info(u32 dramtype)
777 {
778 	struct sdram_head_info_index_v2 *index =
779 		(struct sdram_head_info_index_v2 *)common_info;
780 	void *ddr_info = 0;
781 
782 	if (dramtype == DDR4)
783 		ddr_info = (void *)common_info + index->ddr4_index.offset * 4;
784 	else if (dramtype == DDR3)
785 		ddr_info = (void *)common_info + index->ddr3_index.offset * 4;
786 	else if (dramtype == LPDDR3)
787 		ddr_info = (void *)common_info + index->lp3_index.offset * 4;
788 	else if (dramtype == LPDDR4)
789 		ddr_info = (void *)common_info + index->lp4_index.offset * 4;
790 	else if (dramtype == LPDDR4X)
791 		ddr_info = (void *)common_info + index->lp4x_index.offset * 4;
792 	else
793 		printascii("unsupported dram type\n");
794 	return ddr_info;
795 }
796 
797 static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info,
798 			 u32 freq_mhz, u32 dst_fsp, u32 dramtype)
799 {
800 	void __iomem *pctl_base = dram->pctl;
801 	u32 ca_vref, dq_vref;
802 
803 	if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
804 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff);
805 	else
806 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten);
807 
808 	if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq))
809 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff);
810 	else
811 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten);
812 
813 	if (dramtype == LPDDR4) {
814 		if (ca_vref < 100)
815 			ca_vref = 100;
816 		if (ca_vref > 420)
817 			ca_vref = 420;
818 
819 		if (ca_vref <= 300)
820 			ca_vref = (0 << 6) | (ca_vref - 100) / 4;
821 		else
822 			ca_vref = (1 << 6) | (ca_vref - 220) / 4;
823 
824 		if (dq_vref < 100)
825 			dq_vref = 100;
826 		if (dq_vref > 420)
827 			dq_vref = 420;
828 
829 		if (dq_vref <= 300)
830 			dq_vref = (0 << 6) | (dq_vref - 100) / 4;
831 		else
832 			dq_vref = (1 << 6) | (dq_vref - 220) / 4;
833 	} else {
834 		ca_vref = ca_vref * 11 / 6;
835 		if (ca_vref < 150)
836 			ca_vref = 150;
837 		if (ca_vref > 629)
838 			ca_vref = 629;
839 
840 		if (ca_vref <= 449)
841 			ca_vref = (0 << 6) | (ca_vref - 150) / 4;
842 		else
843 			ca_vref = (1 << 6) | (ca_vref - 329) / 4;
844 
845 		if (dq_vref < 150)
846 			dq_vref = 150;
847 		if (dq_vref > 629)
848 			dq_vref = 629;
849 
850 		if (dq_vref <= 449)
851 			dq_vref = (0 << 6) | (dq_vref - 150) / 6;
852 		else
853 			dq_vref = (1 << 6) | (dq_vref - 329) / 6;
854 	}
855 	sw_set_req(dram);
856 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
857 			DDR_PCTL2_INIT6,
858 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT,
859 			ca_vref << PCTL2_LPDDR4_MR12_SHIFT);
860 
861 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
862 			DDR_PCTL2_INIT7,
863 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT,
864 			dq_vref << PCTL2_LPDDR4_MR14_SHIFT);
865 	sw_set_ack(dram);
866 }
867 
868 static void set_ds_odt(struct dram_info *dram,
869 		       struct rv1126_sdram_params *sdram_params, u32 dst_fsp)
870 {
871 	void __iomem *phy_base = dram->phy;
872 	void __iomem *pctl_base = dram->pctl;
873 	u32 dramtype = sdram_params->base.dramtype;
874 	struct ddr2_3_4_lp2_3_info *ddr_info;
875 	struct lp4_info *lp4_info;
876 	u32 i, j, tmp;
877 	const u16 (*p_drv)[2];
878 	const u16 (*p_odt)[2];
879 	u32 drv_info, sr_info;
880 	u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm;
881 	u32 phy_odt_ohm, dram_odt_ohm;
882 	u32 lp4_pu_cal, phy_lp4_drv_pd_en;
883 	u32 phy_odt_up_en, phy_odt_dn_en;
884 	u32 sr_dq, sr_clk;
885 	u32 freq = sdram_params->base.ddr_freq;
886 	u32 mr1_mr3, mr11, mr22, vref_out, vref_inner;
887 	u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0;
888 	u32 phy_dq_drv = 0;
889 	u32 phy_odt_up = 0, phy_odt_dn = 0;
890 
891 	ddr_info = get_ddr_drv_odt_info(dramtype);
892 	lp4_info = (void *)ddr_info;
893 
894 	if (!ddr_info)
895 		return;
896 
897 	/* dram odt en freq control phy drv, dram odt and phy sr */
898 	if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) {
899 		drv_info = ddr_info->drv_when_odtoff;
900 		dram_odt_ohm = 0;
901 		sr_info = ddr_info->sr_when_odtoff;
902 		phy_lp4_drv_pd_en =
903 			PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info);
904 	} else {
905 		drv_info = ddr_info->drv_when_odten;
906 		dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info);
907 		sr_info = ddr_info->sr_when_odten;
908 		phy_lp4_drv_pd_en =
909 			PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info);
910 	}
911 	phy_dq_drv_ohm =
912 		DRV_INFO_PHY_DQ_DRV(drv_info);
913 	phy_clk_drv_ohm =
914 		DRV_INFO_PHY_CLK_DRV(drv_info);
915 	phy_ca_drv_ohm =
916 		DRV_INFO_PHY_CA_DRV(drv_info);
917 
918 	sr_dq = DQ_SR_INFO(sr_info);
919 	sr_clk = CLK_SR_INFO(sr_info);
920 
921 	/* phy odt en freq control dram drv and phy odt */
922 	if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) {
923 		dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff);
924 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info);
925 		phy_odt_ohm = 0;
926 		phy_odt_up_en = 0;
927 		phy_odt_dn_en = 0;
928 	} else {
929 		dram_drv_ohm =
930 			DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten);
931 		phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info);
932 		phy_odt_up_en =
933 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
934 		phy_odt_dn_en =
935 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
936 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info);
937 	}
938 
939 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
940 		if (phy_odt_ohm) {
941 			phy_odt_up_en = 0;
942 			phy_odt_dn_en = 1;
943 		}
944 		if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
945 			dram_caodt_ohm = 0;
946 		else
947 			dram_caodt_ohm =
948 				ODT_INFO_LP4_CA_ODT(lp4_info->odt_info);
949 	}
950 
951 	if (dramtype == DDR3) {
952 		p_drv = d3_phy_drv_2_ohm;
953 		p_odt = d3_phy_odt_2_ohm;
954 	} else if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
955 		p_drv = lp4_phy_drv_2_ohm;
956 		p_odt = lp4_phy_odt_2_ohm;
957 	} else {
958 		p_drv = d4lp3_phy_drv_2_ohm;
959 		p_odt = d4lp3_phy_odt_2_ohm;
960 	}
961 
962 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
963 		if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) {
964 			phy_dq_drv = **(p_drv + i);
965 			break;
966 		}
967 		if (i == 0)
968 			break;
969 	}
970 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
971 		if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) {
972 			phy_clk_drv = **(p_drv + i);
973 			break;
974 		}
975 		if (i == 0)
976 			break;
977 	}
978 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
979 		if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) {
980 			phy_ca_drv = **(p_drv + i);
981 			break;
982 		}
983 		if (i == 0)
984 			break;
985 	}
986 	if (!phy_odt_ohm)
987 		phy_odt = 0;
988 	else
989 		for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) {
990 			if (phy_odt_ohm <= *(*(p_odt + i) + 1)) {
991 				phy_odt = **(p_odt + i);
992 				break;
993 			}
994 			if (i == 0)
995 				break;
996 		}
997 
998 	if (dramtype != LPDDR4 && dramtype != LPDDR4X) {
999 		if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en))
1000 			vref_inner = 0x80;
1001 		else if (phy_odt_up_en)
1002 			vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 /
1003 				     (dram_drv_ohm + phy_odt_ohm);
1004 		else
1005 			vref_inner = phy_odt_ohm * 128 /
1006 				(phy_odt_ohm + dram_drv_ohm);
1007 
1008 		if (dramtype != DDR3 && dram_odt_ohm)
1009 			vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 /
1010 				   (phy_dq_drv_ohm + dram_odt_ohm);
1011 		else
1012 			vref_out = 0x80;
1013 	} else {
1014 		/* for lp4 and lp4x*/
1015 		if (phy_odt_ohm)
1016 			vref_inner =
1017 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) *
1018 				 256) / 1000;
1019 		else
1020 			vref_inner =
1021 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) *
1022 				 256) / 1000;
1023 
1024 		vref_out = 0x80;
1025 	}
1026 
1027 	/* default ZQCALIB bypass mode */
1028 	clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv);
1029 	clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv);
1030 	clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv);
1031 	clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv);
1032 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1033 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_clk_drv);
1034 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_clk_drv);
1035 	} else {
1036 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_ca_drv);
1037 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_ca_drv);
1038 	}
1039 	/* clk / cmd slew rate */
1040 	clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk);
1041 
1042 	phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1;
1043 	if (phy_odt_up_en)
1044 		phy_odt_up = phy_odt;
1045 	if (phy_odt_dn_en)
1046 		phy_odt_dn = phy_odt;
1047 
1048 	for (i = 0; i < 4; i++) {
1049 		j = 0x110 + i * 0x10;
1050 		clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up);
1051 		clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn);
1052 		clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv);
1053 		clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv);
1054 		writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10));
1055 
1056 		clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10),
1057 				1 << 3, phy_lp4_drv_pd_en << 3);
1058 		if (dramtype == LPDDR4 || dramtype == LPDDR4X)
1059 			clrbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10), BIT(5));
1060 		/* dq slew rate */
1061 		clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10),
1062 				0x1f, sr_dq);
1063 	}
1064 
1065 	/* reg_rx_vref_value_update */
1066 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1067 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1068 
1069 	/* RAM VREF */
1070 	writel(vref_out, PHY_REG(phy_base, 0x105));
1071 	if (dramtype == LPDDR3)
1072 		udelay(100);
1073 
1074 	if (dramtype == LPDDR4 || dramtype == LPDDR4X)
1075 		set_lp4_vref(dram, lp4_info, freq, dst_fsp, dramtype);
1076 
1077 	if (dramtype == DDR3 || dramtype == DDR4) {
1078 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1079 				DDR_PCTL2_INIT3);
1080 		mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK;
1081 	} else {
1082 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1083 				DDR_PCTL2_INIT4);
1084 		mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK;
1085 	}
1086 
1087 	if (dramtype == DDR3) {
1088 		mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK);
1089 		if (dram_drv_ohm == 34)
1090 			mr1_mr3 |= DDR3_DS_34;
1091 
1092 		if (dram_odt_ohm == 0)
1093 			mr1_mr3 |= DDR3_RTT_NOM_DIS;
1094 		else if (dram_odt_ohm <= 40)
1095 			mr1_mr3 |= DDR3_RTT_NOM_40;
1096 		else if (dram_odt_ohm <= 60)
1097 			mr1_mr3 |= DDR3_RTT_NOM_60;
1098 		else
1099 			mr1_mr3 |= DDR3_RTT_NOM_120;
1100 
1101 	} else if (dramtype == DDR4) {
1102 		mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK);
1103 		if (dram_drv_ohm == 48)
1104 			mr1_mr3 |= DDR4_DS_48;
1105 
1106 		if (dram_odt_ohm == 0)
1107 			mr1_mr3 |= DDR4_RTT_NOM_DIS;
1108 		else if (dram_odt_ohm <= 34)
1109 			mr1_mr3 |= DDR4_RTT_NOM_34;
1110 		else if (dram_odt_ohm <= 40)
1111 			mr1_mr3 |= DDR4_RTT_NOM_40;
1112 		else if (dram_odt_ohm <= 48)
1113 			mr1_mr3 |= DDR4_RTT_NOM_48;
1114 		else if (dram_odt_ohm <= 60)
1115 			mr1_mr3 |= DDR4_RTT_NOM_60;
1116 		else
1117 			mr1_mr3 |= DDR4_RTT_NOM_120;
1118 
1119 	} else if (dramtype == LPDDR3) {
1120 		if (dram_drv_ohm <= 34)
1121 			mr1_mr3 |= LPDDR3_DS_34;
1122 		else if (dram_drv_ohm <= 40)
1123 			mr1_mr3 |= LPDDR3_DS_40;
1124 		else if (dram_drv_ohm <= 48)
1125 			mr1_mr3 |= LPDDR3_DS_48;
1126 		else if (dram_drv_ohm <= 60)
1127 			mr1_mr3 |= LPDDR3_DS_60;
1128 		else if (dram_drv_ohm <= 80)
1129 			mr1_mr3 |= LPDDR3_DS_80;
1130 
1131 		if (dram_odt_ohm == 0)
1132 			lp3_odt_value = LPDDR3_ODT_DIS;
1133 		else if (dram_odt_ohm <= 60)
1134 			lp3_odt_value = LPDDR3_ODT_60;
1135 		else if (dram_odt_ohm <= 120)
1136 			lp3_odt_value = LPDDR3_ODT_120;
1137 		else
1138 			lp3_odt_value = LPDDR3_ODT_240;
1139 	} else {/* for lpddr4 and lpddr4x */
1140 		/* MR3 for lp4 PU-CAL and PDDS */
1141 		mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK);
1142 		mr1_mr3 |= lp4_pu_cal;
1143 
1144 		tmp = lp4_odt_calc(dram_drv_ohm);
1145 		if (!tmp)
1146 			tmp = LPDDR4_PDDS_240;
1147 		mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT);
1148 
1149 		/* MR11 for lp4 ca odt, dq odt set */
1150 		mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1151 			     DDR_PCTL2_INIT6);
1152 		mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK;
1153 
1154 		mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK);
1155 
1156 		tmp = lp4_odt_calc(dram_odt_ohm);
1157 		mr11 |= (tmp << LPDDR4_DQODT_SHIFT);
1158 
1159 		tmp = lp4_odt_calc(dram_caodt_ohm);
1160 		mr11 |= (tmp << LPDDR4_CAODT_SHIFT);
1161 		sw_set_req(dram);
1162 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1163 				DDR_PCTL2_INIT6,
1164 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT,
1165 				mr11 << PCTL2_LPDDR4_MR11_SHIFT);
1166 		sw_set_ack(dram);
1167 
1168 		/* MR22 for soc odt/odt-ck/odt-cs/odt-ca */
1169 		mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1170 			     DDR_PCTL2_INIT7);
1171 		mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK;
1172 		mr22 &= ~LPDDR4_SOC_ODT_MASK;
1173 
1174 		tmp = lp4_odt_calc(phy_odt_ohm);
1175 		mr22 |= tmp;
1176 		mr22 = mr22 |
1177 		       (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) <<
1178 			LPDDR4_ODTE_CK_SHIFT) |
1179 		       (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) <<
1180 			LPDDR4_ODTE_CS_SHIFT) |
1181 		       (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) <<
1182 			LPDDR4_ODTD_CA_SHIFT);
1183 
1184 		sw_set_req(dram);
1185 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1186 				DDR_PCTL2_INIT7,
1187 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT,
1188 				mr22 << PCTL2_LPDDR4_MR22_SHIFT);
1189 		sw_set_ack(dram);
1190 	}
1191 
1192 	if (dramtype == DDR4 || dramtype == DDR3) {
1193 		sw_set_req(dram);
1194 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1195 				DDR_PCTL2_INIT3,
1196 				PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT,
1197 				mr1_mr3 << PCTL2_DDR34_MR1_SHIFT);
1198 		sw_set_ack(dram);
1199 	} else {
1200 		sw_set_req(dram);
1201 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1202 				DDR_PCTL2_INIT4,
1203 				PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT,
1204 				mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT);
1205 		sw_set_ack(dram);
1206 	}
1207 }
1208 
1209 static int sdram_cmd_dq_path_remap(struct dram_info *dram,
1210 				   struct rv1126_sdram_params *sdram_params)
1211 {
1212 	void __iomem *phy_base = dram->phy;
1213 	u32 dramtype = sdram_params->base.dramtype;
1214 	struct sdram_head_info_index_v2 *index =
1215 		(struct sdram_head_info_index_v2 *)common_info;
1216 	struct dq_map_info *map_info;
1217 
1218 	map_info = (struct dq_map_info *)((void *)common_info +
1219 		index->dq_map_index.offset * 4);
1220 
1221 	if (dramtype == LPDDR4X)
1222 		dramtype = LPDDR4;
1223 
1224 	if (dramtype <= LPDDR4)
1225 		writel((map_info->byte_map[dramtype / 4] >>
1226 			((dramtype % 4) * 8)) & 0xff,
1227 		       PHY_REG(phy_base, 0x4f));
1228 
1229 	return 0;
1230 }
1231 
1232 static void phy_cfg(struct dram_info *dram,
1233 		    struct rv1126_sdram_params *sdram_params)
1234 {
1235 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
1236 	void __iomem *phy_base = dram->phy;
1237 	u32 i, dq_map, tmp;
1238 	u32 byte1 = 0, byte0 = 0;
1239 
1240 	sdram_cmd_dq_path_remap(dram, sdram_params);
1241 
1242 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0);
1243 	for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
1244 		writel(sdram_params->phy_regs.phy[i][1],
1245 		       phy_base + sdram_params->phy_regs.phy[i][0]);
1246 	}
1247 
1248 	clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5));
1249 	dq_map = readl(PHY_REG(phy_base, 0x4f));
1250 	for (i = 0; i < 4; i++) {
1251 		if (((dq_map >> (i * 2)) & 0x3) == 0)
1252 			byte0 = i;
1253 		if (((dq_map >> (i * 2)) & 0x3) == 1)
1254 			byte1 = i;
1255 	}
1256 
1257 	tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK);
1258 	if (cap_info->bw == 2)
1259 		tmp |= 0xf;
1260 	else if (cap_info->bw == 1)
1261 		tmp |= ((1 << byte0) | (1 << byte1));
1262 	else
1263 		tmp |= (1 << byte0);
1264 
1265 	writel(tmp, PHY_REG(phy_base, 0xf));
1266 
1267 	/* lpddr4 odt control by phy, enable cs0 odt */
1268 	if (sdram_params->base.dramtype == LPDDR4 ||
1269 	    sdram_params->base.dramtype == LPDDR4X)
1270 		clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4,
1271 				(1 << 6) | (1 << 4));
1272 	/* for ca training ca vref choose range1 */
1273 	setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6));
1274 	setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6));
1275 	/* for wr training PHY_0x7c[5], choose range0 */
1276 	clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5));
1277 }
1278 
1279 static int update_refresh_reg(struct dram_info *dram)
1280 {
1281 	void __iomem *pctl_base = dram->pctl;
1282 	u32 ret;
1283 
1284 	ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
1285 	writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
1286 
1287 	return 0;
1288 }
1289 
1290 /*
1291  * rank = 1: cs0
1292  * rank = 2: cs1
1293  */
1294 int read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype)
1295 {
1296 	u32 ret;
1297 	u32 i, temp;
1298 	u32 dqmap;
1299 
1300 	void __iomem *pctl_base = dram->pctl;
1301 	struct sdram_head_info_index_v2 *index =
1302 		(struct sdram_head_info_index_v2 *)common_info;
1303 	struct dq_map_info *map_info;
1304 
1305 	map_info = (struct dq_map_info *)((void *)common_info +
1306 		index->dq_map_index.offset * 4);
1307 
1308 	if (dramtype == LPDDR2)
1309 		dqmap = map_info->lp2_dq0_7_map;
1310 	else
1311 		dqmap = map_info->lp3_dq0_7_map;
1312 
1313 	pctl_read_mr(pctl_base, rank, mr_num);
1314 
1315 	ret = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff);
1316 
1317 	if (dramtype != LPDDR4) {
1318 		temp = 0;
1319 		for (i = 0; i < 8; i++) {
1320 			temp = temp | (((ret >> i) & 0x1) <<
1321 				       ((dqmap >> (i * 4)) & 0xf));
1322 		}
1323 	} else {
1324 		temp = (readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff);
1325 	}
1326 
1327 	return temp;
1328 }
1329 
1330 /* before call this function autorefresh should be disabled */
1331 void send_a_refresh(struct dram_info *dram)
1332 {
1333 	void __iomem *pctl_base = dram->pctl;
1334 
1335 	while (readl(pctl_base + DDR_PCTL2_DBGSTAT) & 0x3)
1336 		continue;
1337 	writel(0x3, pctl_base + DDR_PCTL2_DBGCMD);
1338 }
1339 
1340 static void enter_sr(struct dram_info *dram, u32 en)
1341 {
1342 	void __iomem *pctl_base = dram->pctl;
1343 
1344 	if (en) {
1345 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1346 		while (1) {
1347 			if (((readl(pctl_base + DDR_PCTL2_STAT) &
1348 			      PCTL2_SELFREF_TYPE_MASK) ==
1349 			     PCTL2_SELFREF_TYPE_SR_NOT_AUTO) &&
1350 			    ((readl(pctl_base + DDR_PCTL2_STAT) &
1351 			      PCTL2_OPERATING_MODE_MASK) ==
1352 			     PCTL2_OPERATING_MODE_SR))
1353 				break;
1354 		}
1355 	} else {
1356 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1357 		while ((readl(pctl_base + DDR_PCTL2_STAT) &
1358 		       PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR)
1359 			continue;
1360 	}
1361 }
1362 
1363 void record_dq_prebit(struct dram_info *dram)
1364 {
1365 	u32 group, i, tmp;
1366 	void __iomem *phy_base = dram->phy;
1367 
1368 	for (group = 0; group < 4; group++) {
1369 		for (i = 0; i < ARRAY_SIZE(dq_sel); i++) {
1370 			/* l_loop_invdelaysel */
1371 			writel(dq_sel[i][0], PHY_REG(phy_base,
1372 						     grp_addr[group] + 0x2c));
1373 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e));
1374 			writel(tmp, PHY_REG(phy_base,
1375 					    grp_addr[group] + dq_sel[i][1]));
1376 
1377 			/* r_loop_invdelaysel */
1378 			writel(dq_sel[i][0], PHY_REG(phy_base,
1379 						     grp_addr[group] + 0x2d));
1380 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f));
1381 			writel(tmp, PHY_REG(phy_base,
1382 					    grp_addr[group] + dq_sel[i][2]));
1383 		}
1384 	}
1385 }
1386 
1387 static void update_dq_rx_prebit(struct dram_info *dram)
1388 {
1389 	void __iomem *phy_base = dram->phy;
1390 
1391 	clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4),
1392 			BIT(4));
1393 	udelay(1);
1394 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4));
1395 }
1396 
1397 static void update_dq_tx_prebit(struct dram_info *dram)
1398 {
1399 	void __iomem *phy_base = dram->phy;
1400 
1401 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1402 	setbits_le32(PHY_REG(phy_base, 0x2), BIT(3));
1403 	setbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1404 	udelay(1);
1405 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1406 }
1407 
1408 static void update_ca_prebit(struct dram_info *dram)
1409 {
1410 	void __iomem *phy_base = dram->phy;
1411 
1412 	clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2));
1413 	setbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1414 	udelay(1);
1415 	clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1416 }
1417 
1418 /*
1419  * dir: 0: de-skew = delta_*
1420  *	1: de-skew = reg val - delta_*
1421  * delta_dir: value for differential signal: clk/
1422  * delta_sig: value for single signal: ca/cmd
1423  */
1424 static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif,
1425 			     int delta_sig, u32 cs, u32 dramtype)
1426 {
1427 	void __iomem *phy_base = dram->phy;
1428 	u32 i, cs_en, tmp;
1429 	u32 dfi_lp_stat = 0;
1430 
1431 	if (cs == 0)
1432 		cs_en = 1;
1433 	else if (cs == 2)
1434 		cs_en = 2;
1435 	else
1436 		cs_en = 3;
1437 
1438 	if ((dramtype == LPDDR4 || dramtype == LPDDR4X) &&
1439 	    ((readl(PHY_REG(phy_base, 0x60)) & BIT(5)) == 0)) {
1440 		dfi_lp_stat = 1;
1441 		setbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1442 	}
1443 	enter_sr(dram, 1);
1444 
1445 	for (i = 0; i < 0x20; i++) {
1446 		if (dir == DESKEW_MDF_ABS_VAL)
1447 			tmp = delta_sig;
1448 		else
1449 			tmp = readl(PHY_REG(phy_base, 0x150 + i)) +
1450 			      delta_sig;
1451 		writel(tmp, PHY_REG(phy_base, 0x150 + i));
1452 	}
1453 
1454 	if (dir == DESKEW_MDF_ABS_VAL)
1455 		tmp = delta_dif;
1456 	else
1457 		tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) -
1458 		       delta_sig + delta_dif;
1459 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x17));
1460 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x18));
1461 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1462 		writel(tmp, PHY_REG(phy_base, 0x150 + 0x4));
1463 		writel(tmp, PHY_REG(phy_base, 0x150 + 0xa));
1464 
1465 		clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6);
1466 		update_ca_prebit(dram);
1467 	}
1468 	enter_sr(dram, 0);
1469 
1470 	if (dfi_lp_stat)
1471 		clrbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1472 
1473 }
1474 
1475 static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank)
1476 {
1477 	u32 i, j, offset = 0;
1478 	u32 min = 0x3f;
1479 	void __iomem *phy_base = dram->phy;
1480 	u32 byte_en;
1481 
1482 	if (signal == SKEW_TX_SIGNAL)
1483 		offset = 8;
1484 
1485 	if (signal == SKEW_CA_SIGNAL) {
1486 		for (i = 0; i < 0x20; i++)
1487 			min = MIN(min, readl(PHY_REG(phy_base, 0x150  + i)));
1488 	} else {
1489 		byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1490 		for (j = offset; j < offset + rank * 4; j++) {
1491 			if (!((byte_en >> (j % 4)) & 1))
1492 				continue;
1493 			for (i = 0; i < 11; i++)
1494 				min = MIN(min,
1495 					  readl(PHY_REG(phy_base,
1496 							dqs_dq_skew_adr[j] +
1497 							i)));
1498 		}
1499 	}
1500 
1501 	return min;
1502 }
1503 
1504 static u32 low_power_update(struct dram_info *dram, u32 en)
1505 {
1506 	void __iomem *pctl_base = dram->pctl;
1507 	u32 lp_stat = 0;
1508 
1509 	if (en) {
1510 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf);
1511 	} else {
1512 		lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf;
1513 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf);
1514 	}
1515 
1516 	return lp_stat;
1517 }
1518 
1519 /*
1520  * signal:
1521  * dir: 0: de-skew = delta_*
1522  *	1: de-skew = reg val - delta_*
1523  * delta_dir: value for differential signal: dqs
1524  * delta_sig: value for single signal: dq/dm
1525  */
1526 static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir,
1527 			     int delta_dif, int delta_sig, u32 rank)
1528 {
1529 	void __iomem *phy_base = dram->phy;
1530 	u32 i, j, tmp, offset;
1531 	u32 byte_en;
1532 
1533 	byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1534 
1535 	if (signal == SKEW_RX_SIGNAL)
1536 		offset = 0;
1537 	else
1538 		offset = 8;
1539 
1540 	for (j = offset; j < (offset + rank * 4); j++) {
1541 		if (!((byte_en >> (j % 4)) & 1))
1542 			continue;
1543 		for (i = 0; i < 0x9; i++) {
1544 			if (dir == DESKEW_MDF_ABS_VAL)
1545 				tmp = delta_sig;
1546 			else
1547 				tmp = delta_sig + readl(PHY_REG(phy_base,
1548 							dqs_dq_skew_adr[j] +
1549 							i));
1550 			writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i));
1551 		}
1552 		if (dir == DESKEW_MDF_ABS_VAL)
1553 			tmp = delta_dif;
1554 		else
1555 			tmp = delta_dif + readl(PHY_REG(phy_base,
1556 						dqs_dq_skew_adr[j] + 9));
1557 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9));
1558 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa));
1559 	}
1560 	if (signal == SKEW_RX_SIGNAL)
1561 		update_dq_rx_prebit(dram);
1562 	else
1563 		update_dq_tx_prebit(dram);
1564 }
1565 
1566 static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype)
1567 {
1568 	void __iomem *phy_base = dram->phy;
1569 	u32 ret;
1570 	u32 dis_auto_zq = 0;
1571 	u32 odt_val_up, odt_val_dn;
1572 	u32 i, j;
1573 
1574 	odt_val_dn = readl(PHY_REG(phy_base, 0x110));
1575 	odt_val_up = readl(PHY_REG(phy_base, 0x111));
1576 
1577 	if (dramtype != LPDDR4 || dramtype != LPDDR4X) {
1578 		for (i = 0; i < 4; i++) {
1579 			j = 0x110 + i * 0x10;
1580 			writel(PHY_DDR4_LPDDR3_RTT_294ohm,
1581 			       PHY_REG(phy_base, j));
1582 			writel(PHY_DDR4_LPDDR3_RTT_DISABLE,
1583 			       PHY_REG(phy_base, j + 0x1));
1584 		}
1585 	}
1586 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1587 	/* use normal read mode for data training */
1588 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1589 
1590 	if (dramtype == DDR4)
1591 		setbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1592 
1593 	/* choose training cs */
1594 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
1595 	/* enable gate training */
1596 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
1597 	udelay(50);
1598 	ret = readl(PHY_REG(phy_base, 0x91));
1599 	/* disable gate training */
1600 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
1601 	clrbits_le32(PHY_REG(phy_base, 2), 0x30);
1602 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1603 
1604 	if (ret & 0x20)
1605 		ret = -1;
1606 	else
1607 		ret = (ret & 0xf) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf);
1608 
1609 	if (dramtype != LPDDR4 || dramtype != LPDDR4X) {
1610 		for (i = 0; i < 4; i++) {
1611 			j = 0x110 + i * 0x10;
1612 			writel(odt_val_dn, PHY_REG(phy_base, j));
1613 			writel(odt_val_up, PHY_REG(phy_base, j + 0x1));
1614 		}
1615 	}
1616 	return ret;
1617 }
1618 
1619 static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype,
1620 			    u32 rank)
1621 {
1622 	void __iomem *pctl_base = dram->pctl;
1623 	void __iomem *phy_base = dram->phy;
1624 	u32 dis_auto_zq = 0;
1625 	u32 tmp;
1626 	u32 cur_fsp;
1627 	u32 timeout_us = 1000;
1628 
1629 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1630 
1631 	clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1632 
1633 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1634 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) &
1635 	      0xffff;
1636 	writel(tmp & 0xff, PHY_REG(phy_base, 0x3));
1637 
1638 	/* disable another cs's output */
1639 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1640 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12),
1641 			      dramtype);
1642 	if (dramtype == DDR3 || dramtype == DDR4)
1643 		writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1644 	else
1645 		writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1646 
1647 	/* choose cs */
1648 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1649 			((0x2 >> cs) << 6) | (0 << 2));
1650 	/* enable write leveling */
1651 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1652 			((0x2 >> cs) << 6) | (1 << 2));
1653 
1654 	while (1) {
1655 		if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) ==
1656 		    (readl(PHY_REG(phy_base, 0xf)) & 0xf))
1657 			break;
1658 
1659 		udelay(1);
1660 		if (timeout_us-- == 0) {
1661 			printascii("error: write leveling timeout\n");
1662 			while (1)
1663 				;
1664 		}
1665 	}
1666 
1667 	/* disable write leveling */
1668 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1669 			((0x2 >> cs) << 6) | (0 << 2));
1670 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6);
1671 
1672 	/* enable another cs's output */
1673 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1674 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12),
1675 			      dramtype);
1676 
1677 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1678 
1679 	return 0;
1680 }
1681 
1682 char pattern[32] = {
1683 	0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa,
1684 	0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55,
1685 	0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55,
1686 	0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa
1687 };
1688 
1689 static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype,
1690 			    u32 mhz)
1691 {
1692 	void __iomem *pctl_base = dram->pctl;
1693 	void __iomem *phy_base = dram->phy;
1694 	u32 trefi_1x, trfc_1x;
1695 	u32 dis_auto_zq = 0;
1696 	u32 timeout_us = 1000;
1697 	u32 dqs_default;
1698 	u32 cur_fsp;
1699 	u32 vref_inner;
1700 	u32 i;
1701 	struct sdram_head_info_index_v2 *index =
1702 		(struct sdram_head_info_index_v2 *)common_info;
1703 	struct dq_map_info *map_info;
1704 
1705 	vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff;
1706 	if (dramtype == DDR3 && vref_inner == 0x80) {
1707 		for (i = 0; i < 4; i++)
1708 			writel(vref_inner - 0xa,
1709 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1710 
1711 		/* reg_rx_vref_value_update */
1712 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1713 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1714 	}
1715 
1716 	map_info = (struct dq_map_info *)((void *)common_info +
1717 		index->dq_map_index.offset * 4);
1718 	/* only 1cs a time, 0:cs0 1 cs1 */
1719 	if (cs > 1)
1720 		return -1;
1721 
1722 	dqs_default = 0xf;
1723 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1724 
1725 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1726 	/* config refresh timing */
1727 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1728 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1729 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1730 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1731 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1732 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1733 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1734 	/* reg_phy_trfc */
1735 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1736 	/* reg_max_refi_cnt */
1737 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1738 
1739 	/* choose training cs */
1740 	clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6);
1741 
1742 	/* set dq map for ddr4 */
1743 	if (dramtype == DDR4) {
1744 		setbits_le32(PHY_REG(phy_base, 0x70), BIT(7));
1745 		for (i = 0; i < 4; i++) {
1746 			writel((map_info->ddr4_dq_map[cs * 2] >>
1747 				((i % 4) * 8)) & 0xff,
1748 				PHY_REG(phy_base, 0x238 + i));
1749 			writel((map_info->ddr4_dq_map[cs * 2 + 1] >>
1750 				((i % 4) * 8)) & 0xff,
1751 				PHY_REG(phy_base, 0x2b8 + i));
1752 		}
1753 	}
1754 
1755 	/* cha_l reg_l_rd_train_dqs_default[5:0] */
1756 	clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default);
1757 	/* cha_h reg_h_rd_train_dqs_default[5:0] */
1758 	clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default);
1759 	/* chb_l reg_l_rd_train_dqs_default[5:0] */
1760 	clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default);
1761 	/* chb_h reg_h_rd_train_dqs_default[5:0] */
1762 	clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default);
1763 
1764 	/* Choose the read train auto mode */
1765 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1);
1766 	/* Enable the auto train of the read train */
1767 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3);
1768 
1769 	/* Wait the train done. */
1770 	while (1) {
1771 		if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1)
1772 			break;
1773 
1774 		udelay(1);
1775 		if (timeout_us-- == 0) {
1776 			printascii("error: read training timeout\n");
1777 			return -1;
1778 		}
1779 	}
1780 
1781 	/* Check the read train state */
1782 	if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) ||
1783 	    (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) {
1784 		printascii("error: read training error\n");
1785 		return -1;
1786 	}
1787 
1788 	/* Exit the Read Training by setting */
1789 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1));
1790 
1791 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1792 
1793 	if (dramtype == DDR3 && vref_inner == 0x80) {
1794 		for (i = 0; i < 4; i++)
1795 			writel(vref_inner,
1796 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1797 
1798 		/* reg_rx_vref_value_update */
1799 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1800 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1801 	}
1802 
1803 	return 0;
1804 }
1805 
1806 static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype,
1807 			    u32 mhz, u32 dst_fsp)
1808 {
1809 	void __iomem *pctl_base = dram->pctl;
1810 	void __iomem *phy_base = dram->phy;
1811 	u32 trefi_1x, trfc_1x;
1812 	u32 dis_auto_zq = 0;
1813 	u32 timeout_us = 1000;
1814 	u32 cur_fsp;
1815 	u32 mr_tmp, cl, cwl, phy_fsp, offset = 0;
1816 
1817 	if (dramtype == LPDDR3 && mhz <= 400) {
1818 		phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3;
1819 		offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3;
1820 		cl = readl(PHY_REG(phy_base, offset));
1821 		cwl = readl(PHY_REG(phy_base, offset + 2));
1822 
1823 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8);
1824 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4);
1825 		pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype);
1826 	}
1827 
1828 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1829 
1830 	/* PHY_0x7b[7:0] reg_train_col_addr[7:0] */
1831 	clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0);
1832 	/* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */
1833 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2);
1834 	/* PHY_0x7c[1:0] reg_train_col_addr[9:8] */
1835 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0);
1836 	/* PHY_0x7d[7:0] reg_train_row_addr[7:0] */
1837 	clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0);
1838 	/* PHY_0x7e[7:0] reg_train_row_addr[15:8] */
1839 	clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0);
1840 
1841 	/* PHY_0x71[3] wrtrain_check_data_value_random_gen */
1842 	clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3));
1843 
1844 	/* config refresh timing */
1845 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1846 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1847 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1848 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1849 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1850 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1851 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1852 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1853 	/* reg_phy_trfc */
1854 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1855 	/* reg_max_refi_cnt */
1856 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1857 
1858 	/* choose training cs */
1859 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6);
1860 
1861 	/* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */
1862 	/* 0: Use the write-leveling value. */
1863 	/* 1: use reg0x233 0x237 0x2b3 0x2b7 */
1864 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4));
1865 
1866 	/* PHY_0x7a [0] reg_dq_wr_train_auto */
1867 	setbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1868 
1869 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1870 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1871 
1872 	send_a_refresh(dram);
1873 
1874 	while (1) {
1875 		if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1)
1876 			break;
1877 
1878 		udelay(1);
1879 		if (timeout_us-- == 0) {
1880 			printascii("error: write training timeout\n");
1881 			while (1)
1882 				;
1883 		}
1884 	}
1885 
1886 	/* Check the write train state */
1887 	if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) {
1888 		printascii("error: write training error\n");
1889 		return -1;
1890 	}
1891 
1892 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1893 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1894 
1895 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1896 
1897 	/* save LPDDR4/LPDDR4X write vref to fsp_param for dfs */
1898 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1899 		fsp_param[dst_fsp].vref_dq[cs] =
1900 			((readl(PHY_REG(phy_base, 0x384)) & 0x3f) +
1901 			 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2;
1902 		/* add range info */
1903 		fsp_param[dst_fsp].vref_dq[cs] |=
1904 			((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1);
1905 	}
1906 
1907 	if (dramtype == LPDDR3 && mhz <= 400) {
1908 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl);
1909 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl);
1910 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1911 			       DDR_PCTL2_INIT3);
1912 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
1913 			      dramtype);
1914 	}
1915 
1916 	return 0;
1917 }
1918 
1919 static int data_training(struct dram_info *dram, u32 cs,
1920 			 struct rv1126_sdram_params *sdram_params, u32 dst_fsp,
1921 			 u32 training_flag)
1922 {
1923 	u32 ret = 0;
1924 
1925 	if (training_flag == FULL_TRAINING)
1926 		training_flag = READ_GATE_TRAINING | WRITE_LEVELING |
1927 				WRITE_TRAINING | READ_TRAINING;
1928 
1929 	if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) {
1930 		ret = data_training_wl(dram, cs,
1931 				       sdram_params->base.dramtype,
1932 				       sdram_params->ch.cap_info.rank);
1933 		if (ret != 0)
1934 			goto out;
1935 	}
1936 
1937 	if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) {
1938 		ret = data_training_rg(dram, cs,
1939 				       sdram_params->base.dramtype);
1940 		if (ret != 0)
1941 			goto out;
1942 	}
1943 
1944 	if ((training_flag & READ_TRAINING) == READ_TRAINING) {
1945 		ret = data_training_rd(dram, cs,
1946 				       sdram_params->base.dramtype,
1947 				       sdram_params->base.ddr_freq);
1948 		if (ret != 0)
1949 			goto out;
1950 	}
1951 
1952 	if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) {
1953 		ret = data_training_wr(dram, cs,
1954 				       sdram_params->base.dramtype,
1955 				       sdram_params->base.ddr_freq, dst_fsp);
1956 		if (ret != 0)
1957 			goto out;
1958 	}
1959 
1960 out:
1961 	return ret;
1962 }
1963 
1964 static int get_wrlvl_val(struct dram_info *dram,
1965 			 struct rv1126_sdram_params *sdram_params)
1966 {
1967 	u32 i, j, clk_skew;
1968 	void __iomem *phy_base = dram->phy;
1969 	u32 lp_stat;
1970 	int ret;
1971 
1972 	lp_stat = low_power_update(dram, 0);
1973 
1974 	clk_skew = 0x1f;
1975 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, clk_skew, 3,
1976 			 sdram_params->base.dramtype);
1977 
1978 	ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING);
1979 	if (sdram_params->ch.cap_info.rank == 2)
1980 		ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING);
1981 
1982 	for (j = 0; j < 2; j++)
1983 		for (i = 0; i < 4; i++)
1984 			wrlvl_result[j][i] =
1985 				readl(PHY_REG(phy_base,
1986 					      wrlvl_result_offset[j][i])) -
1987 				clk_skew;
1988 
1989 	low_power_update(dram, lp_stat);
1990 
1991 	return ret;
1992 }
1993 
1994 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
1995 static void init_rw_trn_result_struct(struct rw_trn_result *result,
1996 				      void __iomem *phy_base, u8 cs_num)
1997 {
1998 	int i;
1999 
2000 	result->cs_num = cs_num;
2001 	result->byte_en = readb(PHY_REG(dram_info.phy, 0xf)) &
2002 			  PHY_DQ_WIDTH_MASK;
2003 	for (i = 0; i < FSP_NUM; i++)
2004 		result->fsp_mhz[i] = 0;
2005 }
2006 
2007 static void save_rw_trn_min_max(void __iomem *phy_base,
2008 				struct cs_rw_trn_result *rd_result,
2009 				struct cs_rw_trn_result *wr_result,
2010 				u8 byte_en)
2011 {
2012 	u16 phy_ofs;
2013 	u8 dqs;
2014 	u8 dq;
2015 
2016 	for (dqs = 0; dqs < BYTE_NUM; dqs++) {
2017 		if ((byte_en & BIT(dqs)) == 0)
2018 			continue;
2019 
2020 		/* Channel A or B (low or high 16 bit) */
2021 		phy_ofs = dqs < 2 ? 0x230 : 0x2b0;
2022 		/* low or high 8 bit */
2023 		phy_ofs += (dqs & 0x1) == 0 ? 0 : 0x9;
2024 		for (dq = 0; dq < 8; dq++) {
2025 			rd_result->dqs[dqs].dq_min[dq] =
2026 				readb(PHY_REG(phy_base, phy_ofs + 0x15 + dq));
2027 			rd_result->dqs[dqs].dq_max[dq] =
2028 				readb(PHY_REG(phy_base, phy_ofs + 0x27 + dq));
2029 			wr_result->dqs[dqs].dq_min[dq] =
2030 				readb(PHY_REG(phy_base, phy_ofs + 0x3d + dq));
2031 			wr_result->dqs[dqs].dq_max[dq] =
2032 				readb(PHY_REG(phy_base, phy_ofs + 0x4f + dq));
2033 		}
2034 	}
2035 }
2036 
2037 static void save_rw_trn_deskew(void __iomem *phy_base,
2038 			       struct fsp_rw_trn_result *result, u8 cs_num,
2039 			       int min_val, bool rw)
2040 {
2041 	u16 phy_ofs;
2042 	u8 cs;
2043 	u8 dq;
2044 
2045 	result->min_val = min_val;
2046 
2047 	for (cs = 0; cs < cs_num; cs++) {
2048 		phy_ofs = cs == 0 ? 0x170 : 0x1a0;
2049 		phy_ofs += rw == SKEW_RX_SIGNAL ? 0x1 : 0x17;
2050 		for (dq = 0; dq < 8; dq++) {
2051 			result->cs[cs].dqs[0].dq_deskew[dq] =
2052 				readb(PHY_REG(phy_base, phy_ofs + dq));
2053 			result->cs[cs].dqs[1].dq_deskew[dq] =
2054 				readb(PHY_REG(phy_base, phy_ofs + 0xb + dq));
2055 			result->cs[cs].dqs[2].dq_deskew[dq] =
2056 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + dq));
2057 			result->cs[cs].dqs[3].dq_deskew[dq] =
2058 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + dq));
2059 		}
2060 
2061 		result->cs[cs].dqs[0].dqs_deskew =
2062 			readb(PHY_REG(phy_base, phy_ofs + 0x8));
2063 		result->cs[cs].dqs[1].dqs_deskew =
2064 			readb(PHY_REG(phy_base, phy_ofs + 0xb + 0x8));
2065 		result->cs[cs].dqs[2].dqs_deskew =
2066 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0x8));
2067 		result->cs[cs].dqs[3].dqs_deskew =
2068 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + 0x8));
2069 	}
2070 }
2071 
2072 static void save_rw_trn_result_to_ddr(struct rw_trn_result *result)
2073 {
2074 	result->flag = DDR_DQ_EYE_FLAG;
2075 	memcpy((void *)(RW_TRN_RESULT_ADDR), result, sizeof(*result));
2076 }
2077 #endif
2078 
2079 static int high_freq_training(struct dram_info *dram,
2080 			      struct rv1126_sdram_params *sdram_params,
2081 			      u32 fsp)
2082 {
2083 	u32 i, j;
2084 	void __iomem *phy_base = dram->phy;
2085 	u32 dramtype = sdram_params->base.dramtype;
2086 	int min_val;
2087 	int dqs_skew, clk_skew, ca_skew;
2088 	u8 byte_en;
2089 	int ret;
2090 
2091 	byte_en = readl(PHY_REG(phy_base, 0xf)) & PHY_DQ_WIDTH_MASK;
2092 	dqs_skew = 0;
2093 	for (j = 0; j < sdram_params->ch.cap_info.rank; j++) {
2094 		for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) {
2095 			if ((byte_en & BIT(i)) != 0)
2096 				dqs_skew += wrlvl_result[j][i];
2097 		}
2098 	}
2099 	dqs_skew = dqs_skew / (sdram_params->ch.cap_info.rank *
2100 			       (1 << sdram_params->ch.cap_info.bw));
2101 
2102 	clk_skew = 0x20 - dqs_skew;
2103 	dqs_skew = 0x20;
2104 
2105 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
2106 		min_val = 0xff;
2107 		for (j = 0; j < sdram_params->ch.cap_info.rank; j++)
2108 			for (i = 0; i < sdram_params->ch.cap_info.bw; i++)
2109 				min_val = MIN(wrlvl_result[j][i], min_val);
2110 
2111 		if (min_val < 0) {
2112 			clk_skew = -min_val;
2113 			ca_skew = -min_val;
2114 		} else {
2115 			clk_skew = 0;
2116 			ca_skew = 0;
2117 		}
2118 	} else if (dramtype == LPDDR3) {
2119 		ca_skew = clk_skew - 4;
2120 	} else {
2121 		ca_skew = clk_skew;
2122 	}
2123 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3,
2124 			 dramtype);
2125 
2126 	writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233));
2127 	writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237));
2128 	writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2129 	writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2130 	ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING |
2131 			    READ_TRAINING | WRITE_TRAINING);
2132 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2133 	rw_trn_result.fsp_mhz[fsp] = (u16)sdram_params->base.ddr_freq;
2134 	save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[0],
2135 			    &rw_trn_result.wr_fsp[fsp].cs[0],
2136 			    rw_trn_result.byte_en);
2137 #endif
2138 	if (sdram_params->ch.cap_info.rank == 2) {
2139 		writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233));
2140 		writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237));
2141 		writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2142 		writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2143 		ret |= data_training(dram, 1, sdram_params, fsp,
2144 				     READ_GATE_TRAINING | READ_TRAINING |
2145 				     WRITE_TRAINING);
2146 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2147 		save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[1],
2148 				    &rw_trn_result.wr_fsp[fsp].cs[1],
2149 				    rw_trn_result.byte_en);
2150 #endif
2151 	}
2152 	if (ret)
2153 		goto out;
2154 
2155 	record_dq_prebit(dram);
2156 
2157 	min_val = get_min_value(dram, SKEW_RX_SIGNAL,
2158 				sdram_params->ch.cap_info.rank) * -1;
2159 	modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2160 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2161 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2162 	save_rw_trn_deskew(phy_base, &rw_trn_result.rd_fsp[fsp],
2163 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2164 			   SKEW_RX_SIGNAL);
2165 #endif
2166 
2167 	min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL,
2168 				    sdram_params->ch.cap_info.rank),
2169 		      get_min_value(dram, SKEW_CA_SIGNAL,
2170 				    sdram_params->ch.cap_info.rank)) * -1;
2171 
2172 	/* clk = 0, rx all skew -7, tx - min_value */
2173 	modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3,
2174 			 dramtype);
2175 
2176 	modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2177 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2178 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2179 	save_rw_trn_deskew(phy_base, &rw_trn_result.wr_fsp[fsp],
2180 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2181 			   SKEW_TX_SIGNAL);
2182 #endif
2183 
2184 	ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING);
2185 	if (sdram_params->ch.cap_info.rank == 2)
2186 		ret |= data_training(dram, 1, sdram_params, 0,
2187 				     READ_GATE_TRAINING);
2188 out:
2189 	return ret;
2190 }
2191 
2192 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
2193 {
2194 	writel(ddrconfig, &dram->msch->deviceconf);
2195 	clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0);
2196 }
2197 
2198 static void update_noc_timing(struct dram_info *dram,
2199 			      struct rv1126_sdram_params *sdram_params)
2200 {
2201 	void __iomem *pctl_base = dram->pctl;
2202 	u32 bw, bl;
2203 
2204 	bw = 8 << sdram_params->ch.cap_info.bw;
2205 	bl = ((readl(pctl_base + DDR_PCTL2_MSTR) >> 16) & 0xf) * 2;
2206 
2207 	/* update the noc timing related to data bus width */
2208 	if ((bw / 8 * bl) <= 16)
2209 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 0;
2210 	else if ((bw / 8 * bl) == 32)
2211 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 1;
2212 	else if ((bw / 8 * bl) == 64)
2213 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 2;
2214 	else
2215 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 3;
2216 
2217 	sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty =
2218 		(bl * bw / 8) > 16 ? (bl / 4) : (16 / (bl * bw / 8)) * bl / 4;
2219 
2220 	if (sdram_params->base.dramtype == LPDDR4 ||
2221 	    sdram_params->base.dramtype == LPDDR4X) {
2222 		sdram_params->ch.noc_timings.ddrmode.b.mwrsize =
2223 			(bw == 16) ? 0x1 : 0x2;
2224 		sdram_params->ch.noc_timings.ddrtimingc0.b.wrtomwr =
2225 			3 * sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty;
2226 	}
2227 
2228 	writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
2229 	       &dram->msch->ddrtiminga0);
2230 	writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
2231 	       &dram->msch->ddrtimingb0);
2232 	writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
2233 	       &dram->msch->ddrtimingc0);
2234 	writel(sdram_params->ch.noc_timings.devtodev0.d32,
2235 	       &dram->msch->devtodev0);
2236 	writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
2237 	writel(sdram_params->ch.noc_timings.ddr4timing.d32,
2238 	       &dram->msch->ddr4timing);
2239 }
2240 
2241 static int split_setup(struct dram_info *dram,
2242 		       struct rv1126_sdram_params *sdram_params)
2243 {
2244 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2245 	u32 dramtype = sdram_params->base.dramtype;
2246 	u32 split_size, split_mode;
2247 	u64 cs_cap[2], cap;
2248 
2249 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dramtype);
2250 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dramtype);
2251 	/* only support the larger cap is in low 16bit */
2252 	if (cap_info->cs0_high16bit_row < cap_info->cs0_row) {
2253 		cap = cs_cap[0] / (1 << (cap_info->cs0_row -
2254 		cap_info->cs0_high16bit_row));
2255 	} else if ((cap_info->cs1_high16bit_row < cap_info->cs1_row) &&
2256 		   (cap_info->rank == 2)) {
2257 		if (!cap_info->cs1_high16bit_row)
2258 			cap = cs_cap[0];
2259 		else
2260 			cap = cs_cap[0] + cs_cap[1] / (1 << (cap_info->cs1_row -
2261 				cap_info->cs1_high16bit_row));
2262 	} else {
2263 		goto out;
2264 	}
2265 	split_size = (u32)(cap >> 24) & SPLIT_SIZE_MASK;
2266 	if (cap_info->bw == 2)
2267 		split_mode = SPLIT_MODE_32_L16_VALID;
2268 	else
2269 		split_mode = SPLIT_MODE_16_L8_VALID;
2270 
2271 	rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2272 		     (SPLIT_MODE_MASK << SPLIT_MODE_OFFSET) |
2273 		     (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2274 		     (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2275 		     (split_mode << SPLIT_MODE_OFFSET) |
2276 		     (0x0 << SPLIT_BYPASS_OFFSET) |
2277 		     (split_size << SPLIT_SIZE_OFFSET));
2278 
2279 	rk_clrsetreg(BUS_SGRF_BASE_ADDR + SGRF_SOC_CON2,
2280 		     MSCH_AXI_BYPASS_ALL_MASK << MSCH_AXI_BYPASS_ALL_SHIFT,
2281 		     0x0 << MSCH_AXI_BYPASS_ALL_SHIFT);
2282 
2283 out:
2284 	return 0;
2285 }
2286 
2287 static void split_bypass(struct dram_info *dram)
2288 {
2289 	if ((readl(&dram->ddrgrf->grf_ddrsplit_con) &
2290 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2291 		return;
2292 
2293 	/* bypass split */
2294 	rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2295 		     (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2296 		     (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2297 		     (0x1 << SPLIT_BYPASS_OFFSET) |
2298 		     (0x0 << SPLIT_SIZE_OFFSET));
2299 }
2300 
2301 static void dram_all_config(struct dram_info *dram,
2302 			    struct rv1126_sdram_params *sdram_params)
2303 {
2304 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2305 	u32 dram_type = sdram_params->base.dramtype;
2306 	void __iomem *pctl_base = dram->pctl;
2307 	u32 sys_reg2 = 0;
2308 	u32 sys_reg3 = 0;
2309 	u64 cs_cap[2];
2310 	u32 cs_pst;
2311 
2312 	set_ddrconfig(dram, cap_info->ddrconfig);
2313 	sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
2314 			 &sys_reg3, 0);
2315 	writel(sys_reg2, &dram->pmugrf->os_reg[2]);
2316 	writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2317 
2318 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2319 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2320 
2321 	if (cap_info->rank == 2) {
2322 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2323 			6 + 2;
2324 		if (cs_pst > 28)
2325 			cs_cap[0] = 1llu << cs_pst;
2326 	}
2327 
2328 	writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
2329 			(((cs_cap[0] >> 20) / 64) & 0xff),
2330 			&dram->msch->devicesize);
2331 	update_noc_timing(dram, sdram_params);
2332 }
2333 
2334 static void enable_low_power(struct dram_info *dram,
2335 			     struct rv1126_sdram_params *sdram_params)
2336 {
2337 	void __iomem *pctl_base = dram->pctl;
2338 	u32 grf_lp_con;
2339 
2340 	writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]);
2341 
2342 	if (sdram_params->base.dramtype == DDR4)
2343 		grf_lp_con = (0x7 << 16) | (1 << 1);
2344 	else if (sdram_params->base.dramtype == DDR3)
2345 		grf_lp_con = (0x7 << 16) | (1 << 0);
2346 	else
2347 		grf_lp_con = (0x7 << 16) | (1 << 2);
2348 
2349 	/* en lpckdis_en */
2350 	grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
2351 	writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con);
2352 
2353 	/* enable sr, pd */
2354 	if (dram->pd_idle == 0)
2355 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2356 	else
2357 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2358 	if (dram->sr_idle == 0)
2359 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2360 	else
2361 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2362 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
2363 }
2364 
2365 static void ddr_set_atags(struct dram_info *dram,
2366 			  struct rv1126_sdram_params *sdram_params)
2367 {
2368 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2369 	u32 dram_type = sdram_params->base.dramtype;
2370 	void __iomem *pctl_base = dram->pctl;
2371 	struct tag_serial t_serial;
2372 	struct tag_ddr_mem t_ddrmem;
2373 	struct tag_soc_info t_socinfo;
2374 	u64 cs_cap[2];
2375 	u32 cs_pst = 0;
2376 	u32 split, split_size;
2377 	u64 reduce_cap = 0;
2378 
2379 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2380 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2381 
2382 	memset(&t_serial, 0, sizeof(struct tag_serial));
2383 
2384 	t_serial.version = 0;
2385 	t_serial.enable = 1;
2386 	t_serial.addr = CONFIG_DEBUG_UART_BASE;
2387 	t_serial.baudrate = CONFIG_BAUDRATE;
2388 	t_serial.m_mode = SERIAL_M_MODE_M0;
2389 	t_serial.id = 2;
2390 
2391 	atags_destroy();
2392 	atags_set_tag(ATAG_SERIAL,  &t_serial);
2393 
2394 	split = readl(&dram->ddrgrf->grf_ddrsplit_con);
2395 	memset(&t_ddrmem, 0, sizeof(struct tag_ddr_mem));
2396 	if (cap_info->row_3_4) {
2397 		cs_cap[0] =  cs_cap[0] * 3 / 4;
2398 		cs_cap[1] =  cs_cap[1] * 3 / 4;
2399 	} else if (!(split & (1 << SPLIT_BYPASS_OFFSET))) {
2400 		split_size = (split >> SPLIT_SIZE_OFFSET) & SPLIT_SIZE_MASK;
2401 		reduce_cap = (cs_cap[0] + cs_cap[1] - (split_size << 24)) / 2;
2402 	}
2403 	t_ddrmem.version = 0;
2404 	t_ddrmem.bank[0] = CONFIG_SYS_SDRAM_BASE;
2405 	if (cs_cap[1]) {
2406 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2407 			6 + 2;
2408 	}
2409 
2410 	if (cs_cap[1] && cs_pst > 27) {
2411 		t_ddrmem.count = 2;
2412 		t_ddrmem.bank[1] = 1 << cs_pst;
2413 		t_ddrmem.bank[2] = cs_cap[0];
2414 		t_ddrmem.bank[3] = cs_cap[1] - reduce_cap;
2415 	} else {
2416 		t_ddrmem.count = 1;
2417 		t_ddrmem.bank[1] = (u64)cs_cap[0] + (u64)cs_cap[1] - reduce_cap;
2418 	}
2419 
2420 	atags_set_tag(ATAG_DDR_MEM,  &t_ddrmem);
2421 
2422 	memset(&t_socinfo, 0, sizeof(struct tag_soc_info));
2423 	t_socinfo.version = 0;
2424 	t_socinfo.name = 0x1126;
2425 }
2426 
2427 static void print_ddr_info(struct rv1126_sdram_params *sdram_params)
2428 {
2429 	u32 split;
2430 
2431 	if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2432 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2433 		split = 0;
2434 	else
2435 		split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2436 			SPLIT_SIZE_MASK;
2437 
2438 	sdram_print_ddr_info(&sdram_params->ch.cap_info,
2439 			     &sdram_params->base, split);
2440 }
2441 
2442 static int sdram_init_(struct dram_info *dram,
2443 		       struct rv1126_sdram_params *sdram_params, u32 post_init)
2444 {
2445 	void __iomem *pctl_base = dram->pctl;
2446 	void __iomem *phy_base = dram->phy;
2447 	u32 ddr4_vref;
2448 	u32 mr_tmp;
2449 
2450 	rkclk_configure_ddr(dram, sdram_params);
2451 
2452 	rkclk_ddr_reset(dram, 1, 1, 1, 1);
2453 	udelay(10);
2454 
2455 	rkclk_ddr_reset(dram, 1, 1, 1, 0);
2456 	phy_cfg(dram, sdram_params);
2457 
2458 	rkclk_ddr_reset(dram, 1, 1, 0, 0);
2459 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1);
2460 
2461 	rkclk_ddr_reset(dram, 1, 0, 0, 0);
2462 	pctl_cfg(dram->pctl, &sdram_params->pctl_regs,
2463 		 dram->sr_idle, dram->pd_idle);
2464 
2465 	if (sdram_params->ch.cap_info.bw == 2) {
2466 		/* 32bit interface use pageclose */
2467 		setbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2468 		/* pageclose = 1, pageclose_timer = 0 will err in lp4 328MHz */
2469 		clrsetbits_le32(pctl_base + DDR_PCTL2_SCHED1, 0xff, 0x1 << 0);
2470 	} else {
2471 		clrbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2472 	}
2473 
2474 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2475 	u32 tmp, trefi;
2476 
2477 	tmp = readl(pctl_base + DDR_PCTL2_RFSHTMG);
2478 	trefi = (tmp >> 16) & 0xfff;
2479 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2480 	       pctl_base + DDR_PCTL2_RFSHTMG);
2481 #endif
2482 
2483 	/* set frequency_mode */
2484 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
2485 	/* set target_frequency to Frequency 0 */
2486 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0);
2487 
2488 	set_ds_odt(dram, sdram_params, 0);
2489 	sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params);
2490 	set_ctl_address_map(dram, sdram_params);
2491 
2492 	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
2493 
2494 	rkclk_ddr_reset(dram, 0, 0, 0, 0);
2495 
2496 	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
2497 		continue;
2498 
2499 	if (sdram_params->base.dramtype == LPDDR3) {
2500 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3);
2501 	} else if (sdram_params->base.dramtype == LPDDR4 ||
2502 		   sdram_params->base.dramtype == LPDDR4X) {
2503 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6);
2504 		/* MR11 */
2505 		pctl_write_mr(dram->pctl, 3, 11,
2506 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2507 			      LPDDR4);
2508 		/* MR12 */
2509 		pctl_write_mr(dram->pctl, 3, 12,
2510 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2511 			      LPDDR4);
2512 
2513 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2514 		/* MR22 */
2515 		pctl_write_mr(dram->pctl, 3, 22,
2516 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2517 			      LPDDR4);
2518 	}
2519 
2520 	if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) != 0) {
2521 		if (post_init != 0)
2522 			printascii("DTT cs0 error\n");
2523 		return -1;
2524 	}
2525 
2526 	if (sdram_params->base.dramtype == LPDDR4) {
2527 		mr_tmp = read_mr(dram, 1, 14, LPDDR4);
2528 
2529 		if (mr_tmp != 0x4d)
2530 			return -1;
2531 	}
2532 
2533 	if (sdram_params->base.dramtype == LPDDR4 ||
2534 	    sdram_params->base.dramtype == LPDDR4X) {
2535 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2536 		/* MR14 */
2537 		pctl_write_mr(dram->pctl, 3, 14,
2538 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2539 			      LPDDR4);
2540 	}
2541 	if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) {
2542 		if (data_training(dram, 1, sdram_params, 0,
2543 				  READ_GATE_TRAINING) != 0) {
2544 			printascii("DTT cs1 error\n");
2545 			return -1;
2546 		}
2547 	}
2548 
2549 	if (sdram_params->base.dramtype == DDR4) {
2550 		ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39;
2551 		pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref,
2552 				  sdram_params->base.dramtype);
2553 	}
2554 
2555 	dram_all_config(dram, sdram_params);
2556 	enable_low_power(dram, sdram_params);
2557 
2558 	return 0;
2559 }
2560 
2561 static u64 dram_detect_cap(struct dram_info *dram,
2562 			   struct rv1126_sdram_params *sdram_params,
2563 			   unsigned char channel)
2564 {
2565 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2566 	void __iomem *pctl_base = dram->pctl;
2567 	void __iomem *phy_base = dram->phy;
2568 	u32 mr8;
2569 
2570 	u32 bktmp;
2571 	u32 coltmp;
2572 	u32 rowtmp;
2573 	u32 cs;
2574 	u32 dram_type = sdram_params->base.dramtype;
2575 	u32 pwrctl;
2576 	u32 i, dq_map;
2577 	u32 byte1 = 0, byte0 = 0;
2578 
2579 	cap_info->bw = dram_type == DDR3 ? 0 : 1;
2580 	if (dram_type != LPDDR4 && dram_type != LPDDR4X) {
2581 		if (dram_type != DDR4) {
2582 			coltmp = 12;
2583 			bktmp = 3;
2584 			if (dram_type == LPDDR2)
2585 				rowtmp = 15;
2586 			else
2587 				rowtmp = 16;
2588 
2589 			if (sdram_detect_col(cap_info, coltmp) != 0)
2590 				goto cap_err;
2591 
2592 			sdram_detect_bank(cap_info, coltmp, bktmp);
2593 			sdram_detect_dbw(cap_info, dram_type);
2594 		} else {
2595 			coltmp = 10;
2596 			bktmp = 4;
2597 			rowtmp = 17;
2598 
2599 			cap_info->col = 10;
2600 			cap_info->bk = 2;
2601 			sdram_detect_bg(cap_info, coltmp);
2602 		}
2603 
2604 		if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
2605 			goto cap_err;
2606 
2607 		sdram_detect_row_3_4(cap_info, coltmp, bktmp);
2608 	} else {
2609 		cap_info->col = 10;
2610 		cap_info->bk = 3;
2611 		mr8 = read_mr(dram, 1, 8, dram_type);
2612 		cap_info->dbw = ((mr8 >> 6) & 0x3) == 0 ? 1 : 0;
2613 		mr8 = (mr8 >> 2) & 0xf;
2614 		if (mr8 >= 0 && mr8 <= 6) {
2615 			cap_info->cs0_row = 14 + (mr8 + 1) / 2;
2616 		} else if (mr8 == 0xc) {
2617 			cap_info->cs0_row = 13;
2618 		} else {
2619 			printascii("Cap ERR: Fail to get cap of LPDDR4/X from MR8\n");
2620 			goto cap_err;
2621 		}
2622 		if (cap_info->dbw == 0)
2623 			cap_info->cs0_row++;
2624 		cap_info->row_3_4 = mr8 % 2 == 1 ? 1 : 0;
2625 		if (cap_info->cs0_row >= 17) {
2626 			printascii("Cap ERR: ");
2627 			printascii("RV1126 LPDDR4/X cannot support row >= 17\n");
2628 			goto cap_err;
2629 			// cap_info->cs0_row = 16;
2630 			// cap_info->row_3_4 = 0;
2631 		}
2632 		cap_info->bw = 2;
2633 	}
2634 
2635 	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
2636 	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
2637 
2638 	if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0)
2639 		cs = 1;
2640 	else
2641 		cs = 0;
2642 	cap_info->rank = cs + 1;
2643 
2644 	if (dram_type != LPDDR4 && dram_type != LPDDR4X) {
2645 		setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2646 
2647 		if (data_training(dram, 0, sdram_params, 0,
2648 				  READ_GATE_TRAINING) == 0) {
2649 			cap_info->bw = 2;
2650 		} else {
2651 			dq_map = readl(PHY_REG(phy_base, 0x4f));
2652 			for (i = 0; i < 4; i++) {
2653 				if (((dq_map >> (i * 2)) & 0x3) == 0)
2654 					byte0 = i;
2655 				if (((dq_map >> (i * 2)) & 0x3) == 1)
2656 					byte1 = i;
2657 			}
2658 			clrsetbits_le32(PHY_REG(phy_base, 0xf), PHY_DQ_WIDTH_MASK,
2659 					BIT(byte0) | BIT(byte1));
2660 			if (data_training(dram, 0, sdram_params, 0,
2661 					  READ_GATE_TRAINING) == 0)
2662 				cap_info->bw = 1;
2663 			else
2664 				cap_info->bw = 0;
2665 		}
2666 		if (cap_info->bw > 0)
2667 			cap_info->dbw = 1;
2668 	}
2669 
2670 	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
2671 
2672 	cap_info->cs0_high16bit_row = cap_info->cs0_row;
2673 	if (cs) {
2674 		cap_info->cs1_row = cap_info->cs0_row;
2675 		cap_info->cs1_high16bit_row = cap_info->cs0_row;
2676 	} else {
2677 		cap_info->cs1_row = 0;
2678 		cap_info->cs1_high16bit_row = 0;
2679 	}
2680 
2681 	return 0;
2682 cap_err:
2683 	return -1;
2684 }
2685 
2686 static int dram_detect_cs1_row(struct dram_info *dram,
2687 			       struct rv1126_sdram_params *sdram_params,
2688 			       unsigned char channel)
2689 {
2690 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2691 	void __iomem *pctl_base = dram->pctl;
2692 	u32 ret = 0;
2693 	void __iomem *test_addr;
2694 	u32 row, bktmp, coltmp, bw;
2695 	u64 cs0_cap;
2696 	u32 byte_mask;
2697 	u32 cs_pst;
2698 	u32 cs_add = 0;
2699 	u32 max_row;
2700 
2701 	if (cap_info->rank == 2) {
2702 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2703 			6 + 2;
2704 		if (cs_pst < 28)
2705 			cs_add = 1;
2706 
2707 		cs0_cap = 1 << cs_pst;
2708 
2709 		if (sdram_params->base.dramtype == DDR4) {
2710 			if (cap_info->dbw == 0)
2711 				bktmp = cap_info->bk + 2;
2712 			else
2713 				bktmp = cap_info->bk + 1;
2714 		} else {
2715 			bktmp = cap_info->bk;
2716 		}
2717 		bw = cap_info->bw;
2718 		coltmp = cap_info->col;
2719 
2720 		if (bw == 2)
2721 			byte_mask = 0xFFFF;
2722 		else
2723 			byte_mask = 0xFF;
2724 
2725 		max_row = (cs_pst == 31) ? 30 : 31;
2726 
2727 		max_row = max_row - bktmp - coltmp - bw - cs_add + 1;
2728 
2729 		row = (cap_info->cs0_row > max_row) ? max_row :
2730 			cap_info->cs0_row;
2731 
2732 		for (; row > 12; row--) {
2733 			test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
2734 				    (u32)cs0_cap +
2735 				    (1ul << (row + bktmp + coltmp +
2736 					     cs_add + bw - 1ul)));
2737 
2738 			writel(0, CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap);
2739 			writel(PATTERN, test_addr);
2740 
2741 			if (((readl(test_addr) & byte_mask) ==
2742 			     (PATTERN & byte_mask)) &&
2743 			    ((readl(CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap) &
2744 			      byte_mask) == 0)) {
2745 				ret = row;
2746 				break;
2747 			}
2748 		}
2749 	}
2750 
2751 	return ret;
2752 }
2753 
2754 /* return: 0 = success, other = fail */
2755 static int sdram_init_detect(struct dram_info *dram,
2756 			     struct rv1126_sdram_params *sdram_params)
2757 {
2758 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2759 	u32 ret;
2760 	u32 sys_reg = 0;
2761 	u32 sys_reg3 = 0;
2762 	struct sdram_head_info_index_v2 *index =
2763 		(struct sdram_head_info_index_v2 *)common_info;
2764 	struct dq_map_info *map_info;
2765 
2766 	map_info = (struct dq_map_info *)((void *)common_info +
2767 		index->dq_map_index.offset * 4);
2768 
2769 	if (sdram_init_(dram, sdram_params, 0)) {
2770 		if (sdram_params->base.dramtype == DDR3) {
2771 			clrsetbits_le32(&map_info->byte_map[0], 0xff << 24,
2772 					((0x1 << 6) | (0x3 << 4) | (0x2 << 2) |
2773 					(0x0 << 0)) << 24);
2774 			if (sdram_init_(dram, sdram_params, 0))
2775 				return -1;
2776 		} else {
2777 			return -1;
2778 		}
2779 	}
2780 
2781 	if (sdram_params->base.dramtype == DDR3) {
2782 		writel(PATTERN, CONFIG_SYS_SDRAM_BASE);
2783 		if (readl(CONFIG_SYS_SDRAM_BASE) != PATTERN)
2784 			return -1;
2785 	}
2786 
2787 	split_bypass(dram);
2788 	if (dram_detect_cap(dram, sdram_params, 0) != 0)
2789 		return -1;
2790 
2791 	pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
2792 				   sdram_params->base.dramtype);
2793 	ret = sdram_init_(dram, sdram_params, 1);
2794 	if (ret != 0)
2795 		goto out;
2796 
2797 	cap_info->cs1_row =
2798 		dram_detect_cs1_row(dram, sdram_params, 0);
2799 	if (cap_info->cs1_row) {
2800 		sys_reg = readl(&dram->pmugrf->os_reg[2]);
2801 		sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
2802 		SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
2803 				    sys_reg, sys_reg3, 0);
2804 		writel(sys_reg, &dram->pmugrf->os_reg[2]);
2805 		writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2806 	}
2807 
2808 	sdram_detect_high_row(cap_info, sdram_params->base.dramtype);
2809 	split_setup(dram, sdram_params);
2810 out:
2811 	return ret;
2812 }
2813 
2814 struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz)
2815 {
2816 	u32 i;
2817 	u32 offset = 0;
2818 	struct ddr2_3_4_lp2_3_info *ddr_info;
2819 
2820 	if (!freq_mhz) {
2821 		ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype);
2822 		if (ddr_info)
2823 			freq_mhz =
2824 				(ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
2825 				DDR_FREQ_MASK;
2826 		else
2827 			freq_mhz = 0;
2828 	}
2829 
2830 	for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) {
2831 		if (sdram_configs[i].base.ddr_freq == 0 ||
2832 		    freq_mhz < sdram_configs[i].base.ddr_freq)
2833 			break;
2834 	}
2835 	offset = i == 0 ? 0 : i - 1;
2836 
2837 	return &sdram_configs[offset];
2838 }
2839 
2840 static const u16 pctl_need_update_reg[] = {
2841 	DDR_PCTL2_RFSHTMG,
2842 	DDR_PCTL2_INIT3,
2843 	DDR_PCTL2_INIT4,
2844 	DDR_PCTL2_INIT6,
2845 	DDR_PCTL2_INIT7,
2846 	DDR_PCTL2_DRAMTMG0,
2847 	DDR_PCTL2_DRAMTMG1,
2848 	DDR_PCTL2_DRAMTMG2,
2849 	DDR_PCTL2_DRAMTMG3,
2850 	DDR_PCTL2_DRAMTMG4,
2851 	DDR_PCTL2_DRAMTMG5,
2852 	DDR_PCTL2_DRAMTMG6,
2853 	DDR_PCTL2_DRAMTMG7,
2854 	DDR_PCTL2_DRAMTMG8,
2855 	DDR_PCTL2_DRAMTMG9,
2856 	DDR_PCTL2_DRAMTMG12,
2857 	DDR_PCTL2_DRAMTMG13,
2858 	DDR_PCTL2_DRAMTMG14,
2859 	DDR_PCTL2_ZQCTL0,
2860 	DDR_PCTL2_DFITMG0,
2861 	DDR_PCTL2_ODTCFG
2862 };
2863 
2864 static const u16 phy_need_update_reg[] = {
2865 	0x14,
2866 	0x18,
2867 	0x1c
2868 };
2869 
2870 static void pre_set_rate(struct dram_info *dram,
2871 			 struct rv1126_sdram_params *sdram_params,
2872 			 u32 dst_fsp, u32 dst_fsp_lp4)
2873 {
2874 	u32 i, j, find;
2875 	void __iomem *pctl_base = dram->pctl;
2876 	void __iomem *phy_base = dram->phy;
2877 	u32 phy_offset;
2878 	u32 mr_tmp;
2879 	u32 dramtype = sdram_params->base.dramtype;
2880 
2881 	sw_set_req(dram);
2882 	/* pctl timing update */
2883 	for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) {
2884 		for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF;
2885 		     j++) {
2886 			if (sdram_params->pctl_regs.pctl[j][0] ==
2887 			    pctl_need_update_reg[i]) {
2888 				writel(sdram_params->pctl_regs.pctl[j][1],
2889 				       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2890 				       pctl_need_update_reg[i]);
2891 				find = j;
2892 				break;
2893 			}
2894 		}
2895 	}
2896 
2897 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2898 	u32 tmp, trefi;
2899 
2900 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2901 	trefi = (tmp >> 16) & 0xfff;
2902 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2903 	       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2904 #endif
2905 
2906 	sw_set_ack(dram);
2907 
2908 	/* phy timing update */
2909 	if (dst_fsp == 0)
2910 		phy_offset = 0;
2911 	else
2912 		phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3);
2913 	/* cl cwl al update */
2914 	for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) {
2915 		for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF;
2916 		     j++) {
2917 			if (sdram_params->phy_regs.phy[j][0] ==
2918 			    phy_need_update_reg[i]) {
2919 				writel(sdram_params->phy_regs.phy[j][1],
2920 				       phy_base + phy_offset +
2921 				       phy_need_update_reg[i]);
2922 				find = j;
2923 				break;
2924 			}
2925 		}
2926 	}
2927 
2928 	set_ds_odt(dram, sdram_params, dst_fsp);
2929 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
2930 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2931 			       DDR_PCTL2_INIT4);
2932 		/* MR13 */
2933 		pctl_write_mr(dram->pctl, 3, 13,
2934 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2935 			       PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2936 			      ((0x2 << 6) >> dst_fsp_lp4), dramtype);
2937 		writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2938 				      PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2939 				      ((0x2 << 6) >> dst_fsp_lp4),
2940 				       PHY_REG(phy_base, 0x1b));
2941 		/* MR3 */
2942 		pctl_write_mr(dram->pctl, 3, 3,
2943 			      mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT &
2944 			      PCTL2_MR_MASK,
2945 			      dramtype);
2946 		writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK,
2947 		       PHY_REG(phy_base, 0x19));
2948 
2949 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2950 			       DDR_PCTL2_INIT3);
2951 		/* MR1 */
2952 		pctl_write_mr(dram->pctl, 3, 1,
2953 			      mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT &
2954 			      PCTL2_MR_MASK,
2955 			      dramtype);
2956 		writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK,
2957 		       PHY_REG(phy_base, 0x17));
2958 		/* MR2 */
2959 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
2960 			      dramtype);
2961 		writel(mr_tmp & PCTL2_MR_MASK,
2962 		       PHY_REG(phy_base, 0x18));
2963 
2964 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2965 			       DDR_PCTL2_INIT6);
2966 		/* MR11 */
2967 		pctl_write_mr(dram->pctl, 3, 11,
2968 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2969 			      dramtype);
2970 		writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2971 		       PHY_REG(phy_base, 0x1a));
2972 		/* MR12 */
2973 		pctl_write_mr(dram->pctl, 3, 12,
2974 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2975 			      dramtype);
2976 
2977 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2978 			       DDR_PCTL2_INIT7);
2979 		/* MR22 */
2980 		pctl_write_mr(dram->pctl, 3, 22,
2981 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2982 			      dramtype);
2983 		writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2984 		       PHY_REG(phy_base, 0x1d));
2985 		/* MR14 */
2986 		pctl_write_mr(dram->pctl, 3, 14,
2987 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2988 			      dramtype);
2989 		writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2990 		       PHY_REG(phy_base, 0x1c));
2991 	}
2992 
2993 	update_noc_timing(dram, sdram_params);
2994 }
2995 
2996 static void save_fsp_param(struct dram_info *dram, u32 dst_fsp,
2997 			   struct rv1126_sdram_params *sdram_params)
2998 {
2999 	void __iomem *pctl_base = dram->pctl;
3000 	void __iomem *phy_base = dram->phy;
3001 	struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp];
3002 	u32 temp, temp1;
3003 	struct ddr2_3_4_lp2_3_info *ddr_info;
3004 
3005 	ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype);
3006 
3007 	p_fsp_param->freq_mhz = sdram_params->base.ddr_freq;
3008 
3009 	if (sdram_params->base.dramtype == LPDDR4 ||
3010 	    sdram_params->base.dramtype == LPDDR4X) {
3011 		p_fsp_param->rd_odt_up_en = 0;
3012 		p_fsp_param->rd_odt_down_en = 1;
3013 	} else {
3014 		p_fsp_param->rd_odt_up_en =
3015 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
3016 		p_fsp_param->rd_odt_down_en =
3017 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
3018 	}
3019 
3020 	if (p_fsp_param->rd_odt_up_en)
3021 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111));
3022 	else if (p_fsp_param->rd_odt_down_en)
3023 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110));
3024 	else
3025 		p_fsp_param->rd_odt = 0;
3026 	p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112));
3027 	p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100));
3028 	p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102));
3029 	p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128));
3030 	p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105));
3031 
3032 	if (sdram_params->base.dramtype == DDR3) {
3033 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3034 			     DDR_PCTL2_INIT3);
3035 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
3036 		p_fsp_param->ds_pdds = temp & DDR3_DS_MASK;
3037 		p_fsp_param->dq_odt = temp & DDR3_RTT_NOM_MASK;
3038 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3039 	} else if (sdram_params->base.dramtype == DDR4) {
3040 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3041 			     DDR_PCTL2_INIT3);
3042 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
3043 		p_fsp_param->ds_pdds = temp & DDR4_DS_MASK;
3044 		p_fsp_param->dq_odt = temp & DDR4_RTT_NOM_MASK;
3045 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3046 	} else if (sdram_params->base.dramtype == LPDDR3) {
3047 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3048 			     DDR_PCTL2_INIT4);
3049 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
3050 		p_fsp_param->ds_pdds = temp & 0xf;
3051 
3052 		p_fsp_param->dq_odt = lp3_odt_value;
3053 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3054 	} else if (sdram_params->base.dramtype == LPDDR4 ||
3055 		   sdram_params->base.dramtype == LPDDR4X) {
3056 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3057 			     DDR_PCTL2_INIT4);
3058 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
3059 		p_fsp_param->ds_pdds = temp & LPDDR4_PDDS_MASK;
3060 
3061 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3062 			     DDR_PCTL2_INIT6);
3063 		temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK;
3064 		p_fsp_param->dq_odt = temp & LPDDR4_DQODT_MASK;
3065 		p_fsp_param->ca_odt = temp & LPDDR4_CAODT_MASK;
3066 
3067 		temp = MAX(readl(PHY_REG(phy_base, 0x3ae)),
3068 			   readl(PHY_REG(phy_base, 0x3ce)));
3069 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)),
3070 			    readl(PHY_REG(phy_base, 0x3de)));
3071 		p_fsp_param->vref_ca[0] = (temp + temp1) / 2;
3072 		temp = MAX(readl(PHY_REG(phy_base, 0x3af)),
3073 			   readl(PHY_REG(phy_base, 0x3cf)));
3074 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)),
3075 			    readl(PHY_REG(phy_base, 0x3df)));
3076 		p_fsp_param->vref_ca[1] = (temp + temp1) / 2;
3077 		p_fsp_param->vref_ca[0] |=
3078 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
3079 		p_fsp_param->vref_ca[1] |=
3080 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
3081 
3082 		p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >>
3083 					      3) & 0x1;
3084 	}
3085 
3086 	p_fsp_param->noc_timings.ddrtiminga0 =
3087 		sdram_params->ch.noc_timings.ddrtiminga0;
3088 	p_fsp_param->noc_timings.ddrtimingb0 =
3089 		sdram_params->ch.noc_timings.ddrtimingb0;
3090 	p_fsp_param->noc_timings.ddrtimingc0 =
3091 		sdram_params->ch.noc_timings.ddrtimingc0;
3092 	p_fsp_param->noc_timings.devtodev0 =
3093 		sdram_params->ch.noc_timings.devtodev0;
3094 	p_fsp_param->noc_timings.ddrmode =
3095 		sdram_params->ch.noc_timings.ddrmode;
3096 	p_fsp_param->noc_timings.ddr4timing =
3097 		sdram_params->ch.noc_timings.ddr4timing;
3098 	p_fsp_param->noc_timings.agingx0 =
3099 		sdram_params->ch.noc_timings.agingx0;
3100 	p_fsp_param->noc_timings.aging0 =
3101 		sdram_params->ch.noc_timings.aging0;
3102 	p_fsp_param->noc_timings.aging1 =
3103 		sdram_params->ch.noc_timings.aging1;
3104 	p_fsp_param->noc_timings.aging2 =
3105 		sdram_params->ch.noc_timings.aging2;
3106 	p_fsp_param->noc_timings.aging3 =
3107 		sdram_params->ch.noc_timings.aging3;
3108 
3109 	p_fsp_param->flag = FSP_FLAG;
3110 }
3111 
3112 #ifndef CONFIG_SPL_KERNEL_BOOT
3113 static void copy_fsp_param_to_ddr(void)
3114 {
3115 	memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param,
3116 	       sizeof(fsp_param));
3117 }
3118 #endif
3119 
3120 static void pctl_modify_trfc(struct ddr_pctl_regs *pctl_regs,
3121 			     struct sdram_cap_info *cap_info, u32 dram_type,
3122 			     u32 freq)
3123 {
3124 	u64 cs0_cap;
3125 	u32 die_cap;
3126 	u32 trfc_ns, trfc4_ns;
3127 	u32 trfc, txsnr;
3128 	u32 txs_abort_fast = 0;
3129 	u32 tmp;
3130 
3131 	cs0_cap = sdram_get_cs_cap(cap_info, 0, dram_type);
3132 	die_cap = (u32)(cs0_cap >> (20 + (cap_info->bw - cap_info->dbw)));
3133 
3134 	switch (dram_type) {
3135 	case DDR3:
3136 		if (die_cap <= DIE_CAP_512MBIT)
3137 			trfc_ns = 90;
3138 		else if (die_cap <= DIE_CAP_1GBIT)
3139 			trfc_ns = 110;
3140 		else if (die_cap <= DIE_CAP_2GBIT)
3141 			trfc_ns = 160;
3142 		else if (die_cap <= DIE_CAP_4GBIT)
3143 			trfc_ns = 260;
3144 		else
3145 			trfc_ns = 350;
3146 		txsnr = MAX(5, ((trfc_ns + 10) * freq + 999) / 1000);
3147 		break;
3148 
3149 	case DDR4:
3150 		if (die_cap <= DIE_CAP_2GBIT) {
3151 			trfc_ns = 160;
3152 			trfc4_ns = 90;
3153 		} else if (die_cap <= DIE_CAP_4GBIT) {
3154 			trfc_ns = 260;
3155 			trfc4_ns = 110;
3156 		} else if (die_cap <= DIE_CAP_8GBIT) {
3157 			trfc_ns = 350;
3158 			trfc4_ns = 160;
3159 		} else {
3160 			trfc_ns = 550;
3161 			trfc4_ns = 260;
3162 		}
3163 		txsnr = ((trfc_ns + 10) * freq + 999) / 1000;
3164 		txs_abort_fast = ((trfc4_ns + 10) * freq + 999) / 1000;
3165 		break;
3166 
3167 	case LPDDR3:
3168 		if (die_cap <= DIE_CAP_4GBIT)
3169 			trfc_ns = 130;
3170 		else
3171 			trfc_ns = 210;
3172 		txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3173 		break;
3174 
3175 	case LPDDR4:
3176 	case LPDDR4X:
3177 		if (die_cap <= DIE_CAP_4GBIT)
3178 			trfc_ns = 130;
3179 		else if (die_cap <= DIE_CAP_8GBIT)
3180 			trfc_ns = 180;
3181 		else if (die_cap <= DIE_CAP_16GBIT)
3182 			trfc_ns = 280;
3183 		else
3184 			trfc_ns = 380;
3185 		txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3186 		break;
3187 
3188 	default:
3189 		return;
3190 	}
3191 	trfc = (trfc_ns * freq + 999) / 1000;
3192 
3193 	for (int i = 0; pctl_regs->pctl[i][0] != 0xffffffff; i++) {
3194 		switch (pctl_regs->pctl[i][0]) {
3195 		case DDR_PCTL2_RFSHTMG:
3196 			tmp = pctl_regs->pctl[i][1];
3197 			/* t_rfc_min */
3198 			tmp &= ~((u32)0x3ff);
3199 			tmp |= ((trfc + 1) / 2) & 0x3ff;
3200 			pctl_regs->pctl[i][1] = tmp;
3201 			break;
3202 
3203 		case DDR_PCTL2_DRAMTMG8:
3204 			if (dram_type == DDR3 || dram_type == DDR4) {
3205 				tmp = pctl_regs->pctl[i][1];
3206 				/* t_xs_x32 */
3207 				tmp &= ~((u32)0x7f);
3208 				tmp |= ((txsnr + 63) / 64) & 0x7f;
3209 
3210 				if (dram_type == DDR4) {
3211 					/* t_xs_abort_x32 */
3212 					tmp &= ~((u32)(0x7f << 16));
3213 					tmp |= (((txs_abort_fast + 63) / 64) & 0x7f) << 16;
3214 					/* t_xs_fast_x32 */
3215 					tmp &= ~((u32)(0x7f << 24));
3216 					tmp |= (((txs_abort_fast + 63) / 64) & 0x7f) << 24;
3217 				}
3218 
3219 				pctl_regs->pctl[i][1] = tmp;
3220 			}
3221 			break;
3222 
3223 		case DDR_PCTL2_DRAMTMG14:
3224 			if (dram_type == LPDDR3 ||
3225 			    dram_type == LPDDR4 || dram_type == LPDDR4X) {
3226 				tmp = pctl_regs->pctl[i][1];
3227 				/* t_xsr */
3228 				tmp &= ~((u32)0xfff);
3229 				tmp |= ((txsnr + 1) / 2) & 0xfff;
3230 				pctl_regs->pctl[i][1] = tmp;
3231 			}
3232 			break;
3233 
3234 		default:
3235 			break;
3236 		}
3237 	}
3238 }
3239 
3240 void ddr_set_rate(struct dram_info *dram,
3241 		  struct rv1126_sdram_params *sdram_params,
3242 		  u32 freq, u32 cur_freq, u32 dst_fsp,
3243 		  u32 dst_fsp_lp4, u32 training_en)
3244 {
3245 	u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off;
3246 	u32 mr_tmp;
3247 	u32 lp_stat;
3248 	u32 dramtype = sdram_params->base.dramtype;
3249 	struct rv1126_sdram_params *sdram_params_new;
3250 	void __iomem *pctl_base = dram->pctl;
3251 	void __iomem *phy_base = dram->phy;
3252 
3253 	lp_stat = low_power_update(dram, 0);
3254 	sdram_params_new = get_default_sdram_config(freq);
3255 	sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank;
3256 	sdram_params_new->ch.cap_info.bw = sdram_params->ch.cap_info.bw;
3257 
3258 	pctl_modify_trfc(&sdram_params_new->pctl_regs,
3259 			 &sdram_params->ch.cap_info, dramtype, freq);
3260 	pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4);
3261 
3262 	while ((readl(pctl_base + DDR_PCTL2_STAT) &
3263 			 PCTL2_OPERATING_MODE_MASK) ==
3264 			 PCTL2_OPERATING_MODE_SR)
3265 		continue;
3266 
3267 	dest_dll_off = 0;
3268 	dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3269 			  DDR_PCTL2_INIT3);
3270 	if ((dramtype == DDR3 && (dst_init3 & 1)) ||
3271 	    (dramtype == DDR4 && !(dst_init3 & 1)))
3272 		dest_dll_off = 1;
3273 
3274 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
3275 	cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
3276 			  DDR_PCTL2_INIT3);
3277 	cur_init3 &= PCTL2_MR_MASK;
3278 	cur_dll_off = 1;
3279 	if ((dramtype == DDR3 && !(cur_init3 & 1)) ||
3280 	    (dramtype == DDR4 && (cur_init3 & 1)))
3281 		cur_dll_off = 0;
3282 
3283 	if (!cur_dll_off) {
3284 		if (dramtype == DDR3)
3285 			cur_init3 |= 1;
3286 		else
3287 			cur_init3 &= ~1;
3288 		pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype);
3289 	}
3290 
3291 	setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3292 		     PCTL2_DIS_AUTO_REFRESH);
3293 	update_refresh_reg(dram);
3294 
3295 	enter_sr(dram, 1);
3296 
3297 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3298 	       PMUGRF_CON_DDRPHY_BUFFEREN_EN,
3299 	       &dram->pmugrf->soc_con[0]);
3300 	sw_set_req(dram);
3301 	clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC,
3302 		     PCTL2_DFI_INIT_COMPLETE_EN);
3303 	sw_set_ack(dram);
3304 
3305 	sw_set_req(dram);
3306 	if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off)
3307 		setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3308 	else
3309 		clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3310 
3311 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0,
3312 		     PCTL2_DIS_SRX_ZQCL);
3313 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0,
3314 		     PCTL2_DIS_SRX_ZQCL);
3315 	sw_set_ack(dram);
3316 
3317 	writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT),
3318 	       &dram->cru->clkgate_con[21]);
3319 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3320 					(0x1 << CLK_DDR_UPCTL_EN_SHIFT) |
3321 					(0x1 << ACLK_DDR_UPCTL_EN_SHIFT),
3322 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3323 
3324 	clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3325 	rkclk_set_dpll(dram, freq * MHz / 2);
3326 	phy_pll_set(dram, freq * MHz, 0);
3327 	phy_pll_set(dram, freq * MHz, 1);
3328 	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3329 
3330 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3331 			PMUGRF_CON_DDRPHY_BUFFEREN_DIS,
3332 			&dram->pmugrf->soc_con[0]);
3333 	writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT),
3334 	       &dram->cru->clkgate_con[21]);
3335 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3336 					(0x0 << CLK_DDR_UPCTL_EN_SHIFT) |
3337 					(0x0 << ACLK_DDR_UPCTL_EN_SHIFT),
3338 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3339 	while ((readl(pctl_base + DDR_PCTL2_DFISTAT) &
3340 	       PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE)
3341 		continue;
3342 
3343 	sw_set_req(dram);
3344 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
3345 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp);
3346 	sw_set_ack(dram);
3347 	update_refresh_reg(dram);
3348 	clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2);
3349 
3350 	enter_sr(dram, 0);
3351 
3352 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3353 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3354 
3355 	mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4);
3356 	if (dramtype == LPDDR3) {
3357 		pctl_write_mr(dram->pctl, 3, 1,
3358 			      (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) &
3359 			      PCTL2_MR_MASK,
3360 			      dramtype);
3361 		pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK,
3362 			      dramtype);
3363 		pctl_write_mr(dram->pctl, 3, 3,
3364 			      (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) &
3365 			      PCTL2_MR_MASK,
3366 			      dramtype);
3367 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype);
3368 	} else if ((dramtype == DDR3) || (dramtype == DDR4)) {
3369 		pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK,
3370 			      dramtype);
3371 		if (!dest_dll_off) {
3372 			pctl_write_mr(dram->pctl, 3, 0,
3373 				      ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) &
3374 				      PCTL2_MR_MASK) | DDR3_DLL_RESET,
3375 				      dramtype);
3376 			udelay(2);
3377 		}
3378 		pctl_write_mr(dram->pctl, 3, 0,
3379 			      (dst_init3 >> PCTL2_DDR34_MR0_SHIFT &
3380 			       PCTL2_MR_MASK) & (~DDR3_DLL_RESET),
3381 			      dramtype);
3382 		pctl_write_mr(dram->pctl, 3, 2,
3383 			      ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) &
3384 			       PCTL2_MR_MASK), dramtype);
3385 		if (dramtype == DDR4) {
3386 			pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK,
3387 				      dramtype);
3388 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3389 				       DDR_PCTL2_INIT6);
3390 			pctl_write_mr(dram->pctl, 3, 4,
3391 				      (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) &
3392 				       PCTL2_MR_MASK,
3393 				      dramtype);
3394 			pctl_write_mr(dram->pctl, 3, 5,
3395 				      mr_tmp >> PCTL2_DDR4_MR5_SHIFT &
3396 				      PCTL2_MR_MASK,
3397 				      dramtype);
3398 
3399 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3400 				       DDR_PCTL2_INIT7);
3401 			pctl_write_mr(dram->pctl, 3, 6,
3402 				      mr_tmp >> PCTL2_DDR4_MR6_SHIFT &
3403 				      PCTL2_MR_MASK,
3404 				      dramtype);
3405 		}
3406 	} else if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
3407 		pctl_write_mr(dram->pctl, 3, 13,
3408 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
3409 			       PCTL2_MR_MASK) & (~(BIT(7)))) |
3410 			      dst_fsp_lp4 << 7, dramtype);
3411 	}
3412 	clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3413 		     PCTL2_DIS_AUTO_REFRESH);
3414 	update_refresh_reg(dram);
3415 
3416 	/* training */
3417 	high_freq_training(dram, sdram_params_new, dst_fsp);
3418 	low_power_update(dram, lp_stat);
3419 
3420 	save_fsp_param(dram, dst_fsp, sdram_params_new);
3421 }
3422 
3423 static void ddr_set_rate_for_fsp(struct dram_info *dram,
3424 				 struct rv1126_sdram_params *sdram_params)
3425 {
3426 	struct ddr2_3_4_lp2_3_info *ddr_info;
3427 	u32 f0;
3428 	u32 dramtype = sdram_params->base.dramtype;
3429 #ifndef CONFIG_SPL_KERNEL_BOOT
3430 	u32 f1, f2, f3;
3431 #endif
3432 
3433 	ddr_info = get_ddr_drv_odt_info(dramtype);
3434 	if (!ddr_info)
3435 		return;
3436 
3437 	f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
3438 	     DDR_FREQ_MASK;
3439 
3440 #ifndef CONFIG_SPL_KERNEL_BOOT
3441 	memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param));
3442 	memset((void *)&fsp_param, 0, sizeof(fsp_param));
3443 
3444 	f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) &
3445 	     DDR_FREQ_MASK;
3446 	f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) &
3447 	     DDR_FREQ_MASK;
3448 	f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) &
3449 	     DDR_FREQ_MASK;
3450 #endif
3451 
3452 	if (get_wrlvl_val(dram, sdram_params))
3453 		printascii("get wrlvl value fail\n");
3454 
3455 #ifndef CONFIG_SPL_KERNEL_BOOT
3456 	printascii("change to: ");
3457 	printdec(f1);
3458 	printascii("MHz\n");
3459 	ddr_set_rate(&dram_info, sdram_params, f1,
3460 		     sdram_params->base.ddr_freq, 1, 1, 1);
3461 	printascii("change to: ");
3462 	printdec(f2);
3463 	printascii("MHz\n");
3464 	ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1);
3465 	printascii("change to: ");
3466 	printdec(f3);
3467 	printascii("MHz\n");
3468 	ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1);
3469 #endif
3470 	printascii("change to: ");
3471 	printdec(f0);
3472 	printascii("MHz(final freq)\n");
3473 #ifndef CONFIG_SPL_KERNEL_BOOT
3474 	ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1);
3475 #else
3476 	ddr_set_rate(&dram_info, sdram_params, f0, sdram_params->base.ddr_freq, 1, 1, 1);
3477 #endif
3478 }
3479 
3480 int get_uart_config(void)
3481 {
3482 	struct sdram_head_info_index_v2 *index =
3483 		(struct sdram_head_info_index_v2 *)common_info;
3484 	struct global_info *gbl_info;
3485 
3486 	gbl_info = (struct global_info *)((void *)common_info +
3487 		index->global_index.offset * 4);
3488 
3489 	return gbl_info->uart_info;
3490 }
3491 
3492 /* return: 0 = success, other = fail */
3493 int sdram_init(void)
3494 {
3495 	struct rv1126_sdram_params *sdram_params;
3496 	int ret = 0;
3497 	struct sdram_head_info_index_v2 *index =
3498 		(struct sdram_head_info_index_v2 *)common_info;
3499 	struct global_info *gbl_info;
3500 
3501 	dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
3502 	dram_info.pctl = (void *)UPCTL2_BASE_ADDR;
3503 	dram_info.grf = (void *)GRF_BASE_ADDR;
3504 	dram_info.cru = (void *)CRU_BASE_ADDR;
3505 	dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR;
3506 	dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR;
3507 	dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR;
3508 
3509 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
3510 	printascii("extended temp support\n");
3511 #endif
3512 	if (index->version_info != 2 ||
3513 	    (index->global_index.size != sizeof(struct global_info) / 4) ||
3514 	    (index->ddr3_index.size !=
3515 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3516 	    (index->ddr4_index.size !=
3517 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3518 	    (index->lp3_index.size !=
3519 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3520 	    (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) ||
3521 	    (index->lp4x_index.size != (sizeof(struct lp4_info) / 4)) ||
3522 	    index->global_index.offset == 0 ||
3523 	    index->ddr3_index.offset == 0 ||
3524 	    index->ddr4_index.offset == 0 ||
3525 	    index->lp3_index.offset == 0 ||
3526 	    index->lp4_index.offset == 0 ||
3527 	    index->lp4x_index.offset == 0) {
3528 		printascii("common info error\n");
3529 		goto error;
3530 	}
3531 
3532 	gbl_info = (struct global_info *)((void *)common_info +
3533 		index->global_index.offset * 4);
3534 
3535 	dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info);
3536 	dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info);
3537 
3538 	sdram_params = &sdram_configs[0];
3539 	#if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 8)
3540 	for (j = 0; j < ARRAY_SIZE(sdram_configs); j++)
3541 		sdram_configs[j].base.dramtype = LPDDR4X;
3542 	#endif
3543 	if (sdram_params->base.dramtype == DDR3 ||
3544 	    sdram_params->base.dramtype == DDR4) {
3545 		if (DDR_2T_INFO(gbl_info->info_2t))
3546 			sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10;
3547 		else
3548 			sdram_params->pctl_regs.pctl[0][1] &=
3549 				~(0x1 << 10);
3550 	}
3551 	ret = sdram_init_detect(&dram_info, sdram_params);
3552 	if (ret) {
3553 		sdram_print_dram_type(sdram_params->base.dramtype);
3554 		printascii(", ");
3555 		printdec(sdram_params->base.ddr_freq);
3556 		printascii("MHz\n");
3557 		goto error;
3558 	}
3559 	print_ddr_info(sdram_params);
3560 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3561 	init_rw_trn_result_struct(&rw_trn_result, dram_info.phy,
3562 				  (u8)sdram_params->ch.cap_info.rank);
3563 #endif
3564 
3565 	ddr_set_rate_for_fsp(&dram_info, sdram_params);
3566 #ifndef CONFIG_SPL_KERNEL_BOOT
3567 	copy_fsp_param_to_ddr();
3568 #endif
3569 
3570 	ddr_set_atags(&dram_info, sdram_params);
3571 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3572 	save_rw_trn_result_to_ddr(&rw_trn_result);
3573 #endif
3574 
3575 	printascii("out\n");
3576 
3577 	return ret;
3578 error:
3579 	printascii("error\n");
3580 	return (-1);
3581 }
3582 #endif /* CONFIG_TPL_BUILD */
3583