xref: /rk3399_rockchip-uboot/drivers/ram/rockchip/sdram_rv1126.c (revision 2994e4dcba6c60354fc49e9ac9bb4ebb95a6d4b2)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * (C) Copyright 2020 Rockchip Electronics Co., Ltd.
4  */
5 
6 #include <common.h>
7 #include <debug_uart.h>
8 #include <dm.h>
9 #include <ram.h>
10 #include <syscon.h>
11 #include <asm/io.h>
12 #include <asm/arch/clock.h>
13 #include <asm/arch/hardware.h>
14 #include <asm/arch/rk_atags.h>
15 #include <asm/arch/cru_rv1126.h>
16 #include <asm/arch/grf_rv1126.h>
17 #include <asm/arch/sdram_common.h>
18 #include <asm/arch/sdram_rv1126.h>
19 
20 /* define training flag */
21 #define CA_TRAINING			(0x1 << 0)
22 #define READ_GATE_TRAINING		(0x1 << 1)
23 #define WRITE_LEVELING			(0x1 << 2)
24 #define WRITE_TRAINING			(0x1 << 3)
25 #define READ_TRAINING			(0x1 << 4)
26 #define FULL_TRAINING			(0xff)
27 
28 #define SKEW_RX_SIGNAL			(0)
29 #define SKEW_TX_SIGNAL			(1)
30 #define SKEW_CA_SIGNAL			(2)
31 
32 #define DESKEW_MDF_ABS_VAL		(0)
33 #define DESKEW_MDF_DIFF_VAL		(1)
34 
35 #ifdef CONFIG_TPL_BUILD
36 #ifndef CONFIG_TPL_TINY_FRAMEWORK
37 #error please defined CONFIG_TPL_TINY_FRAMEWORK for RV1126 !!!
38 #endif
39 #endif
40 
41 #ifdef CONFIG_TPL_BUILD
42 
43 struct dram_info {
44 	void __iomem *pctl;
45 	void __iomem *phy;
46 	struct rv1126_cru *cru;
47 	struct msch_regs *msch;
48 	struct rv1126_ddrgrf *ddrgrf;
49 	struct rv1126_grf *grf;
50 	struct ram_info info;
51 	struct rv1126_pmugrf *pmugrf;
52 	u32 sr_idle;
53 	u32 pd_idle;
54 };
55 
56 #define GRF_BASE_ADDR			0xfe000000
57 #define PMU_GRF_BASE_ADDR		0xfe020000
58 #define DDR_GRF_BASE_ADDR		0xfe030000
59 #define BUS_SGRF_BASE_ADDR		0xfe0a0000
60 #define SERVER_MSCH_BASE_ADDR		0xfe800000
61 #define CRU_BASE_ADDR			0xff490000
62 #define DDR_PHY_BASE_ADDR		0xff4a0000
63 #define UPCTL2_BASE_ADDR		0xffa50000
64 
65 #define SGRF_SOC_CON2			0x8
66 #define SGRF_SOC_CON12			0x30
67 #define SGRF_SOC_CON13			0x34
68 
69 struct dram_info dram_info;
70 
71 #if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 3)
72 struct rv1126_sdram_params sdram_configs[] = {
73 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-328.inc"
74 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-396.inc"
75 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-528.inc"
76 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-664.inc"
77 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-784.inc"
78 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-924.inc"
79 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-1056.inc"
80 };
81 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 0)
82 struct rv1126_sdram_params sdram_configs[] = {
83 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-328.inc"
84 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-396.inc"
85 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-528.inc"
86 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-664.inc"
87 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-784.inc"
88 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-924.inc"
89 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-1056.inc"
90 };
91 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 6)
92 struct rv1126_sdram_params sdram_configs[] = {
93 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-328.inc"
94 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-396.inc"
95 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-528.inc"
96 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-664.inc"
97 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-784.inc"
98 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-924.inc"
99 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-1056.inc"
100 };
101 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 7)
102 struct rv1126_sdram_params sdram_configs[] = {
103 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-328.inc"
104 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-396.inc"
105 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-528.inc"
106 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-664.inc"
107 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-784.inc"
108 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-924.inc"
109 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-1056.inc"
110 };
111 #endif
112 
113 u32 common_info[] = {
114 	#include "sdram_inc/rv1126/sdram-rv1126-loader_params.inc"
115 };
116 
117 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
118 static struct rw_trn_result rw_trn_result;
119 #endif
120 
121 static struct rv1126_fsp_param fsp_param[MAX_IDX];
122 
123 static u8 lp3_odt_value;
124 
125 static s8 wrlvl_result[2][4];
126 
127 /* DDR configuration 0-9 */
128 u16 ddr_cfg_2_rbc[] = {
129 	((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */
130 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */
131 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */
132 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */
133 	((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */
134 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */
135 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */
136 	((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */
137 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */
138 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */
139 };
140 
141 /* DDR configuration 10-21 */
142 u8 ddr4_cfg_2_rbc[] = {
143 	((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */
144 	((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */
145 	((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */
146 	((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */
147 	((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */
148 	((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */
149 	((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */
150 	((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */
151 	((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */
152 	((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */
153 	((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */
154 	((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */
155 };
156 
157 /* DDR configuration 22-28 */
158 u16 ddr_cfg_2_rbc_p2[] = {
159 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */
160 	((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */
161 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */
162 	((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */
163 	((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */
164 	((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */
165 	((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */
166 };
167 
168 u8 d4_rbc_2_d3_rbc[][2] = {
169 	{10, 0},
170 	{11, 2},
171 	{12, 23},
172 	{13, 1},
173 	{14, 28},
174 	{15, 24},
175 	{16, 27},
176 	{17, 7},
177 	{18, 6},
178 	{19, 25},
179 	{20, 26},
180 	{21, 3}
181 };
182 
183 u32 addrmap[29][9] = {
184 	{24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
185 		0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */
186 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
187 		0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */
188 	{23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
189 		0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */
190 	{22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606,
191 		0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */
192 	{24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909,
193 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */
194 	{6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707,
195 		0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */
196 	{7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808,
197 		0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */
198 	{8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909,
199 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */
200 	{22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
201 		0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */
202 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
203 		0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */
204 
205 	{24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
206 		0x08080808, 0x00000f0f, 0x0801}, /* 10 */
207 	{23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
208 		0x0f080808, 0x00000f0f, 0x0801}, /* 11 */
209 	{24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
210 		0x07070707, 0x00000f07, 0x0700}, /* 12 */
211 	{23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
212 		0x07070707, 0x00000f0f, 0x0700}, /* 13 */
213 	{24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
214 		0x07070707, 0x00000f07, 0x3f01}, /* 14 */
215 	{23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
216 		0x07070707, 0x00000f0f, 0x3f01}, /* 15 */
217 	{23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606,
218 		0x06060606, 0x00000f06, 0x3f00}, /* 16 */
219 	{8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909,
220 		0x0f090909, 0x00000f0f, 0x0801}, /* 17 */
221 	{7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808,
222 		0x08080808, 0x00000f0f, 0x0700}, /* 18 */
223 	{7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
224 		0x08080808, 0x00000f0f, 0x3f01}, /* 19 */
225 
226 	{6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
227 		0x07070707, 0x00000f07, 0x3f00}, /* 20 */
228 	{23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606,
229 		0x06060606, 0x00000f06, 0x0600}, /* 21 */
230 	{21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505,
231 		0x05050505, 0x00000f0f, 0x3f3f}, /* 22 */
232 
233 	{24, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
234 		0x07070707, 0x00000f07, 0x3f3f}, /* 23 */
235 	{23, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
236 		0x07070707, 0x00000f0f, 0x3f3f}, /* 24 */
237 	{7, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
238 		0x08080808, 0x00000f0f, 0x3f3f}, /* 25 */
239 	{6, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
240 		0x07070707, 0x00000f07, 0x3f3f}, /* 26 */
241 	{23, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
242 		0x06060606, 0x00000f06, 0x3f3f}, /* 27 */
243 	{24, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
244 		0x07070707, 0x00000f07, 0x3f3f} /* 28 */
245 };
246 
247 static u8 dq_sel[22][3] = {
248 	{0x0, 0x17, 0x22},
249 	{0x1, 0x18, 0x23},
250 	{0x2, 0x19, 0x24},
251 	{0x3, 0x1a, 0x25},
252 	{0x4, 0x1b, 0x26},
253 	{0x5, 0x1c, 0x27},
254 	{0x6, 0x1d, 0x28},
255 	{0x7, 0x1e, 0x29},
256 	{0x8, 0x16, 0x21},
257 	{0x9, 0x1f, 0x2a},
258 	{0xa, 0x20, 0x2b},
259 	{0x10, 0x1, 0xc},
260 	{0x11, 0x2, 0xd},
261 	{0x12, 0x3, 0xe},
262 	{0x13, 0x4, 0xf},
263 	{0x14, 0x5, 0x10},
264 	{0x15, 0x6, 0x11},
265 	{0x16, 0x7, 0x12},
266 	{0x17, 0x8, 0x13},
267 	{0x18, 0x0, 0xb},
268 	{0x19, 0x9, 0x14},
269 	{0x1a, 0xa, 0x15}
270 };
271 
272 static u16 grp_addr[4] = {
273 	ADD_GROUP_CS0_A,
274 	ADD_GROUP_CS0_B,
275 	ADD_GROUP_CS1_A,
276 	ADD_GROUP_CS1_B
277 };
278 
279 static u8 wrlvl_result_offset[2][4] = {
280 	{0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27},
281 	{0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29},
282 };
283 
284 static u16 dqs_dq_skew_adr[16] = {
285 	0x170 + 0,	/* SKEW_UPDATE_RX_CS0_DQS0 */
286 	0x170 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS1 */
287 	0x1d0 + 0,	/* SKEW_UPDATE_RX_CS0_DQS2 */
288 	0x1d0 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS3 */
289 	0x1a0 + 0,	/* SKEW_UPDATE_RX_CS1_DQS0 */
290 	0x1a0 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS1 */
291 	0x200 + 0,	/* SKEW_UPDATE_RX_CS1_DQS2 */
292 	0x200 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS3 */
293 	0x170 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS0 */
294 	0x170 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS1 */
295 	0x1d0 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS2 */
296 	0x1d0 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS3 */
297 	0x1a0 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS0 */
298 	0x1a0 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS1 */
299 	0x200 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS2 */
300 	0x200 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS3 */
301 };
302 
303 static void rkclk_ddr_reset(struct dram_info *dram,
304 			    u32 ctl_srstn, u32 ctl_psrstn,
305 			    u32 phy_srstn, u32 phy_psrstn)
306 {
307 	writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) |
308 	       UPCTL2_ASRSTN_REQ(ctl_srstn),
309 	       BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13);
310 
311 	writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn),
312 	       &dram->cru->softrst_con[12]);
313 }
314 
315 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
316 {
317 	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
318 	int delay = 1000;
319 	u32 mhz = hz / MHz;
320 	struct global_info *gbl_info;
321 	struct sdram_head_info_index_v2 *index =
322 		(struct sdram_head_info_index_v2 *)common_info;
323 	u32 ssmod_info;
324 	u32 dsmpd = 1;
325 
326 	gbl_info = (struct global_info *)((void *)common_info +
327 		    index->global_index.offset * 4);
328 	ssmod_info = gbl_info->info_2t;
329 	refdiv = 1;
330 	if (mhz <= 100) {
331 		postdiv1 = 6;
332 		postdiv2 = 4;
333 	} else if (mhz <= 150) {
334 		postdiv1 = 4;
335 		postdiv2 = 4;
336 	} else if (mhz <= 200) {
337 		postdiv1 = 6;
338 		postdiv2 = 2;
339 	} else if (mhz <= 300) {
340 		postdiv1 = 4;
341 		postdiv2 = 2;
342 	} else if (mhz <= 400) {
343 		postdiv1 = 6;
344 		postdiv2 = 1;
345 	} else {
346 		postdiv1 = 4;
347 		postdiv2 = 1;
348 	}
349 	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
350 
351 	writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
352 
353 	writel(0x1f000000, &dram->cru->clksel_con[64]);
354 	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
355 	/* enable ssmod */
356 	if (PLL_SSMOD_SPREAD(ssmod_info)) {
357 		dsmpd = 0;
358 		clrsetbits_le32(&dram->cru->pll[1].con2,
359 				0xffffff << 0, 0x0 << 0);
360 		writel(SSMOD_SPREAD(PLL_SSMOD_SPREAD(ssmod_info)) |
361 		       SSMOD_DIVVAL(PLL_SSMOD_DIV(ssmod_info)) |
362 		       SSMOD_DOWNSPREAD(PLL_SSMOD_DOWNSPREAD(ssmod_info)) |
363 		       SSMOD_RESET(0) |
364 		       SSMOD_DIS_SSCG(0) |
365 		       SSMOD_BP(0),
366 		       &dram->cru->pll[1].con3);
367 	}
368 	writel(DSMPD(dsmpd) | POSTDIV2(postdiv2) | REFDIV(refdiv),
369 	       &dram->cru->pll[1].con1);
370 
371 	while (delay > 0) {
372 		udelay(1);
373 		if (LOCK(readl(&dram->cru->pll[1].con1)))
374 			break;
375 		delay--;
376 	}
377 
378 	writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
379 }
380 
381 static void rkclk_configure_ddr(struct dram_info *dram,
382 				struct rv1126_sdram_params *sdram_params)
383 {
384 	/* for inno ddr phy need freq / 2 */
385 	rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2);
386 }
387 
388 static unsigned int
389 	calculate_ddrconfig(struct rv1126_sdram_params *sdram_params)
390 {
391 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
392 	u32 cs, bw, die_bw, col, row, bank;
393 	u32 cs1_row;
394 	u32 i, tmp;
395 	u32 ddrconf = -1;
396 	u32 row_3_4;
397 
398 	cs = cap_info->rank;
399 	bw = cap_info->bw;
400 	die_bw = cap_info->dbw;
401 	col = cap_info->col;
402 	row = cap_info->cs0_row;
403 	cs1_row = cap_info->cs1_row;
404 	bank = cap_info->bk;
405 	row_3_4 = cap_info->row_3_4;
406 
407 	if (sdram_params->base.dramtype == DDR4) {
408 		if (cs == 2 && row == cs1_row && !row_3_4) {
409 			tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) |
410 			      die_bw;
411 			for (i = 17; i < 21; i++) {
412 				if (((tmp & 0xf) ==
413 				     (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
414 				    ((tmp & 0x70) <=
415 				     (ddr4_cfg_2_rbc[i - 10] & 0x70))) {
416 					ddrconf = i;
417 					goto out;
418 				}
419 			}
420 		}
421 
422 		tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw;
423 		for (i = 10; i < 21; i++) {
424 			if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
425 			    ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) &&
426 			    ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) {
427 				ddrconf = i;
428 				goto out;
429 			}
430 		}
431 	} else {
432 		if (cs == 2 && row == cs1_row && bank == 3) {
433 			for (i = 5; i < 8; i++) {
434 				if (((bw + col - 10) == (ddr_cfg_2_rbc[i] &
435 							 0x7)) &&
436 				    ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] &
437 							  (0x7 << 5))) {
438 					ddrconf = i;
439 					goto out;
440 				}
441 			}
442 		}
443 
444 		tmp = ((cs - 1) << 8) | ((row - 13) << 5) |
445 		      ((bw + col - 10) << 0);
446 		if (bank == 3)
447 			tmp |= (1 << 3);
448 
449 		for (i = 0; i < 9; i++)
450 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) &&
451 			    ((tmp & (7 << 5)) <=
452 			     (ddr_cfg_2_rbc[i] & (7 << 5))) &&
453 			    ((tmp & (1 << 8)) <=
454 			     (ddr_cfg_2_rbc[i] & (1 << 8)))) {
455 				ddrconf = i;
456 				goto out;
457 			}
458 
459 		for (i = 0; i < 7; i++)
460 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc_p2[i] & 0x1f)) &&
461 			    ((tmp & (7 << 5)) <=
462 			     (ddr_cfg_2_rbc_p2[i] & (7 << 5))) &&
463 			    ((tmp & (1 << 8)) <=
464 			     (ddr_cfg_2_rbc_p2[i] & (1 << 8)))) {
465 				ddrconf = i + 22;
466 				goto out;
467 			}
468 
469 		if (cs == 1 && bank == 3 && row <= 17 &&
470 		    (col + bw) == 12)
471 			ddrconf = 23;
472 	}
473 
474 out:
475 	if (ddrconf > 28)
476 		printascii("calculate ddrconfig error\n");
477 
478 	if (sdram_params->base.dramtype == DDR4) {
479 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
480 			if (ddrconf == d4_rbc_2_d3_rbc[i][0]) {
481 				if (ddrconf == 21 && row > 16)
482 					printascii("warn:ddrconf21 row > 16\n");
483 				else
484 					ddrconf = d4_rbc_2_d3_rbc[i][1];
485 				break;
486 			}
487 		}
488 	}
489 
490 	return ddrconf;
491 }
492 
493 static void sw_set_req(struct dram_info *dram)
494 {
495 	void __iomem *pctl_base = dram->pctl;
496 
497 	/* clear sw_done=0 */
498 	writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL);
499 }
500 
501 static void sw_set_ack(struct dram_info *dram)
502 {
503 	void __iomem *pctl_base = dram->pctl;
504 
505 	/* set sw_done=1 */
506 	writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL);
507 	while (1) {
508 		/* wait programming done */
509 		if (readl(pctl_base + DDR_PCTL2_SWSTAT) &
510 				PCTL2_SW_DONE_ACK)
511 			break;
512 	}
513 }
514 
515 static void set_ctl_address_map(struct dram_info *dram,
516 				struct rv1126_sdram_params *sdram_params)
517 {
518 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
519 	void __iomem *pctl_base = dram->pctl;
520 	u32 ddrconf = cap_info->ddrconfig;
521 	u32 i, row;
522 
523 	row = cap_info->cs0_row;
524 	if (sdram_params->base.dramtype == DDR4) {
525 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
526 			if (ddrconf == d4_rbc_2_d3_rbc[i][1]) {
527 				ddrconf = d4_rbc_2_d3_rbc[i][0];
528 				break;
529 			}
530 		}
531 	}
532 
533 	if (ddrconf >= ARRAY_SIZE(addrmap)) {
534 		printascii("set ctl address map fail\n");
535 		return;
536 	}
537 
538 	sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
539 			  &addrmap[ddrconf][0], ARRAY_SIZE(addrmap[ddrconf]) * 4);
540 
541 	/* unused row set to 0xf */
542 	for (i = 17; i >= row; i--)
543 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
544 			((i - 12) * 8 / 32) * 4,
545 			0xf << ((i - 12) * 8 % 32));
546 
547 	if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4)
548 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
549 	if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1)
550 		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
551 
552 	if (cap_info->rank == 1)
553 		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
554 }
555 
556 static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait)
557 {
558 	void __iomem *phy_base = dram->phy;
559 	u32 fbdiv, prediv, postdiv, postdiv_en;
560 
561 	if (wait) {
562 		clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB);
563 		while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK))
564 			continue;
565 	} else {
566 		freq /= MHz;
567 		prediv = 1;
568 		if (freq <= 200) {
569 			fbdiv = 16;
570 			postdiv = 2;
571 			postdiv_en = 1;
572 		} else if (freq <= 456) {
573 			fbdiv = 8;
574 			postdiv = 1;
575 			postdiv_en = 1;
576 		} else {
577 			fbdiv = 4;
578 			postdiv = 0;
579 			postdiv_en = 0;
580 		}
581 		writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50));
582 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK,
583 				(fbdiv >> 8) & 1);
584 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK,
585 				postdiv_en << PHY_POSTDIV_EN_SHIFT);
586 
587 		clrsetbits_le32(PHY_REG(phy_base, 0x52),
588 				PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv);
589 		clrsetbits_le32(PHY_REG(phy_base, 0x53),
590 				PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT,
591 				postdiv << PHY_POSTDIV_SHIFT);
592 	}
593 }
594 
595 static const u16 d3_phy_drv_2_ohm[][2] = {
596 	{PHY_DDR3_RON_455ohm, 455},
597 	{PHY_DDR3_RON_230ohm, 230},
598 	{PHY_DDR3_RON_153ohm, 153},
599 	{PHY_DDR3_RON_115ohm, 115},
600 	{PHY_DDR3_RON_91ohm, 91},
601 	{PHY_DDR3_RON_76ohm, 76},
602 	{PHY_DDR3_RON_65ohm, 65},
603 	{PHY_DDR3_RON_57ohm, 57},
604 	{PHY_DDR3_RON_51ohm, 51},
605 	{PHY_DDR3_RON_46ohm, 46},
606 	{PHY_DDR3_RON_41ohm, 41},
607 	{PHY_DDR3_RON_38ohm, 38},
608 	{PHY_DDR3_RON_35ohm, 35},
609 	{PHY_DDR3_RON_32ohm, 32},
610 	{PHY_DDR3_RON_30ohm, 30},
611 	{PHY_DDR3_RON_28ohm, 28},
612 	{PHY_DDR3_RON_27ohm, 27},
613 	{PHY_DDR3_RON_25ohm, 25},
614 	{PHY_DDR3_RON_24ohm, 24},
615 	{PHY_DDR3_RON_23ohm, 23},
616 	{PHY_DDR3_RON_22ohm, 22},
617 	{PHY_DDR3_RON_21ohm, 21},
618 	{PHY_DDR3_RON_20ohm, 20}
619 };
620 
621 static u16 d3_phy_odt_2_ohm[][2] = {
622 	{PHY_DDR3_RTT_DISABLE, 0},
623 	{PHY_DDR3_RTT_561ohm, 561},
624 	{PHY_DDR3_RTT_282ohm, 282},
625 	{PHY_DDR3_RTT_188ohm, 188},
626 	{PHY_DDR3_RTT_141ohm, 141},
627 	{PHY_DDR3_RTT_113ohm, 113},
628 	{PHY_DDR3_RTT_94ohm, 94},
629 	{PHY_DDR3_RTT_81ohm, 81},
630 	{PHY_DDR3_RTT_72ohm, 72},
631 	{PHY_DDR3_RTT_64ohm, 64},
632 	{PHY_DDR3_RTT_58ohm, 58},
633 	{PHY_DDR3_RTT_52ohm, 52},
634 	{PHY_DDR3_RTT_48ohm, 48},
635 	{PHY_DDR3_RTT_44ohm, 44},
636 	{PHY_DDR3_RTT_41ohm, 41},
637 	{PHY_DDR3_RTT_38ohm, 38},
638 	{PHY_DDR3_RTT_37ohm, 37},
639 	{PHY_DDR3_RTT_34ohm, 34},
640 	{PHY_DDR3_RTT_32ohm, 32},
641 	{PHY_DDR3_RTT_31ohm, 31},
642 	{PHY_DDR3_RTT_29ohm, 29},
643 	{PHY_DDR3_RTT_28ohm, 28},
644 	{PHY_DDR3_RTT_27ohm, 27},
645 	{PHY_DDR3_RTT_25ohm, 25}
646 };
647 
648 static u16 d4lp3_phy_drv_2_ohm[][2] = {
649 	{PHY_DDR4_LPDDR3_RON_482ohm, 482},
650 	{PHY_DDR4_LPDDR3_RON_244ohm, 244},
651 	{PHY_DDR4_LPDDR3_RON_162ohm, 162},
652 	{PHY_DDR4_LPDDR3_RON_122ohm, 122},
653 	{PHY_DDR4_LPDDR3_RON_97ohm, 97},
654 	{PHY_DDR4_LPDDR3_RON_81ohm, 81},
655 	{PHY_DDR4_LPDDR3_RON_69ohm, 69},
656 	{PHY_DDR4_LPDDR3_RON_61ohm, 61},
657 	{PHY_DDR4_LPDDR3_RON_54ohm, 54},
658 	{PHY_DDR4_LPDDR3_RON_48ohm, 48},
659 	{PHY_DDR4_LPDDR3_RON_44ohm, 44},
660 	{PHY_DDR4_LPDDR3_RON_40ohm, 40},
661 	{PHY_DDR4_LPDDR3_RON_37ohm, 37},
662 	{PHY_DDR4_LPDDR3_RON_34ohm, 34},
663 	{PHY_DDR4_LPDDR3_RON_32ohm, 32},
664 	{PHY_DDR4_LPDDR3_RON_30ohm, 30},
665 	{PHY_DDR4_LPDDR3_RON_28ohm, 28},
666 	{PHY_DDR4_LPDDR3_RON_27ohm, 27},
667 	{PHY_DDR4_LPDDR3_RON_25ohm, 25},
668 	{PHY_DDR4_LPDDR3_RON_24ohm, 24},
669 	{PHY_DDR4_LPDDR3_RON_23ohm, 23},
670 	{PHY_DDR4_LPDDR3_RON_22ohm, 22},
671 	{PHY_DDR4_LPDDR3_RON_21ohm, 21}
672 };
673 
674 static u16 d4lp3_phy_odt_2_ohm[][2] = {
675 	{PHY_DDR4_LPDDR3_RTT_DISABLE, 0},
676 	{PHY_DDR4_LPDDR3_RTT_586ohm, 586},
677 	{PHY_DDR4_LPDDR3_RTT_294ohm, 294},
678 	{PHY_DDR4_LPDDR3_RTT_196ohm, 196},
679 	{PHY_DDR4_LPDDR3_RTT_148ohm, 148},
680 	{PHY_DDR4_LPDDR3_RTT_118ohm, 118},
681 	{PHY_DDR4_LPDDR3_RTT_99ohm, 99},
682 	{PHY_DDR4_LPDDR3_RTT_85ohm, 58},
683 	{PHY_DDR4_LPDDR3_RTT_76ohm, 76},
684 	{PHY_DDR4_LPDDR3_RTT_67ohm, 67},
685 	{PHY_DDR4_LPDDR3_RTT_60ohm, 60},
686 	{PHY_DDR4_LPDDR3_RTT_55ohm, 55},
687 	{PHY_DDR4_LPDDR3_RTT_50ohm, 50},
688 	{PHY_DDR4_LPDDR3_RTT_46ohm, 46},
689 	{PHY_DDR4_LPDDR3_RTT_43ohm, 43},
690 	{PHY_DDR4_LPDDR3_RTT_40ohm, 40},
691 	{PHY_DDR4_LPDDR3_RTT_38ohm, 38},
692 	{PHY_DDR4_LPDDR3_RTT_36ohm, 36},
693 	{PHY_DDR4_LPDDR3_RTT_34ohm, 34},
694 	{PHY_DDR4_LPDDR3_RTT_32ohm, 32},
695 	{PHY_DDR4_LPDDR3_RTT_31ohm, 31},
696 	{PHY_DDR4_LPDDR3_RTT_29ohm, 29},
697 	{PHY_DDR4_LPDDR3_RTT_28ohm, 28},
698 	{PHY_DDR4_LPDDR3_RTT_27ohm, 27}
699 };
700 
701 static u16 lp4_phy_drv_2_ohm[][2] = {
702 	{PHY_LPDDR4_RON_501ohm, 501},
703 	{PHY_LPDDR4_RON_253ohm, 253},
704 	{PHY_LPDDR4_RON_168ohm, 168},
705 	{PHY_LPDDR4_RON_126ohm, 126},
706 	{PHY_LPDDR4_RON_101ohm, 101},
707 	{PHY_LPDDR4_RON_84ohm, 84},
708 	{PHY_LPDDR4_RON_72ohm, 72},
709 	{PHY_LPDDR4_RON_63ohm, 63},
710 	{PHY_LPDDR4_RON_56ohm, 56},
711 	{PHY_LPDDR4_RON_50ohm, 50},
712 	{PHY_LPDDR4_RON_46ohm, 46},
713 	{PHY_LPDDR4_RON_42ohm, 42},
714 	{PHY_LPDDR4_RON_38ohm, 38},
715 	{PHY_LPDDR4_RON_36ohm, 36},
716 	{PHY_LPDDR4_RON_33ohm, 33},
717 	{PHY_LPDDR4_RON_31ohm, 31},
718 	{PHY_LPDDR4_RON_29ohm, 29},
719 	{PHY_LPDDR4_RON_28ohm, 28},
720 	{PHY_LPDDR4_RON_26ohm, 26},
721 	{PHY_LPDDR4_RON_25ohm, 25},
722 	{PHY_LPDDR4_RON_24ohm, 24},
723 	{PHY_LPDDR4_RON_23ohm, 23},
724 	{PHY_LPDDR4_RON_22ohm, 22}
725 };
726 
727 static u16 lp4_phy_odt_2_ohm[][2] = {
728 	{PHY_LPDDR4_RTT_DISABLE, 0},
729 	{PHY_LPDDR4_RTT_604ohm, 604},
730 	{PHY_LPDDR4_RTT_303ohm, 303},
731 	{PHY_LPDDR4_RTT_202ohm, 202},
732 	{PHY_LPDDR4_RTT_152ohm, 152},
733 	{PHY_LPDDR4_RTT_122ohm, 122},
734 	{PHY_LPDDR4_RTT_101ohm, 101},
735 	{PHY_LPDDR4_RTT_87ohm,	87},
736 	{PHY_LPDDR4_RTT_78ohm, 78},
737 	{PHY_LPDDR4_RTT_69ohm, 69},
738 	{PHY_LPDDR4_RTT_62ohm, 62},
739 	{PHY_LPDDR4_RTT_56ohm, 56},
740 	{PHY_LPDDR4_RTT_52ohm, 52},
741 	{PHY_LPDDR4_RTT_48ohm, 48},
742 	{PHY_LPDDR4_RTT_44ohm, 44},
743 	{PHY_LPDDR4_RTT_41ohm, 41},
744 	{PHY_LPDDR4_RTT_39ohm, 39},
745 	{PHY_LPDDR4_RTT_37ohm, 37},
746 	{PHY_LPDDR4_RTT_35ohm, 35},
747 	{PHY_LPDDR4_RTT_33ohm, 33},
748 	{PHY_LPDDR4_RTT_32ohm, 32},
749 	{PHY_LPDDR4_RTT_30ohm, 30},
750 	{PHY_LPDDR4_RTT_29ohm, 29},
751 	{PHY_LPDDR4_RTT_27ohm, 27}
752 };
753 
754 static u32 lp4_odt_calc(u32 odt_ohm)
755 {
756 	u32 odt;
757 
758 	if (odt_ohm == 0)
759 		odt = LPDDR4_DQODT_DIS;
760 	else if (odt_ohm <= 40)
761 		odt = LPDDR4_DQODT_40;
762 	else if (odt_ohm <= 48)
763 		odt = LPDDR4_DQODT_48;
764 	else if (odt_ohm <= 60)
765 		odt = LPDDR4_DQODT_60;
766 	else if (odt_ohm <= 80)
767 		odt = LPDDR4_DQODT_80;
768 	else if (odt_ohm <= 120)
769 		odt = LPDDR4_DQODT_120;
770 	else
771 		odt = LPDDR4_DQODT_240;
772 
773 	return odt;
774 }
775 
776 static void *get_ddr_drv_odt_info(u32 dramtype)
777 {
778 	struct sdram_head_info_index_v2 *index =
779 		(struct sdram_head_info_index_v2 *)common_info;
780 	void *ddr_info = 0;
781 
782 	if (dramtype == DDR4)
783 		ddr_info = (void *)common_info + index->ddr4_index.offset * 4;
784 	else if (dramtype == DDR3)
785 		ddr_info = (void *)common_info + index->ddr3_index.offset * 4;
786 	else if (dramtype == LPDDR3)
787 		ddr_info = (void *)common_info + index->lp3_index.offset * 4;
788 	else if (dramtype == LPDDR4)
789 		ddr_info = (void *)common_info + index->lp4_index.offset * 4;
790 	else if (dramtype == LPDDR4X)
791 		ddr_info = (void *)common_info + index->lp4x_index.offset * 4;
792 	else
793 		printascii("unsupported dram type\n");
794 	return ddr_info;
795 }
796 
797 static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info,
798 			 u32 freq_mhz, u32 dst_fsp, u32 dramtype)
799 {
800 	void __iomem *pctl_base = dram->pctl;
801 	u32 ca_vref, dq_vref;
802 
803 	if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
804 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff);
805 	else
806 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten);
807 
808 	if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq))
809 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff);
810 	else
811 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten);
812 
813 	if (dramtype == LPDDR4) {
814 		if (ca_vref < 100)
815 			ca_vref = 100;
816 		if (ca_vref > 420)
817 			ca_vref = 420;
818 
819 		if (ca_vref <= 300)
820 			ca_vref = (0 << 6) | (ca_vref - 100) / 4;
821 		else
822 			ca_vref = (1 << 6) | (ca_vref - 220) / 4;
823 
824 		if (dq_vref < 100)
825 			dq_vref = 100;
826 		if (dq_vref > 420)
827 			dq_vref = 420;
828 
829 		if (dq_vref <= 300)
830 			dq_vref = (0 << 6) | (dq_vref - 100) / 4;
831 		else
832 			dq_vref = (1 << 6) | (dq_vref - 220) / 4;
833 	} else {
834 		ca_vref = ca_vref * 11 / 6;
835 		if (ca_vref < 150)
836 			ca_vref = 150;
837 		if (ca_vref > 629)
838 			ca_vref = 629;
839 
840 		if (ca_vref <= 449)
841 			ca_vref = (0 << 6) | (ca_vref - 150) / 4;
842 		else
843 			ca_vref = (1 << 6) | (ca_vref - 329) / 4;
844 
845 		if (dq_vref < 150)
846 			dq_vref = 150;
847 		if (dq_vref > 629)
848 			dq_vref = 629;
849 
850 		if (dq_vref <= 449)
851 			dq_vref = (0 << 6) | (dq_vref - 150) / 6;
852 		else
853 			dq_vref = (1 << 6) | (dq_vref - 329) / 6;
854 	}
855 	sw_set_req(dram);
856 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
857 			DDR_PCTL2_INIT6,
858 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT,
859 			ca_vref << PCTL2_LPDDR4_MR12_SHIFT);
860 
861 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
862 			DDR_PCTL2_INIT7,
863 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT,
864 			dq_vref << PCTL2_LPDDR4_MR14_SHIFT);
865 	sw_set_ack(dram);
866 }
867 
868 static void set_ds_odt(struct dram_info *dram,
869 		       struct rv1126_sdram_params *sdram_params, u32 dst_fsp)
870 {
871 	void __iomem *phy_base = dram->phy;
872 	void __iomem *pctl_base = dram->pctl;
873 	u32 dramtype = sdram_params->base.dramtype;
874 	struct ddr2_3_4_lp2_3_info *ddr_info;
875 	struct lp4_info *lp4_info;
876 	u32 i, j, tmp;
877 	const u16 (*p_drv)[2];
878 	const u16 (*p_odt)[2];
879 	u32 drv_info, sr_info;
880 	u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm;
881 	u32 phy_odt_ohm, dram_odt_ohm;
882 	u32 lp4_pu_cal, phy_lp4_drv_pd_en;
883 	u32 phy_odt_up_en, phy_odt_dn_en;
884 	u32 sr_dq, sr_clk;
885 	u32 freq = sdram_params->base.ddr_freq;
886 	u32 mr1_mr3, mr11, mr22, vref_out, vref_inner;
887 	u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0;
888 	u32 phy_dq_drv = 0;
889 	u32 phy_odt_up = 0, phy_odt_dn = 0;
890 
891 	ddr_info = get_ddr_drv_odt_info(dramtype);
892 	lp4_info = (void *)ddr_info;
893 
894 	if (!ddr_info)
895 		return;
896 
897 	/* dram odt en freq control phy drv, dram odt and phy sr */
898 	if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) {
899 		drv_info = ddr_info->drv_when_odtoff;
900 		dram_odt_ohm = 0;
901 		sr_info = ddr_info->sr_when_odtoff;
902 		phy_lp4_drv_pd_en =
903 			PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info);
904 	} else {
905 		drv_info = ddr_info->drv_when_odten;
906 		dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info);
907 		sr_info = ddr_info->sr_when_odten;
908 		phy_lp4_drv_pd_en =
909 			PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info);
910 	}
911 	phy_dq_drv_ohm =
912 		DRV_INFO_PHY_DQ_DRV(drv_info);
913 	phy_clk_drv_ohm =
914 		DRV_INFO_PHY_CLK_DRV(drv_info);
915 	phy_ca_drv_ohm =
916 		DRV_INFO_PHY_CA_DRV(drv_info);
917 
918 	sr_dq = DQ_SR_INFO(sr_info);
919 	sr_clk = CLK_SR_INFO(sr_info);
920 
921 	/* phy odt en freq control dram drv and phy odt */
922 	if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) {
923 		dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff);
924 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info);
925 		phy_odt_ohm = 0;
926 		phy_odt_up_en = 0;
927 		phy_odt_dn_en = 0;
928 	} else {
929 		dram_drv_ohm =
930 			DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten);
931 		phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info);
932 		phy_odt_up_en =
933 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
934 		phy_odt_dn_en =
935 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
936 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info);
937 	}
938 
939 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
940 		if (phy_odt_ohm) {
941 			phy_odt_up_en = 0;
942 			phy_odt_dn_en = 1;
943 		}
944 		if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
945 			dram_caodt_ohm = 0;
946 		else
947 			dram_caodt_ohm =
948 				ODT_INFO_LP4_CA_ODT(lp4_info->odt_info);
949 	}
950 
951 	if (dramtype == DDR3) {
952 		p_drv = d3_phy_drv_2_ohm;
953 		p_odt = d3_phy_odt_2_ohm;
954 	} else if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
955 		p_drv = lp4_phy_drv_2_ohm;
956 		p_odt = lp4_phy_odt_2_ohm;
957 	} else {
958 		p_drv = d4lp3_phy_drv_2_ohm;
959 		p_odt = d4lp3_phy_odt_2_ohm;
960 	}
961 
962 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
963 		if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) {
964 			phy_dq_drv = **(p_drv + i);
965 			break;
966 		}
967 		if (i == 0)
968 			break;
969 	}
970 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
971 		if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) {
972 			phy_clk_drv = **(p_drv + i);
973 			break;
974 		}
975 		if (i == 0)
976 			break;
977 	}
978 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
979 		if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) {
980 			phy_ca_drv = **(p_drv + i);
981 			break;
982 		}
983 		if (i == 0)
984 			break;
985 	}
986 	if (!phy_odt_ohm)
987 		phy_odt = 0;
988 	else
989 		for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) {
990 			if (phy_odt_ohm <= *(*(p_odt + i) + 1)) {
991 				phy_odt = **(p_odt + i);
992 				break;
993 			}
994 			if (i == 0)
995 				break;
996 		}
997 
998 	if (dramtype != LPDDR4 && dramtype != LPDDR4X) {
999 		if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en))
1000 			vref_inner = 0x80;
1001 		else if (phy_odt_up_en)
1002 			vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 /
1003 				     (dram_drv_ohm + phy_odt_ohm);
1004 		else
1005 			vref_inner = phy_odt_ohm * 128 /
1006 				(phy_odt_ohm + dram_drv_ohm);
1007 
1008 		if (dramtype != DDR3 && dram_odt_ohm)
1009 			vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 /
1010 				   (phy_dq_drv_ohm + dram_odt_ohm);
1011 		else
1012 			vref_out = 0x80;
1013 	} else {
1014 		/* for lp4 and lp4x*/
1015 		if (phy_odt_ohm)
1016 			vref_inner =
1017 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) *
1018 				 256) / 1000;
1019 		else
1020 			vref_inner =
1021 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) *
1022 				 256) / 1000;
1023 
1024 		vref_out = 0x80;
1025 	}
1026 
1027 	/* default ZQCALIB bypass mode */
1028 	clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv);
1029 	clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv);
1030 	clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv);
1031 	clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv);
1032 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1033 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_clk_drv);
1034 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_clk_drv);
1035 	} else {
1036 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_ca_drv);
1037 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_ca_drv);
1038 	}
1039 	/* clk / cmd slew rate */
1040 	clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk);
1041 
1042 	phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1;
1043 	if (phy_odt_up_en)
1044 		phy_odt_up = phy_odt;
1045 	if (phy_odt_dn_en)
1046 		phy_odt_dn = phy_odt;
1047 
1048 	for (i = 0; i < 4; i++) {
1049 		j = 0x110 + i * 0x10;
1050 		clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up);
1051 		clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn);
1052 		clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv);
1053 		clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv);
1054 		writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10));
1055 
1056 		clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10),
1057 				1 << 3, phy_lp4_drv_pd_en << 3);
1058 		if (dramtype == LPDDR4 || dramtype == LPDDR4X)
1059 			clrbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10), BIT(5));
1060 		/* dq slew rate */
1061 		clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10),
1062 				0x1f, sr_dq);
1063 	}
1064 
1065 	/* reg_rx_vref_value_update */
1066 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1067 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1068 
1069 	/* RAM VREF */
1070 	writel(vref_out, PHY_REG(phy_base, 0x105));
1071 	if (dramtype == LPDDR3)
1072 		udelay(100);
1073 
1074 	if (dramtype == LPDDR4 || dramtype == LPDDR4X)
1075 		set_lp4_vref(dram, lp4_info, freq, dst_fsp, dramtype);
1076 
1077 	if (dramtype == DDR3 || dramtype == DDR4) {
1078 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1079 				DDR_PCTL2_INIT3);
1080 		mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK;
1081 	} else {
1082 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1083 				DDR_PCTL2_INIT4);
1084 		mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK;
1085 	}
1086 
1087 	if (dramtype == DDR3) {
1088 		mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK);
1089 		if (dram_drv_ohm == 34)
1090 			mr1_mr3 |= DDR3_DS_34;
1091 
1092 		if (dram_odt_ohm == 0)
1093 			mr1_mr3 |= DDR3_RTT_NOM_DIS;
1094 		else if (dram_odt_ohm <= 40)
1095 			mr1_mr3 |= DDR3_RTT_NOM_40;
1096 		else if (dram_odt_ohm <= 60)
1097 			mr1_mr3 |= DDR3_RTT_NOM_60;
1098 		else
1099 			mr1_mr3 |= DDR3_RTT_NOM_120;
1100 
1101 	} else if (dramtype == DDR4) {
1102 		mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK);
1103 		if (dram_drv_ohm == 48)
1104 			mr1_mr3 |= DDR4_DS_48;
1105 
1106 		if (dram_odt_ohm == 0)
1107 			mr1_mr3 |= DDR4_RTT_NOM_DIS;
1108 		else if (dram_odt_ohm <= 34)
1109 			mr1_mr3 |= DDR4_RTT_NOM_34;
1110 		else if (dram_odt_ohm <= 40)
1111 			mr1_mr3 |= DDR4_RTT_NOM_40;
1112 		else if (dram_odt_ohm <= 48)
1113 			mr1_mr3 |= DDR4_RTT_NOM_48;
1114 		else if (dram_odt_ohm <= 60)
1115 			mr1_mr3 |= DDR4_RTT_NOM_60;
1116 		else
1117 			mr1_mr3 |= DDR4_RTT_NOM_120;
1118 
1119 	} else if (dramtype == LPDDR3) {
1120 		if (dram_drv_ohm <= 34)
1121 			mr1_mr3 |= LPDDR3_DS_34;
1122 		else if (dram_drv_ohm <= 40)
1123 			mr1_mr3 |= LPDDR3_DS_40;
1124 		else if (dram_drv_ohm <= 48)
1125 			mr1_mr3 |= LPDDR3_DS_48;
1126 		else if (dram_drv_ohm <= 60)
1127 			mr1_mr3 |= LPDDR3_DS_60;
1128 		else if (dram_drv_ohm <= 80)
1129 			mr1_mr3 |= LPDDR3_DS_80;
1130 
1131 		if (dram_odt_ohm == 0)
1132 			lp3_odt_value = LPDDR3_ODT_DIS;
1133 		else if (dram_odt_ohm <= 60)
1134 			lp3_odt_value = LPDDR3_ODT_60;
1135 		else if (dram_odt_ohm <= 120)
1136 			lp3_odt_value = LPDDR3_ODT_120;
1137 		else
1138 			lp3_odt_value = LPDDR3_ODT_240;
1139 	} else {/* for lpddr4 and lpddr4x */
1140 		/* MR3 for lp4 PU-CAL and PDDS */
1141 		mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK);
1142 		mr1_mr3 |= lp4_pu_cal;
1143 
1144 		tmp = lp4_odt_calc(dram_drv_ohm);
1145 		if (!tmp)
1146 			tmp = LPDDR4_PDDS_240;
1147 		mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT);
1148 
1149 		/* MR11 for lp4 ca odt, dq odt set */
1150 		mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1151 			     DDR_PCTL2_INIT6);
1152 		mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK;
1153 
1154 		mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK);
1155 
1156 		tmp = lp4_odt_calc(dram_odt_ohm);
1157 		mr11 |= (tmp << LPDDR4_DQODT_SHIFT);
1158 
1159 		tmp = lp4_odt_calc(dram_caodt_ohm);
1160 		mr11 |= (tmp << LPDDR4_CAODT_SHIFT);
1161 		sw_set_req(dram);
1162 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1163 				DDR_PCTL2_INIT6,
1164 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT,
1165 				mr11 << PCTL2_LPDDR4_MR11_SHIFT);
1166 		sw_set_ack(dram);
1167 
1168 		/* MR22 for soc odt/odt-ck/odt-cs/odt-ca */
1169 		mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1170 			     DDR_PCTL2_INIT7);
1171 		mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK;
1172 		mr22 &= ~LPDDR4_SOC_ODT_MASK;
1173 
1174 		tmp = lp4_odt_calc(phy_odt_ohm);
1175 		mr22 |= tmp;
1176 		mr22 = mr22 |
1177 		       (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) <<
1178 			LPDDR4_ODTE_CK_SHIFT) |
1179 		       (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) <<
1180 			LPDDR4_ODTE_CS_SHIFT) |
1181 		       (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) <<
1182 			LPDDR4_ODTD_CA_SHIFT);
1183 
1184 		sw_set_req(dram);
1185 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1186 				DDR_PCTL2_INIT7,
1187 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT,
1188 				mr22 << PCTL2_LPDDR4_MR22_SHIFT);
1189 		sw_set_ack(dram);
1190 	}
1191 
1192 	if (dramtype == DDR4 || dramtype == DDR3) {
1193 		sw_set_req(dram);
1194 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1195 				DDR_PCTL2_INIT3,
1196 				PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT,
1197 				mr1_mr3 << PCTL2_DDR34_MR1_SHIFT);
1198 		sw_set_ack(dram);
1199 	} else {
1200 		sw_set_req(dram);
1201 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1202 				DDR_PCTL2_INIT4,
1203 				PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT,
1204 				mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT);
1205 		sw_set_ack(dram);
1206 	}
1207 }
1208 
1209 static int sdram_cmd_dq_path_remap(struct dram_info *dram,
1210 				   struct rv1126_sdram_params *sdram_params)
1211 {
1212 	void __iomem *phy_base = dram->phy;
1213 	u32 dramtype = sdram_params->base.dramtype;
1214 	struct sdram_head_info_index_v2 *index =
1215 		(struct sdram_head_info_index_v2 *)common_info;
1216 	struct dq_map_info *map_info;
1217 
1218 	map_info = (struct dq_map_info *)((void *)common_info +
1219 		index->dq_map_index.offset * 4);
1220 
1221 	if (dramtype == LPDDR4X)
1222 		dramtype = LPDDR4;
1223 
1224 	if (dramtype <= LPDDR4)
1225 		writel((map_info->byte_map[dramtype / 4] >>
1226 			((dramtype % 4) * 8)) & 0xff,
1227 		       PHY_REG(phy_base, 0x4f));
1228 
1229 	return 0;
1230 }
1231 
1232 static void phy_cfg(struct dram_info *dram,
1233 		    struct rv1126_sdram_params *sdram_params)
1234 {
1235 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
1236 	void __iomem *phy_base = dram->phy;
1237 	u32 i, dq_map, tmp;
1238 	u32 byte1 = 0, byte0 = 0;
1239 
1240 	sdram_cmd_dq_path_remap(dram, sdram_params);
1241 
1242 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0);
1243 	for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
1244 		writel(sdram_params->phy_regs.phy[i][1],
1245 		       phy_base + sdram_params->phy_regs.phy[i][0]);
1246 	}
1247 
1248 	clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5));
1249 	dq_map = readl(PHY_REG(phy_base, 0x4f));
1250 	for (i = 0; i < 4; i++) {
1251 		if (((dq_map >> (i * 2)) & 0x3) == 0)
1252 			byte0 = i;
1253 		if (((dq_map >> (i * 2)) & 0x3) == 1)
1254 			byte1 = i;
1255 	}
1256 
1257 	tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK);
1258 	if (cap_info->bw == 2)
1259 		tmp |= 0xf;
1260 	else if (cap_info->bw == 1)
1261 		tmp |= ((1 << byte0) | (1 << byte1));
1262 	else
1263 		tmp |= (1 << byte0);
1264 
1265 	writel(tmp, PHY_REG(phy_base, 0xf));
1266 
1267 	/* lpddr4 odt control by phy, enable cs0 odt */
1268 	if (sdram_params->base.dramtype == LPDDR4 ||
1269 	    sdram_params->base.dramtype == LPDDR4X)
1270 		clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4,
1271 				(1 << 6) | (1 << 4));
1272 	/* for ca training ca vref choose range1 */
1273 	setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6));
1274 	setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6));
1275 	/* for wr training PHY_0x7c[5], choose range0 */
1276 	clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5));
1277 }
1278 
1279 static int update_refresh_reg(struct dram_info *dram)
1280 {
1281 	void __iomem *pctl_base = dram->pctl;
1282 	u32 ret;
1283 
1284 	ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
1285 	writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
1286 
1287 	return 0;
1288 }
1289 
1290 /*
1291  * rank = 1: cs0
1292  * rank = 2: cs1
1293  */
1294 int read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype)
1295 {
1296 	u32 ret;
1297 	u32 i, temp;
1298 	u32 dqmap;
1299 
1300 	void __iomem *pctl_base = dram->pctl;
1301 	struct sdram_head_info_index_v2 *index =
1302 		(struct sdram_head_info_index_v2 *)common_info;
1303 	struct dq_map_info *map_info;
1304 
1305 	map_info = (struct dq_map_info *)((void *)common_info +
1306 		index->dq_map_index.offset * 4);
1307 
1308 	if (dramtype == LPDDR2)
1309 		dqmap = map_info->lp2_dq0_7_map;
1310 	else
1311 		dqmap = map_info->lp3_dq0_7_map;
1312 
1313 	pctl_read_mr(pctl_base, rank, mr_num);
1314 
1315 	ret = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff);
1316 
1317 	if (dramtype != LPDDR4) {
1318 		temp = 0;
1319 		for (i = 0; i < 8; i++) {
1320 			temp = temp | (((ret >> i) & 0x1) <<
1321 				       ((dqmap >> (i * 4)) & 0xf));
1322 		}
1323 	} else {
1324 		temp = (readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff);
1325 	}
1326 
1327 	return temp;
1328 }
1329 
1330 /* before call this function autorefresh should be disabled */
1331 void send_a_refresh(struct dram_info *dram)
1332 {
1333 	void __iomem *pctl_base = dram->pctl;
1334 
1335 	while (readl(pctl_base + DDR_PCTL2_DBGSTAT) & 0x3)
1336 		continue;
1337 	writel(0x3, pctl_base + DDR_PCTL2_DBGCMD);
1338 }
1339 
1340 static void enter_sr(struct dram_info *dram, u32 en)
1341 {
1342 	void __iomem *pctl_base = dram->pctl;
1343 
1344 	if (en) {
1345 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1346 		while (1) {
1347 			if (((readl(pctl_base + DDR_PCTL2_STAT) &
1348 			      PCTL2_SELFREF_TYPE_MASK) ==
1349 			     PCTL2_SELFREF_TYPE_SR_NOT_AUTO) &&
1350 			    ((readl(pctl_base + DDR_PCTL2_STAT) &
1351 			      PCTL2_OPERATING_MODE_MASK) ==
1352 			     PCTL2_OPERATING_MODE_SR))
1353 				break;
1354 		}
1355 	} else {
1356 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1357 		while ((readl(pctl_base + DDR_PCTL2_STAT) &
1358 		       PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR)
1359 			continue;
1360 	}
1361 }
1362 
1363 void record_dq_prebit(struct dram_info *dram)
1364 {
1365 	u32 group, i, tmp;
1366 	void __iomem *phy_base = dram->phy;
1367 
1368 	for (group = 0; group < 4; group++) {
1369 		for (i = 0; i < ARRAY_SIZE(dq_sel); i++) {
1370 			/* l_loop_invdelaysel */
1371 			writel(dq_sel[i][0], PHY_REG(phy_base,
1372 						     grp_addr[group] + 0x2c));
1373 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e));
1374 			writel(tmp, PHY_REG(phy_base,
1375 					    grp_addr[group] + dq_sel[i][1]));
1376 
1377 			/* r_loop_invdelaysel */
1378 			writel(dq_sel[i][0], PHY_REG(phy_base,
1379 						     grp_addr[group] + 0x2d));
1380 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f));
1381 			writel(tmp, PHY_REG(phy_base,
1382 					    grp_addr[group] + dq_sel[i][2]));
1383 		}
1384 	}
1385 }
1386 
1387 static void update_dq_rx_prebit(struct dram_info *dram)
1388 {
1389 	void __iomem *phy_base = dram->phy;
1390 
1391 	clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4),
1392 			BIT(4));
1393 	udelay(1);
1394 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4));
1395 }
1396 
1397 static void update_dq_tx_prebit(struct dram_info *dram)
1398 {
1399 	void __iomem *phy_base = dram->phy;
1400 
1401 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1402 	setbits_le32(PHY_REG(phy_base, 0x2), BIT(3));
1403 	setbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1404 	udelay(1);
1405 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1406 }
1407 
1408 static void update_ca_prebit(struct dram_info *dram)
1409 {
1410 	void __iomem *phy_base = dram->phy;
1411 
1412 	clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2));
1413 	setbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1414 	udelay(1);
1415 	clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1416 }
1417 
1418 /*
1419  * dir: 0: de-skew = delta_*
1420  *	1: de-skew = reg val - delta_*
1421  * delta_dir: value for differential signal: clk/
1422  * delta_sig: value for single signal: ca/cmd
1423  */
1424 static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif,
1425 			     int delta_sig, u32 cs, u32 dramtype)
1426 {
1427 	void __iomem *phy_base = dram->phy;
1428 	u32 i, cs_en, tmp;
1429 	u32 dfi_lp_stat = 0;
1430 
1431 	if (cs == 0)
1432 		cs_en = 1;
1433 	else if (cs == 2)
1434 		cs_en = 2;
1435 	else
1436 		cs_en = 3;
1437 
1438 	if ((dramtype == LPDDR4 || dramtype == LPDDR4X) &&
1439 	    ((readl(PHY_REG(phy_base, 0x60)) & BIT(5)) == 0)) {
1440 		dfi_lp_stat = 1;
1441 		setbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1442 	}
1443 	enter_sr(dram, 1);
1444 
1445 	for (i = 0; i < 0x20; i++) {
1446 		if (dir == DESKEW_MDF_ABS_VAL)
1447 			tmp = delta_sig;
1448 		else
1449 			tmp = readl(PHY_REG(phy_base, 0x150 + i)) +
1450 			      delta_sig;
1451 		writel(tmp, PHY_REG(phy_base, 0x150 + i));
1452 	}
1453 
1454 	if (dir == DESKEW_MDF_ABS_VAL)
1455 		tmp = delta_dif;
1456 	else
1457 		tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) -
1458 		       delta_sig + delta_dif;
1459 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x17));
1460 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x18));
1461 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1462 		writel(tmp, PHY_REG(phy_base, 0x150 + 0x4));
1463 		writel(tmp, PHY_REG(phy_base, 0x150 + 0xa));
1464 
1465 		clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6);
1466 		update_ca_prebit(dram);
1467 	}
1468 	enter_sr(dram, 0);
1469 
1470 	if (dfi_lp_stat)
1471 		clrbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1472 
1473 }
1474 
1475 static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank)
1476 {
1477 	u32 i, j, offset = 0;
1478 	u32 min = 0x3f;
1479 	void __iomem *phy_base = dram->phy;
1480 	u32 byte_en;
1481 
1482 	if (signal == SKEW_TX_SIGNAL)
1483 		offset = 8;
1484 
1485 	if (signal == SKEW_CA_SIGNAL) {
1486 		for (i = 0; i < 0x20; i++)
1487 			min = MIN(min, readl(PHY_REG(phy_base, 0x150  + i)));
1488 	} else {
1489 		byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1490 		for (j = offset; j < offset + rank * 4; j++) {
1491 			if (!((byte_en >> (j % 4)) & 1))
1492 				continue;
1493 			for (i = 0; i < 11; i++)
1494 				min = MIN(min,
1495 					  readl(PHY_REG(phy_base,
1496 							dqs_dq_skew_adr[j] +
1497 							i)));
1498 		}
1499 	}
1500 
1501 	return min;
1502 }
1503 
1504 static u32 low_power_update(struct dram_info *dram, u32 en)
1505 {
1506 	void __iomem *pctl_base = dram->pctl;
1507 	u32 lp_stat = 0;
1508 
1509 	if (en) {
1510 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf);
1511 	} else {
1512 		lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf;
1513 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf);
1514 	}
1515 
1516 	return lp_stat;
1517 }
1518 
1519 /*
1520  * signal:
1521  * dir: 0: de-skew = delta_*
1522  *	1: de-skew = reg val - delta_*
1523  * delta_dir: value for differential signal: dqs
1524  * delta_sig: value for single signal: dq/dm
1525  */
1526 static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir,
1527 			     int delta_dif, int delta_sig, u32 rank)
1528 {
1529 	void __iomem *phy_base = dram->phy;
1530 	u32 i, j, tmp, offset;
1531 	u32 byte_en;
1532 
1533 	byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1534 
1535 	if (signal == SKEW_RX_SIGNAL)
1536 		offset = 0;
1537 	else
1538 		offset = 8;
1539 
1540 	for (j = offset; j < (offset + rank * 4); j++) {
1541 		if (!((byte_en >> (j % 4)) & 1))
1542 			continue;
1543 		for (i = 0; i < 0x9; i++) {
1544 			if (dir == DESKEW_MDF_ABS_VAL)
1545 				tmp = delta_sig;
1546 			else
1547 				tmp = delta_sig + readl(PHY_REG(phy_base,
1548 							dqs_dq_skew_adr[j] +
1549 							i));
1550 			writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i));
1551 		}
1552 		if (dir == DESKEW_MDF_ABS_VAL)
1553 			tmp = delta_dif;
1554 		else
1555 			tmp = delta_dif + readl(PHY_REG(phy_base,
1556 						dqs_dq_skew_adr[j] + 9));
1557 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9));
1558 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa));
1559 	}
1560 	if (signal == SKEW_RX_SIGNAL)
1561 		update_dq_rx_prebit(dram);
1562 	else
1563 		update_dq_tx_prebit(dram);
1564 }
1565 
1566 static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype)
1567 {
1568 	void __iomem *phy_base = dram->phy;
1569 	u32 ret;
1570 	u32 dis_auto_zq = 0;
1571 	u32 odt_val_up, odt_val_dn;
1572 	u32 i, j;
1573 
1574 	odt_val_dn = readl(PHY_REG(phy_base, 0x110));
1575 	odt_val_up = readl(PHY_REG(phy_base, 0x111));
1576 
1577 	if (dramtype != LPDDR4 || dramtype != LPDDR4X) {
1578 		for (i = 0; i < 4; i++) {
1579 			j = 0x110 + i * 0x10;
1580 			writel(PHY_DDR4_LPDDR3_RTT_294ohm,
1581 			       PHY_REG(phy_base, j));
1582 			writel(PHY_DDR4_LPDDR3_RTT_DISABLE,
1583 			       PHY_REG(phy_base, j + 0x1));
1584 		}
1585 	}
1586 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1587 	/* use normal read mode for data training */
1588 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1589 
1590 	if (dramtype == DDR4)
1591 		setbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1592 
1593 	/* choose training cs */
1594 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
1595 	/* enable gate training */
1596 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
1597 	udelay(50);
1598 	ret = readl(PHY_REG(phy_base, 0x91));
1599 	/* disable gate training */
1600 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
1601 	clrbits_le32(PHY_REG(phy_base, 2), 0x30);
1602 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1603 
1604 	ret = (ret & 0x2f) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf);
1605 
1606 	if (dramtype != LPDDR4 || dramtype != LPDDR4X) {
1607 		for (i = 0; i < 4; i++) {
1608 			j = 0x110 + i * 0x10;
1609 			writel(odt_val_dn, PHY_REG(phy_base, j));
1610 			writel(odt_val_up, PHY_REG(phy_base, j + 0x1));
1611 		}
1612 	}
1613 	return ret;
1614 }
1615 
1616 static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype,
1617 			    u32 rank)
1618 {
1619 	void __iomem *pctl_base = dram->pctl;
1620 	void __iomem *phy_base = dram->phy;
1621 	u32 dis_auto_zq = 0;
1622 	u32 tmp;
1623 	u32 cur_fsp;
1624 	u32 timeout_us = 1000;
1625 
1626 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1627 
1628 	clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1629 
1630 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1631 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) &
1632 	      0xffff;
1633 	writel(tmp & 0xff, PHY_REG(phy_base, 0x3));
1634 
1635 	/* disable another cs's output */
1636 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1637 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12),
1638 			      dramtype);
1639 	if (dramtype == DDR3 || dramtype == DDR4)
1640 		writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1641 	else
1642 		writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1643 
1644 	/* choose cs */
1645 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1646 			((0x2 >> cs) << 6) | (0 << 2));
1647 	/* enable write leveling */
1648 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1649 			((0x2 >> cs) << 6) | (1 << 2));
1650 
1651 	while (1) {
1652 		if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) ==
1653 		    (readl(PHY_REG(phy_base, 0xf)) & 0xf))
1654 			break;
1655 
1656 		udelay(1);
1657 		if (timeout_us-- == 0) {
1658 			printascii("error: write leveling timeout\n");
1659 			while (1)
1660 				;
1661 		}
1662 	}
1663 
1664 	/* disable write leveling */
1665 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1666 			((0x2 >> cs) << 6) | (0 << 2));
1667 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6);
1668 
1669 	/* enable another cs's output */
1670 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1671 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12),
1672 			      dramtype);
1673 
1674 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1675 
1676 	return 0;
1677 }
1678 
1679 char pattern[32] = {
1680 	0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa,
1681 	0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55,
1682 	0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55,
1683 	0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa
1684 };
1685 
1686 static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype,
1687 			    u32 mhz)
1688 {
1689 	void __iomem *pctl_base = dram->pctl;
1690 	void __iomem *phy_base = dram->phy;
1691 	u32 trefi_1x, trfc_1x;
1692 	u32 dis_auto_zq = 0;
1693 	u32 timeout_us = 1000;
1694 	u32 dqs_default;
1695 	u32 cur_fsp;
1696 	u32 vref_inner;
1697 	u32 i;
1698 	struct sdram_head_info_index_v2 *index =
1699 		(struct sdram_head_info_index_v2 *)common_info;
1700 	struct dq_map_info *map_info;
1701 
1702 	vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff;
1703 	if (dramtype == DDR3 && vref_inner == 0x80) {
1704 		for (i = 0; i < 4; i++)
1705 			writel(vref_inner - 0xa,
1706 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1707 
1708 		/* reg_rx_vref_value_update */
1709 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1710 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1711 	}
1712 
1713 	map_info = (struct dq_map_info *)((void *)common_info +
1714 		index->dq_map_index.offset * 4);
1715 	/* only 1cs a time, 0:cs0 1 cs1 */
1716 	if (cs > 1)
1717 		return -1;
1718 
1719 	dqs_default = 0xf;
1720 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1721 
1722 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1723 	/* config refresh timing */
1724 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1725 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1726 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1727 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1728 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1729 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1730 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1731 	/* reg_phy_trfc */
1732 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1733 	/* reg_max_refi_cnt */
1734 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1735 
1736 	/* choose training cs */
1737 	clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6);
1738 
1739 	/* set dq map for ddr4 */
1740 	if (dramtype == DDR4) {
1741 		setbits_le32(PHY_REG(phy_base, 0x70), BIT(7));
1742 		for (i = 0; i < 4; i++) {
1743 			writel((map_info->ddr4_dq_map[cs * 2] >>
1744 				((i % 4) * 8)) & 0xff,
1745 				PHY_REG(phy_base, 0x238 + i));
1746 			writel((map_info->ddr4_dq_map[cs * 2 + 1] >>
1747 				((i % 4) * 8)) & 0xff,
1748 				PHY_REG(phy_base, 0x2b8 + i));
1749 		}
1750 	}
1751 
1752 	/* cha_l reg_l_rd_train_dqs_default[5:0] */
1753 	clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default);
1754 	/* cha_h reg_h_rd_train_dqs_default[5:0] */
1755 	clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default);
1756 	/* chb_l reg_l_rd_train_dqs_default[5:0] */
1757 	clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default);
1758 	/* chb_h reg_h_rd_train_dqs_default[5:0] */
1759 	clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default);
1760 
1761 	/* Choose the read train auto mode */
1762 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1);
1763 	/* Enable the auto train of the read train */
1764 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3);
1765 
1766 	/* Wait the train done. */
1767 	while (1) {
1768 		if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1)
1769 			break;
1770 
1771 		udelay(1);
1772 		if (timeout_us-- == 0) {
1773 			printascii("error: read training timeout\n");
1774 			return -1;
1775 		}
1776 	}
1777 
1778 	/* Check the read train state */
1779 	if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) ||
1780 	    (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) {
1781 		printascii("error: read training error\n");
1782 		return -1;
1783 	}
1784 
1785 	/* Exit the Read Training by setting */
1786 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1));
1787 
1788 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1789 
1790 	if (dramtype == DDR3 && vref_inner == 0x80) {
1791 		for (i = 0; i < 4; i++)
1792 			writel(vref_inner,
1793 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1794 
1795 		/* reg_rx_vref_value_update */
1796 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1797 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1798 	}
1799 
1800 	return 0;
1801 }
1802 
1803 static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype,
1804 			    u32 mhz, u32 dst_fsp)
1805 {
1806 	void __iomem *pctl_base = dram->pctl;
1807 	void __iomem *phy_base = dram->phy;
1808 	u32 trefi_1x, trfc_1x;
1809 	u32 dis_auto_zq = 0;
1810 	u32 timeout_us = 1000;
1811 	u32 cur_fsp;
1812 	u32 mr_tmp, cl, cwl, phy_fsp, offset = 0;
1813 
1814 	if (dramtype == LPDDR3 && mhz <= 400) {
1815 		phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3;
1816 		offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3;
1817 		cl = readl(PHY_REG(phy_base, offset));
1818 		cwl = readl(PHY_REG(phy_base, offset + 2));
1819 
1820 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8);
1821 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4);
1822 		pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype);
1823 	}
1824 
1825 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1826 
1827 	/* PHY_0x7b[7:0] reg_train_col_addr[7:0] */
1828 	clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0);
1829 	/* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */
1830 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2);
1831 	/* PHY_0x7c[1:0] reg_train_col_addr[9:8] */
1832 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0);
1833 	/* PHY_0x7d[7:0] reg_train_row_addr[7:0] */
1834 	clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0);
1835 	/* PHY_0x7e[7:0] reg_train_row_addr[15:8] */
1836 	clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0);
1837 
1838 	/* PHY_0x71[3] wrtrain_check_data_value_random_gen */
1839 	clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3));
1840 
1841 	/* config refresh timing */
1842 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1843 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1844 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1845 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1846 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1847 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1848 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1849 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1850 	/* reg_phy_trfc */
1851 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1852 	/* reg_max_refi_cnt */
1853 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1854 
1855 	/* choose training cs */
1856 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6);
1857 
1858 	/* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */
1859 	/* 0: Use the write-leveling value. */
1860 	/* 1: use reg0x233 0x237 0x2b3 0x2b7 */
1861 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4));
1862 
1863 	/* PHY_0x7a [0] reg_dq_wr_train_auto */
1864 	setbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1865 
1866 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1867 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1868 
1869 	send_a_refresh(dram);
1870 
1871 	while (1) {
1872 		if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1)
1873 			break;
1874 
1875 		udelay(1);
1876 		if (timeout_us-- == 0) {
1877 			printascii("error: write training timeout\n");
1878 			while (1)
1879 				;
1880 		}
1881 	}
1882 
1883 	/* Check the write train state */
1884 	if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) {
1885 		printascii("error: write training error\n");
1886 		return -1;
1887 	}
1888 
1889 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1890 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1891 
1892 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1893 
1894 	/* save LPDDR4/LPDDR4X write vref to fsp_param for dfs */
1895 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1896 		fsp_param[dst_fsp].vref_dq[cs] =
1897 			((readl(PHY_REG(phy_base, 0x384)) & 0x3f) +
1898 			 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2;
1899 		/* add range info */
1900 		fsp_param[dst_fsp].vref_dq[cs] |=
1901 			((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1);
1902 	}
1903 
1904 	if (dramtype == LPDDR3 && mhz <= 400) {
1905 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl);
1906 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl);
1907 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1908 			       DDR_PCTL2_INIT3);
1909 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
1910 			      dramtype);
1911 	}
1912 
1913 	return 0;
1914 }
1915 
1916 static int data_training(struct dram_info *dram, u32 cs,
1917 			 struct rv1126_sdram_params *sdram_params, u32 dst_fsp,
1918 			 u32 training_flag)
1919 {
1920 	u32 ret = 0;
1921 
1922 	if (training_flag == FULL_TRAINING)
1923 		training_flag = READ_GATE_TRAINING | WRITE_LEVELING |
1924 				WRITE_TRAINING | READ_TRAINING;
1925 
1926 	if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) {
1927 		ret = data_training_wl(dram, cs,
1928 				       sdram_params->base.dramtype,
1929 				       sdram_params->ch.cap_info.rank);
1930 		if (ret != 0)
1931 			goto out;
1932 	}
1933 
1934 	if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) {
1935 		ret = data_training_rg(dram, cs,
1936 				       sdram_params->base.dramtype);
1937 		if (ret != 0)
1938 			goto out;
1939 	}
1940 
1941 	if ((training_flag & READ_TRAINING) == READ_TRAINING) {
1942 		ret = data_training_rd(dram, cs,
1943 				       sdram_params->base.dramtype,
1944 				       sdram_params->base.ddr_freq);
1945 		if (ret != 0)
1946 			goto out;
1947 	}
1948 
1949 	if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) {
1950 		ret = data_training_wr(dram, cs,
1951 				       sdram_params->base.dramtype,
1952 				       sdram_params->base.ddr_freq, dst_fsp);
1953 		if (ret != 0)
1954 			goto out;
1955 	}
1956 
1957 out:
1958 	return ret;
1959 }
1960 
1961 static int get_wrlvl_val(struct dram_info *dram,
1962 			 struct rv1126_sdram_params *sdram_params)
1963 {
1964 	int i, j, clk_skew;
1965 	void __iomem *phy_base = dram->phy;
1966 	u32 lp_stat;
1967 	int ret;
1968 
1969 	lp_stat = low_power_update(dram, 0);
1970 
1971 	clk_skew = 0x1f;
1972 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, clk_skew, 3,
1973 			 sdram_params->base.dramtype);
1974 
1975 	ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING);
1976 	if (sdram_params->ch.cap_info.rank == 2)
1977 		ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING);
1978 
1979 	for (j = 0; j < 2; j++)
1980 		for (i = 0; i < 4; i++)
1981 			wrlvl_result[j][i] =
1982 				(readl(PHY_REG(phy_base, wrlvl_result_offset[j][i])) & 0x3f) -
1983 				clk_skew;
1984 
1985 	low_power_update(dram, lp_stat);
1986 
1987 	return ret;
1988 }
1989 
1990 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
1991 static void init_rw_trn_result_struct(struct rw_trn_result *result,
1992 				      void __iomem *phy_base, u8 cs_num)
1993 {
1994 	int i;
1995 
1996 	result->cs_num = cs_num;
1997 	result->byte_en = readb(PHY_REG(dram_info.phy, 0xf)) &
1998 			  PHY_DQ_WIDTH_MASK;
1999 	for (i = 0; i < FSP_NUM; i++)
2000 		result->fsp_mhz[i] = 0;
2001 }
2002 
2003 static void save_rw_trn_min_max(void __iomem *phy_base,
2004 				struct cs_rw_trn_result *rd_result,
2005 				struct cs_rw_trn_result *wr_result,
2006 				u8 byte_en)
2007 {
2008 	u16 phy_ofs;
2009 	u8 dqs;
2010 	u8 dq;
2011 
2012 	for (dqs = 0; dqs < BYTE_NUM; dqs++) {
2013 		if ((byte_en & BIT(dqs)) == 0)
2014 			continue;
2015 
2016 		/* Channel A or B (low or high 16 bit) */
2017 		phy_ofs = dqs < 2 ? 0x230 : 0x2b0;
2018 		/* low or high 8 bit */
2019 		phy_ofs += (dqs & 0x1) == 0 ? 0 : 0x9;
2020 		for (dq = 0; dq < 8; dq++) {
2021 			rd_result->dqs[dqs].dq_min[dq] =
2022 				readb(PHY_REG(phy_base, phy_ofs + 0x15 + dq));
2023 			rd_result->dqs[dqs].dq_max[dq] =
2024 				readb(PHY_REG(phy_base, phy_ofs + 0x27 + dq));
2025 			wr_result->dqs[dqs].dq_min[dq] =
2026 				readb(PHY_REG(phy_base, phy_ofs + 0x3d + dq));
2027 			wr_result->dqs[dqs].dq_max[dq] =
2028 				readb(PHY_REG(phy_base, phy_ofs + 0x4f + dq));
2029 		}
2030 	}
2031 }
2032 
2033 static void save_rw_trn_deskew(void __iomem *phy_base,
2034 			       struct fsp_rw_trn_result *result, u8 cs_num,
2035 			       int min_val, bool rw)
2036 {
2037 	u16 phy_ofs;
2038 	u8 cs;
2039 	u8 dq;
2040 
2041 	result->min_val = min_val;
2042 
2043 	for (cs = 0; cs < cs_num; cs++) {
2044 		phy_ofs = cs == 0 ? 0x170 : 0x1a0;
2045 		phy_ofs += rw == SKEW_RX_SIGNAL ? 0x1 : 0x17;
2046 		for (dq = 0; dq < 8; dq++) {
2047 			result->cs[cs].dqs[0].dq_deskew[dq] =
2048 				readb(PHY_REG(phy_base, phy_ofs + dq));
2049 			result->cs[cs].dqs[1].dq_deskew[dq] =
2050 				readb(PHY_REG(phy_base, phy_ofs + 0xb + dq));
2051 			result->cs[cs].dqs[2].dq_deskew[dq] =
2052 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + dq));
2053 			result->cs[cs].dqs[3].dq_deskew[dq] =
2054 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + dq));
2055 		}
2056 
2057 		result->cs[cs].dqs[0].dqs_deskew =
2058 			readb(PHY_REG(phy_base, phy_ofs + 0x8));
2059 		result->cs[cs].dqs[1].dqs_deskew =
2060 			readb(PHY_REG(phy_base, phy_ofs + 0xb + 0x8));
2061 		result->cs[cs].dqs[2].dqs_deskew =
2062 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0x8));
2063 		result->cs[cs].dqs[3].dqs_deskew =
2064 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + 0x8));
2065 	}
2066 }
2067 
2068 static void save_rw_trn_result_to_ddr(struct rw_trn_result *result)
2069 {
2070 	result->flag = DDR_DQ_EYE_FLAG;
2071 	memcpy((void *)(RW_TRN_RESULT_ADDR), result, sizeof(*result));
2072 }
2073 #endif
2074 
2075 static int high_freq_training(struct dram_info *dram,
2076 			      struct rv1126_sdram_params *sdram_params,
2077 			      u32 fsp)
2078 {
2079 	u32 i, j;
2080 	void __iomem *phy_base = dram->phy;
2081 	u32 dramtype = sdram_params->base.dramtype;
2082 	int min_val;
2083 	int dqs_skew, clk_skew, ca_skew;
2084 	u8 byte_en;
2085 	int ret;
2086 
2087 	byte_en = readl(PHY_REG(phy_base, 0xf)) & PHY_DQ_WIDTH_MASK;
2088 	dqs_skew = 0;
2089 	for (j = 0; j < sdram_params->ch.cap_info.rank; j++) {
2090 		for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) {
2091 			if ((byte_en & BIT(i)) != 0)
2092 				dqs_skew += wrlvl_result[j][i];
2093 		}
2094 	}
2095 	dqs_skew = dqs_skew /
2096 		   (int)(sdram_params->ch.cap_info.rank * (1 << sdram_params->ch.cap_info.bw));
2097 
2098 	clk_skew = 0x20 - dqs_skew;
2099 	dqs_skew = 0x20;
2100 
2101 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
2102 		min_val = 0xff;
2103 		for (j = 0; j < sdram_params->ch.cap_info.rank; j++)
2104 			for (i = 0; i < sdram_params->ch.cap_info.bw; i++)
2105 				min_val = MIN(wrlvl_result[j][i], min_val);
2106 
2107 		if (min_val < 0) {
2108 			clk_skew = -min_val;
2109 			ca_skew = -min_val;
2110 		} else {
2111 			clk_skew = 0;
2112 			ca_skew = 0;
2113 		}
2114 	} else if (dramtype == LPDDR3) {
2115 		ca_skew = clk_skew - 4;
2116 	} else {
2117 		ca_skew = clk_skew;
2118 	}
2119 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3,
2120 			 dramtype);
2121 
2122 	writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233));
2123 	writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237));
2124 	writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2125 	writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2126 	ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING |
2127 			    READ_TRAINING | WRITE_TRAINING);
2128 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2129 	rw_trn_result.fsp_mhz[fsp] = (u16)sdram_params->base.ddr_freq;
2130 	save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[0],
2131 			    &rw_trn_result.wr_fsp[fsp].cs[0],
2132 			    rw_trn_result.byte_en);
2133 #endif
2134 	if (sdram_params->ch.cap_info.rank == 2) {
2135 		writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233));
2136 		writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237));
2137 		writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2138 		writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2139 		ret |= data_training(dram, 1, sdram_params, fsp,
2140 				     READ_GATE_TRAINING | READ_TRAINING |
2141 				     WRITE_TRAINING);
2142 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2143 		save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[1],
2144 				    &rw_trn_result.wr_fsp[fsp].cs[1],
2145 				    rw_trn_result.byte_en);
2146 #endif
2147 	}
2148 	if (ret)
2149 		goto out;
2150 
2151 	record_dq_prebit(dram);
2152 
2153 	min_val = get_min_value(dram, SKEW_RX_SIGNAL,
2154 				sdram_params->ch.cap_info.rank) * -1;
2155 	modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2156 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2157 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2158 	save_rw_trn_deskew(phy_base, &rw_trn_result.rd_fsp[fsp],
2159 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2160 			   SKEW_RX_SIGNAL);
2161 #endif
2162 
2163 	min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL,
2164 				    sdram_params->ch.cap_info.rank),
2165 		      get_min_value(dram, SKEW_CA_SIGNAL,
2166 				    sdram_params->ch.cap_info.rank)) * -1;
2167 
2168 	/* clk = 0, rx all skew -7, tx - min_value */
2169 	modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3,
2170 			 dramtype);
2171 
2172 	modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2173 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2174 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2175 	save_rw_trn_deskew(phy_base, &rw_trn_result.wr_fsp[fsp],
2176 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2177 			   SKEW_TX_SIGNAL);
2178 #endif
2179 
2180 	ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING);
2181 	if (sdram_params->ch.cap_info.rank == 2)
2182 		ret |= data_training(dram, 1, sdram_params, 0,
2183 				     READ_GATE_TRAINING);
2184 out:
2185 	return ret;
2186 }
2187 
2188 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
2189 {
2190 	writel(ddrconfig, &dram->msch->deviceconf);
2191 	clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0);
2192 }
2193 
2194 static void update_noc_timing(struct dram_info *dram,
2195 			      struct rv1126_sdram_params *sdram_params)
2196 {
2197 	void __iomem *pctl_base = dram->pctl;
2198 	u32 bw, bl;
2199 
2200 	bw = 8 << sdram_params->ch.cap_info.bw;
2201 	bl = ((readl(pctl_base + DDR_PCTL2_MSTR) >> 16) & 0xf) * 2;
2202 
2203 	/* update the noc timing related to data bus width */
2204 	if ((bw / 8 * bl) <= 16)
2205 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 0;
2206 	else if ((bw / 8 * bl) == 32)
2207 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 1;
2208 	else if ((bw / 8 * bl) == 64)
2209 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 2;
2210 	else
2211 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 3;
2212 
2213 	sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty =
2214 		(bl * bw / 8) > 16 ? (bl / 4) : (16 / (bl * bw / 8)) * bl / 4;
2215 
2216 	if (sdram_params->base.dramtype == LPDDR4 ||
2217 	    sdram_params->base.dramtype == LPDDR4X) {
2218 		sdram_params->ch.noc_timings.ddrmode.b.mwrsize =
2219 			(bw == 16) ? 0x1 : 0x2;
2220 		sdram_params->ch.noc_timings.ddrtimingc0.b.wrtomwr =
2221 			3 * sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty;
2222 	}
2223 
2224 	writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
2225 	       &dram->msch->ddrtiminga0);
2226 	writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
2227 	       &dram->msch->ddrtimingb0);
2228 	writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
2229 	       &dram->msch->ddrtimingc0);
2230 	writel(sdram_params->ch.noc_timings.devtodev0.d32,
2231 	       &dram->msch->devtodev0);
2232 	writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
2233 	writel(sdram_params->ch.noc_timings.ddr4timing.d32,
2234 	       &dram->msch->ddr4timing);
2235 }
2236 
2237 static int split_setup(struct dram_info *dram,
2238 		       struct rv1126_sdram_params *sdram_params)
2239 {
2240 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2241 	u32 dramtype = sdram_params->base.dramtype;
2242 	u32 split_size, split_mode;
2243 	u64 cs_cap[2], cap;
2244 
2245 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dramtype);
2246 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dramtype);
2247 	/* only support the larger cap is in low 16bit */
2248 	if (cap_info->cs0_high16bit_row < cap_info->cs0_row) {
2249 		cap = cs_cap[0] / (1 << (cap_info->cs0_row -
2250 		cap_info->cs0_high16bit_row));
2251 	} else if ((cap_info->cs1_high16bit_row < cap_info->cs1_row) &&
2252 		   (cap_info->rank == 2)) {
2253 		if (!cap_info->cs1_high16bit_row)
2254 			cap = cs_cap[0];
2255 		else
2256 			cap = cs_cap[0] + cs_cap[1] / (1 << (cap_info->cs1_row -
2257 				cap_info->cs1_high16bit_row));
2258 	} else {
2259 		goto out;
2260 	}
2261 	split_size = (u32)(cap >> 24) & SPLIT_SIZE_MASK;
2262 	if (cap_info->bw == 2)
2263 		split_mode = SPLIT_MODE_32_L16_VALID;
2264 	else
2265 		split_mode = SPLIT_MODE_16_L8_VALID;
2266 
2267 	rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2268 		     (SPLIT_MODE_MASK << SPLIT_MODE_OFFSET) |
2269 		     (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2270 		     (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2271 		     (split_mode << SPLIT_MODE_OFFSET) |
2272 		     (0x0 << SPLIT_BYPASS_OFFSET) |
2273 		     (split_size << SPLIT_SIZE_OFFSET));
2274 
2275 	rk_clrsetreg(BUS_SGRF_BASE_ADDR + SGRF_SOC_CON2,
2276 		     MSCH_AXI_BYPASS_ALL_MASK << MSCH_AXI_BYPASS_ALL_SHIFT,
2277 		     0x0 << MSCH_AXI_BYPASS_ALL_SHIFT);
2278 
2279 out:
2280 	return 0;
2281 }
2282 
2283 static void split_bypass(struct dram_info *dram)
2284 {
2285 	if ((readl(&dram->ddrgrf->grf_ddrsplit_con) &
2286 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2287 		return;
2288 
2289 	/* bypass split */
2290 	rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2291 		     (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2292 		     (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2293 		     (0x1 << SPLIT_BYPASS_OFFSET) |
2294 		     (0x0 << SPLIT_SIZE_OFFSET));
2295 }
2296 
2297 static void dram_all_config(struct dram_info *dram,
2298 			    struct rv1126_sdram_params *sdram_params)
2299 {
2300 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2301 	u32 dram_type = sdram_params->base.dramtype;
2302 	void __iomem *pctl_base = dram->pctl;
2303 	u32 sys_reg2 = 0;
2304 	u32 sys_reg3 = 0;
2305 	u64 cs_cap[2];
2306 	u32 cs_pst;
2307 
2308 	set_ddrconfig(dram, cap_info->ddrconfig);
2309 	sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
2310 			 &sys_reg3, 0);
2311 	writel(sys_reg2, &dram->pmugrf->os_reg[2]);
2312 	writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2313 
2314 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2315 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2316 
2317 	if (cap_info->rank == 2) {
2318 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2319 			6 + 2;
2320 		if (cs_pst > 28)
2321 			cs_cap[0] = 1llu << cs_pst;
2322 	}
2323 
2324 	writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
2325 			(((cs_cap[0] >> 20) / 64) & 0xff),
2326 			&dram->msch->devicesize);
2327 	update_noc_timing(dram, sdram_params);
2328 }
2329 
2330 static void enable_low_power(struct dram_info *dram,
2331 			     struct rv1126_sdram_params *sdram_params)
2332 {
2333 	void __iomem *pctl_base = dram->pctl;
2334 	u32 grf_lp_con;
2335 
2336 	writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]);
2337 
2338 	if (sdram_params->base.dramtype == DDR4)
2339 		grf_lp_con = (0x7 << 16) | (1 << 1);
2340 	else if (sdram_params->base.dramtype == DDR3)
2341 		grf_lp_con = (0x7 << 16) | (1 << 0);
2342 	else
2343 		grf_lp_con = (0x7 << 16) | (1 << 2);
2344 
2345 	/* en lpckdis_en */
2346 	grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
2347 	writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con);
2348 
2349 	/* enable sr, pd */
2350 	if (dram->pd_idle == 0)
2351 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2352 	else
2353 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2354 	if (dram->sr_idle == 0)
2355 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2356 	else
2357 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2358 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
2359 }
2360 
2361 static void ddr_set_atags(struct dram_info *dram,
2362 			  struct rv1126_sdram_params *sdram_params)
2363 {
2364 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2365 	u32 dram_type = sdram_params->base.dramtype;
2366 	void __iomem *pctl_base = dram->pctl;
2367 	struct tag_serial t_serial;
2368 	struct tag_ddr_mem t_ddrmem;
2369 	struct tag_soc_info t_socinfo;
2370 	u64 cs_cap[2];
2371 	u32 cs_pst = 0;
2372 	u32 split, split_size;
2373 	u64 reduce_cap = 0;
2374 
2375 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2376 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2377 
2378 	memset(&t_serial, 0, sizeof(struct tag_serial));
2379 
2380 	t_serial.version = 0;
2381 	t_serial.enable = 1;
2382 	t_serial.addr = CONFIG_DEBUG_UART_BASE;
2383 	t_serial.baudrate = CONFIG_BAUDRATE;
2384 	t_serial.m_mode = SERIAL_M_MODE_M0;
2385 	t_serial.id = 2;
2386 
2387 	atags_destroy();
2388 	atags_set_tag(ATAG_SERIAL,  &t_serial);
2389 
2390 	split = readl(&dram->ddrgrf->grf_ddrsplit_con);
2391 	memset(&t_ddrmem, 0, sizeof(struct tag_ddr_mem));
2392 	if (cap_info->row_3_4) {
2393 		cs_cap[0] =  cs_cap[0] * 3 / 4;
2394 		cs_cap[1] =  cs_cap[1] * 3 / 4;
2395 	} else if (!(split & (1 << SPLIT_BYPASS_OFFSET))) {
2396 		split_size = (split >> SPLIT_SIZE_OFFSET) & SPLIT_SIZE_MASK;
2397 		reduce_cap = (cs_cap[0] + cs_cap[1] - (split_size << 24)) / 2;
2398 	}
2399 	t_ddrmem.version = 0;
2400 	t_ddrmem.bank[0] = CONFIG_SYS_SDRAM_BASE;
2401 	if (cs_cap[1]) {
2402 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2403 			6 + 2;
2404 	}
2405 
2406 	if (cs_cap[1] && cs_pst > 27) {
2407 		t_ddrmem.count = 2;
2408 		t_ddrmem.bank[1] = 1 << cs_pst;
2409 		t_ddrmem.bank[2] = cs_cap[0];
2410 		t_ddrmem.bank[3] = cs_cap[1] - reduce_cap;
2411 	} else {
2412 		t_ddrmem.count = 1;
2413 		t_ddrmem.bank[1] = (u64)cs_cap[0] + (u64)cs_cap[1] - reduce_cap;
2414 	}
2415 
2416 	atags_set_tag(ATAG_DDR_MEM,  &t_ddrmem);
2417 
2418 	memset(&t_socinfo, 0, sizeof(struct tag_soc_info));
2419 	t_socinfo.version = 0;
2420 	t_socinfo.name = 0x1126;
2421 }
2422 
2423 static void print_ddr_info(struct rv1126_sdram_params *sdram_params)
2424 {
2425 	u32 split;
2426 
2427 	if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2428 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2429 		split = 0;
2430 	else
2431 		split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2432 			SPLIT_SIZE_MASK;
2433 
2434 	sdram_print_ddr_info(&sdram_params->ch.cap_info,
2435 			     &sdram_params->base, split);
2436 }
2437 
2438 static int sdram_init_(struct dram_info *dram,
2439 		       struct rv1126_sdram_params *sdram_params, u32 post_init)
2440 {
2441 	void __iomem *pctl_base = dram->pctl;
2442 	void __iomem *phy_base = dram->phy;
2443 	u32 ddr4_vref;
2444 	u32 mr_tmp;
2445 
2446 	rkclk_configure_ddr(dram, sdram_params);
2447 
2448 	rkclk_ddr_reset(dram, 1, 1, 1, 1);
2449 	udelay(10);
2450 
2451 	rkclk_ddr_reset(dram, 1, 1, 1, 0);
2452 	phy_cfg(dram, sdram_params);
2453 
2454 	rkclk_ddr_reset(dram, 1, 1, 0, 0);
2455 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1);
2456 
2457 	rkclk_ddr_reset(dram, 1, 0, 0, 0);
2458 	pctl_cfg(dram->pctl, &sdram_params->pctl_regs,
2459 		 dram->sr_idle, dram->pd_idle);
2460 
2461 	if (sdram_params->ch.cap_info.bw == 2) {
2462 		/* 32bit interface use pageclose */
2463 		setbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2464 		/* pageclose = 1, pageclose_timer = 0 will err in lp4 328MHz */
2465 		clrsetbits_le32(pctl_base + DDR_PCTL2_SCHED1, 0xff, 0x1 << 0);
2466 	} else {
2467 		clrbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2468 	}
2469 
2470 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2471 	u32 tmp, trefi;
2472 
2473 	tmp = readl(pctl_base + DDR_PCTL2_RFSHTMG);
2474 	trefi = (tmp >> 16) & 0xfff;
2475 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2476 	       pctl_base + DDR_PCTL2_RFSHTMG);
2477 #endif
2478 
2479 	/* set frequency_mode */
2480 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
2481 	/* set target_frequency to Frequency 0 */
2482 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0);
2483 
2484 	set_ds_odt(dram, sdram_params, 0);
2485 	sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params);
2486 	set_ctl_address_map(dram, sdram_params);
2487 
2488 	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
2489 
2490 	rkclk_ddr_reset(dram, 0, 0, 0, 0);
2491 
2492 	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
2493 		continue;
2494 
2495 	if (sdram_params->base.dramtype == LPDDR3) {
2496 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3);
2497 	} else if (sdram_params->base.dramtype == LPDDR4 ||
2498 		   sdram_params->base.dramtype == LPDDR4X) {
2499 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6);
2500 		/* MR11 */
2501 		pctl_write_mr(dram->pctl, 3, 11,
2502 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2503 			      LPDDR4);
2504 		/* MR12 */
2505 		pctl_write_mr(dram->pctl, 3, 12,
2506 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2507 			      LPDDR4);
2508 
2509 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2510 		/* MR22 */
2511 		pctl_write_mr(dram->pctl, 3, 22,
2512 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2513 			      LPDDR4);
2514 	}
2515 
2516 	if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) != 0) {
2517 		if (post_init != 0)
2518 			printascii("DTT cs0 error\n");
2519 		return -1;
2520 	}
2521 
2522 	if (sdram_params->base.dramtype == LPDDR4) {
2523 		mr_tmp = read_mr(dram, 1, 14, LPDDR4);
2524 
2525 		if (mr_tmp != 0x4d)
2526 			return -1;
2527 	}
2528 
2529 	if (sdram_params->base.dramtype == LPDDR4 ||
2530 	    sdram_params->base.dramtype == LPDDR4X) {
2531 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2532 		/* MR14 */
2533 		pctl_write_mr(dram->pctl, 3, 14,
2534 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2535 			      LPDDR4);
2536 	}
2537 	if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) {
2538 		if (data_training(dram, 1, sdram_params, 0,
2539 				  READ_GATE_TRAINING) != 0) {
2540 			printascii("DTT cs1 error\n");
2541 			return -1;
2542 		}
2543 	}
2544 
2545 	if (sdram_params->base.dramtype == DDR4) {
2546 		ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39;
2547 		pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref,
2548 				  sdram_params->base.dramtype);
2549 	}
2550 
2551 	dram_all_config(dram, sdram_params);
2552 	enable_low_power(dram, sdram_params);
2553 
2554 	return 0;
2555 }
2556 
2557 static u64 dram_detect_cap(struct dram_info *dram,
2558 			   struct rv1126_sdram_params *sdram_params,
2559 			   unsigned char channel)
2560 {
2561 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2562 	void __iomem *pctl_base = dram->pctl;
2563 	void __iomem *phy_base = dram->phy;
2564 	u32 mr8;
2565 
2566 	u32 bktmp;
2567 	u32 coltmp;
2568 	u32 rowtmp;
2569 	u32 cs;
2570 	u32 dram_type = sdram_params->base.dramtype;
2571 	u32 pwrctl;
2572 	u32 i, dq_map;
2573 	u32 byte1 = 0, byte0 = 0;
2574 	u32 tmp, byte;
2575 	struct sdram_head_info_index_v2 *index = (struct sdram_head_info_index_v2 *)common_info;
2576 	struct dq_map_info *map_info = (struct dq_map_info *)
2577 				       ((void *)common_info + index->dq_map_index.offset * 4);
2578 
2579 	cap_info->bw = dram_type == DDR3 ? 0 : 1;
2580 	if (dram_type != LPDDR4 && dram_type != LPDDR4X) {
2581 		if (dram_type != DDR4) {
2582 			coltmp = 12;
2583 			bktmp = 3;
2584 			if (dram_type == LPDDR2)
2585 				rowtmp = 15;
2586 			else
2587 				rowtmp = 16;
2588 
2589 			if (sdram_detect_col(cap_info, coltmp) != 0)
2590 				goto cap_err;
2591 
2592 			sdram_detect_bank(cap_info, coltmp, bktmp);
2593 			if (dram_type != LPDDR3)
2594 				sdram_detect_dbw(cap_info, dram_type);
2595 		} else {
2596 			coltmp = 10;
2597 			bktmp = 4;
2598 			rowtmp = 17;
2599 
2600 			cap_info->col = 10;
2601 			cap_info->bk = 2;
2602 			sdram_detect_bg(cap_info, coltmp);
2603 		}
2604 
2605 		if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
2606 			goto cap_err;
2607 
2608 		sdram_detect_row_3_4(cap_info, coltmp, bktmp);
2609 	} else {
2610 		cap_info->col = 10;
2611 		cap_info->bk = 3;
2612 		mr8 = read_mr(dram, 1, 8, dram_type);
2613 		cap_info->dbw = ((mr8 >> 6) & 0x3) == 0 ? 1 : 0;
2614 		mr8 = (mr8 >> 2) & 0xf;
2615 		if (mr8 >= 0 && mr8 <= 6) {
2616 			cap_info->cs0_row = 14 + (mr8 + 1) / 2;
2617 		} else if (mr8 == 0xc) {
2618 			cap_info->cs0_row = 13;
2619 		} else {
2620 			printascii("Cap ERR: Fail to get cap of LPDDR4/X from MR8\n");
2621 			goto cap_err;
2622 		}
2623 		if (cap_info->dbw == 0)
2624 			cap_info->cs0_row++;
2625 		cap_info->row_3_4 = mr8 % 2 == 1 ? 1 : 0;
2626 		if (cap_info->cs0_row >= 17) {
2627 			printascii("Cap ERR: ");
2628 			printascii("RV1126 LPDDR4/X cannot support row >= 17\n");
2629 			goto cap_err;
2630 			// cap_info->cs0_row = 16;
2631 			// cap_info->row_3_4 = 0;
2632 		}
2633 	}
2634 
2635 	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
2636 	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
2637 
2638 	if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0)
2639 		cs = 1;
2640 	else
2641 		cs = 0;
2642 	cap_info->rank = cs + 1;
2643 
2644 	setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2645 
2646 	tmp = data_training_rg(dram, 0, dram_type) & 0xf;
2647 
2648 	if (tmp == 0) {
2649 		cap_info->bw = 2;
2650 	} else {
2651 		if (dram_type == DDR3 || dram_type == DDR4) {
2652 			dq_map = 0;
2653 			byte = 0;
2654 			for (i = 0; i < 4; i++) {
2655 				if ((tmp & BIT(i)) == 0) {
2656 					dq_map |= byte << (i * 2);
2657 					byte++;
2658 				}
2659 			}
2660 			cap_info->bw = byte / 2;
2661 			for (i = 0; i < 4; i++) {
2662 				if ((tmp & BIT(i)) != 0) {
2663 					dq_map |= byte << (i * 2);
2664 					byte++;
2665 				}
2666 			}
2667 			clrsetbits_le32(&map_info->byte_map[0], 0xff << 24, dq_map << 24);
2668 		} else {
2669 			dq_map = readl(PHY_REG(phy_base, 0x4f));
2670 			for (i = 0; i < 4; i++) {
2671 				if (((dq_map >> (i * 2)) & 0x3) == 0)
2672 					byte0 = i;
2673 				if (((dq_map >> (i * 2)) & 0x3) == 1)
2674 					byte1 = i;
2675 			}
2676 			clrsetbits_le32(PHY_REG(phy_base, 0xf), PHY_DQ_WIDTH_MASK,
2677 					BIT(byte0) | BIT(byte1));
2678 			if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) == 0)
2679 				cap_info->bw = 1;
2680 			else
2681 				cap_info->bw = 0;
2682 		}
2683 	}
2684 	if (cap_info->bw > 0)
2685 		cap_info->dbw = 1;
2686 
2687 	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
2688 
2689 	cap_info->cs0_high16bit_row = cap_info->cs0_row;
2690 	if (cs) {
2691 		cap_info->cs1_row = cap_info->cs0_row;
2692 		cap_info->cs1_high16bit_row = cap_info->cs0_row;
2693 	} else {
2694 		cap_info->cs1_row = 0;
2695 		cap_info->cs1_high16bit_row = 0;
2696 	}
2697 
2698 	if (dram_type == LPDDR3)
2699 		sdram_detect_dbw(cap_info, dram_type);
2700 
2701 	return 0;
2702 cap_err:
2703 	return -1;
2704 }
2705 
2706 static int dram_detect_cs1_row(struct dram_info *dram,
2707 			       struct rv1126_sdram_params *sdram_params,
2708 			       unsigned char channel)
2709 {
2710 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2711 	void __iomem *pctl_base = dram->pctl;
2712 	u32 ret = 0;
2713 	void __iomem *test_addr;
2714 	u32 row, bktmp, coltmp, bw;
2715 	u64 cs0_cap;
2716 	u32 byte_mask;
2717 	u32 cs_pst;
2718 	u32 cs_add = 0;
2719 	u32 max_row;
2720 
2721 	if (cap_info->rank == 2) {
2722 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2723 			6 + 2;
2724 		if (cs_pst < 28)
2725 			cs_add = 1;
2726 
2727 		cs0_cap = 1 << cs_pst;
2728 
2729 		if (sdram_params->base.dramtype == DDR4) {
2730 			if (cap_info->dbw == 0)
2731 				bktmp = cap_info->bk + 2;
2732 			else
2733 				bktmp = cap_info->bk + 1;
2734 		} else {
2735 			bktmp = cap_info->bk;
2736 		}
2737 		bw = cap_info->bw;
2738 		coltmp = cap_info->col;
2739 
2740 		if (bw == 2)
2741 			byte_mask = 0xFFFF;
2742 		else
2743 			byte_mask = 0xFF;
2744 
2745 		max_row = (cs_pst == 31) ? 30 : 31;
2746 
2747 		max_row = max_row - bktmp - coltmp - bw - cs_add + 1;
2748 
2749 		row = (cap_info->cs0_row > max_row) ? max_row :
2750 			cap_info->cs0_row;
2751 
2752 		for (; row > 12; row--) {
2753 			test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
2754 				    (u32)cs0_cap +
2755 				    (1ul << (row + bktmp + coltmp +
2756 					     cs_add + bw - 1ul)));
2757 
2758 			writel(0, CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap);
2759 			writel(PATTERN, test_addr);
2760 
2761 			if (((readl(test_addr) & byte_mask) ==
2762 			     (PATTERN & byte_mask)) &&
2763 			    ((readl(CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap) &
2764 			      byte_mask) == 0)) {
2765 				ret = row;
2766 				break;
2767 			}
2768 		}
2769 	}
2770 
2771 	return ret;
2772 }
2773 
2774 /* return: 0 = success, other = fail */
2775 static int sdram_init_detect(struct dram_info *dram,
2776 			     struct rv1126_sdram_params *sdram_params)
2777 {
2778 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2779 	u32 ret;
2780 	u32 sys_reg = 0;
2781 	u32 sys_reg3 = 0;
2782 	struct sdram_head_info_index_v2 *index =
2783 		(struct sdram_head_info_index_v2 *)common_info;
2784 	struct dq_map_info *map_info;
2785 
2786 	map_info = (struct dq_map_info *)((void *)common_info +
2787 		index->dq_map_index.offset * 4);
2788 
2789 	if (sdram_init_(dram, sdram_params, 0)) {
2790 		if (sdram_params->base.dramtype == DDR3) {
2791 			clrsetbits_le32(&map_info->byte_map[0], 0xff << 24,
2792 					((0x1 << 6) | (0x3 << 4) | (0x2 << 2) |
2793 					(0x0 << 0)) << 24);
2794 			if (sdram_init_(dram, sdram_params, 0))
2795 				return -1;
2796 		} else {
2797 			return -1;
2798 		}
2799 	}
2800 
2801 	if (sdram_params->base.dramtype == DDR3) {
2802 		writel(PATTERN, CONFIG_SYS_SDRAM_BASE);
2803 		if (readl(CONFIG_SYS_SDRAM_BASE) != PATTERN)
2804 			return -1;
2805 	}
2806 
2807 	split_bypass(dram);
2808 	if (dram_detect_cap(dram, sdram_params, 0) != 0)
2809 		return -1;
2810 
2811 	pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
2812 				   sdram_params->base.dramtype);
2813 	ret = sdram_init_(dram, sdram_params, 1);
2814 	if (ret != 0)
2815 		goto out;
2816 
2817 	cap_info->cs1_row =
2818 		dram_detect_cs1_row(dram, sdram_params, 0);
2819 	if (cap_info->cs1_row) {
2820 		sys_reg = readl(&dram->pmugrf->os_reg[2]);
2821 		sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
2822 		SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
2823 				    sys_reg, sys_reg3, 0);
2824 		writel(sys_reg, &dram->pmugrf->os_reg[2]);
2825 		writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2826 	}
2827 
2828 	sdram_detect_high_row(cap_info, sdram_params->base.dramtype);
2829 	split_setup(dram, sdram_params);
2830 out:
2831 	return ret;
2832 }
2833 
2834 struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz)
2835 {
2836 	u32 i;
2837 	u32 offset = 0;
2838 	struct ddr2_3_4_lp2_3_info *ddr_info;
2839 
2840 	if (!freq_mhz) {
2841 		ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype);
2842 		if (ddr_info)
2843 			freq_mhz =
2844 				(ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
2845 				DDR_FREQ_MASK;
2846 		else
2847 			freq_mhz = 0;
2848 	}
2849 
2850 	for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) {
2851 		if (sdram_configs[i].base.ddr_freq == 0 ||
2852 		    freq_mhz < sdram_configs[i].base.ddr_freq)
2853 			break;
2854 	}
2855 	offset = i == 0 ? 0 : i - 1;
2856 
2857 	return &sdram_configs[offset];
2858 }
2859 
2860 static const u16 pctl_need_update_reg[] = {
2861 	DDR_PCTL2_RFSHTMG,
2862 	DDR_PCTL2_INIT3,
2863 	DDR_PCTL2_INIT4,
2864 	DDR_PCTL2_INIT6,
2865 	DDR_PCTL2_INIT7,
2866 	DDR_PCTL2_DRAMTMG0,
2867 	DDR_PCTL2_DRAMTMG1,
2868 	DDR_PCTL2_DRAMTMG2,
2869 	DDR_PCTL2_DRAMTMG3,
2870 	DDR_PCTL2_DRAMTMG4,
2871 	DDR_PCTL2_DRAMTMG5,
2872 	DDR_PCTL2_DRAMTMG6,
2873 	DDR_PCTL2_DRAMTMG7,
2874 	DDR_PCTL2_DRAMTMG8,
2875 	DDR_PCTL2_DRAMTMG9,
2876 	DDR_PCTL2_DRAMTMG12,
2877 	DDR_PCTL2_DRAMTMG13,
2878 	DDR_PCTL2_DRAMTMG14,
2879 	DDR_PCTL2_ZQCTL0,
2880 	DDR_PCTL2_DFITMG0,
2881 	DDR_PCTL2_ODTCFG
2882 };
2883 
2884 static const u16 phy_need_update_reg[] = {
2885 	0x14,
2886 	0x18,
2887 	0x1c
2888 };
2889 
2890 static void pre_set_rate(struct dram_info *dram,
2891 			 struct rv1126_sdram_params *sdram_params,
2892 			 u32 dst_fsp, u32 dst_fsp_lp4)
2893 {
2894 	u32 i, j, find;
2895 	void __iomem *pctl_base = dram->pctl;
2896 	void __iomem *phy_base = dram->phy;
2897 	u32 phy_offset;
2898 	u32 mr_tmp;
2899 	u32 dramtype = sdram_params->base.dramtype;
2900 
2901 	sw_set_req(dram);
2902 	/* pctl timing update */
2903 	for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) {
2904 		for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF;
2905 		     j++) {
2906 			if (sdram_params->pctl_regs.pctl[j][0] ==
2907 			    pctl_need_update_reg[i]) {
2908 				writel(sdram_params->pctl_regs.pctl[j][1],
2909 				       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2910 				       pctl_need_update_reg[i]);
2911 				find = j;
2912 				break;
2913 			}
2914 		}
2915 	}
2916 
2917 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2918 	u32 tmp, trefi;
2919 
2920 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2921 	trefi = (tmp >> 16) & 0xfff;
2922 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2923 	       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2924 #endif
2925 
2926 	sw_set_ack(dram);
2927 
2928 	/* phy timing update */
2929 	if (dst_fsp == 0)
2930 		phy_offset = 0;
2931 	else
2932 		phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3);
2933 	/* cl cwl al update */
2934 	for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) {
2935 		for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF;
2936 		     j++) {
2937 			if (sdram_params->phy_regs.phy[j][0] ==
2938 			    phy_need_update_reg[i]) {
2939 				writel(sdram_params->phy_regs.phy[j][1],
2940 				       phy_base + phy_offset +
2941 				       phy_need_update_reg[i]);
2942 				find = j;
2943 				break;
2944 			}
2945 		}
2946 	}
2947 
2948 	set_ds_odt(dram, sdram_params, dst_fsp);
2949 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
2950 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2951 			       DDR_PCTL2_INIT4);
2952 		/* MR13 */
2953 		pctl_write_mr(dram->pctl, 3, 13,
2954 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2955 			       PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2956 			      ((0x2 << 6) >> dst_fsp_lp4), dramtype);
2957 		writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2958 				      PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2959 				      ((0x2 << 6) >> dst_fsp_lp4),
2960 				       PHY_REG(phy_base, 0x1b));
2961 		/* MR3 */
2962 		pctl_write_mr(dram->pctl, 3, 3,
2963 			      mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT &
2964 			      PCTL2_MR_MASK,
2965 			      dramtype);
2966 		writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK,
2967 		       PHY_REG(phy_base, 0x19));
2968 
2969 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2970 			       DDR_PCTL2_INIT3);
2971 		/* MR1 */
2972 		pctl_write_mr(dram->pctl, 3, 1,
2973 			      mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT &
2974 			      PCTL2_MR_MASK,
2975 			      dramtype);
2976 		writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK,
2977 		       PHY_REG(phy_base, 0x17));
2978 		/* MR2 */
2979 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
2980 			      dramtype);
2981 		writel(mr_tmp & PCTL2_MR_MASK,
2982 		       PHY_REG(phy_base, 0x18));
2983 
2984 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2985 			       DDR_PCTL2_INIT6);
2986 		/* MR11 */
2987 		pctl_write_mr(dram->pctl, 3, 11,
2988 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2989 			      dramtype);
2990 		writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2991 		       PHY_REG(phy_base, 0x1a));
2992 		/* MR12 */
2993 		pctl_write_mr(dram->pctl, 3, 12,
2994 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2995 			      dramtype);
2996 
2997 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2998 			       DDR_PCTL2_INIT7);
2999 		/* MR22 */
3000 		pctl_write_mr(dram->pctl, 3, 22,
3001 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
3002 			      dramtype);
3003 		writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
3004 		       PHY_REG(phy_base, 0x1d));
3005 		/* MR14 */
3006 		pctl_write_mr(dram->pctl, 3, 14,
3007 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
3008 			      dramtype);
3009 		writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
3010 		       PHY_REG(phy_base, 0x1c));
3011 	}
3012 
3013 	update_noc_timing(dram, sdram_params);
3014 }
3015 
3016 static void save_fsp_param(struct dram_info *dram, u32 dst_fsp,
3017 			   struct rv1126_sdram_params *sdram_params)
3018 {
3019 	void __iomem *pctl_base = dram->pctl;
3020 	void __iomem *phy_base = dram->phy;
3021 	struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp];
3022 	u32 temp, temp1;
3023 	struct ddr2_3_4_lp2_3_info *ddr_info;
3024 
3025 	ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype);
3026 
3027 	p_fsp_param->freq_mhz = sdram_params->base.ddr_freq;
3028 
3029 	if (sdram_params->base.dramtype == LPDDR4 ||
3030 	    sdram_params->base.dramtype == LPDDR4X) {
3031 		p_fsp_param->rd_odt_up_en = 0;
3032 		p_fsp_param->rd_odt_down_en = 1;
3033 	} else {
3034 		p_fsp_param->rd_odt_up_en =
3035 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
3036 		p_fsp_param->rd_odt_down_en =
3037 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
3038 	}
3039 
3040 	if (p_fsp_param->rd_odt_up_en)
3041 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111));
3042 	else if (p_fsp_param->rd_odt_down_en)
3043 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110));
3044 	else
3045 		p_fsp_param->rd_odt = 0;
3046 	p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112));
3047 	p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100));
3048 	p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102));
3049 	p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128));
3050 	p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105));
3051 
3052 	if (sdram_params->base.dramtype == DDR3) {
3053 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3054 			     DDR_PCTL2_INIT3);
3055 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
3056 		p_fsp_param->ds_pdds = temp & DDR3_DS_MASK;
3057 		p_fsp_param->dq_odt = temp & DDR3_RTT_NOM_MASK;
3058 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3059 	} else if (sdram_params->base.dramtype == DDR4) {
3060 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3061 			     DDR_PCTL2_INIT3);
3062 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
3063 		p_fsp_param->ds_pdds = temp & DDR4_DS_MASK;
3064 		p_fsp_param->dq_odt = temp & DDR4_RTT_NOM_MASK;
3065 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3066 	} else if (sdram_params->base.dramtype == LPDDR3) {
3067 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3068 			     DDR_PCTL2_INIT4);
3069 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
3070 		p_fsp_param->ds_pdds = temp & 0xf;
3071 
3072 		p_fsp_param->dq_odt = lp3_odt_value;
3073 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3074 	} else if (sdram_params->base.dramtype == LPDDR4 ||
3075 		   sdram_params->base.dramtype == LPDDR4X) {
3076 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3077 			     DDR_PCTL2_INIT4);
3078 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
3079 		p_fsp_param->ds_pdds = temp & LPDDR4_PDDS_MASK;
3080 
3081 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3082 			     DDR_PCTL2_INIT6);
3083 		temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK;
3084 		p_fsp_param->dq_odt = temp & LPDDR4_DQODT_MASK;
3085 		p_fsp_param->ca_odt = temp & LPDDR4_CAODT_MASK;
3086 
3087 		temp = MAX(readl(PHY_REG(phy_base, 0x3ae)),
3088 			   readl(PHY_REG(phy_base, 0x3ce)));
3089 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)),
3090 			    readl(PHY_REG(phy_base, 0x3de)));
3091 		p_fsp_param->vref_ca[0] = (temp + temp1) / 2;
3092 		temp = MAX(readl(PHY_REG(phy_base, 0x3af)),
3093 			   readl(PHY_REG(phy_base, 0x3cf)));
3094 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)),
3095 			    readl(PHY_REG(phy_base, 0x3df)));
3096 		p_fsp_param->vref_ca[1] = (temp + temp1) / 2;
3097 		p_fsp_param->vref_ca[0] |=
3098 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
3099 		p_fsp_param->vref_ca[1] |=
3100 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
3101 
3102 		p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >>
3103 					      3) & 0x1;
3104 	}
3105 
3106 	p_fsp_param->noc_timings.ddrtiminga0 =
3107 		sdram_params->ch.noc_timings.ddrtiminga0;
3108 	p_fsp_param->noc_timings.ddrtimingb0 =
3109 		sdram_params->ch.noc_timings.ddrtimingb0;
3110 	p_fsp_param->noc_timings.ddrtimingc0 =
3111 		sdram_params->ch.noc_timings.ddrtimingc0;
3112 	p_fsp_param->noc_timings.devtodev0 =
3113 		sdram_params->ch.noc_timings.devtodev0;
3114 	p_fsp_param->noc_timings.ddrmode =
3115 		sdram_params->ch.noc_timings.ddrmode;
3116 	p_fsp_param->noc_timings.ddr4timing =
3117 		sdram_params->ch.noc_timings.ddr4timing;
3118 	p_fsp_param->noc_timings.agingx0 =
3119 		sdram_params->ch.noc_timings.agingx0;
3120 	p_fsp_param->noc_timings.aging0 =
3121 		sdram_params->ch.noc_timings.aging0;
3122 	p_fsp_param->noc_timings.aging1 =
3123 		sdram_params->ch.noc_timings.aging1;
3124 	p_fsp_param->noc_timings.aging2 =
3125 		sdram_params->ch.noc_timings.aging2;
3126 	p_fsp_param->noc_timings.aging3 =
3127 		sdram_params->ch.noc_timings.aging3;
3128 
3129 	p_fsp_param->flag = FSP_FLAG;
3130 }
3131 
3132 #ifndef CONFIG_SPL_KERNEL_BOOT
3133 static void copy_fsp_param_to_ddr(void)
3134 {
3135 	memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param,
3136 	       sizeof(fsp_param));
3137 }
3138 #endif
3139 
3140 static void pctl_modify_trfc(struct ddr_pctl_regs *pctl_regs,
3141 			     struct sdram_cap_info *cap_info, u32 dram_type,
3142 			     u32 freq)
3143 {
3144 	u64 cs0_cap;
3145 	u32 die_cap;
3146 	u32 trfc_ns, trfc4_ns;
3147 	u32 trfc, txsnr;
3148 	u32 txs_abort_fast = 0;
3149 	u32 tmp;
3150 
3151 	cs0_cap = sdram_get_cs_cap(cap_info, 0, dram_type);
3152 	die_cap = (u32)(cs0_cap >> (20 + (cap_info->bw - cap_info->dbw)));
3153 
3154 	switch (dram_type) {
3155 	case DDR3:
3156 		if (die_cap <= DIE_CAP_512MBIT)
3157 			trfc_ns = 90;
3158 		else if (die_cap <= DIE_CAP_1GBIT)
3159 			trfc_ns = 110;
3160 		else if (die_cap <= DIE_CAP_2GBIT)
3161 			trfc_ns = 160;
3162 		else if (die_cap <= DIE_CAP_4GBIT)
3163 			trfc_ns = 260;
3164 		else
3165 			trfc_ns = 350;
3166 		txsnr = MAX(5, ((trfc_ns + 10) * freq + 999) / 1000);
3167 		break;
3168 
3169 	case DDR4:
3170 		if (die_cap <= DIE_CAP_2GBIT) {
3171 			trfc_ns = 160;
3172 			trfc4_ns = 90;
3173 		} else if (die_cap <= DIE_CAP_4GBIT) {
3174 			trfc_ns = 260;
3175 			trfc4_ns = 110;
3176 		} else if (die_cap <= DIE_CAP_8GBIT) {
3177 			trfc_ns = 350;
3178 			trfc4_ns = 160;
3179 		} else {
3180 			trfc_ns = 550;
3181 			trfc4_ns = 260;
3182 		}
3183 		txsnr = ((trfc_ns + 10) * freq + 999) / 1000;
3184 		txs_abort_fast = ((trfc4_ns + 10) * freq + 999) / 1000;
3185 		break;
3186 
3187 	case LPDDR3:
3188 		if (die_cap <= DIE_CAP_4GBIT)
3189 			trfc_ns = 130;
3190 		else
3191 			trfc_ns = 210;
3192 		txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3193 		break;
3194 
3195 	case LPDDR4:
3196 	case LPDDR4X:
3197 		if (die_cap <= DIE_CAP_2GBIT)
3198 			trfc_ns = 130;
3199 		else if (die_cap <= DIE_CAP_4GBIT)
3200 			trfc_ns = 180;
3201 		else if (die_cap <= DIE_CAP_8GBIT)
3202 			trfc_ns = 280;
3203 		else
3204 			trfc_ns = 380;
3205 		txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3206 		break;
3207 
3208 	default:
3209 		return;
3210 	}
3211 	trfc = (trfc_ns * freq + 999) / 1000;
3212 
3213 	for (int i = 0; pctl_regs->pctl[i][0] != 0xffffffff; i++) {
3214 		switch (pctl_regs->pctl[i][0]) {
3215 		case DDR_PCTL2_RFSHTMG:
3216 			tmp = pctl_regs->pctl[i][1];
3217 			/* t_rfc_min */
3218 			tmp &= ~((u32)0x3ff);
3219 			tmp |= ((trfc + 1) / 2) & 0x3ff;
3220 			pctl_regs->pctl[i][1] = tmp;
3221 			break;
3222 
3223 		case DDR_PCTL2_DRAMTMG8:
3224 			if (dram_type == DDR3 || dram_type == DDR4) {
3225 				tmp = pctl_regs->pctl[i][1];
3226 				/* t_xs_x32 */
3227 				tmp &= ~((u32)0x7f);
3228 				tmp |= ((txsnr + 63) / 64) & 0x7f;
3229 
3230 				if (dram_type == DDR4) {
3231 					/* t_xs_abort_x32 */
3232 					tmp &= ~((u32)(0x7f << 16));
3233 					tmp |= (((txs_abort_fast + 63) / 64) & 0x7f) << 16;
3234 					/* t_xs_fast_x32 */
3235 					tmp &= ~((u32)(0x7f << 24));
3236 					tmp |= (((txs_abort_fast + 63) / 64) & 0x7f) << 24;
3237 				}
3238 
3239 				pctl_regs->pctl[i][1] = tmp;
3240 			}
3241 			break;
3242 
3243 		case DDR_PCTL2_DRAMTMG14:
3244 			if (dram_type == LPDDR3 ||
3245 			    dram_type == LPDDR4 || dram_type == LPDDR4X) {
3246 				tmp = pctl_regs->pctl[i][1];
3247 				/* t_xsr */
3248 				tmp &= ~((u32)0xfff);
3249 				tmp |= ((txsnr + 1) / 2) & 0xfff;
3250 				pctl_regs->pctl[i][1] = tmp;
3251 			}
3252 			break;
3253 
3254 		default:
3255 			break;
3256 		}
3257 	}
3258 }
3259 
3260 void ddr_set_rate(struct dram_info *dram,
3261 		  struct rv1126_sdram_params *sdram_params,
3262 		  u32 freq, u32 cur_freq, u32 dst_fsp,
3263 		  u32 dst_fsp_lp4, u32 training_en)
3264 {
3265 	u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off;
3266 	u32 mr_tmp;
3267 	u32 lp_stat;
3268 	u32 dramtype = sdram_params->base.dramtype;
3269 	struct rv1126_sdram_params *sdram_params_new;
3270 	void __iomem *pctl_base = dram->pctl;
3271 	void __iomem *phy_base = dram->phy;
3272 
3273 	lp_stat = low_power_update(dram, 0);
3274 	sdram_params_new = get_default_sdram_config(freq);
3275 	sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank;
3276 	sdram_params_new->ch.cap_info.bw = sdram_params->ch.cap_info.bw;
3277 
3278 	pctl_modify_trfc(&sdram_params_new->pctl_regs,
3279 			 &sdram_params->ch.cap_info, dramtype, freq);
3280 	pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4);
3281 
3282 	while ((readl(pctl_base + DDR_PCTL2_STAT) &
3283 			 PCTL2_OPERATING_MODE_MASK) ==
3284 			 PCTL2_OPERATING_MODE_SR)
3285 		continue;
3286 
3287 	dest_dll_off = 0;
3288 	dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3289 			  DDR_PCTL2_INIT3);
3290 	if ((dramtype == DDR3 && (dst_init3 & 1)) ||
3291 	    (dramtype == DDR4 && !(dst_init3 & 1)))
3292 		dest_dll_off = 1;
3293 
3294 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
3295 	cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
3296 			  DDR_PCTL2_INIT3);
3297 	cur_init3 &= PCTL2_MR_MASK;
3298 	cur_dll_off = 1;
3299 	if ((dramtype == DDR3 && !(cur_init3 & 1)) ||
3300 	    (dramtype == DDR4 && (cur_init3 & 1)))
3301 		cur_dll_off = 0;
3302 
3303 	if (!cur_dll_off) {
3304 		if (dramtype == DDR3)
3305 			cur_init3 |= 1;
3306 		else
3307 			cur_init3 &= ~1;
3308 		pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype);
3309 	}
3310 
3311 	setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3312 		     PCTL2_DIS_AUTO_REFRESH);
3313 	update_refresh_reg(dram);
3314 
3315 	enter_sr(dram, 1);
3316 
3317 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3318 	       PMUGRF_CON_DDRPHY_BUFFEREN_EN,
3319 	       &dram->pmugrf->soc_con[0]);
3320 	sw_set_req(dram);
3321 	clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC,
3322 		     PCTL2_DFI_INIT_COMPLETE_EN);
3323 	sw_set_ack(dram);
3324 
3325 	sw_set_req(dram);
3326 	if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off)
3327 		setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3328 	else
3329 		clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3330 
3331 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0,
3332 		     PCTL2_DIS_SRX_ZQCL);
3333 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0,
3334 		     PCTL2_DIS_SRX_ZQCL);
3335 	sw_set_ack(dram);
3336 
3337 	writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT),
3338 	       &dram->cru->clkgate_con[21]);
3339 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3340 					(0x1 << CLK_DDR_UPCTL_EN_SHIFT) |
3341 					(0x1 << ACLK_DDR_UPCTL_EN_SHIFT),
3342 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3343 
3344 	clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3345 	rkclk_set_dpll(dram, freq * MHz / 2);
3346 	phy_pll_set(dram, freq * MHz, 0);
3347 	phy_pll_set(dram, freq * MHz, 1);
3348 	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3349 
3350 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3351 			PMUGRF_CON_DDRPHY_BUFFEREN_DIS,
3352 			&dram->pmugrf->soc_con[0]);
3353 	writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT),
3354 	       &dram->cru->clkgate_con[21]);
3355 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3356 					(0x0 << CLK_DDR_UPCTL_EN_SHIFT) |
3357 					(0x0 << ACLK_DDR_UPCTL_EN_SHIFT),
3358 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3359 	while ((readl(pctl_base + DDR_PCTL2_DFISTAT) &
3360 	       PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE)
3361 		continue;
3362 
3363 	sw_set_req(dram);
3364 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
3365 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp);
3366 	sw_set_ack(dram);
3367 	update_refresh_reg(dram);
3368 	clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2);
3369 
3370 	enter_sr(dram, 0);
3371 
3372 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3373 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3374 
3375 	mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4);
3376 	if (dramtype == LPDDR3) {
3377 		pctl_write_mr(dram->pctl, 3, 1,
3378 			      (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) &
3379 			      PCTL2_MR_MASK,
3380 			      dramtype);
3381 		pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK,
3382 			      dramtype);
3383 		pctl_write_mr(dram->pctl, 3, 3,
3384 			      (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) &
3385 			      PCTL2_MR_MASK,
3386 			      dramtype);
3387 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype);
3388 	} else if ((dramtype == DDR3) || (dramtype == DDR4)) {
3389 		pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK,
3390 			      dramtype);
3391 		if (!dest_dll_off) {
3392 			pctl_write_mr(dram->pctl, 3, 0,
3393 				      ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) &
3394 				      PCTL2_MR_MASK) | DDR3_DLL_RESET,
3395 				      dramtype);
3396 			udelay(2);
3397 		}
3398 		pctl_write_mr(dram->pctl, 3, 0,
3399 			      (dst_init3 >> PCTL2_DDR34_MR0_SHIFT &
3400 			       PCTL2_MR_MASK) & (~DDR3_DLL_RESET),
3401 			      dramtype);
3402 		pctl_write_mr(dram->pctl, 3, 2,
3403 			      ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) &
3404 			       PCTL2_MR_MASK), dramtype);
3405 		if (dramtype == DDR4) {
3406 			pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK,
3407 				      dramtype);
3408 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3409 				       DDR_PCTL2_INIT6);
3410 			pctl_write_mr(dram->pctl, 3, 4,
3411 				      (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) &
3412 				       PCTL2_MR_MASK,
3413 				      dramtype);
3414 			pctl_write_mr(dram->pctl, 3, 5,
3415 				      mr_tmp >> PCTL2_DDR4_MR5_SHIFT &
3416 				      PCTL2_MR_MASK,
3417 				      dramtype);
3418 
3419 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3420 				       DDR_PCTL2_INIT7);
3421 			pctl_write_mr(dram->pctl, 3, 6,
3422 				      mr_tmp >> PCTL2_DDR4_MR6_SHIFT &
3423 				      PCTL2_MR_MASK,
3424 				      dramtype);
3425 		}
3426 	} else if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
3427 		pctl_write_mr(dram->pctl, 3, 13,
3428 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
3429 			       PCTL2_MR_MASK) & (~(BIT(7)))) |
3430 			      dst_fsp_lp4 << 7, dramtype);
3431 	}
3432 	clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3433 		     PCTL2_DIS_AUTO_REFRESH);
3434 	update_refresh_reg(dram);
3435 
3436 	/* training */
3437 	high_freq_training(dram, sdram_params_new, dst_fsp);
3438 	low_power_update(dram, lp_stat);
3439 
3440 	save_fsp_param(dram, dst_fsp, sdram_params_new);
3441 }
3442 
3443 static void ddr_set_rate_for_fsp(struct dram_info *dram,
3444 				 struct rv1126_sdram_params *sdram_params)
3445 {
3446 	struct ddr2_3_4_lp2_3_info *ddr_info;
3447 	u32 f0;
3448 	u32 dramtype = sdram_params->base.dramtype;
3449 #ifndef CONFIG_SPL_KERNEL_BOOT
3450 	u32 f1, f2, f3;
3451 #endif
3452 
3453 	ddr_info = get_ddr_drv_odt_info(dramtype);
3454 	if (!ddr_info)
3455 		return;
3456 
3457 	f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
3458 	     DDR_FREQ_MASK;
3459 
3460 #ifndef CONFIG_SPL_KERNEL_BOOT
3461 	memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param));
3462 	memset((void *)&fsp_param, 0, sizeof(fsp_param));
3463 
3464 	f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) &
3465 	     DDR_FREQ_MASK;
3466 	f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) &
3467 	     DDR_FREQ_MASK;
3468 	f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) &
3469 	     DDR_FREQ_MASK;
3470 #endif
3471 
3472 	if (get_wrlvl_val(dram, sdram_params))
3473 		printascii("get wrlvl value fail\n");
3474 
3475 #ifndef CONFIG_SPL_KERNEL_BOOT
3476 	printascii("change to: ");
3477 	printdec(f1);
3478 	printascii("MHz\n");
3479 	ddr_set_rate(&dram_info, sdram_params, f1,
3480 		     sdram_params->base.ddr_freq, 1, 1, 1);
3481 	printascii("change to: ");
3482 	printdec(f2);
3483 	printascii("MHz\n");
3484 	ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1);
3485 	printascii("change to: ");
3486 	printdec(f3);
3487 	printascii("MHz\n");
3488 	ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1);
3489 #endif
3490 	printascii("change to: ");
3491 	printdec(f0);
3492 	printascii("MHz(final freq)\n");
3493 #ifndef CONFIG_SPL_KERNEL_BOOT
3494 	ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1);
3495 #else
3496 	ddr_set_rate(&dram_info, sdram_params, f0, sdram_params->base.ddr_freq, 1, 1, 1);
3497 #endif
3498 }
3499 
3500 int get_uart_config(void)
3501 {
3502 	struct sdram_head_info_index_v2 *index =
3503 		(struct sdram_head_info_index_v2 *)common_info;
3504 	struct global_info *gbl_info;
3505 
3506 	gbl_info = (struct global_info *)((void *)common_info +
3507 		index->global_index.offset * 4);
3508 
3509 	return gbl_info->uart_info;
3510 }
3511 
3512 /* return: 0 = success, other = fail */
3513 int sdram_init(void)
3514 {
3515 	struct rv1126_sdram_params *sdram_params;
3516 	int ret = 0;
3517 	struct sdram_head_info_index_v2 *index =
3518 		(struct sdram_head_info_index_v2 *)common_info;
3519 	struct global_info *gbl_info;
3520 
3521 	dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
3522 	dram_info.pctl = (void *)UPCTL2_BASE_ADDR;
3523 	dram_info.grf = (void *)GRF_BASE_ADDR;
3524 	dram_info.cru = (void *)CRU_BASE_ADDR;
3525 	dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR;
3526 	dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR;
3527 	dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR;
3528 
3529 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
3530 	printascii("extended temp support\n");
3531 #endif
3532 	if (index->version_info != 2 ||
3533 	    (index->global_index.size != sizeof(struct global_info) / 4) ||
3534 	    (index->ddr3_index.size !=
3535 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3536 	    (index->ddr4_index.size !=
3537 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3538 	    (index->lp3_index.size !=
3539 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3540 	    (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) ||
3541 	    (index->lp4x_index.size != (sizeof(struct lp4_info) / 4)) ||
3542 	    index->global_index.offset == 0 ||
3543 	    index->ddr3_index.offset == 0 ||
3544 	    index->ddr4_index.offset == 0 ||
3545 	    index->lp3_index.offset == 0 ||
3546 	    index->lp4_index.offset == 0 ||
3547 	    index->lp4x_index.offset == 0) {
3548 		printascii("common info error\n");
3549 		goto error;
3550 	}
3551 
3552 	gbl_info = (struct global_info *)((void *)common_info +
3553 		index->global_index.offset * 4);
3554 
3555 	dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info);
3556 	dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info);
3557 
3558 	sdram_params = &sdram_configs[0];
3559 	#if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 8)
3560 	for (j = 0; j < ARRAY_SIZE(sdram_configs); j++)
3561 		sdram_configs[j].base.dramtype = LPDDR4X;
3562 	#endif
3563 	if (sdram_params->base.dramtype == DDR3 ||
3564 	    sdram_params->base.dramtype == DDR4) {
3565 		if (DDR_2T_INFO(gbl_info->info_2t))
3566 			sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10;
3567 		else
3568 			sdram_params->pctl_regs.pctl[0][1] &=
3569 				~(0x1 << 10);
3570 	}
3571 	ret = sdram_init_detect(&dram_info, sdram_params);
3572 	if (ret) {
3573 		sdram_print_dram_type(sdram_params->base.dramtype);
3574 		printascii(", ");
3575 		printdec(sdram_params->base.ddr_freq);
3576 		printascii("MHz\n");
3577 		goto error;
3578 	}
3579 	print_ddr_info(sdram_params);
3580 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3581 	init_rw_trn_result_struct(&rw_trn_result, dram_info.phy,
3582 				  (u8)sdram_params->ch.cap_info.rank);
3583 #endif
3584 
3585 	ddr_set_rate_for_fsp(&dram_info, sdram_params);
3586 #ifndef CONFIG_SPL_KERNEL_BOOT
3587 	copy_fsp_param_to_ddr();
3588 #endif
3589 
3590 	ddr_set_atags(&dram_info, sdram_params);
3591 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3592 	save_rw_trn_result_to_ddr(&rw_trn_result);
3593 #endif
3594 
3595 	printascii("out\n");
3596 
3597 	return ret;
3598 error:
3599 	printascii("error\n");
3600 	return (-1);
3601 }
3602 #endif /* CONFIG_TPL_BUILD */
3603