xref: /rk3399_rockchip-uboot/drivers/ram/rockchip/sdram_rv1126.c (revision 0c7f2afdd1e1c535f8dad1fb2da77397b32e5ef6)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * (C) Copyright 2020 Rockchip Electronics Co., Ltd.
4  */
5 
6 #include <common.h>
7 #include <debug_uart.h>
8 #include <dm.h>
9 #include <ram.h>
10 #include <syscon.h>
11 #include <asm/io.h>
12 #include <asm/arch/clock.h>
13 #include <asm/arch/hardware.h>
14 #include <asm/arch/rk_atags.h>
15 #include <asm/arch/cru_rv1126.h>
16 #include <asm/arch/grf_rv1126.h>
17 #include <asm/arch/sdram_common.h>
18 #include <asm/arch/sdram_rv1126.h>
19 
20 /* define training flag */
21 #define CA_TRAINING			(0x1 << 0)
22 #define READ_GATE_TRAINING		(0x1 << 1)
23 #define WRITE_LEVELING			(0x1 << 2)
24 #define WRITE_TRAINING			(0x1 << 3)
25 #define READ_TRAINING			(0x1 << 4)
26 #define FULL_TRAINING			(0xff)
27 
28 #define SKEW_RX_SIGNAL			(0)
29 #define SKEW_TX_SIGNAL			(1)
30 #define SKEW_CA_SIGNAL			(2)
31 
32 #define DESKEW_MDF_ABS_VAL		(0)
33 #define DESKEW_MDF_DIFF_VAL		(1)
34 
35 #ifdef CONFIG_TPL_BUILD
36 #ifndef CONFIG_TPL_TINY_FRAMEWORK
37 #error please defined CONFIG_TPL_TINY_FRAMEWORK for RV1126 !!!
38 #endif
39 #endif
40 
41 #ifdef CONFIG_TPL_BUILD
42 
43 struct dram_info {
44 	void __iomem *pctl;
45 	void __iomem *phy;
46 	struct rv1126_cru *cru;
47 	struct msch_regs *msch;
48 	struct rv1126_ddrgrf *ddrgrf;
49 	struct rv1126_grf *grf;
50 	struct ram_info info;
51 	struct rv1126_pmugrf *pmugrf;
52 	u32 sr_idle;
53 	u32 pd_idle;
54 };
55 
56 #define GRF_BASE_ADDR			0xfe000000
57 #define PMU_GRF_BASE_ADDR		0xfe020000
58 #define DDR_GRF_BASE_ADDR		0xfe030000
59 #define BUS_SGRF_BASE_ADDR		0xfe0a0000
60 #define SERVER_MSCH_BASE_ADDR		0xfe800000
61 #define CRU_BASE_ADDR			0xff490000
62 #define DDR_PHY_BASE_ADDR		0xff4a0000
63 #define UPCTL2_BASE_ADDR		0xffa50000
64 
65 #define SGRF_SOC_CON2			0x8
66 #define SGRF_SOC_CON12			0x30
67 #define SGRF_SOC_CON13			0x34
68 
69 struct dram_info dram_info;
70 
71 #if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 3)
72 struct rv1126_sdram_params sdram_configs[] = {
73 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-328.inc"
74 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-396.inc"
75 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-528.inc"
76 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-664.inc"
77 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-784.inc"
78 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-924.inc"
79 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-1056.inc"
80 };
81 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 0)
82 struct rv1126_sdram_params sdram_configs[] = {
83 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-328.inc"
84 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-396.inc"
85 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-528.inc"
86 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-664.inc"
87 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-784.inc"
88 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-924.inc"
89 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-1056.inc"
90 };
91 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 6)
92 struct rv1126_sdram_params sdram_configs[] = {
93 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-328.inc"
94 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-396.inc"
95 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-528.inc"
96 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-664.inc"
97 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-784.inc"
98 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-924.inc"
99 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-1056.inc"
100 };
101 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 7) || (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 8)
102 struct rv1126_sdram_params sdram_configs[] = {
103 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-328.inc"
104 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-396.inc"
105 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-528.inc"
106 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-664.inc"
107 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-784.inc"
108 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-924.inc"
109 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-1056.inc"
110 };
111 #endif
112 
113 u32 common_info[] = {
114 	#include "sdram_inc/rv1126/sdram-rv1126-loader_params.inc"
115 };
116 
117 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
118 static struct rw_trn_result rw_trn_result;
119 #endif
120 
121 static struct rv1126_fsp_param fsp_param[MAX_IDX];
122 
123 static u8 lp3_odt_value;
124 
125 static s8 wrlvl_result[2][4];
126 
127 /* DDR configuration 0-9 */
128 u16 ddr_cfg_2_rbc[] = {
129 	((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */
130 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */
131 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */
132 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */
133 	((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */
134 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */
135 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */
136 	((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */
137 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */
138 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */
139 };
140 
141 /* DDR configuration 10-21 */
142 u8 ddr4_cfg_2_rbc[] = {
143 	((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */
144 	((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */
145 	((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */
146 	((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */
147 	((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */
148 	((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */
149 	((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */
150 	((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */
151 	((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */
152 	((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */
153 	((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */
154 	((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */
155 };
156 
157 /* DDR configuration 22-28 */
158 u16 ddr_cfg_2_rbc_p2[] = {
159 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */
160 	((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */
161 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */
162 	((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */
163 	((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */
164 	((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */
165 	((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */
166 };
167 
168 u8 d4_rbc_2_d3_rbc[][2] = {
169 	{10, 0},
170 	{11, 2},
171 	{12, 23},
172 	{13, 1},
173 	{14, 28},
174 	{15, 24},
175 	{16, 27},
176 	{17, 7},
177 	{18, 6},
178 	{19, 25},
179 	{20, 26},
180 	{21, 3}
181 };
182 
183 u32 addrmap[29][9] = {
184 	{24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
185 		0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */
186 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
187 		0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */
188 	{23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
189 		0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */
190 	{22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606,
191 		0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */
192 	{24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909,
193 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */
194 	{6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707,
195 		0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */
196 	{7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808,
197 		0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */
198 	{8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909,
199 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */
200 	{22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
201 		0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */
202 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
203 		0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */
204 
205 	{24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
206 		0x08080808, 0x00000f0f, 0x0801}, /* 10 */
207 	{23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
208 		0x0f080808, 0x00000f0f, 0x0801}, /* 11 */
209 	{24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
210 		0x07070707, 0x00000f07, 0x0700}, /* 12 */
211 	{23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
212 		0x07070707, 0x00000f0f, 0x0700}, /* 13 */
213 	{24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
214 		0x07070707, 0x00000f07, 0x3f01}, /* 14 */
215 	{23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
216 		0x07070707, 0x00000f0f, 0x3f01}, /* 15 */
217 	{23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606,
218 		0x06060606, 0x00000f06, 0x3f00}, /* 16 */
219 	{8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909,
220 		0x0f090909, 0x00000f0f, 0x0801}, /* 17 */
221 	{7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808,
222 		0x08080808, 0x00000f0f, 0x0700}, /* 18 */
223 	{7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
224 		0x08080808, 0x00000f0f, 0x3f01}, /* 19 */
225 
226 	{6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
227 		0x07070707, 0x00000f07, 0x3f00}, /* 20 */
228 	{23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606,
229 		0x06060606, 0x00000f06, 0x0600}, /* 21 */
230 	{21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505,
231 		0x05050505, 0x00000f0f, 0x3f3f}, /* 22 */
232 
233 	{24, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
234 		0x07070707, 0x00000f07, 0x3f3f}, /* 23 */
235 	{23, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
236 		0x07070707, 0x00000f0f, 0x3f3f}, /* 24 */
237 	{7, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
238 		0x08080808, 0x00000f0f, 0x3f3f}, /* 25 */
239 	{6, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
240 		0x07070707, 0x00000f07, 0x3f3f}, /* 26 */
241 	{23, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
242 		0x06060606, 0x00000f06, 0x3f3f}, /* 27 */
243 	{24, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
244 		0x07070707, 0x00000f07, 0x3f3f} /* 28 */
245 };
246 
247 static u8 dq_sel[22][3] = {
248 	{0x0, 0x17, 0x22},
249 	{0x1, 0x18, 0x23},
250 	{0x2, 0x19, 0x24},
251 	{0x3, 0x1a, 0x25},
252 	{0x4, 0x1b, 0x26},
253 	{0x5, 0x1c, 0x27},
254 	{0x6, 0x1d, 0x28},
255 	{0x7, 0x1e, 0x29},
256 	{0x8, 0x16, 0x21},
257 	{0x9, 0x1f, 0x2a},
258 	{0xa, 0x20, 0x2b},
259 	{0x10, 0x1, 0xc},
260 	{0x11, 0x2, 0xd},
261 	{0x12, 0x3, 0xe},
262 	{0x13, 0x4, 0xf},
263 	{0x14, 0x5, 0x10},
264 	{0x15, 0x6, 0x11},
265 	{0x16, 0x7, 0x12},
266 	{0x17, 0x8, 0x13},
267 	{0x18, 0x0, 0xb},
268 	{0x19, 0x9, 0x14},
269 	{0x1a, 0xa, 0x15}
270 };
271 
272 static u16 grp_addr[4] = {
273 	ADD_GROUP_CS0_A,
274 	ADD_GROUP_CS0_B,
275 	ADD_GROUP_CS1_A,
276 	ADD_GROUP_CS1_B
277 };
278 
279 static u8 wrlvl_result_offset[2][4] = {
280 	{0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27},
281 	{0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29},
282 };
283 
284 static u16 dqs_dq_skew_adr[16] = {
285 	0x170 + 0,	/* SKEW_UPDATE_RX_CS0_DQS0 */
286 	0x170 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS1 */
287 	0x1d0 + 0,	/* SKEW_UPDATE_RX_CS0_DQS2 */
288 	0x1d0 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS3 */
289 	0x1a0 + 0,	/* SKEW_UPDATE_RX_CS1_DQS0 */
290 	0x1a0 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS1 */
291 	0x200 + 0,	/* SKEW_UPDATE_RX_CS1_DQS2 */
292 	0x200 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS3 */
293 	0x170 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS0 */
294 	0x170 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS1 */
295 	0x1d0 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS2 */
296 	0x1d0 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS3 */
297 	0x1a0 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS0 */
298 	0x1a0 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS1 */
299 	0x200 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS2 */
300 	0x200 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS3 */
301 };
302 
303 static void rkclk_ddr_reset(struct dram_info *dram,
304 			    u32 ctl_srstn, u32 ctl_psrstn,
305 			    u32 phy_srstn, u32 phy_psrstn)
306 {
307 	writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) |
308 	       UPCTL2_ASRSTN_REQ(ctl_srstn),
309 	       BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13);
310 
311 	writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn),
312 	       &dram->cru->softrst_con[12]);
313 }
314 
315 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
316 {
317 	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
318 	int delay = 1000;
319 	u32 mhz = hz / MHz;
320 	struct global_info *gbl_info;
321 	struct sdram_head_info_index_v2 *index =
322 		(struct sdram_head_info_index_v2 *)common_info;
323 	u32 ssmod_info;
324 	u32 dsmpd = 1;
325 
326 	gbl_info = (struct global_info *)((void *)common_info +
327 		    index->global_index.offset * 4);
328 	ssmod_info = gbl_info->info_2t;
329 	refdiv = 1;
330 	if (mhz <= 100) {
331 		postdiv1 = 6;
332 		postdiv2 = 4;
333 	} else if (mhz <= 150) {
334 		postdiv1 = 4;
335 		postdiv2 = 4;
336 	} else if (mhz <= 200) {
337 		postdiv1 = 6;
338 		postdiv2 = 2;
339 	} else if (mhz <= 300) {
340 		postdiv1 = 4;
341 		postdiv2 = 2;
342 	} else if (mhz <= 400) {
343 		postdiv1 = 6;
344 		postdiv2 = 1;
345 	} else {
346 		postdiv1 = 4;
347 		postdiv2 = 1;
348 	}
349 	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
350 
351 	writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
352 
353 	writel(0x1f000000, &dram->cru->clksel_con[64]);
354 	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
355 	/* enable ssmod */
356 	if (PLL_SSMOD_SPREAD(ssmod_info)) {
357 		dsmpd = 0;
358 		clrsetbits_le32(&dram->cru->pll[1].con2,
359 				0xffffff << 0, 0x0 << 0);
360 		writel(SSMOD_SPREAD(PLL_SSMOD_SPREAD(ssmod_info)) |
361 		       SSMOD_DIVVAL(PLL_SSMOD_DIV(ssmod_info)) |
362 		       SSMOD_DOWNSPREAD(PLL_SSMOD_DOWNSPREAD(ssmod_info)) |
363 		       SSMOD_RESET(0) |
364 		       SSMOD_DIS_SSCG(0) |
365 		       SSMOD_BP(0),
366 		       &dram->cru->pll[1].con3);
367 	}
368 	writel(DSMPD(dsmpd) | POSTDIV2(postdiv2) | REFDIV(refdiv),
369 	       &dram->cru->pll[1].con1);
370 
371 	while (delay > 0) {
372 		udelay(1);
373 		if (LOCK(readl(&dram->cru->pll[1].con1)))
374 			break;
375 		delay--;
376 	}
377 
378 	writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
379 }
380 
381 static void rkclk_configure_ddr(struct dram_info *dram,
382 				struct rv1126_sdram_params *sdram_params)
383 {
384 	/* for inno ddr phy need freq / 2 */
385 	rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2);
386 }
387 
388 static unsigned int
389 	calculate_ddrconfig(struct rv1126_sdram_params *sdram_params)
390 {
391 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
392 	u32 cs, bw, die_bw, col, row, bank;
393 	u32 cs1_row;
394 	u32 i, tmp;
395 	u32 ddrconf = -1;
396 	u32 row_3_4;
397 
398 	cs = cap_info->rank;
399 	bw = cap_info->bw;
400 	die_bw = cap_info->dbw;
401 	col = cap_info->col;
402 	row = cap_info->cs0_row;
403 	cs1_row = cap_info->cs1_row;
404 	bank = cap_info->bk;
405 	row_3_4 = cap_info->row_3_4;
406 
407 	if (sdram_params->base.dramtype == DDR4) {
408 		if (cs == 2 && row == cs1_row && !row_3_4) {
409 			tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) |
410 			      die_bw;
411 			for (i = 17; i < 21; i++) {
412 				if (((tmp & 0xf) ==
413 				     (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
414 				    ((tmp & 0x70) <=
415 				     (ddr4_cfg_2_rbc[i - 10] & 0x70))) {
416 					ddrconf = i;
417 					goto out;
418 				}
419 			}
420 		}
421 
422 		tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw;
423 		for (i = 10; i < 21; i++) {
424 			if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
425 			    ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) &&
426 			    ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) {
427 				ddrconf = i;
428 				goto out;
429 			}
430 		}
431 	} else {
432 		if (cs == 2 && row == cs1_row && bank == 3) {
433 			for (i = 5; i < 8; i++) {
434 				if (((bw + col - 10) == (ddr_cfg_2_rbc[i] &
435 							 0x7)) &&
436 				    ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] &
437 							  (0x7 << 5))) {
438 					ddrconf = i;
439 					goto out;
440 				}
441 			}
442 		}
443 
444 		tmp = ((cs - 1) << 8) | ((row - 13) << 5) |
445 		      ((bw + col - 10) << 0);
446 		if (bank == 3)
447 			tmp |= (1 << 3);
448 
449 		for (i = 0; i < 9; i++)
450 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) &&
451 			    ((tmp & (7 << 5)) <=
452 			     (ddr_cfg_2_rbc[i] & (7 << 5))) &&
453 			    ((tmp & (1 << 8)) <=
454 			     (ddr_cfg_2_rbc[i] & (1 << 8)))) {
455 				ddrconf = i;
456 				goto out;
457 			}
458 
459 		for (i = 0; i < 7; i++)
460 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc_p2[i] & 0x1f)) &&
461 			    ((tmp & (7 << 5)) <=
462 			     (ddr_cfg_2_rbc_p2[i] & (7 << 5))) &&
463 			    ((tmp & (1 << 8)) <=
464 			     (ddr_cfg_2_rbc_p2[i] & (1 << 8)))) {
465 				ddrconf = i + 22;
466 				goto out;
467 			}
468 
469 		if (cs == 1 && bank == 3 && row <= 17 &&
470 		    (col + bw) == 12)
471 			ddrconf = 23;
472 	}
473 
474 out:
475 	if (ddrconf > 28)
476 		printascii("calculate ddrconfig error\n");
477 
478 	if (sdram_params->base.dramtype == DDR4) {
479 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
480 			if (ddrconf == d4_rbc_2_d3_rbc[i][0]) {
481 				if (ddrconf == 21 && row > 16)
482 					printascii("warn:ddrconf21 row > 16\n");
483 				else
484 					ddrconf = d4_rbc_2_d3_rbc[i][1];
485 				break;
486 			}
487 		}
488 	}
489 
490 	return ddrconf;
491 }
492 
493 static void sw_set_req(struct dram_info *dram)
494 {
495 	void __iomem *pctl_base = dram->pctl;
496 
497 	/* clear sw_done=0 */
498 	writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL);
499 }
500 
501 static void sw_set_ack(struct dram_info *dram)
502 {
503 	void __iomem *pctl_base = dram->pctl;
504 
505 	/* set sw_done=1 */
506 	writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL);
507 	while (1) {
508 		/* wait programming done */
509 		if (readl(pctl_base + DDR_PCTL2_SWSTAT) &
510 				PCTL2_SW_DONE_ACK)
511 			break;
512 	}
513 }
514 
515 static void set_ctl_address_map(struct dram_info *dram,
516 				struct rv1126_sdram_params *sdram_params)
517 {
518 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
519 	void __iomem *pctl_base = dram->pctl;
520 	u32 ddrconf = cap_info->ddrconfig;
521 	u32 i, row;
522 
523 	row = cap_info->cs0_row;
524 	if (sdram_params->base.dramtype == DDR4) {
525 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
526 			if (ddrconf == d4_rbc_2_d3_rbc[i][1]) {
527 				ddrconf = d4_rbc_2_d3_rbc[i][0];
528 				break;
529 			}
530 		}
531 	}
532 
533 	if (ddrconf >= ARRAY_SIZE(addrmap)) {
534 		printascii("set ctl address map fail\n");
535 		return;
536 	}
537 
538 	sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
539 			  &addrmap[ddrconf][0], ARRAY_SIZE(addrmap[ddrconf]) * 4);
540 
541 	/* unused row set to 0xf */
542 	for (i = 17; i >= row; i--)
543 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
544 			((i - 12) * 8 / 32) * 4,
545 			0xf << ((i - 12) * 8 % 32));
546 
547 	if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4)
548 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
549 	if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1)
550 		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
551 
552 	if (cap_info->rank == 1)
553 		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
554 }
555 
556 static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait)
557 {
558 	void __iomem *phy_base = dram->phy;
559 	u32 fbdiv, prediv, postdiv, postdiv_en;
560 
561 	if (wait) {
562 		clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB);
563 		while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK))
564 			continue;
565 	} else {
566 		freq /= MHz;
567 		prediv = 1;
568 		if (freq <= 200) {
569 			fbdiv = 16;
570 			postdiv = 2;
571 			postdiv_en = 1;
572 		} else if (freq <= 456) {
573 			fbdiv = 8;
574 			postdiv = 1;
575 			postdiv_en = 1;
576 		} else {
577 			fbdiv = 4;
578 			postdiv = 0;
579 			postdiv_en = 0;
580 		}
581 		writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50));
582 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK,
583 				(fbdiv >> 8) & 1);
584 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK,
585 				postdiv_en << PHY_POSTDIV_EN_SHIFT);
586 
587 		clrsetbits_le32(PHY_REG(phy_base, 0x52),
588 				PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv);
589 		clrsetbits_le32(PHY_REG(phy_base, 0x53),
590 				PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT,
591 				postdiv << PHY_POSTDIV_SHIFT);
592 	}
593 }
594 
595 static const u16 d3_phy_drv_2_ohm[][2] = {
596 	{PHY_DDR3_RON_455ohm, 455},
597 	{PHY_DDR3_RON_230ohm, 230},
598 	{PHY_DDR3_RON_153ohm, 153},
599 	{PHY_DDR3_RON_115ohm, 115},
600 	{PHY_DDR3_RON_91ohm, 91},
601 	{PHY_DDR3_RON_76ohm, 76},
602 	{PHY_DDR3_RON_65ohm, 65},
603 	{PHY_DDR3_RON_57ohm, 57},
604 	{PHY_DDR3_RON_51ohm, 51},
605 	{PHY_DDR3_RON_46ohm, 46},
606 	{PHY_DDR3_RON_41ohm, 41},
607 	{PHY_DDR3_RON_38ohm, 38},
608 	{PHY_DDR3_RON_35ohm, 35},
609 	{PHY_DDR3_RON_32ohm, 32},
610 	{PHY_DDR3_RON_30ohm, 30},
611 	{PHY_DDR3_RON_28ohm, 28},
612 	{PHY_DDR3_RON_27ohm, 27},
613 	{PHY_DDR3_RON_25ohm, 25},
614 	{PHY_DDR3_RON_24ohm, 24},
615 	{PHY_DDR3_RON_23ohm, 23},
616 	{PHY_DDR3_RON_22ohm, 22},
617 	{PHY_DDR3_RON_21ohm, 21},
618 	{PHY_DDR3_RON_20ohm, 20}
619 };
620 
621 static u16 d3_phy_odt_2_ohm[][2] = {
622 	{PHY_DDR3_RTT_DISABLE, 0},
623 	{PHY_DDR3_RTT_561ohm, 561},
624 	{PHY_DDR3_RTT_282ohm, 282},
625 	{PHY_DDR3_RTT_188ohm, 188},
626 	{PHY_DDR3_RTT_141ohm, 141},
627 	{PHY_DDR3_RTT_113ohm, 113},
628 	{PHY_DDR3_RTT_94ohm, 94},
629 	{PHY_DDR3_RTT_81ohm, 81},
630 	{PHY_DDR3_RTT_72ohm, 72},
631 	{PHY_DDR3_RTT_64ohm, 64},
632 	{PHY_DDR3_RTT_58ohm, 58},
633 	{PHY_DDR3_RTT_52ohm, 52},
634 	{PHY_DDR3_RTT_48ohm, 48},
635 	{PHY_DDR3_RTT_44ohm, 44},
636 	{PHY_DDR3_RTT_41ohm, 41},
637 	{PHY_DDR3_RTT_38ohm, 38},
638 	{PHY_DDR3_RTT_37ohm, 37},
639 	{PHY_DDR3_RTT_34ohm, 34},
640 	{PHY_DDR3_RTT_32ohm, 32},
641 	{PHY_DDR3_RTT_31ohm, 31},
642 	{PHY_DDR3_RTT_29ohm, 29},
643 	{PHY_DDR3_RTT_28ohm, 28},
644 	{PHY_DDR3_RTT_27ohm, 27},
645 	{PHY_DDR3_RTT_25ohm, 25}
646 };
647 
648 static u16 d4lp3_phy_drv_2_ohm[][2] = {
649 	{PHY_DDR4_LPDDR3_RON_482ohm, 482},
650 	{PHY_DDR4_LPDDR3_RON_244ohm, 244},
651 	{PHY_DDR4_LPDDR3_RON_162ohm, 162},
652 	{PHY_DDR4_LPDDR3_RON_122ohm, 122},
653 	{PHY_DDR4_LPDDR3_RON_97ohm, 97},
654 	{PHY_DDR4_LPDDR3_RON_81ohm, 81},
655 	{PHY_DDR4_LPDDR3_RON_69ohm, 69},
656 	{PHY_DDR4_LPDDR3_RON_61ohm, 61},
657 	{PHY_DDR4_LPDDR3_RON_54ohm, 54},
658 	{PHY_DDR4_LPDDR3_RON_48ohm, 48},
659 	{PHY_DDR4_LPDDR3_RON_44ohm, 44},
660 	{PHY_DDR4_LPDDR3_RON_40ohm, 40},
661 	{PHY_DDR4_LPDDR3_RON_37ohm, 37},
662 	{PHY_DDR4_LPDDR3_RON_34ohm, 34},
663 	{PHY_DDR4_LPDDR3_RON_32ohm, 32},
664 	{PHY_DDR4_LPDDR3_RON_30ohm, 30},
665 	{PHY_DDR4_LPDDR3_RON_28ohm, 28},
666 	{PHY_DDR4_LPDDR3_RON_27ohm, 27},
667 	{PHY_DDR4_LPDDR3_RON_25ohm, 25},
668 	{PHY_DDR4_LPDDR3_RON_24ohm, 24},
669 	{PHY_DDR4_LPDDR3_RON_23ohm, 23},
670 	{PHY_DDR4_LPDDR3_RON_22ohm, 22},
671 	{PHY_DDR4_LPDDR3_RON_21ohm, 21}
672 };
673 
674 static u16 d4lp3_phy_odt_2_ohm[][2] = {
675 	{PHY_DDR4_LPDDR3_RTT_DISABLE, 0},
676 	{PHY_DDR4_LPDDR3_RTT_586ohm, 586},
677 	{PHY_DDR4_LPDDR3_RTT_294ohm, 294},
678 	{PHY_DDR4_LPDDR3_RTT_196ohm, 196},
679 	{PHY_DDR4_LPDDR3_RTT_148ohm, 148},
680 	{PHY_DDR4_LPDDR3_RTT_118ohm, 118},
681 	{PHY_DDR4_LPDDR3_RTT_99ohm, 99},
682 	{PHY_DDR4_LPDDR3_RTT_85ohm, 58},
683 	{PHY_DDR4_LPDDR3_RTT_76ohm, 76},
684 	{PHY_DDR4_LPDDR3_RTT_67ohm, 67},
685 	{PHY_DDR4_LPDDR3_RTT_60ohm, 60},
686 	{PHY_DDR4_LPDDR3_RTT_55ohm, 55},
687 	{PHY_DDR4_LPDDR3_RTT_50ohm, 50},
688 	{PHY_DDR4_LPDDR3_RTT_46ohm, 46},
689 	{PHY_DDR4_LPDDR3_RTT_43ohm, 43},
690 	{PHY_DDR4_LPDDR3_RTT_40ohm, 40},
691 	{PHY_DDR4_LPDDR3_RTT_38ohm, 38},
692 	{PHY_DDR4_LPDDR3_RTT_36ohm, 36},
693 	{PHY_DDR4_LPDDR3_RTT_34ohm, 34},
694 	{PHY_DDR4_LPDDR3_RTT_32ohm, 32},
695 	{PHY_DDR4_LPDDR3_RTT_31ohm, 31},
696 	{PHY_DDR4_LPDDR3_RTT_29ohm, 29},
697 	{PHY_DDR4_LPDDR3_RTT_28ohm, 28},
698 	{PHY_DDR4_LPDDR3_RTT_27ohm, 27}
699 };
700 
701 static u16 lp4_phy_drv_2_ohm[][2] = {
702 	{PHY_LPDDR4_RON_501ohm, 501},
703 	{PHY_LPDDR4_RON_253ohm, 253},
704 	{PHY_LPDDR4_RON_168ohm, 168},
705 	{PHY_LPDDR4_RON_126ohm, 126},
706 	{PHY_LPDDR4_RON_101ohm, 101},
707 	{PHY_LPDDR4_RON_84ohm, 84},
708 	{PHY_LPDDR4_RON_72ohm, 72},
709 	{PHY_LPDDR4_RON_63ohm, 63},
710 	{PHY_LPDDR4_RON_56ohm, 56},
711 	{PHY_LPDDR4_RON_50ohm, 50},
712 	{PHY_LPDDR4_RON_46ohm, 46},
713 	{PHY_LPDDR4_RON_42ohm, 42},
714 	{PHY_LPDDR4_RON_38ohm, 38},
715 	{PHY_LPDDR4_RON_36ohm, 36},
716 	{PHY_LPDDR4_RON_33ohm, 33},
717 	{PHY_LPDDR4_RON_31ohm, 31},
718 	{PHY_LPDDR4_RON_29ohm, 29},
719 	{PHY_LPDDR4_RON_28ohm, 28},
720 	{PHY_LPDDR4_RON_26ohm, 26},
721 	{PHY_LPDDR4_RON_25ohm, 25},
722 	{PHY_LPDDR4_RON_24ohm, 24},
723 	{PHY_LPDDR4_RON_23ohm, 23},
724 	{PHY_LPDDR4_RON_22ohm, 22}
725 };
726 
727 static u16 lp4_phy_odt_2_ohm[][2] = {
728 	{PHY_LPDDR4_RTT_DISABLE, 0},
729 	{PHY_LPDDR4_RTT_604ohm, 604},
730 	{PHY_LPDDR4_RTT_303ohm, 303},
731 	{PHY_LPDDR4_RTT_202ohm, 202},
732 	{PHY_LPDDR4_RTT_152ohm, 152},
733 	{PHY_LPDDR4_RTT_122ohm, 122},
734 	{PHY_LPDDR4_RTT_101ohm, 101},
735 	{PHY_LPDDR4_RTT_87ohm,	87},
736 	{PHY_LPDDR4_RTT_78ohm, 78},
737 	{PHY_LPDDR4_RTT_69ohm, 69},
738 	{PHY_LPDDR4_RTT_62ohm, 62},
739 	{PHY_LPDDR4_RTT_56ohm, 56},
740 	{PHY_LPDDR4_RTT_52ohm, 52},
741 	{PHY_LPDDR4_RTT_48ohm, 48},
742 	{PHY_LPDDR4_RTT_44ohm, 44},
743 	{PHY_LPDDR4_RTT_41ohm, 41},
744 	{PHY_LPDDR4_RTT_39ohm, 39},
745 	{PHY_LPDDR4_RTT_37ohm, 37},
746 	{PHY_LPDDR4_RTT_35ohm, 35},
747 	{PHY_LPDDR4_RTT_33ohm, 33},
748 	{PHY_LPDDR4_RTT_32ohm, 32},
749 	{PHY_LPDDR4_RTT_30ohm, 30},
750 	{PHY_LPDDR4_RTT_29ohm, 29},
751 	{PHY_LPDDR4_RTT_27ohm, 27}
752 };
753 
754 static u32 lp4_odt_calc(u32 odt_ohm)
755 {
756 	u32 odt;
757 
758 	if (odt_ohm == 0)
759 		odt = LPDDR4_DQODT_DIS;
760 	else if (odt_ohm <= 40)
761 		odt = LPDDR4_DQODT_40;
762 	else if (odt_ohm <= 48)
763 		odt = LPDDR4_DQODT_48;
764 	else if (odt_ohm <= 60)
765 		odt = LPDDR4_DQODT_60;
766 	else if (odt_ohm <= 80)
767 		odt = LPDDR4_DQODT_80;
768 	else if (odt_ohm <= 120)
769 		odt = LPDDR4_DQODT_120;
770 	else
771 		odt = LPDDR4_DQODT_240;
772 
773 	return odt;
774 }
775 
776 static void *get_ddr_drv_odt_info(u32 dramtype)
777 {
778 	struct sdram_head_info_index_v2 *index =
779 		(struct sdram_head_info_index_v2 *)common_info;
780 	void *ddr_info = 0;
781 
782 	if (dramtype == DDR4)
783 		ddr_info = (void *)common_info + index->ddr4_index.offset * 4;
784 	else if (dramtype == DDR3)
785 		ddr_info = (void *)common_info + index->ddr3_index.offset * 4;
786 	else if (dramtype == LPDDR3)
787 		ddr_info = (void *)common_info + index->lp3_index.offset * 4;
788 	else if (dramtype == LPDDR4)
789 		ddr_info = (void *)common_info + index->lp4_index.offset * 4;
790 	else if (dramtype == LPDDR4X)
791 		ddr_info = (void *)common_info + index->lp4x_index.offset * 4;
792 	else
793 		printascii("unsupported dram type\n");
794 	return ddr_info;
795 }
796 
797 static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info,
798 			 u32 freq_mhz, u32 dst_fsp, u32 dramtype)
799 {
800 	void __iomem *pctl_base = dram->pctl;
801 	u32 ca_vref, dq_vref;
802 
803 	if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
804 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff);
805 	else
806 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten);
807 
808 	if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq))
809 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff);
810 	else
811 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten);
812 
813 	if (dramtype == LPDDR4) {
814 		if (ca_vref < 100)
815 			ca_vref = 100;
816 		if (ca_vref > 420)
817 			ca_vref = 420;
818 
819 		if (ca_vref <= 300)
820 			ca_vref = (0 << 6) | (ca_vref - 100) / 4;
821 		else
822 			ca_vref = (1 << 6) | (ca_vref - 220) / 4;
823 
824 		if (dq_vref < 100)
825 			dq_vref = 100;
826 		if (dq_vref > 420)
827 			dq_vref = 420;
828 
829 		if (dq_vref <= 300)
830 			dq_vref = (0 << 6) | (dq_vref - 100) / 4;
831 		else
832 			dq_vref = (1 << 6) | (dq_vref - 220) / 4;
833 	} else {
834 		ca_vref = ca_vref * 11 / 6;
835 		if (ca_vref < 150)
836 			ca_vref = 150;
837 		if (ca_vref > 629)
838 			ca_vref = 629;
839 
840 		if (ca_vref <= 449)
841 			ca_vref = (0 << 6) | (ca_vref - 150) / 4;
842 		else
843 			ca_vref = (1 << 6) | (ca_vref - 329) / 4;
844 
845 		if (dq_vref < 150)
846 			dq_vref = 150;
847 		if (dq_vref > 629)
848 			dq_vref = 629;
849 
850 		if (dq_vref <= 449)
851 			dq_vref = (0 << 6) | (dq_vref - 150) / 6;
852 		else
853 			dq_vref = (1 << 6) | (dq_vref - 329) / 6;
854 	}
855 	sw_set_req(dram);
856 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
857 			DDR_PCTL2_INIT6,
858 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT,
859 			ca_vref << PCTL2_LPDDR4_MR12_SHIFT);
860 
861 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
862 			DDR_PCTL2_INIT7,
863 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT,
864 			dq_vref << PCTL2_LPDDR4_MR14_SHIFT);
865 	sw_set_ack(dram);
866 }
867 
868 static void set_ds_odt(struct dram_info *dram,
869 		       struct rv1126_sdram_params *sdram_params, u32 dst_fsp)
870 {
871 	void __iomem *phy_base = dram->phy;
872 	void __iomem *pctl_base = dram->pctl;
873 	u32 dramtype = sdram_params->base.dramtype;
874 	struct ddr2_3_4_lp2_3_info *ddr_info;
875 	struct lp4_info *lp4_info;
876 	u32 i, j, tmp;
877 	const u16 (*p_drv)[2];
878 	const u16 (*p_odt)[2];
879 	u32 drv_info, sr_info;
880 	u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm;
881 	u32 phy_odt_ohm, dram_odt_ohm;
882 	u32 lp4_pu_cal, phy_lp4_drv_pd_en;
883 	u32 phy_odt_up_en, phy_odt_dn_en;
884 	u32 sr_dq, sr_clk;
885 	u32 freq = sdram_params->base.ddr_freq;
886 	u32 mr1_mr3, mr11, mr22, vref_out, vref_inner;
887 	u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0;
888 	u32 phy_dq_drv = 0;
889 	u32 phy_odt_up = 0, phy_odt_dn = 0;
890 
891 	ddr_info = get_ddr_drv_odt_info(dramtype);
892 	lp4_info = (void *)ddr_info;
893 
894 	if (!ddr_info)
895 		return;
896 
897 	/* dram odt en freq control phy drv, dram odt and phy sr */
898 	if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) {
899 		drv_info = ddr_info->drv_when_odtoff;
900 		dram_odt_ohm = 0;
901 		sr_info = ddr_info->sr_when_odtoff;
902 		phy_lp4_drv_pd_en =
903 			PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info);
904 	} else {
905 		drv_info = ddr_info->drv_when_odten;
906 		dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info);
907 		sr_info = ddr_info->sr_when_odten;
908 		phy_lp4_drv_pd_en =
909 			PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info);
910 	}
911 	phy_dq_drv_ohm =
912 		DRV_INFO_PHY_DQ_DRV(drv_info);
913 	phy_clk_drv_ohm =
914 		DRV_INFO_PHY_CLK_DRV(drv_info);
915 	phy_ca_drv_ohm =
916 		DRV_INFO_PHY_CA_DRV(drv_info);
917 
918 	sr_dq = DQ_SR_INFO(sr_info);
919 	sr_clk = CLK_SR_INFO(sr_info);
920 
921 	/* phy odt en freq control dram drv and phy odt */
922 	if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) {
923 		dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff);
924 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info);
925 		phy_odt_ohm = 0;
926 		phy_odt_up_en = 0;
927 		phy_odt_dn_en = 0;
928 	} else {
929 		dram_drv_ohm =
930 			DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten);
931 		phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info);
932 		phy_odt_up_en =
933 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
934 		phy_odt_dn_en =
935 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
936 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info);
937 	}
938 
939 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
940 		if (phy_odt_ohm) {
941 			phy_odt_up_en = 0;
942 			phy_odt_dn_en = 1;
943 		}
944 		if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
945 			dram_caodt_ohm = 0;
946 		else
947 			dram_caodt_ohm =
948 				ODT_INFO_LP4_CA_ODT(lp4_info->odt_info);
949 	}
950 
951 	if (dramtype == DDR3) {
952 		p_drv = d3_phy_drv_2_ohm;
953 		p_odt = d3_phy_odt_2_ohm;
954 	} else if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
955 		p_drv = lp4_phy_drv_2_ohm;
956 		p_odt = lp4_phy_odt_2_ohm;
957 	} else {
958 		p_drv = d4lp3_phy_drv_2_ohm;
959 		p_odt = d4lp3_phy_odt_2_ohm;
960 	}
961 
962 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
963 		if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) {
964 			phy_dq_drv = **(p_drv + i);
965 			break;
966 		}
967 		if (i == 0)
968 			break;
969 	}
970 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
971 		if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) {
972 			phy_clk_drv = **(p_drv + i);
973 			break;
974 		}
975 		if (i == 0)
976 			break;
977 	}
978 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
979 		if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) {
980 			phy_ca_drv = **(p_drv + i);
981 			break;
982 		}
983 		if (i == 0)
984 			break;
985 	}
986 	if (!phy_odt_ohm)
987 		phy_odt = 0;
988 	else
989 		for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) {
990 			if (phy_odt_ohm <= *(*(p_odt + i) + 1)) {
991 				phy_odt = **(p_odt + i);
992 				break;
993 			}
994 			if (i == 0)
995 				break;
996 		}
997 
998 	if (dramtype != LPDDR4 && dramtype != LPDDR4X) {
999 		if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en))
1000 			vref_inner = 0x80;
1001 		else if (phy_odt_up_en)
1002 			vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 /
1003 				     (dram_drv_ohm + phy_odt_ohm);
1004 		else
1005 			vref_inner = phy_odt_ohm * 128 /
1006 				(phy_odt_ohm + dram_drv_ohm);
1007 
1008 		if (dramtype != DDR3 && dram_odt_ohm)
1009 			vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 /
1010 				   (phy_dq_drv_ohm + dram_odt_ohm);
1011 		else
1012 			vref_out = 0x80;
1013 	} else {
1014 		/* for lp4 and lp4x*/
1015 		if (phy_odt_ohm)
1016 			vref_inner =
1017 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) *
1018 				 256) / 1000;
1019 		else
1020 			vref_inner =
1021 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) *
1022 				 256) / 1000;
1023 
1024 		vref_out = 0x80;
1025 	}
1026 
1027 	/* default ZQCALIB bypass mode */
1028 	clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv);
1029 	clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv);
1030 	clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv);
1031 	clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv);
1032 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1033 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_clk_drv);
1034 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_clk_drv);
1035 	} else {
1036 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_ca_drv);
1037 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_ca_drv);
1038 	}
1039 	/* clk / cmd slew rate */
1040 	clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk);
1041 
1042 	phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1;
1043 	if (phy_odt_up_en)
1044 		phy_odt_up = phy_odt;
1045 	if (phy_odt_dn_en)
1046 		phy_odt_dn = phy_odt;
1047 
1048 	for (i = 0; i < 4; i++) {
1049 		j = 0x110 + i * 0x10;
1050 		clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up);
1051 		clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn);
1052 		clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv);
1053 		clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv);
1054 		writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10));
1055 
1056 		clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10),
1057 				1 << 3, phy_lp4_drv_pd_en << 3);
1058 		if (dramtype == LPDDR4 || dramtype == LPDDR4X)
1059 			clrbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10), BIT(5));
1060 		/* dq slew rate */
1061 		clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10),
1062 				0x1f, sr_dq);
1063 	}
1064 
1065 	/* reg_rx_vref_value_update */
1066 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1067 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1068 
1069 	/* RAM VREF */
1070 	writel(vref_out, PHY_REG(phy_base, 0x105));
1071 	if (dramtype == LPDDR3)
1072 		udelay(100);
1073 
1074 	if (dramtype == LPDDR4 || dramtype == LPDDR4X)
1075 		set_lp4_vref(dram, lp4_info, freq, dst_fsp, dramtype);
1076 
1077 	if (dramtype == DDR3 || dramtype == DDR4) {
1078 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1079 				DDR_PCTL2_INIT3);
1080 		mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK;
1081 	} else {
1082 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1083 				DDR_PCTL2_INIT4);
1084 		mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK;
1085 	}
1086 
1087 	if (dramtype == DDR3) {
1088 		mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK);
1089 		if (dram_drv_ohm == 34)
1090 			mr1_mr3 |= DDR3_DS_34;
1091 
1092 		if (dram_odt_ohm == 0)
1093 			mr1_mr3 |= DDR3_RTT_NOM_DIS;
1094 		else if (dram_odt_ohm <= 40)
1095 			mr1_mr3 |= DDR3_RTT_NOM_40;
1096 		else if (dram_odt_ohm <= 60)
1097 			mr1_mr3 |= DDR3_RTT_NOM_60;
1098 		else
1099 			mr1_mr3 |= DDR3_RTT_NOM_120;
1100 
1101 	} else if (dramtype == DDR4) {
1102 		mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK);
1103 		if (dram_drv_ohm == 48)
1104 			mr1_mr3 |= DDR4_DS_48;
1105 
1106 		if (dram_odt_ohm == 0)
1107 			mr1_mr3 |= DDR4_RTT_NOM_DIS;
1108 		else if (dram_odt_ohm <= 34)
1109 			mr1_mr3 |= DDR4_RTT_NOM_34;
1110 		else if (dram_odt_ohm <= 40)
1111 			mr1_mr3 |= DDR4_RTT_NOM_40;
1112 		else if (dram_odt_ohm <= 48)
1113 			mr1_mr3 |= DDR4_RTT_NOM_48;
1114 		else if (dram_odt_ohm <= 60)
1115 			mr1_mr3 |= DDR4_RTT_NOM_60;
1116 		else
1117 			mr1_mr3 |= DDR4_RTT_NOM_120;
1118 
1119 	} else if (dramtype == LPDDR3) {
1120 		if (dram_drv_ohm <= 34)
1121 			mr1_mr3 |= LPDDR3_DS_34;
1122 		else if (dram_drv_ohm <= 40)
1123 			mr1_mr3 |= LPDDR3_DS_40;
1124 		else if (dram_drv_ohm <= 48)
1125 			mr1_mr3 |= LPDDR3_DS_48;
1126 		else if (dram_drv_ohm <= 60)
1127 			mr1_mr3 |= LPDDR3_DS_60;
1128 		else if (dram_drv_ohm <= 80)
1129 			mr1_mr3 |= LPDDR3_DS_80;
1130 
1131 		if (dram_odt_ohm == 0)
1132 			lp3_odt_value = LPDDR3_ODT_DIS;
1133 		else if (dram_odt_ohm <= 60)
1134 			lp3_odt_value = LPDDR3_ODT_60;
1135 		else if (dram_odt_ohm <= 120)
1136 			lp3_odt_value = LPDDR3_ODT_120;
1137 		else
1138 			lp3_odt_value = LPDDR3_ODT_240;
1139 	} else {/* for lpddr4 and lpddr4x */
1140 		/* MR3 for lp4 PU-CAL and PDDS */
1141 		mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK);
1142 		mr1_mr3 |= lp4_pu_cal;
1143 
1144 		tmp = lp4_odt_calc(dram_drv_ohm);
1145 		if (!tmp)
1146 			tmp = LPDDR4_PDDS_240;
1147 		mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT);
1148 
1149 		/* MR11 for lp4 ca odt, dq odt set */
1150 		mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1151 			     DDR_PCTL2_INIT6);
1152 		mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK;
1153 
1154 		mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK);
1155 
1156 		tmp = lp4_odt_calc(dram_odt_ohm);
1157 		mr11 |= (tmp << LPDDR4_DQODT_SHIFT);
1158 
1159 		tmp = lp4_odt_calc(dram_caodt_ohm);
1160 		mr11 |= (tmp << LPDDR4_CAODT_SHIFT);
1161 		sw_set_req(dram);
1162 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1163 				DDR_PCTL2_INIT6,
1164 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT,
1165 				mr11 << PCTL2_LPDDR4_MR11_SHIFT);
1166 		sw_set_ack(dram);
1167 
1168 		/* MR22 for soc odt/odt-ck/odt-cs/odt-ca */
1169 		mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1170 			     DDR_PCTL2_INIT7);
1171 		mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK;
1172 		mr22 &= ~LPDDR4_SOC_ODT_MASK;
1173 
1174 		tmp = lp4_odt_calc(phy_odt_ohm);
1175 		mr22 |= tmp;
1176 		mr22 = mr22 |
1177 		       (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) <<
1178 			LPDDR4_ODTE_CK_SHIFT) |
1179 		       (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) <<
1180 			LPDDR4_ODTE_CS_SHIFT) |
1181 		       (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) <<
1182 			LPDDR4_ODTD_CA_SHIFT);
1183 
1184 		sw_set_req(dram);
1185 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1186 				DDR_PCTL2_INIT7,
1187 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT,
1188 				mr22 << PCTL2_LPDDR4_MR22_SHIFT);
1189 		sw_set_ack(dram);
1190 	}
1191 
1192 	if (dramtype == DDR4 || dramtype == DDR3) {
1193 		sw_set_req(dram);
1194 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1195 				DDR_PCTL2_INIT3,
1196 				PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT,
1197 				mr1_mr3 << PCTL2_DDR34_MR1_SHIFT);
1198 		sw_set_ack(dram);
1199 	} else {
1200 		sw_set_req(dram);
1201 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1202 				DDR_PCTL2_INIT4,
1203 				PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT,
1204 				mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT);
1205 		sw_set_ack(dram);
1206 	}
1207 }
1208 
1209 static int sdram_cmd_dq_path_remap(struct dram_info *dram,
1210 				   struct rv1126_sdram_params *sdram_params)
1211 {
1212 	void __iomem *phy_base = dram->phy;
1213 	u32 dramtype = sdram_params->base.dramtype;
1214 	struct sdram_head_info_index_v2 *index =
1215 		(struct sdram_head_info_index_v2 *)common_info;
1216 	struct dq_map_info *map_info;
1217 
1218 	map_info = (struct dq_map_info *)((void *)common_info +
1219 		index->dq_map_index.offset * 4);
1220 
1221 	if (dramtype == LPDDR4X)
1222 		dramtype = LPDDR4;
1223 
1224 	if (dramtype <= LPDDR4)
1225 		writel((map_info->byte_map[dramtype / 4] >>
1226 			((dramtype % 4) * 8)) & 0xff,
1227 		       PHY_REG(phy_base, 0x4f));
1228 
1229 	return 0;
1230 }
1231 
1232 static void phy_cfg(struct dram_info *dram,
1233 		    struct rv1126_sdram_params *sdram_params)
1234 {
1235 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
1236 	void __iomem *phy_base = dram->phy;
1237 	u32 i, dq_map, tmp;
1238 	u32 byte1 = 0, byte0 = 0;
1239 
1240 	sdram_cmd_dq_path_remap(dram, sdram_params);
1241 
1242 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0);
1243 	for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
1244 		writel(sdram_params->phy_regs.phy[i][1],
1245 		       phy_base + sdram_params->phy_regs.phy[i][0]);
1246 	}
1247 
1248 	clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5));
1249 	dq_map = readl(PHY_REG(phy_base, 0x4f));
1250 	for (i = 0; i < 4; i++) {
1251 		if (((dq_map >> (i * 2)) & 0x3) == 0)
1252 			byte0 = i;
1253 		if (((dq_map >> (i * 2)) & 0x3) == 1)
1254 			byte1 = i;
1255 	}
1256 
1257 	tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK);
1258 	if (cap_info->bw == 2)
1259 		tmp |= 0xf;
1260 	else if (cap_info->bw == 1)
1261 		tmp |= ((1 << byte0) | (1 << byte1));
1262 	else
1263 		tmp |= (1 << byte0);
1264 
1265 	writel(tmp, PHY_REG(phy_base, 0xf));
1266 
1267 	/* lpddr4 odt control by phy, enable cs0 odt */
1268 	if (sdram_params->base.dramtype == LPDDR4 ||
1269 	    sdram_params->base.dramtype == LPDDR4X)
1270 		clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4,
1271 				(1 << 6) | (1 << 4));
1272 	/* for ca training ca vref choose range1 */
1273 	setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6));
1274 	setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6));
1275 	/* for wr training PHY_0x7c[5], choose range0 */
1276 	clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5));
1277 }
1278 
1279 static int update_refresh_reg(struct dram_info *dram)
1280 {
1281 	void __iomem *pctl_base = dram->pctl;
1282 	u32 ret;
1283 
1284 	ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
1285 	writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
1286 
1287 	return 0;
1288 }
1289 
1290 /*
1291  * rank = 1: cs0
1292  * rank = 2: cs1
1293  */
1294 u32 read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype)
1295 {
1296 	u32 ret;
1297 	u32 i, temp;
1298 	void __iomem *pctl_base = dram->pctl;
1299 	struct sdram_head_info_index_v2 *index =
1300 		(struct sdram_head_info_index_v2 *)common_info;
1301 	struct dq_map_info *map_info;
1302 
1303 	map_info = (struct dq_map_info *)((void *)common_info +
1304 		index->dq_map_index.offset * 4);
1305 
1306 	pctl_read_mr(pctl_base, rank, mr_num);
1307 
1308 	if (dramtype == LPDDR3) {
1309 		temp = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff);
1310 		ret = 0;
1311 		for (i = 0; i < 8; i++)
1312 			ret |= ((temp >> i) & 0x1) << ((map_info->lp3_dq0_7_map >> (i * 4)) & 0xf);
1313 	} else {
1314 		ret = readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff;
1315 	}
1316 
1317 	return ret;
1318 }
1319 
1320 /* before call this function autorefresh should be disabled */
1321 void send_a_refresh(struct dram_info *dram)
1322 {
1323 	void __iomem *pctl_base = dram->pctl;
1324 
1325 	while (readl(pctl_base + DDR_PCTL2_DBGSTAT) & 0x3)
1326 		continue;
1327 	writel(0x3, pctl_base + DDR_PCTL2_DBGCMD);
1328 }
1329 
1330 static void enter_sr(struct dram_info *dram, u32 en)
1331 {
1332 	void __iomem *pctl_base = dram->pctl;
1333 
1334 	if (en) {
1335 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1336 		while (1) {
1337 			if (((readl(pctl_base + DDR_PCTL2_STAT) &
1338 			      PCTL2_SELFREF_TYPE_MASK) ==
1339 			     PCTL2_SELFREF_TYPE_SR_NOT_AUTO) &&
1340 			    ((readl(pctl_base + DDR_PCTL2_STAT) &
1341 			      PCTL2_OPERATING_MODE_MASK) ==
1342 			     PCTL2_OPERATING_MODE_SR))
1343 				break;
1344 		}
1345 	} else {
1346 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1347 		while ((readl(pctl_base + DDR_PCTL2_STAT) &
1348 		       PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR)
1349 			continue;
1350 	}
1351 }
1352 
1353 void record_dq_prebit(struct dram_info *dram)
1354 {
1355 	u32 group, i, tmp;
1356 	void __iomem *phy_base = dram->phy;
1357 
1358 	for (group = 0; group < 4; group++) {
1359 		for (i = 0; i < ARRAY_SIZE(dq_sel); i++) {
1360 			/* l_loop_invdelaysel */
1361 			writel(dq_sel[i][0], PHY_REG(phy_base,
1362 						     grp_addr[group] + 0x2c));
1363 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e));
1364 			writel(tmp, PHY_REG(phy_base,
1365 					    grp_addr[group] + dq_sel[i][1]));
1366 
1367 			/* r_loop_invdelaysel */
1368 			writel(dq_sel[i][0], PHY_REG(phy_base,
1369 						     grp_addr[group] + 0x2d));
1370 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f));
1371 			writel(tmp, PHY_REG(phy_base,
1372 					    grp_addr[group] + dq_sel[i][2]));
1373 		}
1374 	}
1375 }
1376 
1377 static void update_dq_rx_prebit(struct dram_info *dram)
1378 {
1379 	void __iomem *phy_base = dram->phy;
1380 
1381 	clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4),
1382 			BIT(4));
1383 	udelay(1);
1384 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4));
1385 }
1386 
1387 static void update_dq_tx_prebit(struct dram_info *dram)
1388 {
1389 	void __iomem *phy_base = dram->phy;
1390 
1391 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1392 	setbits_le32(PHY_REG(phy_base, 0x2), BIT(3));
1393 	setbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1394 	udelay(1);
1395 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1396 }
1397 
1398 static void update_ca_prebit(struct dram_info *dram)
1399 {
1400 	void __iomem *phy_base = dram->phy;
1401 
1402 	clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2));
1403 	setbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1404 	udelay(1);
1405 	clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1406 }
1407 
1408 /*
1409  * dir: 0: de-skew = delta_*
1410  *	1: de-skew = reg val - delta_*
1411  * delta_dir: value for differential signal: clk/
1412  * delta_sig: value for single signal: ca/cmd
1413  */
1414 static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif,
1415 			     int delta_sig, u32 cs, u32 dramtype)
1416 {
1417 	void __iomem *phy_base = dram->phy;
1418 	u32 i, cs_en, tmp;
1419 	u32 dfi_lp_stat = 0;
1420 
1421 	if (cs == 0)
1422 		cs_en = 1;
1423 	else if (cs == 2)
1424 		cs_en = 2;
1425 	else
1426 		cs_en = 3;
1427 
1428 	if ((dramtype == LPDDR4 || dramtype == LPDDR4X) &&
1429 	    ((readl(PHY_REG(phy_base, 0x60)) & BIT(5)) == 0)) {
1430 		dfi_lp_stat = 1;
1431 		setbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1432 	}
1433 	enter_sr(dram, 1);
1434 
1435 	for (i = 0; i < 0x20; i++) {
1436 		if (dir == DESKEW_MDF_ABS_VAL)
1437 			tmp = delta_sig;
1438 		else
1439 			tmp = readl(PHY_REG(phy_base, 0x150 + i)) +
1440 			      delta_sig;
1441 		writel(tmp, PHY_REG(phy_base, 0x150 + i));
1442 	}
1443 
1444 	if (dir == DESKEW_MDF_ABS_VAL)
1445 		tmp = delta_dif;
1446 	else
1447 		tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) -
1448 		       delta_sig + delta_dif;
1449 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x17));
1450 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x18));
1451 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1452 		writel(tmp, PHY_REG(phy_base, 0x150 + 0x4));
1453 		writel(tmp, PHY_REG(phy_base, 0x150 + 0xa));
1454 
1455 		clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6);
1456 		update_ca_prebit(dram);
1457 	}
1458 	enter_sr(dram, 0);
1459 
1460 	if (dfi_lp_stat)
1461 		clrbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1462 
1463 }
1464 
1465 static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank)
1466 {
1467 	u32 i, j, offset = 0;
1468 	u32 min = 0x3f;
1469 	void __iomem *phy_base = dram->phy;
1470 	u32 byte_en;
1471 
1472 	if (signal == SKEW_TX_SIGNAL)
1473 		offset = 8;
1474 
1475 	if (signal == SKEW_CA_SIGNAL) {
1476 		for (i = 0; i < 0x20; i++)
1477 			min = MIN(min, readl(PHY_REG(phy_base, 0x150  + i)));
1478 	} else {
1479 		byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1480 		for (j = offset; j < offset + rank * 4; j++) {
1481 			if (!((byte_en >> (j % 4)) & 1))
1482 				continue;
1483 			for (i = 0; i < 11; i++)
1484 				min = MIN(min,
1485 					  readl(PHY_REG(phy_base,
1486 							dqs_dq_skew_adr[j] +
1487 							i)));
1488 		}
1489 	}
1490 
1491 	return min;
1492 }
1493 
1494 static u32 low_power_update(struct dram_info *dram, u32 en)
1495 {
1496 	void __iomem *pctl_base = dram->pctl;
1497 	u32 lp_stat = 0;
1498 
1499 	if (en) {
1500 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf);
1501 	} else {
1502 		lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf;
1503 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf);
1504 	}
1505 
1506 	return lp_stat;
1507 }
1508 
1509 /*
1510  * signal:
1511  * dir: 0: de-skew = delta_*
1512  *	1: de-skew = reg val - delta_*
1513  * delta_dir: value for differential signal: dqs
1514  * delta_sig: value for single signal: dq/dm
1515  */
1516 static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir,
1517 			     int delta_dif, int delta_sig, u32 rank)
1518 {
1519 	void __iomem *phy_base = dram->phy;
1520 	u32 i, j, tmp, offset;
1521 	u32 byte_en;
1522 
1523 	byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1524 
1525 	if (signal == SKEW_RX_SIGNAL)
1526 		offset = 0;
1527 	else
1528 		offset = 8;
1529 
1530 	for (j = offset; j < (offset + rank * 4); j++) {
1531 		if (!((byte_en >> (j % 4)) & 1))
1532 			continue;
1533 		for (i = 0; i < 0x9; i++) {
1534 			if (dir == DESKEW_MDF_ABS_VAL)
1535 				tmp = delta_sig;
1536 			else
1537 				tmp = delta_sig + readl(PHY_REG(phy_base,
1538 							dqs_dq_skew_adr[j] +
1539 							i));
1540 			writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i));
1541 		}
1542 		if (dir == DESKEW_MDF_ABS_VAL)
1543 			tmp = delta_dif;
1544 		else
1545 			tmp = delta_dif + readl(PHY_REG(phy_base,
1546 						dqs_dq_skew_adr[j] + 9));
1547 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9));
1548 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa));
1549 	}
1550 	if (signal == SKEW_RX_SIGNAL)
1551 		update_dq_rx_prebit(dram);
1552 	else
1553 		update_dq_tx_prebit(dram);
1554 }
1555 
1556 static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype)
1557 {
1558 	void __iomem *phy_base = dram->phy;
1559 	u32 ret;
1560 	u32 dis_auto_zq = 0;
1561 	u32 odt_val_up, odt_val_dn;
1562 	u32 i, j;
1563 
1564 	odt_val_dn = readl(PHY_REG(phy_base, 0x110));
1565 	odt_val_up = readl(PHY_REG(phy_base, 0x111));
1566 
1567 	if (dramtype != LPDDR4 || dramtype != LPDDR4X) {
1568 		for (i = 0; i < 4; i++) {
1569 			j = 0x110 + i * 0x10;
1570 			writel(PHY_DDR4_LPDDR3_RTT_294ohm,
1571 			       PHY_REG(phy_base, j));
1572 			writel(PHY_DDR4_LPDDR3_RTT_DISABLE,
1573 			       PHY_REG(phy_base, j + 0x1));
1574 		}
1575 	}
1576 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1577 	/* use normal read mode for data training */
1578 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1579 
1580 	if (dramtype == DDR4)
1581 		setbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1582 
1583 	/* choose training cs */
1584 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
1585 	/* enable gate training */
1586 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
1587 	udelay(50);
1588 	ret = readl(PHY_REG(phy_base, 0x91));
1589 	/* disable gate training */
1590 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
1591 	clrbits_le32(PHY_REG(phy_base, 2), 0x30);
1592 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1593 
1594 	ret = (ret & 0x2f) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf);
1595 
1596 	if (dramtype != LPDDR4 || dramtype != LPDDR4X) {
1597 		for (i = 0; i < 4; i++) {
1598 			j = 0x110 + i * 0x10;
1599 			writel(odt_val_dn, PHY_REG(phy_base, j));
1600 			writel(odt_val_up, PHY_REG(phy_base, j + 0x1));
1601 		}
1602 	}
1603 	return ret;
1604 }
1605 
1606 static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype,
1607 			    u32 rank)
1608 {
1609 	void __iomem *pctl_base = dram->pctl;
1610 	void __iomem *phy_base = dram->phy;
1611 	u32 dis_auto_zq = 0;
1612 	u32 tmp;
1613 	u32 cur_fsp;
1614 	u32 timeout_us = 1000;
1615 
1616 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1617 
1618 	clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1619 
1620 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1621 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) &
1622 	      0xffff;
1623 	writel(tmp & 0xff, PHY_REG(phy_base, 0x3));
1624 
1625 	/* disable another cs's output */
1626 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1627 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12),
1628 			      dramtype);
1629 	if (dramtype == DDR3 || dramtype == DDR4)
1630 		writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1631 	else
1632 		writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1633 
1634 	/* choose cs */
1635 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1636 			((0x2 >> cs) << 6) | (0 << 2));
1637 	/* enable write leveling */
1638 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1639 			((0x2 >> cs) << 6) | (1 << 2));
1640 
1641 	while (1) {
1642 		if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) ==
1643 		    (readl(PHY_REG(phy_base, 0xf)) & 0xf))
1644 			break;
1645 
1646 		udelay(1);
1647 		if (timeout_us-- == 0) {
1648 			printascii("error: write leveling timeout\n");
1649 			while (1)
1650 				;
1651 		}
1652 	}
1653 
1654 	/* disable write leveling */
1655 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1656 			((0x2 >> cs) << 6) | (0 << 2));
1657 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6);
1658 
1659 	/* enable another cs's output */
1660 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1661 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12),
1662 			      dramtype);
1663 
1664 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1665 
1666 	return 0;
1667 }
1668 
1669 char pattern[32] = {
1670 	0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa,
1671 	0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55,
1672 	0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55,
1673 	0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa
1674 };
1675 
1676 static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype,
1677 			    u32 mhz)
1678 {
1679 	void __iomem *pctl_base = dram->pctl;
1680 	void __iomem *phy_base = dram->phy;
1681 	u32 trefi_1x, trfc_1x;
1682 	u32 dis_auto_zq = 0;
1683 	u32 timeout_us = 1000;
1684 	u32 dqs_default;
1685 	u32 cur_fsp;
1686 	u32 vref_inner;
1687 	u32 i;
1688 	struct sdram_head_info_index_v2 *index =
1689 		(struct sdram_head_info_index_v2 *)common_info;
1690 	struct dq_map_info *map_info;
1691 
1692 	vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff;
1693 	if (dramtype == DDR3 && vref_inner == 0x80) {
1694 		for (i = 0; i < 4; i++)
1695 			writel(vref_inner - 0xa,
1696 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1697 
1698 		/* reg_rx_vref_value_update */
1699 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1700 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1701 	}
1702 
1703 	map_info = (struct dq_map_info *)((void *)common_info +
1704 		index->dq_map_index.offset * 4);
1705 	/* only 1cs a time, 0:cs0 1 cs1 */
1706 	if (cs > 1)
1707 		return -1;
1708 
1709 	dqs_default = 0xf;
1710 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1711 
1712 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1713 	/* config refresh timing */
1714 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1715 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1716 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1717 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1718 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1719 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1720 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1721 	/* reg_phy_trfc */
1722 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1723 	/* reg_max_refi_cnt */
1724 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1725 
1726 	/* choose training cs */
1727 	clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6);
1728 
1729 	/* set dq map for ddr4 */
1730 	if (dramtype == DDR4) {
1731 		setbits_le32(PHY_REG(phy_base, 0x70), BIT(7));
1732 		for (i = 0; i < 4; i++) {
1733 			writel((map_info->ddr4_dq_map[cs * 2] >>
1734 				((i % 4) * 8)) & 0xff,
1735 				PHY_REG(phy_base, 0x238 + i));
1736 			writel((map_info->ddr4_dq_map[cs * 2 + 1] >>
1737 				((i % 4) * 8)) & 0xff,
1738 				PHY_REG(phy_base, 0x2b8 + i));
1739 		}
1740 	}
1741 
1742 	/* cha_l reg_l_rd_train_dqs_default[5:0] */
1743 	clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default);
1744 	/* cha_h reg_h_rd_train_dqs_default[5:0] */
1745 	clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default);
1746 	/* chb_l reg_l_rd_train_dqs_default[5:0] */
1747 	clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default);
1748 	/* chb_h reg_h_rd_train_dqs_default[5:0] */
1749 	clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default);
1750 
1751 	/* Choose the read train auto mode */
1752 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1);
1753 	/* Enable the auto train of the read train */
1754 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3);
1755 
1756 	/* Wait the train done. */
1757 	while (1) {
1758 		if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1)
1759 			break;
1760 
1761 		udelay(1);
1762 		if (timeout_us-- == 0) {
1763 			printascii("error: read training timeout\n");
1764 			return -1;
1765 		}
1766 	}
1767 
1768 	/* Check the read train state */
1769 	if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) ||
1770 	    (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) {
1771 		printascii("error: read training error\n");
1772 		return -1;
1773 	}
1774 
1775 	/* Exit the Read Training by setting */
1776 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1));
1777 
1778 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1779 
1780 	if (dramtype == DDR3 && vref_inner == 0x80) {
1781 		for (i = 0; i < 4; i++)
1782 			writel(vref_inner,
1783 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1784 
1785 		/* reg_rx_vref_value_update */
1786 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1787 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1788 	}
1789 
1790 	return 0;
1791 }
1792 
1793 static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype,
1794 			    u32 mhz, u32 dst_fsp)
1795 {
1796 	void __iomem *pctl_base = dram->pctl;
1797 	void __iomem *phy_base = dram->phy;
1798 	u32 trefi_1x, trfc_1x;
1799 	u32 dis_auto_zq = 0;
1800 	u32 timeout_us = 1000;
1801 	u32 cur_fsp;
1802 	u32 mr_tmp, cl, cwl, phy_fsp, offset = 0;
1803 
1804 	if (dramtype == LPDDR3 && mhz <= 400) {
1805 		phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3;
1806 		offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3;
1807 		cl = readl(PHY_REG(phy_base, offset));
1808 		cwl = readl(PHY_REG(phy_base, offset + 2));
1809 
1810 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8);
1811 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4);
1812 		pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype);
1813 	}
1814 
1815 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1816 
1817 	/* PHY_0x7b[7:0] reg_train_col_addr[7:0] */
1818 	clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0);
1819 	/* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */
1820 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2);
1821 	/* PHY_0x7c[1:0] reg_train_col_addr[9:8] */
1822 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0);
1823 	/* PHY_0x7d[7:0] reg_train_row_addr[7:0] */
1824 	clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0);
1825 	/* PHY_0x7e[7:0] reg_train_row_addr[15:8] */
1826 	clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0);
1827 
1828 	/* PHY_0x71[3] wrtrain_check_data_value_random_gen */
1829 	clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3));
1830 
1831 	/* config refresh timing */
1832 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1833 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1834 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1835 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1836 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1837 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1838 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1839 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1840 	/* reg_phy_trfc */
1841 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1842 	/* reg_max_refi_cnt */
1843 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1844 
1845 	/* choose training cs */
1846 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6);
1847 
1848 	/* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */
1849 	/* 0: Use the write-leveling value. */
1850 	/* 1: use reg0x233 0x237 0x2b3 0x2b7 */
1851 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4));
1852 
1853 	/* PHY_0x7a [0] reg_dq_wr_train_auto */
1854 	setbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1855 
1856 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1857 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1858 
1859 	send_a_refresh(dram);
1860 
1861 	while (1) {
1862 		if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1)
1863 			break;
1864 
1865 		udelay(1);
1866 		if (timeout_us-- == 0) {
1867 			printascii("error: write training timeout\n");
1868 			while (1)
1869 				;
1870 		}
1871 	}
1872 
1873 	/* Check the write train state */
1874 	if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) {
1875 		printascii("error: write training error\n");
1876 		return -1;
1877 	}
1878 
1879 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1880 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1881 
1882 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1883 
1884 	/* save LPDDR4/LPDDR4X write vref to fsp_param for dfs */
1885 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1886 		fsp_param[dst_fsp].vref_dq[cs] =
1887 			((readl(PHY_REG(phy_base, 0x384)) & 0x3f) +
1888 			 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2;
1889 		/* add range info */
1890 		fsp_param[dst_fsp].vref_dq[cs] |=
1891 			((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1);
1892 	}
1893 
1894 	if (dramtype == LPDDR3 && mhz <= 400) {
1895 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl);
1896 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl);
1897 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1898 			       DDR_PCTL2_INIT3);
1899 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
1900 			      dramtype);
1901 	}
1902 
1903 	return 0;
1904 }
1905 
1906 static int data_training(struct dram_info *dram, u32 cs,
1907 			 struct rv1126_sdram_params *sdram_params, u32 dst_fsp,
1908 			 u32 training_flag)
1909 {
1910 	u32 ret = 0;
1911 
1912 	if (training_flag == FULL_TRAINING)
1913 		training_flag = READ_GATE_TRAINING | WRITE_LEVELING |
1914 				WRITE_TRAINING | READ_TRAINING;
1915 
1916 	if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) {
1917 		ret = data_training_wl(dram, cs,
1918 				       sdram_params->base.dramtype,
1919 				       sdram_params->ch.cap_info.rank);
1920 		if (ret != 0)
1921 			goto out;
1922 	}
1923 
1924 	if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) {
1925 		ret = data_training_rg(dram, cs,
1926 				       sdram_params->base.dramtype);
1927 		if (ret != 0)
1928 			goto out;
1929 	}
1930 
1931 	if ((training_flag & READ_TRAINING) == READ_TRAINING) {
1932 		ret = data_training_rd(dram, cs,
1933 				       sdram_params->base.dramtype,
1934 				       sdram_params->base.ddr_freq);
1935 		if (ret != 0)
1936 			goto out;
1937 	}
1938 
1939 	if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) {
1940 		ret = data_training_wr(dram, cs,
1941 				       sdram_params->base.dramtype,
1942 				       sdram_params->base.ddr_freq, dst_fsp);
1943 		if (ret != 0)
1944 			goto out;
1945 	}
1946 
1947 out:
1948 	return ret;
1949 }
1950 
1951 static int get_wrlvl_val(struct dram_info *dram,
1952 			 struct rv1126_sdram_params *sdram_params)
1953 {
1954 	int i, j, clk_skew;
1955 	void __iomem *phy_base = dram->phy;
1956 	u32 lp_stat;
1957 	int ret;
1958 
1959 	lp_stat = low_power_update(dram, 0);
1960 
1961 	clk_skew = 0x1f;
1962 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, clk_skew, 3,
1963 			 sdram_params->base.dramtype);
1964 
1965 	ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING);
1966 	if (sdram_params->ch.cap_info.rank == 2)
1967 		ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING);
1968 
1969 	for (j = 0; j < 2; j++)
1970 		for (i = 0; i < 4; i++)
1971 			wrlvl_result[j][i] =
1972 				(readl(PHY_REG(phy_base, wrlvl_result_offset[j][i])) & 0x3f) -
1973 				clk_skew;
1974 
1975 	low_power_update(dram, lp_stat);
1976 
1977 	return ret;
1978 }
1979 
1980 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
1981 static void init_rw_trn_result_struct(struct rw_trn_result *result,
1982 				      void __iomem *phy_base, u8 cs_num)
1983 {
1984 	int i;
1985 
1986 	result->cs_num = cs_num;
1987 	result->byte_en = readb(PHY_REG(dram_info.phy, 0xf)) &
1988 			  PHY_DQ_WIDTH_MASK;
1989 	for (i = 0; i < FSP_NUM; i++)
1990 		result->fsp_mhz[i] = 0;
1991 }
1992 
1993 static void save_rw_trn_min_max(void __iomem *phy_base,
1994 				struct cs_rw_trn_result *rd_result,
1995 				struct cs_rw_trn_result *wr_result,
1996 				u8 byte_en)
1997 {
1998 	u16 phy_ofs;
1999 	u8 dqs;
2000 	u8 dq;
2001 
2002 	for (dqs = 0; dqs < BYTE_NUM; dqs++) {
2003 		if ((byte_en & BIT(dqs)) == 0)
2004 			continue;
2005 
2006 		/* Channel A or B (low or high 16 bit) */
2007 		phy_ofs = dqs < 2 ? 0x230 : 0x2b0;
2008 		/* low or high 8 bit */
2009 		phy_ofs += (dqs & 0x1) == 0 ? 0 : 0x9;
2010 		for (dq = 0; dq < 8; dq++) {
2011 			rd_result->dqs[dqs].dq_min[dq] =
2012 				readb(PHY_REG(phy_base, phy_ofs + 0x15 + dq));
2013 			rd_result->dqs[dqs].dq_max[dq] =
2014 				readb(PHY_REG(phy_base, phy_ofs + 0x27 + dq));
2015 			wr_result->dqs[dqs].dq_min[dq] =
2016 				readb(PHY_REG(phy_base, phy_ofs + 0x3d + dq));
2017 			wr_result->dqs[dqs].dq_max[dq] =
2018 				readb(PHY_REG(phy_base, phy_ofs + 0x4f + dq));
2019 		}
2020 	}
2021 }
2022 
2023 static void save_rw_trn_deskew(void __iomem *phy_base,
2024 			       struct fsp_rw_trn_result *result, u8 cs_num,
2025 			       int min_val, bool rw)
2026 {
2027 	u16 phy_ofs;
2028 	u8 cs;
2029 	u8 dq;
2030 
2031 	result->min_val = min_val;
2032 
2033 	for (cs = 0; cs < cs_num; cs++) {
2034 		phy_ofs = cs == 0 ? 0x170 : 0x1a0;
2035 		phy_ofs += rw == SKEW_RX_SIGNAL ? 0x1 : 0x17;
2036 		for (dq = 0; dq < 8; dq++) {
2037 			result->cs[cs].dqs[0].dq_deskew[dq] =
2038 				readb(PHY_REG(phy_base, phy_ofs + dq));
2039 			result->cs[cs].dqs[1].dq_deskew[dq] =
2040 				readb(PHY_REG(phy_base, phy_ofs + 0xb + dq));
2041 			result->cs[cs].dqs[2].dq_deskew[dq] =
2042 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + dq));
2043 			result->cs[cs].dqs[3].dq_deskew[dq] =
2044 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + dq));
2045 		}
2046 
2047 		result->cs[cs].dqs[0].dqs_deskew =
2048 			readb(PHY_REG(phy_base, phy_ofs + 0x8));
2049 		result->cs[cs].dqs[1].dqs_deskew =
2050 			readb(PHY_REG(phy_base, phy_ofs + 0xb + 0x8));
2051 		result->cs[cs].dqs[2].dqs_deskew =
2052 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0x8));
2053 		result->cs[cs].dqs[3].dqs_deskew =
2054 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + 0x8));
2055 	}
2056 }
2057 
2058 static void save_rw_trn_result_to_ddr(struct rw_trn_result *result)
2059 {
2060 	result->flag = DDR_DQ_EYE_FLAG;
2061 	memcpy((void *)(RW_TRN_RESULT_ADDR), result, sizeof(*result));
2062 }
2063 #endif
2064 
2065 static int high_freq_training(struct dram_info *dram,
2066 			      struct rv1126_sdram_params *sdram_params,
2067 			      u32 fsp)
2068 {
2069 	u32 i, j;
2070 	void __iomem *phy_base = dram->phy;
2071 	u32 dramtype = sdram_params->base.dramtype;
2072 	int min_val;
2073 	int dqs_skew, clk_skew, ca_skew;
2074 	u8 byte_en;
2075 	int ret;
2076 
2077 	byte_en = readl(PHY_REG(phy_base, 0xf)) & PHY_DQ_WIDTH_MASK;
2078 	dqs_skew = 0;
2079 	for (j = 0; j < sdram_params->ch.cap_info.rank; j++) {
2080 		for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) {
2081 			if ((byte_en & BIT(i)) != 0)
2082 				dqs_skew += wrlvl_result[j][i];
2083 		}
2084 	}
2085 	dqs_skew = dqs_skew /
2086 		   (int)(sdram_params->ch.cap_info.rank * (1 << sdram_params->ch.cap_info.bw));
2087 
2088 	clk_skew = 0x20 - dqs_skew;
2089 	dqs_skew = 0x20;
2090 
2091 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
2092 		min_val = 0xff;
2093 		for (j = 0; j < sdram_params->ch.cap_info.rank; j++)
2094 			for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) {
2095 				if ((byte_en & BIT(i)) != 0)
2096 					min_val = MIN(wrlvl_result[j][i], min_val);
2097 			}
2098 
2099 		if (min_val < 0) {
2100 			clk_skew = -min_val;
2101 			ca_skew = -min_val;
2102 		} else {
2103 			clk_skew = 0;
2104 			ca_skew = 0;
2105 		}
2106 	} else if (dramtype == LPDDR3) {
2107 		ca_skew = clk_skew - 4;
2108 	} else {
2109 		ca_skew = clk_skew;
2110 	}
2111 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3,
2112 			 dramtype);
2113 
2114 	writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233));
2115 	writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237));
2116 	writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2117 	writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2118 	ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING |
2119 			    READ_TRAINING | WRITE_TRAINING);
2120 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2121 	rw_trn_result.fsp_mhz[fsp] = (u16)sdram_params->base.ddr_freq;
2122 	save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[0],
2123 			    &rw_trn_result.wr_fsp[fsp].cs[0],
2124 			    rw_trn_result.byte_en);
2125 #endif
2126 	if (sdram_params->ch.cap_info.rank == 2) {
2127 		writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233));
2128 		writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237));
2129 		writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2130 		writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2131 		ret |= data_training(dram, 1, sdram_params, fsp,
2132 				     READ_GATE_TRAINING | READ_TRAINING |
2133 				     WRITE_TRAINING);
2134 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2135 		save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[1],
2136 				    &rw_trn_result.wr_fsp[fsp].cs[1],
2137 				    rw_trn_result.byte_en);
2138 #endif
2139 	}
2140 	if (ret)
2141 		goto out;
2142 
2143 	record_dq_prebit(dram);
2144 
2145 	min_val = get_min_value(dram, SKEW_RX_SIGNAL,
2146 				sdram_params->ch.cap_info.rank) * -1;
2147 	modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2148 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2149 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2150 	save_rw_trn_deskew(phy_base, &rw_trn_result.rd_fsp[fsp],
2151 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2152 			   SKEW_RX_SIGNAL);
2153 #endif
2154 
2155 	min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL,
2156 				    sdram_params->ch.cap_info.rank),
2157 		      get_min_value(dram, SKEW_CA_SIGNAL,
2158 				    sdram_params->ch.cap_info.rank)) * -1;
2159 
2160 	/* clk = 0, rx all skew -7, tx - min_value */
2161 	modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3,
2162 			 dramtype);
2163 
2164 	modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2165 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2166 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2167 	save_rw_trn_deskew(phy_base, &rw_trn_result.wr_fsp[fsp],
2168 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2169 			   SKEW_TX_SIGNAL);
2170 #endif
2171 
2172 	ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING);
2173 	if (sdram_params->ch.cap_info.rank == 2)
2174 		ret |= data_training(dram, 1, sdram_params, 0,
2175 				     READ_GATE_TRAINING);
2176 out:
2177 	return ret;
2178 }
2179 
2180 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
2181 {
2182 	writel(ddrconfig, &dram->msch->deviceconf);
2183 	clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0);
2184 }
2185 
2186 static void update_noc_timing(struct dram_info *dram,
2187 			      struct rv1126_sdram_params *sdram_params)
2188 {
2189 	void __iomem *pctl_base = dram->pctl;
2190 	u32 bw, bl;
2191 
2192 	bw = 8 << sdram_params->ch.cap_info.bw;
2193 	bl = ((readl(pctl_base + DDR_PCTL2_MSTR) >> 16) & 0xf) * 2;
2194 
2195 	/* update the noc timing related to data bus width */
2196 	if ((bw / 8 * bl) <= 16)
2197 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 0;
2198 	else if ((bw / 8 * bl) == 32)
2199 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 1;
2200 	else if ((bw / 8 * bl) == 64)
2201 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 2;
2202 	else
2203 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 3;
2204 
2205 	sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty =
2206 		(bl * bw / 8) > 16 ? (bl / 4) : (16 / (bl * bw / 8)) * bl / 4;
2207 
2208 	if (sdram_params->base.dramtype == LPDDR4 ||
2209 	    sdram_params->base.dramtype == LPDDR4X) {
2210 		sdram_params->ch.noc_timings.ddrmode.b.mwrsize =
2211 			(bw == 16) ? 0x1 : 0x2;
2212 		sdram_params->ch.noc_timings.ddrtimingc0.b.wrtomwr =
2213 			3 * sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty;
2214 	}
2215 
2216 	writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
2217 	       &dram->msch->ddrtiminga0);
2218 	writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
2219 	       &dram->msch->ddrtimingb0);
2220 	writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
2221 	       &dram->msch->ddrtimingc0);
2222 	writel(sdram_params->ch.noc_timings.devtodev0.d32,
2223 	       &dram->msch->devtodev0);
2224 	writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
2225 	writel(sdram_params->ch.noc_timings.ddr4timing.d32,
2226 	       &dram->msch->ddr4timing);
2227 }
2228 
2229 static int split_setup(struct dram_info *dram,
2230 		       struct rv1126_sdram_params *sdram_params)
2231 {
2232 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2233 	u32 dramtype = sdram_params->base.dramtype;
2234 	u32 split_size, split_mode;
2235 	u64 cs_cap[2], cap;
2236 
2237 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dramtype);
2238 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dramtype);
2239 	/* only support the larger cap is in low 16bit */
2240 	if (cap_info->cs0_high16bit_row < cap_info->cs0_row) {
2241 		cap = cs_cap[0] / (1 << (cap_info->cs0_row -
2242 		cap_info->cs0_high16bit_row));
2243 	} else if ((cap_info->cs1_high16bit_row < cap_info->cs1_row) &&
2244 		   (cap_info->rank == 2)) {
2245 		if (!cap_info->cs1_high16bit_row)
2246 			cap = cs_cap[0];
2247 		else
2248 			cap = cs_cap[0] + cs_cap[1] / (1 << (cap_info->cs1_row -
2249 				cap_info->cs1_high16bit_row));
2250 	} else {
2251 		goto out;
2252 	}
2253 	split_size = (u32)(cap >> 24) & SPLIT_SIZE_MASK;
2254 	if (cap_info->bw == 2)
2255 		split_mode = SPLIT_MODE_32_L16_VALID;
2256 	else
2257 		split_mode = SPLIT_MODE_16_L8_VALID;
2258 
2259 	rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2260 		     (SPLIT_MODE_MASK << SPLIT_MODE_OFFSET) |
2261 		     (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2262 		     (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2263 		     (split_mode << SPLIT_MODE_OFFSET) |
2264 		     (0x0 << SPLIT_BYPASS_OFFSET) |
2265 		     (split_size << SPLIT_SIZE_OFFSET));
2266 
2267 	rk_clrsetreg(BUS_SGRF_BASE_ADDR + SGRF_SOC_CON2,
2268 		     MSCH_AXI_BYPASS_ALL_MASK << MSCH_AXI_BYPASS_ALL_SHIFT,
2269 		     0x0 << MSCH_AXI_BYPASS_ALL_SHIFT);
2270 
2271 out:
2272 	return 0;
2273 }
2274 
2275 static void split_bypass(struct dram_info *dram)
2276 {
2277 	if ((readl(&dram->ddrgrf->grf_ddrsplit_con) &
2278 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2279 		return;
2280 
2281 	/* bypass split */
2282 	rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2283 		     (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2284 		     (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2285 		     (0x1 << SPLIT_BYPASS_OFFSET) |
2286 		     (0x0 << SPLIT_SIZE_OFFSET));
2287 }
2288 
2289 static void dram_all_config(struct dram_info *dram,
2290 			    struct rv1126_sdram_params *sdram_params)
2291 {
2292 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2293 	u32 dram_type = sdram_params->base.dramtype;
2294 	void __iomem *pctl_base = dram->pctl;
2295 	u32 sys_reg2 = 0;
2296 	u32 sys_reg3 = 0;
2297 	u64 cs_cap[2];
2298 	u32 cs_pst;
2299 
2300 	set_ddrconfig(dram, cap_info->ddrconfig);
2301 	sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
2302 			 &sys_reg3, 0);
2303 	writel(sys_reg2, &dram->pmugrf->os_reg[2]);
2304 	writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2305 
2306 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2307 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2308 
2309 	if (cap_info->rank == 2) {
2310 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2311 			6 + 2;
2312 		if (cs_pst > 28)
2313 			cs_cap[0] = 1llu << cs_pst;
2314 	}
2315 
2316 	writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
2317 			(((cs_cap[0] >> 20) / 64) & 0xff),
2318 			&dram->msch->devicesize);
2319 	update_noc_timing(dram, sdram_params);
2320 }
2321 
2322 static void enable_low_power(struct dram_info *dram,
2323 			     struct rv1126_sdram_params *sdram_params)
2324 {
2325 	void __iomem *pctl_base = dram->pctl;
2326 	u32 grf_lp_con;
2327 
2328 	writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]);
2329 
2330 	if (sdram_params->base.dramtype == DDR4)
2331 		grf_lp_con = (0x7 << 16) | (1 << 1);
2332 	else if (sdram_params->base.dramtype == DDR3)
2333 		grf_lp_con = (0x7 << 16) | (1 << 0);
2334 	else
2335 		grf_lp_con = (0x7 << 16) | (1 << 2);
2336 
2337 	/* en lpckdis_en */
2338 	grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
2339 	writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con);
2340 
2341 	/* enable sr, pd */
2342 	if (dram->pd_idle == 0)
2343 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2344 	else
2345 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2346 	if (dram->sr_idle == 0)
2347 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2348 	else
2349 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2350 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
2351 }
2352 
2353 static void ddr_set_atags(struct dram_info *dram,
2354 			  struct rv1126_sdram_params *sdram_params)
2355 {
2356 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2357 	u32 dram_type = sdram_params->base.dramtype;
2358 	void __iomem *pctl_base = dram->pctl;
2359 	struct tag_serial t_serial;
2360 	struct tag_ddr_mem t_ddrmem;
2361 	struct tag_soc_info t_socinfo;
2362 	u64 cs_cap[2];
2363 	u32 cs_pst = 0;
2364 	u32 split, split_size;
2365 	u64 reduce_cap = 0;
2366 
2367 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2368 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2369 
2370 	memset(&t_serial, 0, sizeof(struct tag_serial));
2371 
2372 	t_serial.version = 0;
2373 	t_serial.enable = 1;
2374 	t_serial.addr = CONFIG_DEBUG_UART_BASE;
2375 	t_serial.baudrate = CONFIG_BAUDRATE;
2376 	t_serial.m_mode = SERIAL_M_MODE_M0;
2377 	t_serial.id = 2;
2378 
2379 	atags_destroy();
2380 	atags_set_tag(ATAG_SERIAL,  &t_serial);
2381 
2382 	split = readl(&dram->ddrgrf->grf_ddrsplit_con);
2383 	memset(&t_ddrmem, 0, sizeof(struct tag_ddr_mem));
2384 	if (cap_info->row_3_4) {
2385 		cs_cap[0] =  cs_cap[0] * 3 / 4;
2386 		cs_cap[1] =  cs_cap[1] * 3 / 4;
2387 	} else if (!(split & (1 << SPLIT_BYPASS_OFFSET))) {
2388 		split_size = (split >> SPLIT_SIZE_OFFSET) & SPLIT_SIZE_MASK;
2389 		reduce_cap = (cs_cap[0] + cs_cap[1] - (split_size << 24)) / 2;
2390 	}
2391 	t_ddrmem.version = 0;
2392 	t_ddrmem.bank[0] = CONFIG_SYS_SDRAM_BASE;
2393 	if (cs_cap[1]) {
2394 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2395 			6 + 2;
2396 	}
2397 
2398 	if (cs_cap[1] && cs_pst > 27) {
2399 		t_ddrmem.count = 2;
2400 		t_ddrmem.bank[1] = 1 << cs_pst;
2401 		t_ddrmem.bank[2] = cs_cap[0];
2402 		t_ddrmem.bank[3] = cs_cap[1] - reduce_cap;
2403 	} else {
2404 		t_ddrmem.count = 1;
2405 		t_ddrmem.bank[1] = (u64)cs_cap[0] + (u64)cs_cap[1] - reduce_cap;
2406 	}
2407 
2408 	atags_set_tag(ATAG_DDR_MEM,  &t_ddrmem);
2409 
2410 	memset(&t_socinfo, 0, sizeof(struct tag_soc_info));
2411 	t_socinfo.version = 0;
2412 	t_socinfo.name = 0x1126;
2413 }
2414 
2415 static void print_ddr_info(struct rv1126_sdram_params *sdram_params)
2416 {
2417 	u32 split;
2418 
2419 	if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2420 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2421 		split = 0;
2422 	else
2423 		split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2424 			SPLIT_SIZE_MASK;
2425 
2426 	sdram_print_ddr_info(&sdram_params->ch.cap_info,
2427 			     &sdram_params->base, split);
2428 }
2429 
2430 static int sdram_init_(struct dram_info *dram,
2431 		       struct rv1126_sdram_params *sdram_params, u32 post_init)
2432 {
2433 	void __iomem *pctl_base = dram->pctl;
2434 	void __iomem *phy_base = dram->phy;
2435 	u32 ddr4_vref;
2436 	u32 mr_tmp;
2437 
2438 	rkclk_configure_ddr(dram, sdram_params);
2439 
2440 	rkclk_ddr_reset(dram, 1, 1, 1, 1);
2441 	udelay(10);
2442 
2443 	rkclk_ddr_reset(dram, 1, 1, 1, 0);
2444 	phy_cfg(dram, sdram_params);
2445 
2446 	rkclk_ddr_reset(dram, 1, 1, 0, 0);
2447 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1);
2448 
2449 	rkclk_ddr_reset(dram, 1, 0, 0, 0);
2450 	pctl_cfg(dram->pctl, &sdram_params->pctl_regs,
2451 		 dram->sr_idle, dram->pd_idle);
2452 
2453 	if (sdram_params->ch.cap_info.bw == 2) {
2454 		/* 32bit interface use pageclose */
2455 		setbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2456 		/* pageclose = 1, pageclose_timer = 0 will err in lp4 328MHz */
2457 		clrsetbits_le32(pctl_base + DDR_PCTL2_SCHED1, 0xff, 0x1 << 0);
2458 	} else {
2459 		clrbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2460 	}
2461 
2462 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2463 	u32 tmp, trefi;
2464 
2465 	tmp = readl(pctl_base + DDR_PCTL2_RFSHTMG);
2466 	trefi = (tmp >> 16) & 0xfff;
2467 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2468 	       pctl_base + DDR_PCTL2_RFSHTMG);
2469 #endif
2470 
2471 	/* set frequency_mode */
2472 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
2473 	/* set target_frequency to Frequency 0 */
2474 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0);
2475 
2476 	set_ds_odt(dram, sdram_params, 0);
2477 	sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params);
2478 	set_ctl_address_map(dram, sdram_params);
2479 
2480 	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
2481 
2482 	rkclk_ddr_reset(dram, 0, 0, 0, 0);
2483 
2484 	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
2485 		continue;
2486 
2487 	if (sdram_params->base.dramtype == LPDDR3) {
2488 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3);
2489 	} else if (sdram_params->base.dramtype == LPDDR4 ||
2490 		   sdram_params->base.dramtype == LPDDR4X) {
2491 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6);
2492 		/* MR11 */
2493 		pctl_write_mr(dram->pctl, 3, 11,
2494 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2495 			      LPDDR4);
2496 		/* MR12 */
2497 		pctl_write_mr(dram->pctl, 3, 12,
2498 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2499 			      LPDDR4);
2500 
2501 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2502 		/* MR22 */
2503 		pctl_write_mr(dram->pctl, 3, 22,
2504 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2505 			      LPDDR4);
2506 	}
2507 
2508 	if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) != 0) {
2509 		if (post_init != 0)
2510 			printascii("DTT cs0 error\n");
2511 		return -1;
2512 	}
2513 
2514 	if (sdram_params->base.dramtype == LPDDR4) {
2515 		mr_tmp = read_mr(dram, 1, 14, LPDDR4);
2516 
2517 		if (mr_tmp != 0x4d)
2518 			return -1;
2519 	}
2520 
2521 	if (sdram_params->base.dramtype == LPDDR4 ||
2522 	    sdram_params->base.dramtype == LPDDR4X) {
2523 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2524 		/* MR14 */
2525 		pctl_write_mr(dram->pctl, 3, 14,
2526 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2527 			      LPDDR4);
2528 	}
2529 	if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) {
2530 		if (data_training(dram, 1, sdram_params, 0,
2531 				  READ_GATE_TRAINING) != 0) {
2532 			printascii("DTT cs1 error\n");
2533 			return -1;
2534 		}
2535 	}
2536 
2537 	if (sdram_params->base.dramtype == DDR4) {
2538 		ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39;
2539 		pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref,
2540 				  sdram_params->base.dramtype);
2541 	}
2542 
2543 	dram_all_config(dram, sdram_params);
2544 	enable_low_power(dram, sdram_params);
2545 
2546 	return 0;
2547 }
2548 
2549 static u64 dram_detect_cap(struct dram_info *dram,
2550 			   struct rv1126_sdram_params *sdram_params,
2551 			   unsigned char channel)
2552 {
2553 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2554 	void __iomem *pctl_base = dram->pctl;
2555 	void __iomem *phy_base = dram->phy;
2556 	u32 mr8;
2557 
2558 	u32 bktmp;
2559 	u32 coltmp;
2560 	u32 rowtmp;
2561 	u32 cs;
2562 	u32 dram_type = sdram_params->base.dramtype;
2563 	u32 pwrctl;
2564 	u32 i, dq_map;
2565 	u32 byte1 = 0, byte0 = 0;
2566 	u32 tmp, byte;
2567 	struct sdram_head_info_index_v2 *index = (struct sdram_head_info_index_v2 *)common_info;
2568 	struct dq_map_info *map_info = (struct dq_map_info *)
2569 				       ((void *)common_info + index->dq_map_index.offset * 4);
2570 
2571 	cap_info->bw = dram_type == DDR3 ? 0 : 1;
2572 	if (dram_type != LPDDR4 && dram_type != LPDDR4X) {
2573 		if (dram_type != DDR4) {
2574 			coltmp = 12;
2575 			bktmp = 3;
2576 			if (dram_type == LPDDR2)
2577 				rowtmp = 15;
2578 			else
2579 				rowtmp = 16;
2580 
2581 			if (sdram_detect_col(cap_info, coltmp) != 0)
2582 				goto cap_err;
2583 
2584 			sdram_detect_bank(cap_info, coltmp, bktmp);
2585 			if (dram_type != LPDDR3)
2586 				sdram_detect_dbw(cap_info, dram_type);
2587 		} else {
2588 			coltmp = 10;
2589 			bktmp = 4;
2590 			rowtmp = 17;
2591 
2592 			cap_info->col = 10;
2593 			cap_info->bk = 2;
2594 			sdram_detect_bg(cap_info, coltmp);
2595 		}
2596 
2597 		if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
2598 			goto cap_err;
2599 
2600 		sdram_detect_row_3_4(cap_info, coltmp, bktmp);
2601 	} else {
2602 		cap_info->col = 10;
2603 		cap_info->bk = 3;
2604 		mr8 = read_mr(dram, 1, 8, dram_type);
2605 		cap_info->dbw = ((mr8 >> 6) & 0x3) == 0 ? 1 : 0;
2606 		mr8 = (mr8 >> 2) & 0xf;
2607 		if (mr8 >= 0 && mr8 <= 6) {
2608 			cap_info->cs0_row = 14 + (mr8 + 1) / 2;
2609 		} else if (mr8 == 0xc) {
2610 			cap_info->cs0_row = 13;
2611 		} else {
2612 			printascii("Cap ERR: Fail to get cap of LPDDR4/X from MR8\n");
2613 			goto cap_err;
2614 		}
2615 		if (cap_info->dbw == 0)
2616 			cap_info->cs0_row++;
2617 		cap_info->row_3_4 = mr8 % 2 == 1 ? 1 : 0;
2618 		if (cap_info->cs0_row >= 17) {
2619 			printascii("Cap ERR: ");
2620 			printascii("RV1126 LPDDR4/X cannot support row >= 17\n");
2621 			goto cap_err;
2622 			// cap_info->cs0_row = 16;
2623 			// cap_info->row_3_4 = 0;
2624 		}
2625 	}
2626 
2627 	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
2628 	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
2629 
2630 	if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0)
2631 		cs = 1;
2632 	else
2633 		cs = 0;
2634 	cap_info->rank = cs + 1;
2635 
2636 	setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2637 
2638 	tmp = data_training_rg(dram, 0, dram_type) & 0xf;
2639 
2640 	if (tmp == 0) {
2641 		cap_info->bw = 2;
2642 	} else {
2643 		if (dram_type == DDR3 || dram_type == DDR4) {
2644 			dq_map = 0;
2645 			byte = 0;
2646 			for (i = 0; i < 4; i++) {
2647 				if ((tmp & BIT(i)) == 0) {
2648 					dq_map |= byte << (i * 2);
2649 					byte++;
2650 				}
2651 			}
2652 			cap_info->bw = byte / 2;
2653 			for (i = 0; i < 4; i++) {
2654 				if ((tmp & BIT(i)) != 0) {
2655 					dq_map |= byte << (i * 2);
2656 					byte++;
2657 				}
2658 			}
2659 			clrsetbits_le32(&map_info->byte_map[0], 0xff << 24, dq_map << 24);
2660 		} else {
2661 			dq_map = readl(PHY_REG(phy_base, 0x4f));
2662 			for (i = 0; i < 4; i++) {
2663 				if (((dq_map >> (i * 2)) & 0x3) == 0)
2664 					byte0 = i;
2665 				if (((dq_map >> (i * 2)) & 0x3) == 1)
2666 					byte1 = i;
2667 			}
2668 			clrsetbits_le32(PHY_REG(phy_base, 0xf), PHY_DQ_WIDTH_MASK,
2669 					BIT(byte0) | BIT(byte1));
2670 			if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) == 0)
2671 				cap_info->bw = 1;
2672 			else
2673 				cap_info->bw = 0;
2674 		}
2675 	}
2676 	if (cap_info->bw > 0)
2677 		cap_info->dbw = 1;
2678 
2679 	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
2680 
2681 	cap_info->cs0_high16bit_row = cap_info->cs0_row;
2682 	if (cs) {
2683 		cap_info->cs1_row = cap_info->cs0_row;
2684 		cap_info->cs1_high16bit_row = cap_info->cs0_row;
2685 	} else {
2686 		cap_info->cs1_row = 0;
2687 		cap_info->cs1_high16bit_row = 0;
2688 	}
2689 
2690 	if (dram_type == LPDDR3)
2691 		sdram_detect_dbw(cap_info, dram_type);
2692 
2693 	return 0;
2694 cap_err:
2695 	return -1;
2696 }
2697 
2698 static int dram_detect_cs1_row(struct dram_info *dram,
2699 			       struct rv1126_sdram_params *sdram_params,
2700 			       unsigned char channel)
2701 {
2702 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2703 	void __iomem *pctl_base = dram->pctl;
2704 	u32 ret = 0;
2705 	void __iomem *test_addr;
2706 	u32 row, bktmp, coltmp, bw;
2707 	u64 cs0_cap;
2708 	u32 byte_mask;
2709 	u32 cs_pst;
2710 	u32 cs_add = 0;
2711 	u32 max_row;
2712 
2713 	if (cap_info->rank == 2) {
2714 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2715 			6 + 2;
2716 		if (cs_pst < 28)
2717 			cs_add = 1;
2718 
2719 		cs0_cap = 1 << cs_pst;
2720 
2721 		if (sdram_params->base.dramtype == DDR4) {
2722 			if (cap_info->dbw == 0)
2723 				bktmp = cap_info->bk + 2;
2724 			else
2725 				bktmp = cap_info->bk + 1;
2726 		} else {
2727 			bktmp = cap_info->bk;
2728 		}
2729 		bw = cap_info->bw;
2730 		coltmp = cap_info->col;
2731 
2732 		if (bw == 2)
2733 			byte_mask = 0xFFFF;
2734 		else
2735 			byte_mask = 0xFF;
2736 
2737 		max_row = (cs_pst == 31) ? 30 : 31;
2738 
2739 		max_row = max_row - bktmp - coltmp - bw - cs_add + 1;
2740 
2741 		row = (cap_info->cs0_row > max_row) ? max_row :
2742 			cap_info->cs0_row;
2743 
2744 		for (; row > 12; row--) {
2745 			test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
2746 				    (u32)cs0_cap +
2747 				    (1ul << (row + bktmp + coltmp +
2748 					     cs_add + bw - 1ul)));
2749 
2750 			writel(0, CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap);
2751 			writel(PATTERN, test_addr);
2752 
2753 			if (((readl(test_addr) & byte_mask) ==
2754 			     (PATTERN & byte_mask)) &&
2755 			    ((readl(CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap) &
2756 			      byte_mask) == 0)) {
2757 				ret = row;
2758 				break;
2759 			}
2760 		}
2761 	}
2762 
2763 	return ret;
2764 }
2765 
2766 /* return: 0 = success, other = fail */
2767 static int sdram_init_detect(struct dram_info *dram,
2768 			     struct rv1126_sdram_params *sdram_params)
2769 {
2770 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2771 	u32 ret;
2772 	u32 sys_reg = 0;
2773 	u32 sys_reg3 = 0;
2774 	struct sdram_head_info_index_v2 *index =
2775 		(struct sdram_head_info_index_v2 *)common_info;
2776 	struct dq_map_info *map_info;
2777 
2778 	map_info = (struct dq_map_info *)((void *)common_info +
2779 		index->dq_map_index.offset * 4);
2780 
2781 	if (sdram_init_(dram, sdram_params, 0)) {
2782 		if (sdram_params->base.dramtype == DDR3) {
2783 			clrsetbits_le32(&map_info->byte_map[0], 0xff << 24,
2784 					((0x1 << 6) | (0x3 << 4) | (0x2 << 2) |
2785 					(0x0 << 0)) << 24);
2786 			if (sdram_init_(dram, sdram_params, 0))
2787 				return -1;
2788 		} else {
2789 			return -1;
2790 		}
2791 	}
2792 
2793 	if (sdram_params->base.dramtype == DDR3) {
2794 		writel(PATTERN, CONFIG_SYS_SDRAM_BASE);
2795 		if (readl(CONFIG_SYS_SDRAM_BASE) != PATTERN)
2796 			return -1;
2797 	}
2798 
2799 	split_bypass(dram);
2800 	if (dram_detect_cap(dram, sdram_params, 0) != 0)
2801 		return -1;
2802 
2803 	pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
2804 				   sdram_params->base.dramtype);
2805 	ret = sdram_init_(dram, sdram_params, 1);
2806 	if (ret != 0)
2807 		goto out;
2808 
2809 	cap_info->cs1_row =
2810 		dram_detect_cs1_row(dram, sdram_params, 0);
2811 	if (cap_info->cs1_row) {
2812 		sys_reg = readl(&dram->pmugrf->os_reg[2]);
2813 		sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
2814 		SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
2815 				    sys_reg, sys_reg3, 0);
2816 		writel(sys_reg, &dram->pmugrf->os_reg[2]);
2817 		writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2818 	}
2819 
2820 	sdram_detect_high_row(cap_info, sdram_params->base.dramtype);
2821 	split_setup(dram, sdram_params);
2822 out:
2823 	return ret;
2824 }
2825 
2826 struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz)
2827 {
2828 	u32 i;
2829 	u32 offset = 0;
2830 	struct ddr2_3_4_lp2_3_info *ddr_info;
2831 
2832 	if (!freq_mhz) {
2833 		ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype);
2834 		if (ddr_info)
2835 			freq_mhz =
2836 				(ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
2837 				DDR_FREQ_MASK;
2838 		else
2839 			freq_mhz = 0;
2840 	}
2841 
2842 	for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) {
2843 		if (sdram_configs[i].base.ddr_freq == 0 ||
2844 		    freq_mhz < sdram_configs[i].base.ddr_freq)
2845 			break;
2846 	}
2847 	offset = i == 0 ? 0 : i - 1;
2848 
2849 	return &sdram_configs[offset];
2850 }
2851 
2852 static const u16 pctl_need_update_reg[] = {
2853 	DDR_PCTL2_RFSHTMG,
2854 	DDR_PCTL2_INIT3,
2855 	DDR_PCTL2_INIT4,
2856 	DDR_PCTL2_INIT6,
2857 	DDR_PCTL2_INIT7,
2858 	DDR_PCTL2_DRAMTMG0,
2859 	DDR_PCTL2_DRAMTMG1,
2860 	DDR_PCTL2_DRAMTMG2,
2861 	DDR_PCTL2_DRAMTMG3,
2862 	DDR_PCTL2_DRAMTMG4,
2863 	DDR_PCTL2_DRAMTMG5,
2864 	DDR_PCTL2_DRAMTMG6,
2865 	DDR_PCTL2_DRAMTMG7,
2866 	DDR_PCTL2_DRAMTMG8,
2867 	DDR_PCTL2_DRAMTMG9,
2868 	DDR_PCTL2_DRAMTMG12,
2869 	DDR_PCTL2_DRAMTMG13,
2870 	DDR_PCTL2_DRAMTMG14,
2871 	DDR_PCTL2_ZQCTL0,
2872 	DDR_PCTL2_DFITMG0,
2873 	DDR_PCTL2_ODTCFG
2874 };
2875 
2876 static const u16 phy_need_update_reg[] = {
2877 	0x14,
2878 	0x18,
2879 	0x1c
2880 };
2881 
2882 static void pre_set_rate(struct dram_info *dram,
2883 			 struct rv1126_sdram_params *sdram_params,
2884 			 u32 dst_fsp, u32 dst_fsp_lp4)
2885 {
2886 	u32 i, j, find;
2887 	void __iomem *pctl_base = dram->pctl;
2888 	void __iomem *phy_base = dram->phy;
2889 	u32 phy_offset;
2890 	u32 mr_tmp;
2891 	u32 dramtype = sdram_params->base.dramtype;
2892 
2893 	sw_set_req(dram);
2894 	/* pctl timing update */
2895 	for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) {
2896 		for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF;
2897 		     j++) {
2898 			if (sdram_params->pctl_regs.pctl[j][0] ==
2899 			    pctl_need_update_reg[i]) {
2900 				writel(sdram_params->pctl_regs.pctl[j][1],
2901 				       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2902 				       pctl_need_update_reg[i]);
2903 				find = j;
2904 				break;
2905 			}
2906 		}
2907 	}
2908 
2909 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2910 	u32 tmp, trefi;
2911 
2912 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2913 	trefi = (tmp >> 16) & 0xfff;
2914 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2915 	       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2916 #endif
2917 
2918 	sw_set_ack(dram);
2919 
2920 	/* phy timing update */
2921 	if (dst_fsp == 0)
2922 		phy_offset = 0;
2923 	else
2924 		phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3);
2925 	/* cl cwl al update */
2926 	for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) {
2927 		for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF;
2928 		     j++) {
2929 			if (sdram_params->phy_regs.phy[j][0] ==
2930 			    phy_need_update_reg[i]) {
2931 				writel(sdram_params->phy_regs.phy[j][1],
2932 				       phy_base + phy_offset +
2933 				       phy_need_update_reg[i]);
2934 				find = j;
2935 				break;
2936 			}
2937 		}
2938 	}
2939 
2940 	set_ds_odt(dram, sdram_params, dst_fsp);
2941 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
2942 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2943 			       DDR_PCTL2_INIT4);
2944 		/* MR13 */
2945 		pctl_write_mr(dram->pctl, 3, 13,
2946 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2947 			       PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2948 			      ((0x2 << 6) >> dst_fsp_lp4), dramtype);
2949 		writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2950 				      PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2951 				      ((0x2 << 6) >> dst_fsp_lp4),
2952 				       PHY_REG(phy_base, 0x1b));
2953 		/* MR3 */
2954 		pctl_write_mr(dram->pctl, 3, 3,
2955 			      mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT &
2956 			      PCTL2_MR_MASK,
2957 			      dramtype);
2958 		writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK,
2959 		       PHY_REG(phy_base, 0x19));
2960 
2961 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2962 			       DDR_PCTL2_INIT3);
2963 		/* MR1 */
2964 		pctl_write_mr(dram->pctl, 3, 1,
2965 			      mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT &
2966 			      PCTL2_MR_MASK,
2967 			      dramtype);
2968 		writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK,
2969 		       PHY_REG(phy_base, 0x17));
2970 		/* MR2 */
2971 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
2972 			      dramtype);
2973 		writel(mr_tmp & PCTL2_MR_MASK,
2974 		       PHY_REG(phy_base, 0x18));
2975 
2976 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2977 			       DDR_PCTL2_INIT6);
2978 		/* MR11 */
2979 		pctl_write_mr(dram->pctl, 3, 11,
2980 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2981 			      dramtype);
2982 		writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2983 		       PHY_REG(phy_base, 0x1a));
2984 		/* MR12 */
2985 		pctl_write_mr(dram->pctl, 3, 12,
2986 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2987 			      dramtype);
2988 
2989 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2990 			       DDR_PCTL2_INIT7);
2991 		/* MR22 */
2992 		pctl_write_mr(dram->pctl, 3, 22,
2993 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2994 			      dramtype);
2995 		writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2996 		       PHY_REG(phy_base, 0x1d));
2997 		/* MR14 */
2998 		pctl_write_mr(dram->pctl, 3, 14,
2999 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
3000 			      dramtype);
3001 		writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
3002 		       PHY_REG(phy_base, 0x1c));
3003 	}
3004 
3005 	update_noc_timing(dram, sdram_params);
3006 }
3007 
3008 static void save_fsp_param(struct dram_info *dram, u32 dst_fsp,
3009 			   struct rv1126_sdram_params *sdram_params)
3010 {
3011 	void __iomem *pctl_base = dram->pctl;
3012 	void __iomem *phy_base = dram->phy;
3013 	struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp];
3014 	u32 temp, temp1;
3015 	struct ddr2_3_4_lp2_3_info *ddr_info;
3016 
3017 	ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype);
3018 
3019 	p_fsp_param->freq_mhz = sdram_params->base.ddr_freq;
3020 
3021 	if (sdram_params->base.dramtype == LPDDR4 ||
3022 	    sdram_params->base.dramtype == LPDDR4X) {
3023 		p_fsp_param->rd_odt_up_en = 0;
3024 		p_fsp_param->rd_odt_down_en = 1;
3025 	} else {
3026 		p_fsp_param->rd_odt_up_en =
3027 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
3028 		p_fsp_param->rd_odt_down_en =
3029 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
3030 	}
3031 
3032 	if (p_fsp_param->rd_odt_up_en)
3033 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111));
3034 	else if (p_fsp_param->rd_odt_down_en)
3035 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110));
3036 	else
3037 		p_fsp_param->rd_odt = 0;
3038 	p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112));
3039 	p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100));
3040 	p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102));
3041 	p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128));
3042 	p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105));
3043 
3044 	if (sdram_params->base.dramtype == DDR3) {
3045 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3046 			     DDR_PCTL2_INIT3);
3047 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
3048 		p_fsp_param->ds_pdds = temp & DDR3_DS_MASK;
3049 		p_fsp_param->dq_odt = temp & DDR3_RTT_NOM_MASK;
3050 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3051 	} else if (sdram_params->base.dramtype == DDR4) {
3052 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3053 			     DDR_PCTL2_INIT3);
3054 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
3055 		p_fsp_param->ds_pdds = temp & DDR4_DS_MASK;
3056 		p_fsp_param->dq_odt = temp & DDR4_RTT_NOM_MASK;
3057 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3058 	} else if (sdram_params->base.dramtype == LPDDR3) {
3059 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3060 			     DDR_PCTL2_INIT4);
3061 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
3062 		p_fsp_param->ds_pdds = temp & 0xf;
3063 
3064 		p_fsp_param->dq_odt = lp3_odt_value;
3065 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3066 	} else if (sdram_params->base.dramtype == LPDDR4 ||
3067 		   sdram_params->base.dramtype == LPDDR4X) {
3068 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3069 			     DDR_PCTL2_INIT4);
3070 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
3071 		p_fsp_param->ds_pdds = temp & LPDDR4_PDDS_MASK;
3072 
3073 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3074 			     DDR_PCTL2_INIT6);
3075 		temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK;
3076 		p_fsp_param->dq_odt = temp & LPDDR4_DQODT_MASK;
3077 		p_fsp_param->ca_odt = temp & LPDDR4_CAODT_MASK;
3078 
3079 		temp = MAX(readl(PHY_REG(phy_base, 0x3ae)),
3080 			   readl(PHY_REG(phy_base, 0x3ce)));
3081 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)),
3082 			    readl(PHY_REG(phy_base, 0x3de)));
3083 		p_fsp_param->vref_ca[0] = (temp + temp1) / 2;
3084 		temp = MAX(readl(PHY_REG(phy_base, 0x3af)),
3085 			   readl(PHY_REG(phy_base, 0x3cf)));
3086 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)),
3087 			    readl(PHY_REG(phy_base, 0x3df)));
3088 		p_fsp_param->vref_ca[1] = (temp + temp1) / 2;
3089 		p_fsp_param->vref_ca[0] |=
3090 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
3091 		p_fsp_param->vref_ca[1] |=
3092 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
3093 
3094 		p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >>
3095 					      3) & 0x1;
3096 	}
3097 
3098 	p_fsp_param->noc_timings.ddrtiminga0 =
3099 		sdram_params->ch.noc_timings.ddrtiminga0;
3100 	p_fsp_param->noc_timings.ddrtimingb0 =
3101 		sdram_params->ch.noc_timings.ddrtimingb0;
3102 	p_fsp_param->noc_timings.ddrtimingc0 =
3103 		sdram_params->ch.noc_timings.ddrtimingc0;
3104 	p_fsp_param->noc_timings.devtodev0 =
3105 		sdram_params->ch.noc_timings.devtodev0;
3106 	p_fsp_param->noc_timings.ddrmode =
3107 		sdram_params->ch.noc_timings.ddrmode;
3108 	p_fsp_param->noc_timings.ddr4timing =
3109 		sdram_params->ch.noc_timings.ddr4timing;
3110 	p_fsp_param->noc_timings.agingx0 =
3111 		sdram_params->ch.noc_timings.agingx0;
3112 	p_fsp_param->noc_timings.aging0 =
3113 		sdram_params->ch.noc_timings.aging0;
3114 	p_fsp_param->noc_timings.aging1 =
3115 		sdram_params->ch.noc_timings.aging1;
3116 	p_fsp_param->noc_timings.aging2 =
3117 		sdram_params->ch.noc_timings.aging2;
3118 	p_fsp_param->noc_timings.aging3 =
3119 		sdram_params->ch.noc_timings.aging3;
3120 
3121 	p_fsp_param->flag = FSP_FLAG;
3122 }
3123 
3124 #ifndef CONFIG_SPL_KERNEL_BOOT
3125 static void copy_fsp_param_to_ddr(void)
3126 {
3127 	memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param,
3128 	       sizeof(fsp_param));
3129 }
3130 #endif
3131 
3132 static void pctl_modify_trfc(struct ddr_pctl_regs *pctl_regs,
3133 			     struct sdram_cap_info *cap_info, u32 dram_type,
3134 			     u32 freq)
3135 {
3136 	u64 cs0_cap;
3137 	u32 die_cap;
3138 	u32 trfc_ns, trfc4_ns;
3139 	u32 trfc, txsnr;
3140 	u32 txs_abort_fast = 0;
3141 	u32 tmp;
3142 
3143 	cs0_cap = sdram_get_cs_cap(cap_info, 0, dram_type);
3144 	die_cap = (u32)(cs0_cap >> (20 + (cap_info->bw - cap_info->dbw)));
3145 
3146 	switch (dram_type) {
3147 	case DDR3:
3148 		if (die_cap <= DIE_CAP_512MBIT)
3149 			trfc_ns = 90;
3150 		else if (die_cap <= DIE_CAP_1GBIT)
3151 			trfc_ns = 110;
3152 		else if (die_cap <= DIE_CAP_2GBIT)
3153 			trfc_ns = 160;
3154 		else if (die_cap <= DIE_CAP_4GBIT)
3155 			trfc_ns = 260;
3156 		else
3157 			trfc_ns = 350;
3158 		txsnr = MAX(5, ((trfc_ns + 10) * freq + 999) / 1000);
3159 		break;
3160 
3161 	case DDR4:
3162 		if (die_cap <= DIE_CAP_2GBIT) {
3163 			trfc_ns = 160;
3164 			trfc4_ns = 90;
3165 		} else if (die_cap <= DIE_CAP_4GBIT) {
3166 			trfc_ns = 260;
3167 			trfc4_ns = 110;
3168 		} else if (die_cap <= DIE_CAP_8GBIT) {
3169 			trfc_ns = 350;
3170 			trfc4_ns = 160;
3171 		} else {
3172 			trfc_ns = 550;
3173 			trfc4_ns = 260;
3174 		}
3175 		txsnr = ((trfc_ns + 10) * freq + 999) / 1000;
3176 		txs_abort_fast = ((trfc4_ns + 10) * freq + 999) / 1000;
3177 		break;
3178 
3179 	case LPDDR3:
3180 		if (die_cap <= DIE_CAP_4GBIT)
3181 			trfc_ns = 130;
3182 		else
3183 			trfc_ns = 210;
3184 		txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3185 		break;
3186 
3187 	case LPDDR4:
3188 	case LPDDR4X:
3189 		if (die_cap <= DIE_CAP_2GBIT)
3190 			trfc_ns = 130;
3191 		else if (die_cap <= DIE_CAP_4GBIT)
3192 			trfc_ns = 180;
3193 		else if (die_cap <= DIE_CAP_8GBIT)
3194 			trfc_ns = 280;
3195 		else
3196 			trfc_ns = 380;
3197 		txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3198 		break;
3199 
3200 	default:
3201 		return;
3202 	}
3203 	trfc = (trfc_ns * freq + 999) / 1000;
3204 
3205 	for (int i = 0; pctl_regs->pctl[i][0] != 0xffffffff; i++) {
3206 		switch (pctl_regs->pctl[i][0]) {
3207 		case DDR_PCTL2_RFSHTMG:
3208 			tmp = pctl_regs->pctl[i][1];
3209 			/* t_rfc_min */
3210 			tmp &= ~((u32)0x3ff);
3211 			tmp |= ((trfc + 1) / 2) & 0x3ff;
3212 			pctl_regs->pctl[i][1] = tmp;
3213 			break;
3214 
3215 		case DDR_PCTL2_DRAMTMG8:
3216 			if (dram_type == DDR3 || dram_type == DDR4) {
3217 				tmp = pctl_regs->pctl[i][1];
3218 				/* t_xs_x32 */
3219 				tmp &= ~((u32)0x7f);
3220 				tmp |= ((txsnr + 63) / 64 + 1) & 0x7f;
3221 
3222 				if (dram_type == DDR4) {
3223 					/* t_xs_abort_x32 */
3224 					tmp &= ~((u32)(0x7f << 16));
3225 					tmp |= (((txs_abort_fast + 63) / 64 + 1) & 0x7f) << 16;
3226 					/* t_xs_fast_x32 */
3227 					tmp &= ~((u32)(0x7f << 24));
3228 					tmp |= (((txs_abort_fast + 63) / 64 + 1) & 0x7f) << 24;
3229 				}
3230 
3231 				pctl_regs->pctl[i][1] = tmp;
3232 			}
3233 			break;
3234 
3235 		case DDR_PCTL2_DRAMTMG14:
3236 			if (dram_type == LPDDR3 ||
3237 			    dram_type == LPDDR4 || dram_type == LPDDR4X) {
3238 				tmp = pctl_regs->pctl[i][1];
3239 				/* t_xsr */
3240 				tmp &= ~((u32)0xfff);
3241 				tmp |= ((txsnr + 1) / 2) & 0xfff;
3242 				pctl_regs->pctl[i][1] = tmp;
3243 			}
3244 			break;
3245 
3246 		default:
3247 			break;
3248 		}
3249 	}
3250 }
3251 
3252 void ddr_set_rate(struct dram_info *dram,
3253 		  struct rv1126_sdram_params *sdram_params,
3254 		  u32 freq, u32 cur_freq, u32 dst_fsp,
3255 		  u32 dst_fsp_lp4, u32 training_en)
3256 {
3257 	u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off;
3258 	u32 mr_tmp;
3259 	u32 lp_stat;
3260 	u32 dramtype = sdram_params->base.dramtype;
3261 	struct rv1126_sdram_params *sdram_params_new;
3262 	void __iomem *pctl_base = dram->pctl;
3263 	void __iomem *phy_base = dram->phy;
3264 
3265 	lp_stat = low_power_update(dram, 0);
3266 	sdram_params_new = get_default_sdram_config(freq);
3267 	sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank;
3268 	sdram_params_new->ch.cap_info.bw = sdram_params->ch.cap_info.bw;
3269 
3270 	pctl_modify_trfc(&sdram_params_new->pctl_regs,
3271 			 &sdram_params->ch.cap_info, dramtype, freq);
3272 	pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4);
3273 
3274 	while ((readl(pctl_base + DDR_PCTL2_STAT) &
3275 			 PCTL2_OPERATING_MODE_MASK) ==
3276 			 PCTL2_OPERATING_MODE_SR)
3277 		continue;
3278 
3279 	dest_dll_off = 0;
3280 	dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3281 			  DDR_PCTL2_INIT3);
3282 	if ((dramtype == DDR3 && (dst_init3 & 1)) ||
3283 	    (dramtype == DDR4 && !(dst_init3 & 1)))
3284 		dest_dll_off = 1;
3285 
3286 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
3287 	cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
3288 			  DDR_PCTL2_INIT3);
3289 	cur_init3 &= PCTL2_MR_MASK;
3290 	cur_dll_off = 1;
3291 	if ((dramtype == DDR3 && !(cur_init3 & 1)) ||
3292 	    (dramtype == DDR4 && (cur_init3 & 1)))
3293 		cur_dll_off = 0;
3294 
3295 	if (!cur_dll_off) {
3296 		if (dramtype == DDR3)
3297 			cur_init3 |= 1;
3298 		else
3299 			cur_init3 &= ~1;
3300 		pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype);
3301 	}
3302 
3303 	setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3304 		     PCTL2_DIS_AUTO_REFRESH);
3305 	update_refresh_reg(dram);
3306 
3307 	enter_sr(dram, 1);
3308 
3309 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3310 	       PMUGRF_CON_DDRPHY_BUFFEREN_EN,
3311 	       &dram->pmugrf->soc_con[0]);
3312 	sw_set_req(dram);
3313 	clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC,
3314 		     PCTL2_DFI_INIT_COMPLETE_EN);
3315 	sw_set_ack(dram);
3316 
3317 	sw_set_req(dram);
3318 	if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off)
3319 		setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3320 	else
3321 		clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3322 
3323 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0,
3324 		     PCTL2_DIS_SRX_ZQCL);
3325 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0,
3326 		     PCTL2_DIS_SRX_ZQCL);
3327 	sw_set_ack(dram);
3328 
3329 	writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT),
3330 	       &dram->cru->clkgate_con[21]);
3331 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3332 					(0x1 << CLK_DDR_UPCTL_EN_SHIFT) |
3333 					(0x1 << ACLK_DDR_UPCTL_EN_SHIFT),
3334 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3335 
3336 	clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3337 	rkclk_set_dpll(dram, freq * MHz / 2);
3338 	phy_pll_set(dram, freq * MHz, 0);
3339 	phy_pll_set(dram, freq * MHz, 1);
3340 	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3341 
3342 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3343 			PMUGRF_CON_DDRPHY_BUFFEREN_DIS,
3344 			&dram->pmugrf->soc_con[0]);
3345 	writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT),
3346 	       &dram->cru->clkgate_con[21]);
3347 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3348 					(0x0 << CLK_DDR_UPCTL_EN_SHIFT) |
3349 					(0x0 << ACLK_DDR_UPCTL_EN_SHIFT),
3350 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3351 	while ((readl(pctl_base + DDR_PCTL2_DFISTAT) &
3352 	       PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE)
3353 		continue;
3354 
3355 	sw_set_req(dram);
3356 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
3357 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp);
3358 	sw_set_ack(dram);
3359 	update_refresh_reg(dram);
3360 	clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2);
3361 
3362 	enter_sr(dram, 0);
3363 
3364 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3365 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3366 
3367 	mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4);
3368 	if (dramtype == LPDDR3) {
3369 		pctl_write_mr(dram->pctl, 3, 1,
3370 			      (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) &
3371 			      PCTL2_MR_MASK,
3372 			      dramtype);
3373 		pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK,
3374 			      dramtype);
3375 		pctl_write_mr(dram->pctl, 3, 3,
3376 			      (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) &
3377 			      PCTL2_MR_MASK,
3378 			      dramtype);
3379 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype);
3380 	} else if ((dramtype == DDR3) || (dramtype == DDR4)) {
3381 		pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK,
3382 			      dramtype);
3383 		if (!dest_dll_off) {
3384 			pctl_write_mr(dram->pctl, 3, 0,
3385 				      ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) &
3386 				      PCTL2_MR_MASK) | DDR3_DLL_RESET,
3387 				      dramtype);
3388 			udelay(2);
3389 		}
3390 		pctl_write_mr(dram->pctl, 3, 0,
3391 			      (dst_init3 >> PCTL2_DDR34_MR0_SHIFT &
3392 			       PCTL2_MR_MASK) & (~DDR3_DLL_RESET),
3393 			      dramtype);
3394 		pctl_write_mr(dram->pctl, 3, 2,
3395 			      ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) &
3396 			       PCTL2_MR_MASK), dramtype);
3397 		if (dramtype == DDR4) {
3398 			pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK,
3399 				      dramtype);
3400 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3401 				       DDR_PCTL2_INIT6);
3402 			pctl_write_mr(dram->pctl, 3, 4,
3403 				      (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) &
3404 				       PCTL2_MR_MASK,
3405 				      dramtype);
3406 			pctl_write_mr(dram->pctl, 3, 5,
3407 				      mr_tmp >> PCTL2_DDR4_MR5_SHIFT &
3408 				      PCTL2_MR_MASK,
3409 				      dramtype);
3410 
3411 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3412 				       DDR_PCTL2_INIT7);
3413 			pctl_write_mr(dram->pctl, 3, 6,
3414 				      mr_tmp >> PCTL2_DDR4_MR6_SHIFT &
3415 				      PCTL2_MR_MASK,
3416 				      dramtype);
3417 		}
3418 	} else if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
3419 		pctl_write_mr(dram->pctl, 3, 13,
3420 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
3421 			       PCTL2_MR_MASK) & (~(BIT(7)))) |
3422 			      dst_fsp_lp4 << 7, dramtype);
3423 	}
3424 	clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3425 		     PCTL2_DIS_AUTO_REFRESH);
3426 	update_refresh_reg(dram);
3427 
3428 	/* training */
3429 	high_freq_training(dram, sdram_params_new, dst_fsp);
3430 	low_power_update(dram, lp_stat);
3431 
3432 	save_fsp_param(dram, dst_fsp, sdram_params_new);
3433 }
3434 
3435 static void ddr_set_rate_for_fsp(struct dram_info *dram,
3436 				 struct rv1126_sdram_params *sdram_params)
3437 {
3438 	struct ddr2_3_4_lp2_3_info *ddr_info;
3439 	u32 f0;
3440 	u32 dramtype = sdram_params->base.dramtype;
3441 #ifndef CONFIG_SPL_KERNEL_BOOT
3442 	u32 f1, f2, f3;
3443 #endif
3444 
3445 	ddr_info = get_ddr_drv_odt_info(dramtype);
3446 	if (!ddr_info)
3447 		return;
3448 
3449 	f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
3450 	     DDR_FREQ_MASK;
3451 
3452 #ifndef CONFIG_SPL_KERNEL_BOOT
3453 	memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param));
3454 	memset((void *)&fsp_param, 0, sizeof(fsp_param));
3455 
3456 	f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) &
3457 	     DDR_FREQ_MASK;
3458 	f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) &
3459 	     DDR_FREQ_MASK;
3460 	f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) &
3461 	     DDR_FREQ_MASK;
3462 #endif
3463 
3464 	if (get_wrlvl_val(dram, sdram_params))
3465 		printascii("get wrlvl value fail\n");
3466 
3467 #ifndef CONFIG_SPL_KERNEL_BOOT
3468 	printascii("change to: ");
3469 	printdec(f1);
3470 	printascii("MHz\n");
3471 	ddr_set_rate(&dram_info, sdram_params, f1,
3472 		     sdram_params->base.ddr_freq, 1, 1, 1);
3473 	printascii("change to: ");
3474 	printdec(f2);
3475 	printascii("MHz\n");
3476 	ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1);
3477 	printascii("change to: ");
3478 	printdec(f3);
3479 	printascii("MHz\n");
3480 	ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1);
3481 #endif
3482 	printascii("change to: ");
3483 	printdec(f0);
3484 	printascii("MHz(final freq)\n");
3485 #ifndef CONFIG_SPL_KERNEL_BOOT
3486 	ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1);
3487 #else
3488 	ddr_set_rate(&dram_info, sdram_params, f0, sdram_params->base.ddr_freq, 1, 1, 1);
3489 #endif
3490 }
3491 
3492 int get_uart_config(void)
3493 {
3494 	struct sdram_head_info_index_v2 *index =
3495 		(struct sdram_head_info_index_v2 *)common_info;
3496 	struct global_info *gbl_info;
3497 
3498 	gbl_info = (struct global_info *)((void *)common_info +
3499 		index->global_index.offset * 4);
3500 
3501 	return gbl_info->uart_info;
3502 }
3503 
3504 /* return: 0 = success, other = fail */
3505 int sdram_init(void)
3506 {
3507 	struct rv1126_sdram_params *sdram_params;
3508 	int ret = 0;
3509 	struct sdram_head_info_index_v2 *index =
3510 		(struct sdram_head_info_index_v2 *)common_info;
3511 	struct global_info *gbl_info;
3512 
3513 	dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
3514 	dram_info.pctl = (void *)UPCTL2_BASE_ADDR;
3515 	dram_info.grf = (void *)GRF_BASE_ADDR;
3516 	dram_info.cru = (void *)CRU_BASE_ADDR;
3517 	dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR;
3518 	dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR;
3519 	dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR;
3520 
3521 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
3522 	printascii("extended temp support\n");
3523 #endif
3524 	if (index->version_info != 2 ||
3525 	    (index->global_index.size != sizeof(struct global_info) / 4) ||
3526 	    (index->ddr3_index.size !=
3527 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3528 	    (index->ddr4_index.size !=
3529 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3530 	    (index->lp3_index.size !=
3531 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3532 	    (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) ||
3533 	    (index->lp4x_index.size != (sizeof(struct lp4_info) / 4)) ||
3534 	    index->global_index.offset == 0 ||
3535 	    index->ddr3_index.offset == 0 ||
3536 	    index->ddr4_index.offset == 0 ||
3537 	    index->lp3_index.offset == 0 ||
3538 	    index->lp4_index.offset == 0 ||
3539 	    index->lp4x_index.offset == 0) {
3540 		printascii("common info error\n");
3541 		goto error;
3542 	}
3543 
3544 	gbl_info = (struct global_info *)((void *)common_info +
3545 		index->global_index.offset * 4);
3546 
3547 	dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info);
3548 	dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info);
3549 
3550 	sdram_params = &sdram_configs[0];
3551 	#if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 8)
3552 	for (int j = 0; j < ARRAY_SIZE(sdram_configs); j++)
3553 		sdram_configs[j].base.dramtype = LPDDR4X;
3554 	#endif
3555 	if (sdram_params->base.dramtype == DDR3 ||
3556 	    sdram_params->base.dramtype == DDR4) {
3557 		if (DDR_2T_INFO(gbl_info->info_2t))
3558 			sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10;
3559 		else
3560 			sdram_params->pctl_regs.pctl[0][1] &=
3561 				~(0x1 << 10);
3562 	}
3563 	ret = sdram_init_detect(&dram_info, sdram_params);
3564 	if (ret) {
3565 		sdram_print_dram_type(sdram_params->base.dramtype);
3566 		printascii(", ");
3567 		printdec(sdram_params->base.ddr_freq);
3568 		printascii("MHz\n");
3569 		goto error;
3570 	}
3571 	print_ddr_info(sdram_params);
3572 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3573 	init_rw_trn_result_struct(&rw_trn_result, dram_info.phy,
3574 				  (u8)sdram_params->ch.cap_info.rank);
3575 #endif
3576 
3577 	ddr_set_rate_for_fsp(&dram_info, sdram_params);
3578 #ifndef CONFIG_SPL_KERNEL_BOOT
3579 	copy_fsp_param_to_ddr();
3580 #endif
3581 
3582 	ddr_set_atags(&dram_info, sdram_params);
3583 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3584 	save_rw_trn_result_to_ddr(&rw_trn_result);
3585 #endif
3586 
3587 	printascii("out\n");
3588 
3589 	return ret;
3590 error:
3591 	printascii("error\n");
3592 	return (-1);
3593 }
3594 #endif /* CONFIG_TPL_BUILD */
3595