xref: /rk3399_rockchip-uboot/drivers/ram/rockchip/sdram_rv1126.c (revision 6a8f377ca2e7df7257118262bdf7e8e1412915ef)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * (C) Copyright 2020 Rockchip Electronics Co., Ltd.
4  */
5 
6 #include <common.h>
7 #include <debug_uart.h>
8 #include <dm.h>
9 #include <ram.h>
10 #include <syscon.h>
11 #include <asm/io.h>
12 #include <asm/arch/clock.h>
13 #include <asm/arch/hardware.h>
14 #include <asm/arch/rk_atags.h>
15 #include <asm/arch/cru_rv1126.h>
16 #include <asm/arch/grf_rv1126.h>
17 #include <asm/arch/sdram_common.h>
18 #include <asm/arch/sdram_rv1126.h>
19 
20 /* define training flag */
21 #define CA_TRAINING			(0x1 << 0)
22 #define READ_GATE_TRAINING		(0x1 << 1)
23 #define WRITE_LEVELING			(0x1 << 2)
24 #define WRITE_TRAINING			(0x1 << 3)
25 #define READ_TRAINING			(0x1 << 4)
26 #define FULL_TRAINING			(0xff)
27 
28 /* #define DDR4_READ_GATE_PREAMBLE_MODE */
29 #ifndef DDR4_READ_GATE_PREAMBLE_MODE
30 /* DDR4 read gate normal mode conflicts with 1nCK preamble */
31 #define DDR4_READ_GATE_2NCK_PREAMBLE
32 #endif
33 
34 #define SKEW_RX_SIGNAL			(0)
35 #define SKEW_TX_SIGNAL			(1)
36 #define SKEW_CA_SIGNAL			(2)
37 
38 #define DESKEW_MDF_ABS_VAL		(0)
39 #define DESKEW_MDF_DIFF_VAL		(1)
40 
41 #ifdef CONFIG_TPL_BUILD
42 #ifndef CONFIG_TPL_TINY_FRAMEWORK
43 #error please defined CONFIG_TPL_TINY_FRAMEWORK for RV1126 !!!
44 #endif
45 #endif
46 
47 #ifdef CONFIG_TPL_BUILD
48 
49 struct dram_info {
50 	void __iomem *pctl;
51 	void __iomem *phy;
52 	struct rv1126_cru *cru;
53 	struct msch_regs *msch;
54 	struct rv1126_ddrgrf *ddrgrf;
55 	struct rv1126_grf *grf;
56 	struct ram_info info;
57 	struct rv1126_pmugrf *pmugrf;
58 	u32 sr_idle;
59 	u32 pd_idle;
60 };
61 
62 #define GRF_BASE_ADDR			0xfe000000
63 #define PMU_GRF_BASE_ADDR		0xfe020000
64 #define DDR_GRF_BASE_ADDR		0xfe030000
65 #define BUS_SGRF_BASE_ADDR		0xfe0a0000
66 #define SERVER_MSCH_BASE_ADDR		0xfe800000
67 #define CRU_BASE_ADDR			0xff490000
68 #define DDR_PHY_BASE_ADDR		0xff4a0000
69 #define UPCTL2_BASE_ADDR		0xffa50000
70 
71 #define SGRF_SOC_CON2			0x8
72 #define SGRF_SOC_CON12			0x30
73 #define SGRF_SOC_CON13			0x34
74 
75 struct dram_info dram_info;
76 
77 #if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 3)
78 struct rv1126_sdram_params sdram_configs[] = {
79 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-328.inc"
80 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-396.inc"
81 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-528.inc"
82 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-664.inc"
83 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-784.inc"
84 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-924.inc"
85 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-1056.inc"
86 };
87 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 0)
88 struct rv1126_sdram_params sdram_configs[] = {
89 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-328.inc"
90 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-396.inc"
91 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-528.inc"
92 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-664.inc"
93 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-784.inc"
94 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-924.inc"
95 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-1056.inc"
96 };
97 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 6)
98 struct rv1126_sdram_params sdram_configs[] = {
99 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-328.inc"
100 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-396.inc"
101 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-528.inc"
102 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-664.inc"
103 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-784.inc"
104 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-924.inc"
105 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-1056.inc"
106 };
107 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 7) || (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 8)
108 struct rv1126_sdram_params sdram_configs[] = {
109 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-328.inc"
110 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-396.inc"
111 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-528.inc"
112 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-664.inc"
113 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-784.inc"
114 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-924.inc"
115 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-1056.inc"
116 };
117 #endif
118 
119 u32 common_info[] = {
120 	#include "sdram_inc/rv1126/sdram-rv1126-loader_params.inc"
121 };
122 
123 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
124 static struct rw_trn_result rw_trn_result;
125 #endif
126 
127 static struct rv1126_fsp_param fsp_param[MAX_IDX];
128 
129 static u8 lp3_odt_value;
130 
131 static s8 wrlvl_result[2][4];
132 
133 /* DDR configuration 0-9 */
134 u16 ddr_cfg_2_rbc[] = {
135 	((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */
136 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */
137 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */
138 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */
139 	((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */
140 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */
141 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */
142 	((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */
143 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */
144 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */
145 };
146 
147 /* DDR configuration 10-21 */
148 u8 ddr4_cfg_2_rbc[] = {
149 	((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */
150 	((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */
151 	((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */
152 	((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */
153 	((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */
154 	((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */
155 	((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */
156 	((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */
157 	((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */
158 	((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */
159 	((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */
160 	((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */
161 };
162 
163 /* DDR configuration 22-28 */
164 u16 ddr_cfg_2_rbc_p2[] = {
165 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */
166 	((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */
167 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */
168 	((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */
169 	((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */
170 	((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */
171 	((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */
172 };
173 
174 u8 d4_rbc_2_d3_rbc[][2] = {
175 	{10, 0},
176 	{11, 2},
177 	{12, 23},
178 	{13, 1},
179 	{14, 28},
180 	{15, 24},
181 	{16, 27},
182 	{17, 7},
183 	{18, 6},
184 	{19, 25},
185 	{20, 26},
186 	{21, 3}
187 };
188 
189 u32 addrmap[29][9] = {
190 	{24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
191 		0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */
192 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
193 		0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */
194 	{23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
195 		0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */
196 	{22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606,
197 		0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */
198 	{24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909,
199 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */
200 	{6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707,
201 		0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */
202 	{7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808,
203 		0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */
204 	{8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909,
205 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */
206 	{22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
207 		0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */
208 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
209 		0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */
210 
211 	{24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
212 		0x08080808, 0x00000f0f, 0x0801}, /* 10 */
213 	{23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
214 		0x0f080808, 0x00000f0f, 0x0801}, /* 11 */
215 	{24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
216 		0x07070707, 0x00000f07, 0x0700}, /* 12 */
217 	{23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
218 		0x07070707, 0x00000f0f, 0x0700}, /* 13 */
219 	{24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
220 		0x07070707, 0x00000f07, 0x3f01}, /* 14 */
221 	{23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
222 		0x07070707, 0x00000f0f, 0x3f01}, /* 15 */
223 	{23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606,
224 		0x06060606, 0x00000f06, 0x3f00}, /* 16 */
225 	{8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909,
226 		0x0f090909, 0x00000f0f, 0x0801}, /* 17 */
227 	{7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808,
228 		0x08080808, 0x00000f0f, 0x0700}, /* 18 */
229 	{7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
230 		0x08080808, 0x00000f0f, 0x3f01}, /* 19 */
231 
232 	{6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
233 		0x07070707, 0x00000f07, 0x3f00}, /* 20 */
234 	{23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606,
235 		0x06060606, 0x00000f06, 0x0600}, /* 21 */
236 	{21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505,
237 		0x05050505, 0x00000f0f, 0x3f3f}, /* 22 */
238 
239 	{24, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
240 		0x07070707, 0x00000f07, 0x3f3f}, /* 23 */
241 	{23, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
242 		0x07070707, 0x00000f0f, 0x3f3f}, /* 24 */
243 	{7, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
244 		0x08080808, 0x00000f0f, 0x3f3f}, /* 25 */
245 	{6, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
246 		0x07070707, 0x00000f07, 0x3f3f}, /* 26 */
247 	{23, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
248 		0x06060606, 0x00000f06, 0x3f3f}, /* 27 */
249 	{24, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
250 		0x07070707, 0x00000f07, 0x3f3f} /* 28 */
251 };
252 
253 static u8 dq_sel[22][3] = {
254 	{0x0, 0x17, 0x22},
255 	{0x1, 0x18, 0x23},
256 	{0x2, 0x19, 0x24},
257 	{0x3, 0x1a, 0x25},
258 	{0x4, 0x1b, 0x26},
259 	{0x5, 0x1c, 0x27},
260 	{0x6, 0x1d, 0x28},
261 	{0x7, 0x1e, 0x29},
262 	{0x8, 0x16, 0x21},
263 	{0x9, 0x1f, 0x2a},
264 	{0xa, 0x20, 0x2b},
265 	{0x10, 0x1, 0xc},
266 	{0x11, 0x2, 0xd},
267 	{0x12, 0x3, 0xe},
268 	{0x13, 0x4, 0xf},
269 	{0x14, 0x5, 0x10},
270 	{0x15, 0x6, 0x11},
271 	{0x16, 0x7, 0x12},
272 	{0x17, 0x8, 0x13},
273 	{0x18, 0x0, 0xb},
274 	{0x19, 0x9, 0x14},
275 	{0x1a, 0xa, 0x15}
276 };
277 
278 static u16 grp_addr[4] = {
279 	ADD_GROUP_CS0_A,
280 	ADD_GROUP_CS0_B,
281 	ADD_GROUP_CS1_A,
282 	ADD_GROUP_CS1_B
283 };
284 
285 static u8 wrlvl_result_offset[2][4] = {
286 	{0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27},
287 	{0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29},
288 };
289 
290 static u16 dqs_dq_skew_adr[16] = {
291 	0x170 + 0,	/* SKEW_UPDATE_RX_CS0_DQS0 */
292 	0x170 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS1 */
293 	0x1d0 + 0,	/* SKEW_UPDATE_RX_CS0_DQS2 */
294 	0x1d0 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS3 */
295 	0x1a0 + 0,	/* SKEW_UPDATE_RX_CS1_DQS0 */
296 	0x1a0 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS1 */
297 	0x200 + 0,	/* SKEW_UPDATE_RX_CS1_DQS2 */
298 	0x200 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS3 */
299 	0x170 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS0 */
300 	0x170 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS1 */
301 	0x1d0 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS2 */
302 	0x1d0 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS3 */
303 	0x1a0 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS0 */
304 	0x1a0 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS1 */
305 	0x200 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS2 */
306 	0x200 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS3 */
307 };
308 
309 static void rkclk_ddr_reset(struct dram_info *dram,
310 			    u32 ctl_srstn, u32 ctl_psrstn,
311 			    u32 phy_srstn, u32 phy_psrstn)
312 {
313 	writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) |
314 	       UPCTL2_ASRSTN_REQ(ctl_srstn),
315 	       BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13);
316 
317 	writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn),
318 	       &dram->cru->softrst_con[12]);
319 }
320 
321 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
322 {
323 	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
324 	int delay = 1000;
325 	u32 mhz = hz / MHz;
326 	struct global_info *gbl_info;
327 	struct sdram_head_info_index_v2 *index =
328 		(struct sdram_head_info_index_v2 *)common_info;
329 	u32 ssmod_info;
330 	u32 dsmpd = 1;
331 
332 	gbl_info = (struct global_info *)((void *)common_info +
333 		    index->global_index.offset * 4);
334 	ssmod_info = gbl_info->info_2t;
335 	refdiv = 1;
336 	if (mhz <= 100) {
337 		postdiv1 = 6;
338 		postdiv2 = 4;
339 	} else if (mhz <= 150) {
340 		postdiv1 = 4;
341 		postdiv2 = 4;
342 	} else if (mhz <= 200) {
343 		postdiv1 = 6;
344 		postdiv2 = 2;
345 	} else if (mhz <= 300) {
346 		postdiv1 = 4;
347 		postdiv2 = 2;
348 	} else if (mhz <= 400) {
349 		postdiv1 = 6;
350 		postdiv2 = 1;
351 	} else {
352 		postdiv1 = 4;
353 		postdiv2 = 1;
354 	}
355 	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
356 
357 	writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
358 
359 	writel(0x1f000000, &dram->cru->clksel_con[64]);
360 	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
361 	/* enable ssmod */
362 	if (PLL_SSMOD_SPREAD(ssmod_info)) {
363 		dsmpd = 0;
364 		clrsetbits_le32(&dram->cru->pll[1].con2,
365 				0xffffff << 0, 0x0 << 0);
366 		writel(SSMOD_SPREAD(PLL_SSMOD_SPREAD(ssmod_info)) |
367 		       SSMOD_DIVVAL(PLL_SSMOD_DIV(ssmod_info)) |
368 		       SSMOD_DOWNSPREAD(PLL_SSMOD_DOWNSPREAD(ssmod_info)) |
369 		       SSMOD_RESET(0) |
370 		       SSMOD_DIS_SSCG(0) |
371 		       SSMOD_BP(0),
372 		       &dram->cru->pll[1].con3);
373 	}
374 	writel(DSMPD(dsmpd) | POSTDIV2(postdiv2) | REFDIV(refdiv),
375 	       &dram->cru->pll[1].con1);
376 
377 	while (delay > 0) {
378 		udelay(1);
379 		if (LOCK(readl(&dram->cru->pll[1].con1)))
380 			break;
381 		delay--;
382 	}
383 	if (delay <= 0)
384 		printascii("ERROR: DPLL lock timeout!\n");
385 
386 	writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
387 }
388 
389 static void rkclk_configure_ddr(struct dram_info *dram,
390 				struct rv1126_sdram_params *sdram_params)
391 {
392 	/* for inno ddr phy need freq / 2 */
393 	rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2);
394 }
395 
396 static unsigned int
397 	calculate_ddrconfig(struct rv1126_sdram_params *sdram_params)
398 {
399 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
400 	u32 cs, bw, die_bw, col, row, bank;
401 	u32 cs1_row;
402 	u32 i, tmp;
403 	u32 ddrconf = -1;
404 	u32 row_3_4;
405 
406 	cs = cap_info->rank;
407 	bw = cap_info->bw;
408 	die_bw = cap_info->dbw;
409 	col = cap_info->col;
410 	row = cap_info->cs0_row;
411 	cs1_row = cap_info->cs1_row;
412 	bank = cap_info->bk;
413 	row_3_4 = cap_info->row_3_4;
414 
415 	if (sdram_params->base.dramtype == DDR4) {
416 		if (cs == 2 && row == cs1_row && !row_3_4) {
417 			tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) |
418 			      die_bw;
419 			for (i = 17; i < 21; i++) {
420 				if (((tmp & 0xf) ==
421 				     (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
422 				    ((tmp & 0x70) <=
423 				     (ddr4_cfg_2_rbc[i - 10] & 0x70))) {
424 					ddrconf = i;
425 					goto out;
426 				}
427 			}
428 		}
429 
430 		tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw;
431 		for (i = 10; i < 21; i++) {
432 			if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
433 			    ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) &&
434 			    ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) {
435 				ddrconf = i;
436 				goto out;
437 			}
438 		}
439 	} else {
440 		if (cs == 2 && row == cs1_row && bank == 3) {
441 			for (i = 5; i < 8; i++) {
442 				if (((bw + col - 10) == (ddr_cfg_2_rbc[i] &
443 							 0x7)) &&
444 				    ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] &
445 							  (0x7 << 5))) {
446 					ddrconf = i;
447 					goto out;
448 				}
449 			}
450 		}
451 
452 		tmp = ((cs - 1) << 8) | ((row - 13) << 5) |
453 		      ((bw + col - 10) << 0);
454 		if (bank == 3)
455 			tmp |= (1 << 3);
456 
457 		for (i = 0; i < 9; i++)
458 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) &&
459 			    ((tmp & (7 << 5)) <=
460 			     (ddr_cfg_2_rbc[i] & (7 << 5))) &&
461 			    ((tmp & (1 << 8)) <=
462 			     (ddr_cfg_2_rbc[i] & (1 << 8)))) {
463 				ddrconf = i;
464 				goto out;
465 			}
466 
467 		for (i = 0; i < 7; i++)
468 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc_p2[i] & 0x1f)) &&
469 			    ((tmp & (7 << 5)) <=
470 			     (ddr_cfg_2_rbc_p2[i] & (7 << 5))) &&
471 			    ((tmp & (1 << 8)) <=
472 			     (ddr_cfg_2_rbc_p2[i] & (1 << 8)))) {
473 				ddrconf = i + 22;
474 				goto out;
475 			}
476 
477 		if (cs == 1 && bank == 3 && row <= 17 &&
478 		    (col + bw) == 12)
479 			ddrconf = 23;
480 	}
481 
482 out:
483 	if (ddrconf > 28)
484 		printascii("calculate ddrconfig error\n");
485 
486 	if (sdram_params->base.dramtype == DDR4) {
487 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
488 			if (ddrconf == d4_rbc_2_d3_rbc[i][0]) {
489 				if (ddrconf == 21 && row > 16)
490 					printascii("warn:ddrconf21 row > 16\n");
491 				else
492 					ddrconf = d4_rbc_2_d3_rbc[i][1];
493 				break;
494 			}
495 		}
496 	}
497 
498 	return ddrconf;
499 }
500 
501 static void sw_set_req(struct dram_info *dram)
502 {
503 	void __iomem *pctl_base = dram->pctl;
504 
505 	/* clear sw_done=0 */
506 	writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL);
507 }
508 
509 static void sw_set_ack(struct dram_info *dram)
510 {
511 	void __iomem *pctl_base = dram->pctl;
512 
513 	/* set sw_done=1 */
514 	writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL);
515 	while (1) {
516 		/* wait programming done */
517 		if (readl(pctl_base + DDR_PCTL2_SWSTAT) &
518 				PCTL2_SW_DONE_ACK)
519 			break;
520 	}
521 }
522 
523 static void set_ctl_address_map(struct dram_info *dram,
524 				struct rv1126_sdram_params *sdram_params)
525 {
526 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
527 	void __iomem *pctl_base = dram->pctl;
528 	u32 ddrconf = cap_info->ddrconfig;
529 	u32 i, row;
530 
531 	row = cap_info->cs0_row;
532 	if (sdram_params->base.dramtype == DDR4) {
533 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
534 			if (ddrconf == d4_rbc_2_d3_rbc[i][1]) {
535 				ddrconf = d4_rbc_2_d3_rbc[i][0];
536 				break;
537 			}
538 		}
539 	}
540 
541 	if (ddrconf >= ARRAY_SIZE(addrmap)) {
542 		printascii("set ctl address map fail\n");
543 		return;
544 	}
545 
546 	sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
547 			  &addrmap[ddrconf][0], ARRAY_SIZE(addrmap[ddrconf]) * 4);
548 
549 	/* unused row set to 0xf */
550 	for (i = 17; i >= row; i--)
551 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
552 			((i - 12) * 8 / 32) * 4,
553 			0xf << ((i - 12) * 8 % 32));
554 
555 	if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4)
556 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
557 	if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1)
558 		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
559 
560 	if (cap_info->rank == 1)
561 		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
562 }
563 
564 static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait)
565 {
566 	void __iomem *phy_base = dram->phy;
567 	u32 fbdiv, prediv, postdiv, postdiv_en;
568 	int delay = 1000;
569 
570 	if (wait) {
571 		clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB);
572 		while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK)) {
573 			udelay(1);
574 			if (delay-- <= 0) {
575 				printascii("ERROR: phy pll lock timeout!\n");
576 				while (1)
577 					;
578 			}
579 		}
580 	} else {
581 		freq /= MHz;
582 		prediv = 1;
583 		if (freq <= 200) {
584 			fbdiv = 16;
585 			postdiv = 2;
586 			postdiv_en = 1;
587 		} else if (freq <= 456) {
588 			fbdiv = 8;
589 			postdiv = 1;
590 			postdiv_en = 1;
591 		} else {
592 			fbdiv = 4;
593 			postdiv = 0;
594 			postdiv_en = 0;
595 		}
596 		writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50));
597 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK,
598 				(fbdiv >> 8) & 1);
599 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK,
600 				postdiv_en << PHY_POSTDIV_EN_SHIFT);
601 
602 		clrsetbits_le32(PHY_REG(phy_base, 0x52),
603 				PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv);
604 		clrsetbits_le32(PHY_REG(phy_base, 0x53),
605 				PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT,
606 				postdiv << PHY_POSTDIV_SHIFT);
607 	}
608 }
609 
610 static const u16 d3_phy_drv_2_ohm[][2] = {
611 	{PHY_DDR3_RON_455ohm, 455},
612 	{PHY_DDR3_RON_230ohm, 230},
613 	{PHY_DDR3_RON_153ohm, 153},
614 	{PHY_DDR3_RON_115ohm, 115},
615 	{PHY_DDR3_RON_91ohm, 91},
616 	{PHY_DDR3_RON_76ohm, 76},
617 	{PHY_DDR3_RON_65ohm, 65},
618 	{PHY_DDR3_RON_57ohm, 57},
619 	{PHY_DDR3_RON_51ohm, 51},
620 	{PHY_DDR3_RON_46ohm, 46},
621 	{PHY_DDR3_RON_41ohm, 41},
622 	{PHY_DDR3_RON_38ohm, 38},
623 	{PHY_DDR3_RON_35ohm, 35},
624 	{PHY_DDR3_RON_32ohm, 32},
625 	{PHY_DDR3_RON_30ohm, 30},
626 	{PHY_DDR3_RON_28ohm, 28},
627 	{PHY_DDR3_RON_27ohm, 27},
628 	{PHY_DDR3_RON_25ohm, 25},
629 	{PHY_DDR3_RON_24ohm, 24},
630 	{PHY_DDR3_RON_23ohm, 23},
631 	{PHY_DDR3_RON_22ohm, 22},
632 	{PHY_DDR3_RON_21ohm, 21},
633 	{PHY_DDR3_RON_20ohm, 20}
634 };
635 
636 static u16 d3_phy_odt_2_ohm[][2] = {
637 	{PHY_DDR3_RTT_DISABLE, 0},
638 	{PHY_DDR3_RTT_561ohm, 561},
639 	{PHY_DDR3_RTT_282ohm, 282},
640 	{PHY_DDR3_RTT_188ohm, 188},
641 	{PHY_DDR3_RTT_141ohm, 141},
642 	{PHY_DDR3_RTT_113ohm, 113},
643 	{PHY_DDR3_RTT_94ohm, 94},
644 	{PHY_DDR3_RTT_81ohm, 81},
645 	{PHY_DDR3_RTT_72ohm, 72},
646 	{PHY_DDR3_RTT_64ohm, 64},
647 	{PHY_DDR3_RTT_58ohm, 58},
648 	{PHY_DDR3_RTT_52ohm, 52},
649 	{PHY_DDR3_RTT_48ohm, 48},
650 	{PHY_DDR3_RTT_44ohm, 44},
651 	{PHY_DDR3_RTT_41ohm, 41},
652 	{PHY_DDR3_RTT_38ohm, 38},
653 	{PHY_DDR3_RTT_37ohm, 37},
654 	{PHY_DDR3_RTT_34ohm, 34},
655 	{PHY_DDR3_RTT_32ohm, 32},
656 	{PHY_DDR3_RTT_31ohm, 31},
657 	{PHY_DDR3_RTT_29ohm, 29},
658 	{PHY_DDR3_RTT_28ohm, 28},
659 	{PHY_DDR3_RTT_27ohm, 27},
660 	{PHY_DDR3_RTT_25ohm, 25}
661 };
662 
663 static u16 d4lp3_phy_drv_2_ohm[][2] = {
664 	{PHY_DDR4_LPDDR3_RON_482ohm, 482},
665 	{PHY_DDR4_LPDDR3_RON_244ohm, 244},
666 	{PHY_DDR4_LPDDR3_RON_162ohm, 162},
667 	{PHY_DDR4_LPDDR3_RON_122ohm, 122},
668 	{PHY_DDR4_LPDDR3_RON_97ohm, 97},
669 	{PHY_DDR4_LPDDR3_RON_81ohm, 81},
670 	{PHY_DDR4_LPDDR3_RON_69ohm, 69},
671 	{PHY_DDR4_LPDDR3_RON_61ohm, 61},
672 	{PHY_DDR4_LPDDR3_RON_54ohm, 54},
673 	{PHY_DDR4_LPDDR3_RON_48ohm, 48},
674 	{PHY_DDR4_LPDDR3_RON_44ohm, 44},
675 	{PHY_DDR4_LPDDR3_RON_40ohm, 40},
676 	{PHY_DDR4_LPDDR3_RON_37ohm, 37},
677 	{PHY_DDR4_LPDDR3_RON_34ohm, 34},
678 	{PHY_DDR4_LPDDR3_RON_32ohm, 32},
679 	{PHY_DDR4_LPDDR3_RON_30ohm, 30},
680 	{PHY_DDR4_LPDDR3_RON_28ohm, 28},
681 	{PHY_DDR4_LPDDR3_RON_27ohm, 27},
682 	{PHY_DDR4_LPDDR3_RON_25ohm, 25},
683 	{PHY_DDR4_LPDDR3_RON_24ohm, 24},
684 	{PHY_DDR4_LPDDR3_RON_23ohm, 23},
685 	{PHY_DDR4_LPDDR3_RON_22ohm, 22},
686 	{PHY_DDR4_LPDDR3_RON_21ohm, 21}
687 };
688 
689 static u16 d4lp3_phy_odt_2_ohm[][2] = {
690 	{PHY_DDR4_LPDDR3_RTT_DISABLE, 0},
691 	{PHY_DDR4_LPDDR3_RTT_586ohm, 586},
692 	{PHY_DDR4_LPDDR3_RTT_294ohm, 294},
693 	{PHY_DDR4_LPDDR3_RTT_196ohm, 196},
694 	{PHY_DDR4_LPDDR3_RTT_148ohm, 148},
695 	{PHY_DDR4_LPDDR3_RTT_118ohm, 118},
696 	{PHY_DDR4_LPDDR3_RTT_99ohm, 99},
697 	{PHY_DDR4_LPDDR3_RTT_85ohm, 58},
698 	{PHY_DDR4_LPDDR3_RTT_76ohm, 76},
699 	{PHY_DDR4_LPDDR3_RTT_67ohm, 67},
700 	{PHY_DDR4_LPDDR3_RTT_60ohm, 60},
701 	{PHY_DDR4_LPDDR3_RTT_55ohm, 55},
702 	{PHY_DDR4_LPDDR3_RTT_50ohm, 50},
703 	{PHY_DDR4_LPDDR3_RTT_46ohm, 46},
704 	{PHY_DDR4_LPDDR3_RTT_43ohm, 43},
705 	{PHY_DDR4_LPDDR3_RTT_40ohm, 40},
706 	{PHY_DDR4_LPDDR3_RTT_38ohm, 38},
707 	{PHY_DDR4_LPDDR3_RTT_36ohm, 36},
708 	{PHY_DDR4_LPDDR3_RTT_34ohm, 34},
709 	{PHY_DDR4_LPDDR3_RTT_32ohm, 32},
710 	{PHY_DDR4_LPDDR3_RTT_31ohm, 31},
711 	{PHY_DDR4_LPDDR3_RTT_29ohm, 29},
712 	{PHY_DDR4_LPDDR3_RTT_28ohm, 28},
713 	{PHY_DDR4_LPDDR3_RTT_27ohm, 27}
714 };
715 
716 static u16 lp4_phy_drv_2_ohm[][2] = {
717 	{PHY_LPDDR4_RON_501ohm, 501},
718 	{PHY_LPDDR4_RON_253ohm, 253},
719 	{PHY_LPDDR4_RON_168ohm, 168},
720 	{PHY_LPDDR4_RON_126ohm, 126},
721 	{PHY_LPDDR4_RON_101ohm, 101},
722 	{PHY_LPDDR4_RON_84ohm, 84},
723 	{PHY_LPDDR4_RON_72ohm, 72},
724 	{PHY_LPDDR4_RON_63ohm, 63},
725 	{PHY_LPDDR4_RON_56ohm, 56},
726 	{PHY_LPDDR4_RON_50ohm, 50},
727 	{PHY_LPDDR4_RON_46ohm, 46},
728 	{PHY_LPDDR4_RON_42ohm, 42},
729 	{PHY_LPDDR4_RON_38ohm, 38},
730 	{PHY_LPDDR4_RON_36ohm, 36},
731 	{PHY_LPDDR4_RON_33ohm, 33},
732 	{PHY_LPDDR4_RON_31ohm, 31},
733 	{PHY_LPDDR4_RON_29ohm, 29},
734 	{PHY_LPDDR4_RON_28ohm, 28},
735 	{PHY_LPDDR4_RON_26ohm, 26},
736 	{PHY_LPDDR4_RON_25ohm, 25},
737 	{PHY_LPDDR4_RON_24ohm, 24},
738 	{PHY_LPDDR4_RON_23ohm, 23},
739 	{PHY_LPDDR4_RON_22ohm, 22}
740 };
741 
742 static u16 lp4_phy_odt_2_ohm[][2] = {
743 	{PHY_LPDDR4_RTT_DISABLE, 0},
744 	{PHY_LPDDR4_RTT_604ohm, 604},
745 	{PHY_LPDDR4_RTT_303ohm, 303},
746 	{PHY_LPDDR4_RTT_202ohm, 202},
747 	{PHY_LPDDR4_RTT_152ohm, 152},
748 	{PHY_LPDDR4_RTT_122ohm, 122},
749 	{PHY_LPDDR4_RTT_101ohm, 101},
750 	{PHY_LPDDR4_RTT_87ohm,	87},
751 	{PHY_LPDDR4_RTT_78ohm, 78},
752 	{PHY_LPDDR4_RTT_69ohm, 69},
753 	{PHY_LPDDR4_RTT_62ohm, 62},
754 	{PHY_LPDDR4_RTT_56ohm, 56},
755 	{PHY_LPDDR4_RTT_52ohm, 52},
756 	{PHY_LPDDR4_RTT_48ohm, 48},
757 	{PHY_LPDDR4_RTT_44ohm, 44},
758 	{PHY_LPDDR4_RTT_41ohm, 41},
759 	{PHY_LPDDR4_RTT_39ohm, 39},
760 	{PHY_LPDDR4_RTT_37ohm, 37},
761 	{PHY_LPDDR4_RTT_35ohm, 35},
762 	{PHY_LPDDR4_RTT_33ohm, 33},
763 	{PHY_LPDDR4_RTT_32ohm, 32},
764 	{PHY_LPDDR4_RTT_30ohm, 30},
765 	{PHY_LPDDR4_RTT_29ohm, 29},
766 	{PHY_LPDDR4_RTT_27ohm, 27}
767 };
768 
769 static u32 lp4_odt_calc(u32 odt_ohm)
770 {
771 	u32 odt;
772 
773 	if (odt_ohm == 0)
774 		odt = LPDDR4_DQODT_DIS;
775 	else if (odt_ohm <= 40)
776 		odt = LPDDR4_DQODT_40;
777 	else if (odt_ohm <= 48)
778 		odt = LPDDR4_DQODT_48;
779 	else if (odt_ohm <= 60)
780 		odt = LPDDR4_DQODT_60;
781 	else if (odt_ohm <= 80)
782 		odt = LPDDR4_DQODT_80;
783 	else if (odt_ohm <= 120)
784 		odt = LPDDR4_DQODT_120;
785 	else
786 		odt = LPDDR4_DQODT_240;
787 
788 	return odt;
789 }
790 
791 static void *get_ddr_drv_odt_info(u32 dramtype)
792 {
793 	struct sdram_head_info_index_v2 *index =
794 		(struct sdram_head_info_index_v2 *)common_info;
795 	void *ddr_info = 0;
796 
797 	if (dramtype == DDR4)
798 		ddr_info = (void *)common_info + index->ddr4_index.offset * 4;
799 	else if (dramtype == DDR3)
800 		ddr_info = (void *)common_info + index->ddr3_index.offset * 4;
801 	else if (dramtype == LPDDR3)
802 		ddr_info = (void *)common_info + index->lp3_index.offset * 4;
803 	else if (dramtype == LPDDR4)
804 		ddr_info = (void *)common_info + index->lp4_index.offset * 4;
805 	else if (dramtype == LPDDR4X)
806 		ddr_info = (void *)common_info + index->lp4x_index.offset * 4;
807 	else
808 		printascii("unsupported dram type\n");
809 	return ddr_info;
810 }
811 
812 static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info,
813 			 u32 freq_mhz, u32 dst_fsp, u32 dramtype)
814 {
815 	void __iomem *pctl_base = dram->pctl;
816 	u32 ca_vref, dq_vref;
817 
818 	if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
819 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff);
820 	else
821 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten);
822 
823 	if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq))
824 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff);
825 	else
826 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten);
827 
828 	if (dramtype == LPDDR4) {
829 		if (ca_vref < 100)
830 			ca_vref = 100;
831 		if (ca_vref > 420)
832 			ca_vref = 420;
833 
834 		if (ca_vref <= 300)
835 			ca_vref = (0 << 6) | (ca_vref - 100) / 4;
836 		else
837 			ca_vref = (1 << 6) | (ca_vref - 220) / 4;
838 
839 		if (dq_vref < 100)
840 			dq_vref = 100;
841 		if (dq_vref > 420)
842 			dq_vref = 420;
843 
844 		if (dq_vref <= 300)
845 			dq_vref = (0 << 6) | (dq_vref - 100) / 4;
846 		else
847 			dq_vref = (1 << 6) | (dq_vref - 220) / 4;
848 	} else {
849 		ca_vref = ca_vref * 11 / 6;
850 		if (ca_vref < 150)
851 			ca_vref = 150;
852 		if (ca_vref > 629)
853 			ca_vref = 629;
854 
855 		if (ca_vref <= 449)
856 			ca_vref = (0 << 6) | (ca_vref - 150) / 4;
857 		else
858 			ca_vref = (1 << 6) | (ca_vref - 329) / 4;
859 
860 		if (dq_vref < 150)
861 			dq_vref = 150;
862 		if (dq_vref > 629)
863 			dq_vref = 629;
864 
865 		if (dq_vref <= 449)
866 			dq_vref = (0 << 6) | (dq_vref - 150) / 6;
867 		else
868 			dq_vref = (1 << 6) | (dq_vref - 329) / 6;
869 	}
870 	sw_set_req(dram);
871 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
872 			DDR_PCTL2_INIT6,
873 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT,
874 			ca_vref << PCTL2_LPDDR4_MR12_SHIFT);
875 
876 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
877 			DDR_PCTL2_INIT7,
878 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT,
879 			dq_vref << PCTL2_LPDDR4_MR14_SHIFT);
880 	sw_set_ack(dram);
881 }
882 
883 static void set_ds_odt(struct dram_info *dram,
884 		       struct rv1126_sdram_params *sdram_params, u32 dst_fsp)
885 {
886 	void __iomem *phy_base = dram->phy;
887 	void __iomem *pctl_base = dram->pctl;
888 	u32 dramtype = sdram_params->base.dramtype;
889 	struct ddr2_3_4_lp2_3_info *ddr_info;
890 	struct lp4_info *lp4_info;
891 	u32 i, j, tmp;
892 	const u16 (*p_drv)[2];
893 	const u16 (*p_odt)[2];
894 	u32 drv_info, sr_info;
895 	u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm;
896 	u32 phy_odt_ohm, dram_odt_ohm;
897 	u32 lp4_pu_cal, phy_lp4_drv_pd_en;
898 	u32 phy_odt_up_en, phy_odt_dn_en;
899 	u32 sr_dq, sr_clk;
900 	u32 freq = sdram_params->base.ddr_freq;
901 	u32 mr1_mr3, mr11, mr22, vref_out, vref_inner;
902 	u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0;
903 	u32 phy_dq_drv = 0;
904 	u32 phy_odt_up = 0, phy_odt_dn = 0;
905 
906 	ddr_info = get_ddr_drv_odt_info(dramtype);
907 	lp4_info = (void *)ddr_info;
908 
909 	if (!ddr_info)
910 		return;
911 
912 	/* dram odt en freq control phy drv, dram odt and phy sr */
913 	if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) {
914 		drv_info = ddr_info->drv_when_odtoff;
915 		dram_odt_ohm = 0;
916 		sr_info = ddr_info->sr_when_odtoff;
917 		phy_lp4_drv_pd_en =
918 			PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info);
919 	} else {
920 		drv_info = ddr_info->drv_when_odten;
921 		dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info);
922 		sr_info = ddr_info->sr_when_odten;
923 		phy_lp4_drv_pd_en =
924 			PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info);
925 	}
926 	phy_dq_drv_ohm =
927 		DRV_INFO_PHY_DQ_DRV(drv_info);
928 	phy_clk_drv_ohm =
929 		DRV_INFO_PHY_CLK_DRV(drv_info);
930 	phy_ca_drv_ohm =
931 		DRV_INFO_PHY_CA_DRV(drv_info);
932 
933 	sr_dq = DQ_SR_INFO(sr_info);
934 	sr_clk = CLK_SR_INFO(sr_info);
935 
936 	/* phy odt en freq control dram drv and phy odt */
937 	if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) {
938 		dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff);
939 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info);
940 		phy_odt_ohm = 0;
941 		phy_odt_up_en = 0;
942 		phy_odt_dn_en = 0;
943 	} else {
944 		dram_drv_ohm =
945 			DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten);
946 		phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info);
947 		phy_odt_up_en =
948 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
949 		phy_odt_dn_en =
950 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
951 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info);
952 	}
953 
954 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
955 		if (phy_odt_ohm) {
956 			phy_odt_up_en = 0;
957 			phy_odt_dn_en = 1;
958 		}
959 		if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
960 			dram_caodt_ohm = 0;
961 		else
962 			dram_caodt_ohm =
963 				ODT_INFO_LP4_CA_ODT(lp4_info->odt_info);
964 	}
965 
966 	if (dramtype == DDR3) {
967 		p_drv = d3_phy_drv_2_ohm;
968 		p_odt = d3_phy_odt_2_ohm;
969 	} else if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
970 		p_drv = lp4_phy_drv_2_ohm;
971 		p_odt = lp4_phy_odt_2_ohm;
972 	} else {
973 		p_drv = d4lp3_phy_drv_2_ohm;
974 		p_odt = d4lp3_phy_odt_2_ohm;
975 	}
976 
977 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
978 		if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) {
979 			phy_dq_drv = **(p_drv + i);
980 			break;
981 		}
982 		if (i == 0)
983 			break;
984 	}
985 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
986 		if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) {
987 			phy_clk_drv = **(p_drv + i);
988 			break;
989 		}
990 		if (i == 0)
991 			break;
992 	}
993 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
994 		if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) {
995 			phy_ca_drv = **(p_drv + i);
996 			break;
997 		}
998 		if (i == 0)
999 			break;
1000 	}
1001 	if (!phy_odt_ohm)
1002 		phy_odt = 0;
1003 	else
1004 		for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) {
1005 			if (phy_odt_ohm <= *(*(p_odt + i) + 1)) {
1006 				phy_odt = **(p_odt + i);
1007 				break;
1008 			}
1009 			if (i == 0)
1010 				break;
1011 		}
1012 
1013 	if (dramtype != LPDDR4 && dramtype != LPDDR4X) {
1014 		if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en))
1015 			vref_inner = 0x80;
1016 		else if (phy_odt_up_en)
1017 			vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 /
1018 				     (dram_drv_ohm + phy_odt_ohm);
1019 		else
1020 			vref_inner = phy_odt_ohm * 128 /
1021 				(phy_odt_ohm + dram_drv_ohm);
1022 
1023 		if (dramtype != DDR3 && dram_odt_ohm)
1024 			vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 /
1025 				   (phy_dq_drv_ohm + dram_odt_ohm);
1026 		else
1027 			vref_out = 0x80;
1028 	} else {
1029 		/* for lp4 and lp4x*/
1030 		if (phy_odt_ohm)
1031 			vref_inner =
1032 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) *
1033 				 256) / 1000;
1034 		else
1035 			vref_inner =
1036 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) *
1037 				 256) / 1000;
1038 
1039 		vref_out = 0x80;
1040 	}
1041 
1042 	/* default ZQCALIB bypass mode */
1043 	clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv);
1044 	clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv);
1045 	clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv);
1046 	clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv);
1047 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1048 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_clk_drv);
1049 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_clk_drv);
1050 	} else {
1051 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_ca_drv);
1052 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_ca_drv);
1053 	}
1054 	/* clk / cmd slew rate */
1055 	clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk);
1056 
1057 	phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1;
1058 	if (phy_odt_up_en)
1059 		phy_odt_up = phy_odt;
1060 	if (phy_odt_dn_en)
1061 		phy_odt_dn = phy_odt;
1062 
1063 	for (i = 0; i < 4; i++) {
1064 		j = 0x110 + i * 0x10;
1065 		clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up);
1066 		clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn);
1067 		clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv);
1068 		clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv);
1069 		writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10));
1070 
1071 		clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10),
1072 				1 << 3, phy_lp4_drv_pd_en << 3);
1073 		if (dramtype == LPDDR4 || dramtype == LPDDR4X)
1074 			clrbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10), BIT(5));
1075 		/* dq slew rate */
1076 		clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10),
1077 				0x1f, sr_dq);
1078 	}
1079 
1080 	/* reg_rx_vref_value_update */
1081 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1082 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1083 
1084 	/* RAM VREF */
1085 	writel(vref_out, PHY_REG(phy_base, 0x105));
1086 	if (dramtype == LPDDR3)
1087 		udelay(100);
1088 
1089 	if (dramtype == LPDDR4 || dramtype == LPDDR4X)
1090 		set_lp4_vref(dram, lp4_info, freq, dst_fsp, dramtype);
1091 
1092 	if (dramtype == DDR3 || dramtype == DDR4) {
1093 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1094 				DDR_PCTL2_INIT3);
1095 		mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK;
1096 	} else {
1097 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1098 				DDR_PCTL2_INIT4);
1099 		mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK;
1100 	}
1101 
1102 	if (dramtype == DDR3) {
1103 		mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK);
1104 		if (dram_drv_ohm == 34)
1105 			mr1_mr3 |= DDR3_DS_34;
1106 
1107 		if (dram_odt_ohm == 0)
1108 			mr1_mr3 |= DDR3_RTT_NOM_DIS;
1109 		else if (dram_odt_ohm <= 40)
1110 			mr1_mr3 |= DDR3_RTT_NOM_40;
1111 		else if (dram_odt_ohm <= 60)
1112 			mr1_mr3 |= DDR3_RTT_NOM_60;
1113 		else
1114 			mr1_mr3 |= DDR3_RTT_NOM_120;
1115 
1116 	} else if (dramtype == DDR4) {
1117 		mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK);
1118 		if (dram_drv_ohm == 48)
1119 			mr1_mr3 |= DDR4_DS_48;
1120 
1121 		if (dram_odt_ohm == 0)
1122 			mr1_mr3 |= DDR4_RTT_NOM_DIS;
1123 		else if (dram_odt_ohm <= 34)
1124 			mr1_mr3 |= DDR4_RTT_NOM_34;
1125 		else if (dram_odt_ohm <= 40)
1126 			mr1_mr3 |= DDR4_RTT_NOM_40;
1127 		else if (dram_odt_ohm <= 48)
1128 			mr1_mr3 |= DDR4_RTT_NOM_48;
1129 		else if (dram_odt_ohm <= 60)
1130 			mr1_mr3 |= DDR4_RTT_NOM_60;
1131 		else
1132 			mr1_mr3 |= DDR4_RTT_NOM_120;
1133 
1134 	} else if (dramtype == LPDDR3) {
1135 		if (dram_drv_ohm <= 34)
1136 			mr1_mr3 |= LPDDR3_DS_34;
1137 		else if (dram_drv_ohm <= 40)
1138 			mr1_mr3 |= LPDDR3_DS_40;
1139 		else if (dram_drv_ohm <= 48)
1140 			mr1_mr3 |= LPDDR3_DS_48;
1141 		else if (dram_drv_ohm <= 60)
1142 			mr1_mr3 |= LPDDR3_DS_60;
1143 		else if (dram_drv_ohm <= 80)
1144 			mr1_mr3 |= LPDDR3_DS_80;
1145 
1146 		if (dram_odt_ohm == 0)
1147 			lp3_odt_value = LPDDR3_ODT_DIS;
1148 		else if (dram_odt_ohm <= 60)
1149 			lp3_odt_value = LPDDR3_ODT_60;
1150 		else if (dram_odt_ohm <= 120)
1151 			lp3_odt_value = LPDDR3_ODT_120;
1152 		else
1153 			lp3_odt_value = LPDDR3_ODT_240;
1154 	} else {/* for lpddr4 and lpddr4x */
1155 		/* MR3 for lp4 PU-CAL and PDDS */
1156 		mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK);
1157 		mr1_mr3 |= lp4_pu_cal;
1158 
1159 		tmp = lp4_odt_calc(dram_drv_ohm);
1160 		if (!tmp)
1161 			tmp = LPDDR4_PDDS_240;
1162 		mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT);
1163 
1164 		/* MR11 for lp4 ca odt, dq odt set */
1165 		mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1166 			     DDR_PCTL2_INIT6);
1167 		mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK;
1168 
1169 		mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK);
1170 
1171 		tmp = lp4_odt_calc(dram_odt_ohm);
1172 		mr11 |= (tmp << LPDDR4_DQODT_SHIFT);
1173 
1174 		tmp = lp4_odt_calc(dram_caodt_ohm);
1175 		mr11 |= (tmp << LPDDR4_CAODT_SHIFT);
1176 		sw_set_req(dram);
1177 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1178 				DDR_PCTL2_INIT6,
1179 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT,
1180 				mr11 << PCTL2_LPDDR4_MR11_SHIFT);
1181 		sw_set_ack(dram);
1182 
1183 		/* MR22 for soc odt/odt-ck/odt-cs/odt-ca */
1184 		mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1185 			     DDR_PCTL2_INIT7);
1186 		mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK;
1187 		mr22 &= ~LPDDR4_SOC_ODT_MASK;
1188 
1189 		tmp = lp4_odt_calc(phy_odt_ohm);
1190 		mr22 |= tmp;
1191 		mr22 = mr22 |
1192 		       (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) <<
1193 			LPDDR4_ODTE_CK_SHIFT) |
1194 		       (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) <<
1195 			LPDDR4_ODTE_CS_SHIFT) |
1196 		       (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) <<
1197 			LPDDR4_ODTD_CA_SHIFT);
1198 
1199 		sw_set_req(dram);
1200 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1201 				DDR_PCTL2_INIT7,
1202 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT,
1203 				mr22 << PCTL2_LPDDR4_MR22_SHIFT);
1204 		sw_set_ack(dram);
1205 	}
1206 
1207 	if (dramtype == DDR4 || dramtype == DDR3) {
1208 		sw_set_req(dram);
1209 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1210 				DDR_PCTL2_INIT3,
1211 				PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT,
1212 				mr1_mr3 << PCTL2_DDR34_MR1_SHIFT);
1213 		sw_set_ack(dram);
1214 	} else {
1215 		sw_set_req(dram);
1216 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1217 				DDR_PCTL2_INIT4,
1218 				PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT,
1219 				mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT);
1220 		sw_set_ack(dram);
1221 	}
1222 }
1223 
1224 static int sdram_cmd_dq_path_remap(struct dram_info *dram,
1225 				   struct rv1126_sdram_params *sdram_params)
1226 {
1227 	void __iomem *phy_base = dram->phy;
1228 	u32 dramtype = sdram_params->base.dramtype;
1229 	struct sdram_head_info_index_v2 *index =
1230 		(struct sdram_head_info_index_v2 *)common_info;
1231 	struct dq_map_info *map_info;
1232 
1233 	map_info = (struct dq_map_info *)((void *)common_info +
1234 		index->dq_map_index.offset * 4);
1235 
1236 	if (dramtype == LPDDR4X)
1237 		dramtype = LPDDR4;
1238 
1239 	if (dramtype <= LPDDR4)
1240 		writel((map_info->byte_map[dramtype / 4] >>
1241 			((dramtype % 4) * 8)) & 0xff,
1242 		       PHY_REG(phy_base, 0x4f));
1243 
1244 	return 0;
1245 }
1246 
1247 static void phy_cfg(struct dram_info *dram,
1248 		    struct rv1126_sdram_params *sdram_params)
1249 {
1250 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
1251 	void __iomem *phy_base = dram->phy;
1252 	u32 i, dq_map, tmp;
1253 	u32 byte1 = 0, byte0 = 0;
1254 
1255 	sdram_cmd_dq_path_remap(dram, sdram_params);
1256 
1257 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0);
1258 	for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
1259 		writel(sdram_params->phy_regs.phy[i][1],
1260 		       phy_base + sdram_params->phy_regs.phy[i][0]);
1261 	}
1262 
1263 	clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5));
1264 	dq_map = readl(PHY_REG(phy_base, 0x4f));
1265 	for (i = 0; i < 4; i++) {
1266 		if (((dq_map >> (i * 2)) & 0x3) == 0) {
1267 			byte0 = i;
1268 			break;
1269 		}
1270 	}
1271 	for (i = 0; i < 4; i++) {
1272 		if (((dq_map >> (i * 2)) & 0x3) == 1) {
1273 			byte1 = i;
1274 			break;
1275 		}
1276 	}
1277 
1278 	tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK);
1279 	if (cap_info->bw == 2)
1280 		tmp |= 0xf;
1281 	else if (cap_info->bw == 1)
1282 		tmp |= ((1 << byte0) | (1 << byte1));
1283 	else
1284 		tmp |= (1 << byte0);
1285 
1286 	writel(tmp, PHY_REG(phy_base, 0xf));
1287 
1288 	/* lpddr4 odt control by phy, enable cs0 odt */
1289 	if (sdram_params->base.dramtype == LPDDR4 ||
1290 	    sdram_params->base.dramtype == LPDDR4X)
1291 		clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4,
1292 				(1 << 6) | (1 << 4));
1293 	/* for ca training ca vref choose range1 */
1294 	setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6));
1295 	setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6));
1296 	/* for wr training PHY_0x7c[5], choose range0 */
1297 	clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5));
1298 }
1299 
1300 static int update_refresh_reg(struct dram_info *dram)
1301 {
1302 	void __iomem *pctl_base = dram->pctl;
1303 	u32 ret;
1304 
1305 	ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
1306 	writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
1307 
1308 	return 0;
1309 }
1310 
1311 /*
1312  * rank = 1: cs0
1313  * rank = 2: cs1
1314  */
1315 u32 read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype)
1316 {
1317 	u32 ret;
1318 	u32 i, temp;
1319 	void __iomem *pctl_base = dram->pctl;
1320 	struct sdram_head_info_index_v2 *index =
1321 		(struct sdram_head_info_index_v2 *)common_info;
1322 	struct dq_map_info *map_info;
1323 
1324 	map_info = (struct dq_map_info *)((void *)common_info +
1325 		index->dq_map_index.offset * 4);
1326 
1327 	pctl_read_mr(pctl_base, rank, mr_num);
1328 
1329 	if (dramtype == LPDDR3) {
1330 		temp = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff);
1331 		ret = 0;
1332 		for (i = 0; i < 8; i++)
1333 			ret |= ((temp >> i) & 0x1) << ((map_info->lp3_dq0_7_map >> (i * 4)) & 0xf);
1334 	} else {
1335 		ret = readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff;
1336 	}
1337 
1338 	return ret;
1339 }
1340 
1341 /* before call this function autorefresh should be disabled */
1342 void send_a_refresh(struct dram_info *dram)
1343 {
1344 	void __iomem *pctl_base = dram->pctl;
1345 
1346 	while (readl(pctl_base + DDR_PCTL2_DBGSTAT) & 0x3)
1347 		continue;
1348 	writel(0x3, pctl_base + DDR_PCTL2_DBGCMD);
1349 }
1350 
1351 static void enter_sr(struct dram_info *dram, u32 en)
1352 {
1353 	void __iomem *pctl_base = dram->pctl;
1354 
1355 	if (en) {
1356 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1357 		while (1) {
1358 			if (((readl(pctl_base + DDR_PCTL2_STAT) &
1359 			      PCTL2_SELFREF_TYPE_MASK) ==
1360 			     PCTL2_SELFREF_TYPE_SR_NOT_AUTO) &&
1361 			    ((readl(pctl_base + DDR_PCTL2_STAT) &
1362 			      PCTL2_OPERATING_MODE_MASK) ==
1363 			     PCTL2_OPERATING_MODE_SR))
1364 				break;
1365 		}
1366 	} else {
1367 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1368 		while ((readl(pctl_base + DDR_PCTL2_STAT) &
1369 		       PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR)
1370 			continue;
1371 	}
1372 }
1373 
1374 void record_dq_prebit(struct dram_info *dram)
1375 {
1376 	u32 group, i, tmp;
1377 	void __iomem *phy_base = dram->phy;
1378 
1379 	for (group = 0; group < 4; group++) {
1380 		for (i = 0; i < ARRAY_SIZE(dq_sel); i++) {
1381 			/* l_loop_invdelaysel */
1382 			writel(dq_sel[i][0], PHY_REG(phy_base,
1383 						     grp_addr[group] + 0x2c));
1384 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e));
1385 			writel(tmp, PHY_REG(phy_base,
1386 					    grp_addr[group] + dq_sel[i][1]));
1387 
1388 			/* r_loop_invdelaysel */
1389 			writel(dq_sel[i][0], PHY_REG(phy_base,
1390 						     grp_addr[group] + 0x2d));
1391 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f));
1392 			writel(tmp, PHY_REG(phy_base,
1393 					    grp_addr[group] + dq_sel[i][2]));
1394 		}
1395 	}
1396 }
1397 
1398 static void update_dq_rx_prebit(struct dram_info *dram)
1399 {
1400 	void __iomem *phy_base = dram->phy;
1401 
1402 	clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4),
1403 			BIT(4));
1404 	udelay(1);
1405 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4));
1406 }
1407 
1408 static void update_dq_tx_prebit(struct dram_info *dram)
1409 {
1410 	void __iomem *phy_base = dram->phy;
1411 
1412 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1413 	setbits_le32(PHY_REG(phy_base, 0x2), BIT(3));
1414 	setbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1415 	udelay(1);
1416 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1417 }
1418 
1419 static void update_ca_prebit(struct dram_info *dram)
1420 {
1421 	void __iomem *phy_base = dram->phy;
1422 
1423 	clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2));
1424 	setbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1425 	udelay(1);
1426 	clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1427 }
1428 
1429 /*
1430  * dir: 0: de-skew = delta_*
1431  *	1: de-skew = reg val - delta_*
1432  * delta_dir: value for differential signal: clk/
1433  * delta_sig: value for single signal: ca/cmd
1434  */
1435 static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif,
1436 			     int delta_sig, u32 cs, u32 dramtype)
1437 {
1438 	void __iomem *phy_base = dram->phy;
1439 	u32 i, cs_en, tmp;
1440 	u32 dfi_lp_stat = 0;
1441 
1442 	if (cs == 0)
1443 		cs_en = 1;
1444 	else if (cs == 2)
1445 		cs_en = 2;
1446 	else
1447 		cs_en = 3;
1448 
1449 	if ((dramtype == LPDDR4 || dramtype == LPDDR4X) &&
1450 	    ((readl(PHY_REG(phy_base, 0x60)) & BIT(5)) == 0)) {
1451 		dfi_lp_stat = 1;
1452 		setbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1453 	}
1454 	enter_sr(dram, 1);
1455 
1456 	for (i = 0; i < 0x20; i++) {
1457 		if (dir == DESKEW_MDF_ABS_VAL)
1458 			tmp = delta_sig;
1459 		else
1460 			tmp = readl(PHY_REG(phy_base, 0x150 + i)) +
1461 			      delta_sig;
1462 		writel(tmp, PHY_REG(phy_base, 0x150 + i));
1463 	}
1464 
1465 	if (dir == DESKEW_MDF_ABS_VAL)
1466 		tmp = delta_dif;
1467 	else
1468 		tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) -
1469 		       delta_sig + delta_dif;
1470 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x17));
1471 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x18));
1472 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1473 		writel(tmp, PHY_REG(phy_base, 0x150 + 0x4));
1474 		writel(tmp, PHY_REG(phy_base, 0x150 + 0xa));
1475 
1476 		clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6);
1477 		update_ca_prebit(dram);
1478 	}
1479 	enter_sr(dram, 0);
1480 
1481 	if (dfi_lp_stat)
1482 		clrbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1483 
1484 }
1485 
1486 static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank)
1487 {
1488 	u32 i, j, offset = 0;
1489 	u32 min = 0x3f;
1490 	void __iomem *phy_base = dram->phy;
1491 	u32 byte_en;
1492 
1493 	if (signal == SKEW_TX_SIGNAL)
1494 		offset = 8;
1495 
1496 	if (signal == SKEW_CA_SIGNAL) {
1497 		for (i = 0; i < 0x20; i++)
1498 			min = MIN(min, readl(PHY_REG(phy_base, 0x150  + i)));
1499 	} else {
1500 		byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1501 		for (j = offset; j < offset + rank * 4; j++) {
1502 			if (!((byte_en >> (j % 4)) & 1))
1503 				continue;
1504 			for (i = 0; i < 11; i++)
1505 				min = MIN(min,
1506 					  readl(PHY_REG(phy_base,
1507 							dqs_dq_skew_adr[j] +
1508 							i)));
1509 		}
1510 	}
1511 
1512 	return min;
1513 }
1514 
1515 static u32 low_power_update(struct dram_info *dram, u32 en)
1516 {
1517 	void __iomem *pctl_base = dram->pctl;
1518 	u32 lp_stat = 0;
1519 
1520 	if (en) {
1521 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf);
1522 	} else {
1523 		lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf;
1524 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf);
1525 	}
1526 
1527 	return lp_stat;
1528 }
1529 
1530 /*
1531  * signal:
1532  * dir: 0: de-skew = delta_*
1533  *	1: de-skew = reg val - delta_*
1534  * delta_dir: value for differential signal: dqs
1535  * delta_sig: value for single signal: dq/dm
1536  */
1537 static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir,
1538 			     int delta_dif, int delta_sig, u32 rank)
1539 {
1540 	void __iomem *phy_base = dram->phy;
1541 	u32 i, j, tmp, offset;
1542 	u32 byte_en;
1543 
1544 	byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1545 
1546 	if (signal == SKEW_RX_SIGNAL)
1547 		offset = 0;
1548 	else
1549 		offset = 8;
1550 
1551 	for (j = offset; j < (offset + rank * 4); j++) {
1552 		if (!((byte_en >> (j % 4)) & 1))
1553 			continue;
1554 		for (i = 0; i < 0x9; i++) {
1555 			if (dir == DESKEW_MDF_ABS_VAL)
1556 				tmp = delta_sig;
1557 			else
1558 				tmp = delta_sig + readl(PHY_REG(phy_base,
1559 							dqs_dq_skew_adr[j] +
1560 							i));
1561 			writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i));
1562 		}
1563 		if (dir == DESKEW_MDF_ABS_VAL)
1564 			tmp = delta_dif;
1565 		else
1566 			tmp = delta_dif + readl(PHY_REG(phy_base,
1567 						dqs_dq_skew_adr[j] + 9));
1568 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9));
1569 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa));
1570 	}
1571 	if (signal == SKEW_RX_SIGNAL)
1572 		update_dq_rx_prebit(dram);
1573 	else
1574 		update_dq_tx_prebit(dram);
1575 }
1576 
1577 static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype)
1578 {
1579 	void __iomem *phy_base = dram->phy;
1580 	u32 ret;
1581 	u32 dis_auto_zq = 0;
1582 	u32 odt_val_up, odt_val_dn;
1583 	u32 i, j;
1584 #if defined(DDR4_READ_GATE_2NCK_PREAMBLE)
1585 	void __iomem *pctl_base = dram->pctl;
1586 	u32 mr4_d4 = 0;
1587 #endif
1588 
1589 	odt_val_dn = readl(PHY_REG(phy_base, 0x110));
1590 	odt_val_up = readl(PHY_REG(phy_base, 0x111));
1591 
1592 	if (dramtype != LPDDR4 || dramtype != LPDDR4X) {
1593 		for (i = 0; i < 4; i++) {
1594 			j = 0x110 + i * 0x10;
1595 			writel(PHY_DDR4_LPDDR3_RTT_294ohm,
1596 			       PHY_REG(phy_base, j));
1597 			writel(PHY_DDR4_LPDDR3_RTT_DISABLE,
1598 			       PHY_REG(phy_base, j + 0x1));
1599 		}
1600 	}
1601 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1602 	/* use normal read mode for data training */
1603 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1604 
1605 	if (dramtype == DDR4) {
1606 #if defined(DDR4_READ_GATE_PREAMBLE_MODE)
1607 		setbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1608 #elif defined(DDR4_READ_GATE_2NCK_PREAMBLE)
1609 		mr4_d4 = readl(pctl_base + DDR_PCTL2_INIT6) >> PCTL2_DDR4_MR4_SHIFT & PCTL2_MR_MASK;
1610 		/* 2nCK Read Preamble */
1611 		pctl_write_mr(pctl_base, BIT(cs), 4, mr4_d4 | BIT(11), DDR4);
1612 #endif
1613 	}
1614 
1615 	/* choose training cs */
1616 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
1617 	/* enable gate training */
1618 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
1619 	udelay(50);
1620 	ret = readl(PHY_REG(phy_base, 0x91));
1621 	/* disable gate training */
1622 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
1623 	clrbits_le32(PHY_REG(phy_base, 2), 0x30);
1624 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1625 
1626 #if defined(DDR4_READ_GATE_2NCK_PREAMBLE)
1627 	if (dramtype == DDR4) {
1628 		pctl_write_mr(pctl_base, BIT(cs), 4, mr4_d4, DDR4);
1629 	}
1630 #endif
1631 
1632 	ret = (ret & 0x2f) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf);
1633 
1634 	if (dramtype != LPDDR4 || dramtype != LPDDR4X) {
1635 		for (i = 0; i < 4; i++) {
1636 			j = 0x110 + i * 0x10;
1637 			writel(odt_val_dn, PHY_REG(phy_base, j));
1638 			writel(odt_val_up, PHY_REG(phy_base, j + 0x1));
1639 		}
1640 	}
1641 	return ret;
1642 }
1643 
1644 static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype,
1645 			    u32 rank)
1646 {
1647 	void __iomem *pctl_base = dram->pctl;
1648 	void __iomem *phy_base = dram->phy;
1649 	u32 dis_auto_zq = 0;
1650 	u32 tmp;
1651 	u32 cur_fsp;
1652 	u32 timeout_us = 1000;
1653 
1654 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1655 
1656 	clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1657 
1658 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1659 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) &
1660 	      0xffff;
1661 	writel(tmp & 0xff, PHY_REG(phy_base, 0x3));
1662 
1663 	/* disable another cs's output */
1664 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1665 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12),
1666 			      dramtype);
1667 	if (dramtype == DDR3 || dramtype == DDR4)
1668 		writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1669 	else
1670 		writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1671 
1672 	/* choose cs */
1673 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1674 			((0x2 >> cs) << 6) | (0 << 2));
1675 	/* enable write leveling */
1676 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1677 			((0x2 >> cs) << 6) | (1 << 2));
1678 
1679 	while (1) {
1680 		if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) ==
1681 		    (readl(PHY_REG(phy_base, 0xf)) & 0xf))
1682 			break;
1683 
1684 		udelay(1);
1685 		if (timeout_us-- == 0) {
1686 			printascii("error: write leveling timeout\n");
1687 			while (1)
1688 				;
1689 		}
1690 	}
1691 
1692 	/* disable write leveling */
1693 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1694 			((0x2 >> cs) << 6) | (0 << 2));
1695 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6);
1696 
1697 	/* enable another cs's output */
1698 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1699 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12),
1700 			      dramtype);
1701 
1702 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1703 
1704 	return 0;
1705 }
1706 
1707 char pattern[32] = {
1708 	0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa,
1709 	0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55,
1710 	0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55,
1711 	0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa
1712 };
1713 
1714 static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype,
1715 			    u32 mhz)
1716 {
1717 	void __iomem *pctl_base = dram->pctl;
1718 	void __iomem *phy_base = dram->phy;
1719 	u32 trefi_1x, trfc_1x;
1720 	u32 dis_auto_zq = 0;
1721 	u32 timeout_us = 1000;
1722 	u32 dqs_default;
1723 	u32 cur_fsp;
1724 	u32 vref_inner;
1725 	u32 i;
1726 	struct sdram_head_info_index_v2 *index =
1727 		(struct sdram_head_info_index_v2 *)common_info;
1728 	struct dq_map_info *map_info;
1729 
1730 	vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff;
1731 	if (dramtype == DDR3 && vref_inner == 0x80) {
1732 		for (i = 0; i < 4; i++)
1733 			writel(vref_inner - 0xa,
1734 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1735 
1736 		/* reg_rx_vref_value_update */
1737 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1738 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1739 	}
1740 
1741 	map_info = (struct dq_map_info *)((void *)common_info +
1742 		index->dq_map_index.offset * 4);
1743 	/* only 1cs a time, 0:cs0 1 cs1 */
1744 	if (cs > 1)
1745 		return -1;
1746 
1747 	dqs_default = 0xf;
1748 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1749 
1750 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1751 	/* config refresh timing */
1752 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1753 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1754 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1755 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1756 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1757 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1758 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1759 	/* reg_phy_trfc */
1760 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1761 	/* reg_max_refi_cnt */
1762 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1763 
1764 	/* choose training cs */
1765 	clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6);
1766 
1767 	/* set dq map for ddr4 */
1768 	if (dramtype == DDR4) {
1769 		setbits_le32(PHY_REG(phy_base, 0x70), BIT(7));
1770 		for (i = 0; i < 4; i++) {
1771 			writel((map_info->ddr4_dq_map[cs * 2] >>
1772 				((i % 4) * 8)) & 0xff,
1773 				PHY_REG(phy_base, 0x238 + i));
1774 			writel((map_info->ddr4_dq_map[cs * 2 + 1] >>
1775 				((i % 4) * 8)) & 0xff,
1776 				PHY_REG(phy_base, 0x2b8 + i));
1777 		}
1778 	}
1779 
1780 	/* cha_l reg_l_rd_train_dqs_default[5:0] */
1781 	clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default);
1782 	/* cha_h reg_h_rd_train_dqs_default[5:0] */
1783 	clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default);
1784 	/* chb_l reg_l_rd_train_dqs_default[5:0] */
1785 	clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default);
1786 	/* chb_h reg_h_rd_train_dqs_default[5:0] */
1787 	clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default);
1788 
1789 	/* Choose the read train auto mode */
1790 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1);
1791 	/* Enable the auto train of the read train */
1792 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3);
1793 
1794 	/* Wait the train done. */
1795 	while (1) {
1796 		if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1)
1797 			break;
1798 
1799 		udelay(1);
1800 		if (timeout_us-- == 0) {
1801 			printascii("error: read training timeout\n");
1802 			return -1;
1803 		}
1804 	}
1805 
1806 	/* Check the read train state */
1807 	if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) ||
1808 	    (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) {
1809 		printascii("error: read training error\n");
1810 		return -1;
1811 	}
1812 
1813 	/* Exit the Read Training by setting */
1814 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1));
1815 
1816 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1817 
1818 	if (dramtype == DDR3 && vref_inner == 0x80) {
1819 		for (i = 0; i < 4; i++)
1820 			writel(vref_inner,
1821 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1822 
1823 		/* reg_rx_vref_value_update */
1824 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1825 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1826 	}
1827 
1828 	return 0;
1829 }
1830 
1831 static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype,
1832 			    u32 mhz, u32 dst_fsp)
1833 {
1834 	void __iomem *pctl_base = dram->pctl;
1835 	void __iomem *phy_base = dram->phy;
1836 	u32 trefi_1x, trfc_1x;
1837 	u32 dis_auto_zq = 0;
1838 	u32 timeout_us = 1000;
1839 	u32 cur_fsp;
1840 	u32 mr_tmp, cl, cwl, phy_fsp, offset = 0;
1841 
1842 	if (dramtype == LPDDR3 && mhz <= 400) {
1843 		phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3;
1844 		offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3;
1845 		cl = readl(PHY_REG(phy_base, offset));
1846 		cwl = readl(PHY_REG(phy_base, offset + 2));
1847 
1848 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8);
1849 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4);
1850 		pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype);
1851 	}
1852 
1853 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1854 
1855 	/* PHY_0x7b[7:0] reg_train_col_addr[7:0] */
1856 	clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0);
1857 	/* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */
1858 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2);
1859 	/* PHY_0x7c[1:0] reg_train_col_addr[9:8] */
1860 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0);
1861 	/* PHY_0x7d[7:0] reg_train_row_addr[7:0] */
1862 	clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0);
1863 	/* PHY_0x7e[7:0] reg_train_row_addr[15:8] */
1864 	clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0);
1865 
1866 	/* PHY_0x71[3] wrtrain_check_data_value_random_gen */
1867 	clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3));
1868 
1869 	/* config refresh timing */
1870 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1871 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1872 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1873 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1874 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1875 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1876 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1877 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1878 	/* reg_phy_trfc */
1879 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1880 	/* reg_max_refi_cnt */
1881 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1882 
1883 	/* choose training cs */
1884 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6);
1885 
1886 	/* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */
1887 	/* 0: Use the write-leveling value. */
1888 	/* 1: use reg0x233 0x237 0x2b3 0x2b7 */
1889 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4));
1890 
1891 	/* PHY_0x7a [0] reg_dq_wr_train_auto */
1892 	setbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1893 
1894 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1895 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1896 
1897 	send_a_refresh(dram);
1898 
1899 	while (1) {
1900 		if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1)
1901 			break;
1902 
1903 		udelay(1);
1904 		if (timeout_us-- == 0) {
1905 			printascii("error: write training timeout\n");
1906 			while (1)
1907 				;
1908 		}
1909 	}
1910 
1911 	/* Check the write train state */
1912 	if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) {
1913 		printascii("error: write training error\n");
1914 		return -1;
1915 	}
1916 
1917 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1918 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1919 
1920 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1921 
1922 	/* save LPDDR4/LPDDR4X write vref to fsp_param for dfs */
1923 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1924 		fsp_param[dst_fsp].vref_dq[cs] =
1925 			((readl(PHY_REG(phy_base, 0x384)) & 0x3f) +
1926 			 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2;
1927 		/* add range info */
1928 		fsp_param[dst_fsp].vref_dq[cs] |=
1929 			((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1);
1930 	}
1931 
1932 	if (dramtype == LPDDR3 && mhz <= 400) {
1933 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl);
1934 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl);
1935 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1936 			       DDR_PCTL2_INIT3);
1937 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
1938 			      dramtype);
1939 	}
1940 
1941 	return 0;
1942 }
1943 
1944 static int data_training(struct dram_info *dram, u32 cs,
1945 			 struct rv1126_sdram_params *sdram_params, u32 dst_fsp,
1946 			 u32 training_flag)
1947 {
1948 	u32 ret = 0;
1949 
1950 	if (training_flag == FULL_TRAINING)
1951 		training_flag = READ_GATE_TRAINING | WRITE_LEVELING |
1952 				WRITE_TRAINING | READ_TRAINING;
1953 
1954 	if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) {
1955 		ret = data_training_wl(dram, cs,
1956 				       sdram_params->base.dramtype,
1957 				       sdram_params->ch.cap_info.rank);
1958 		if (ret != 0)
1959 			goto out;
1960 	}
1961 
1962 	if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) {
1963 		ret = data_training_rg(dram, cs,
1964 				       sdram_params->base.dramtype);
1965 		if (ret != 0)
1966 			goto out;
1967 	}
1968 
1969 	if ((training_flag & READ_TRAINING) == READ_TRAINING) {
1970 		ret = data_training_rd(dram, cs,
1971 				       sdram_params->base.dramtype,
1972 				       sdram_params->base.ddr_freq);
1973 		if (ret != 0)
1974 			goto out;
1975 	}
1976 
1977 	if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) {
1978 		ret = data_training_wr(dram, cs,
1979 				       sdram_params->base.dramtype,
1980 				       sdram_params->base.ddr_freq, dst_fsp);
1981 		if (ret != 0)
1982 			goto out;
1983 	}
1984 
1985 out:
1986 	return ret;
1987 }
1988 
1989 static int get_wrlvl_val(struct dram_info *dram,
1990 			 struct rv1126_sdram_params *sdram_params)
1991 {
1992 	int i, j, clk_skew;
1993 	void __iomem *phy_base = dram->phy;
1994 	u32 lp_stat;
1995 	int ret;
1996 
1997 	lp_stat = low_power_update(dram, 0);
1998 
1999 	clk_skew = 0x1f;
2000 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, clk_skew, 3,
2001 			 sdram_params->base.dramtype);
2002 
2003 	ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING);
2004 	if (sdram_params->ch.cap_info.rank == 2)
2005 		ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING);
2006 
2007 	for (j = 0; j < 2; j++)
2008 		for (i = 0; i < 4; i++)
2009 			wrlvl_result[j][i] =
2010 				(readl(PHY_REG(phy_base, wrlvl_result_offset[j][i])) & 0x3f) -
2011 				clk_skew;
2012 
2013 	low_power_update(dram, lp_stat);
2014 
2015 	return ret;
2016 }
2017 
2018 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2019 static void init_rw_trn_result_struct(struct rw_trn_result *result,
2020 				      void __iomem *phy_base, u8 cs_num)
2021 {
2022 	int i;
2023 
2024 	result->cs_num = cs_num;
2025 	result->byte_en = readb(PHY_REG(dram_info.phy, 0xf)) &
2026 			  PHY_DQ_WIDTH_MASK;
2027 	for (i = 0; i < FSP_NUM; i++)
2028 		result->fsp_mhz[i] = 0;
2029 }
2030 
2031 static void save_rw_trn_min_max(void __iomem *phy_base,
2032 				struct cs_rw_trn_result *rd_result,
2033 				struct cs_rw_trn_result *wr_result,
2034 				u8 byte_en)
2035 {
2036 	u16 phy_ofs;
2037 	u8 dqs;
2038 	u8 dq;
2039 
2040 	for (dqs = 0; dqs < BYTE_NUM; dqs++) {
2041 		if ((byte_en & BIT(dqs)) == 0)
2042 			continue;
2043 
2044 		/* Channel A or B (low or high 16 bit) */
2045 		phy_ofs = dqs < 2 ? 0x230 : 0x2b0;
2046 		/* low or high 8 bit */
2047 		phy_ofs += (dqs & 0x1) == 0 ? 0 : 0x9;
2048 		for (dq = 0; dq < 8; dq++) {
2049 			rd_result->dqs[dqs].dq_min[dq] =
2050 				readb(PHY_REG(phy_base, phy_ofs + 0x15 + dq));
2051 			rd_result->dqs[dqs].dq_max[dq] =
2052 				readb(PHY_REG(phy_base, phy_ofs + 0x27 + dq));
2053 			wr_result->dqs[dqs].dq_min[dq] =
2054 				readb(PHY_REG(phy_base, phy_ofs + 0x3d + dq));
2055 			wr_result->dqs[dqs].dq_max[dq] =
2056 				readb(PHY_REG(phy_base, phy_ofs + 0x4f + dq));
2057 		}
2058 	}
2059 }
2060 
2061 static void save_rw_trn_deskew(void __iomem *phy_base,
2062 			       struct fsp_rw_trn_result *result, u8 cs_num,
2063 			       int min_val, bool rw)
2064 {
2065 	u16 phy_ofs;
2066 	u8 cs;
2067 	u8 dq;
2068 
2069 	result->min_val = min_val;
2070 
2071 	for (cs = 0; cs < cs_num; cs++) {
2072 		phy_ofs = cs == 0 ? 0x170 : 0x1a0;
2073 		phy_ofs += rw == SKEW_RX_SIGNAL ? 0x1 : 0x17;
2074 		for (dq = 0; dq < 8; dq++) {
2075 			result->cs[cs].dqs[0].dq_deskew[dq] =
2076 				readb(PHY_REG(phy_base, phy_ofs + dq));
2077 			result->cs[cs].dqs[1].dq_deskew[dq] =
2078 				readb(PHY_REG(phy_base, phy_ofs + 0xb + dq));
2079 			result->cs[cs].dqs[2].dq_deskew[dq] =
2080 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + dq));
2081 			result->cs[cs].dqs[3].dq_deskew[dq] =
2082 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + dq));
2083 		}
2084 
2085 		result->cs[cs].dqs[0].dqs_deskew =
2086 			readb(PHY_REG(phy_base, phy_ofs + 0x8));
2087 		result->cs[cs].dqs[1].dqs_deskew =
2088 			readb(PHY_REG(phy_base, phy_ofs + 0xb + 0x8));
2089 		result->cs[cs].dqs[2].dqs_deskew =
2090 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0x8));
2091 		result->cs[cs].dqs[3].dqs_deskew =
2092 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + 0x8));
2093 	}
2094 }
2095 
2096 static void save_rw_trn_result_to_ddr(struct rw_trn_result *result)
2097 {
2098 	result->flag = DDR_DQ_EYE_FLAG;
2099 	memcpy((void *)(RW_TRN_RESULT_ADDR), result, sizeof(*result));
2100 }
2101 #endif
2102 
2103 static int high_freq_training(struct dram_info *dram,
2104 			      struct rv1126_sdram_params *sdram_params,
2105 			      u32 fsp)
2106 {
2107 	u32 i, j;
2108 	void __iomem *phy_base = dram->phy;
2109 	u32 dramtype = sdram_params->base.dramtype;
2110 	int min_val;
2111 	int dqs_skew, clk_skew, ca_skew;
2112 	u8 byte_en;
2113 	int ret;
2114 
2115 	byte_en = readl(PHY_REG(phy_base, 0xf)) & PHY_DQ_WIDTH_MASK;
2116 	dqs_skew = 0;
2117 	for (j = 0; j < sdram_params->ch.cap_info.rank; j++) {
2118 		for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) {
2119 			if ((byte_en & BIT(i)) != 0)
2120 				dqs_skew += wrlvl_result[j][i];
2121 		}
2122 	}
2123 	dqs_skew = dqs_skew /
2124 		   (int)(sdram_params->ch.cap_info.rank * (1 << sdram_params->ch.cap_info.bw));
2125 
2126 	clk_skew = 0x20 - dqs_skew;
2127 	dqs_skew = 0x20;
2128 
2129 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
2130 		min_val = 0xff;
2131 		for (j = 0; j < sdram_params->ch.cap_info.rank; j++)
2132 			for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) {
2133 				if ((byte_en & BIT(i)) != 0)
2134 					min_val = MIN(wrlvl_result[j][i], min_val);
2135 			}
2136 
2137 		if (min_val < 0) {
2138 			clk_skew = -min_val;
2139 			ca_skew = -min_val;
2140 		} else {
2141 			clk_skew = 0;
2142 			ca_skew = 0;
2143 		}
2144 	} else if (dramtype == LPDDR3) {
2145 		ca_skew = clk_skew - 4;
2146 	} else {
2147 		ca_skew = clk_skew;
2148 	}
2149 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3,
2150 			 dramtype);
2151 
2152 	writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233));
2153 	writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237));
2154 	writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2155 	writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2156 	ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING |
2157 			    READ_TRAINING | WRITE_TRAINING);
2158 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2159 	rw_trn_result.fsp_mhz[fsp] = (u16)sdram_params->base.ddr_freq;
2160 	save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[0],
2161 			    &rw_trn_result.wr_fsp[fsp].cs[0],
2162 			    rw_trn_result.byte_en);
2163 #endif
2164 	if (sdram_params->ch.cap_info.rank == 2) {
2165 		writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233));
2166 		writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237));
2167 		writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2168 		writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2169 		ret |= data_training(dram, 1, sdram_params, fsp,
2170 				     READ_GATE_TRAINING | READ_TRAINING |
2171 				     WRITE_TRAINING);
2172 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2173 		save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[1],
2174 				    &rw_trn_result.wr_fsp[fsp].cs[1],
2175 				    rw_trn_result.byte_en);
2176 #endif
2177 	}
2178 	if (ret)
2179 		goto out;
2180 
2181 	record_dq_prebit(dram);
2182 
2183 	min_val = get_min_value(dram, SKEW_RX_SIGNAL,
2184 				sdram_params->ch.cap_info.rank) * -1;
2185 	modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2186 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2187 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2188 	save_rw_trn_deskew(phy_base, &rw_trn_result.rd_fsp[fsp],
2189 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2190 			   SKEW_RX_SIGNAL);
2191 #endif
2192 
2193 	min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL,
2194 				    sdram_params->ch.cap_info.rank),
2195 		      get_min_value(dram, SKEW_CA_SIGNAL,
2196 				    sdram_params->ch.cap_info.rank)) * -1;
2197 
2198 	/* clk = 0, rx all skew -7, tx - min_value */
2199 	modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3,
2200 			 dramtype);
2201 
2202 	modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2203 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2204 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2205 	save_rw_trn_deskew(phy_base, &rw_trn_result.wr_fsp[fsp],
2206 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2207 			   SKEW_TX_SIGNAL);
2208 #endif
2209 
2210 	ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING);
2211 	if (sdram_params->ch.cap_info.rank == 2)
2212 		ret |= data_training(dram, 1, sdram_params, 0,
2213 				     READ_GATE_TRAINING);
2214 out:
2215 	return ret;
2216 }
2217 
2218 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
2219 {
2220 	writel(ddrconfig, &dram->msch->deviceconf);
2221 	clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0);
2222 }
2223 
2224 static void update_noc_timing(struct dram_info *dram,
2225 			      struct rv1126_sdram_params *sdram_params)
2226 {
2227 	void __iomem *pctl_base = dram->pctl;
2228 	u32 bw, bl;
2229 
2230 	bw = 8 << sdram_params->ch.cap_info.bw;
2231 	bl = ((readl(pctl_base + DDR_PCTL2_MSTR) >> 16) & 0xf) * 2;
2232 
2233 	/* update the noc timing related to data bus width */
2234 	if ((bw / 8 * bl) <= 16)
2235 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 0;
2236 	else if ((bw / 8 * bl) == 32)
2237 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 1;
2238 	else if ((bw / 8 * bl) == 64)
2239 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 2;
2240 	else
2241 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 3;
2242 
2243 	sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty =
2244 		(bl * bw / 8) > 16 ? (bl / 4) : (16 / (bl * bw / 8)) * bl / 4;
2245 
2246 	if (sdram_params->base.dramtype == LPDDR4 ||
2247 	    sdram_params->base.dramtype == LPDDR4X) {
2248 		sdram_params->ch.noc_timings.ddrmode.b.mwrsize =
2249 			(bw == 16) ? 0x1 : 0x2;
2250 		sdram_params->ch.noc_timings.ddrtimingc0.b.wrtomwr =
2251 			3 * sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty;
2252 	}
2253 
2254 	writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
2255 	       &dram->msch->ddrtiminga0);
2256 	writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
2257 	       &dram->msch->ddrtimingb0);
2258 	writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
2259 	       &dram->msch->ddrtimingc0);
2260 	writel(sdram_params->ch.noc_timings.devtodev0.d32,
2261 	       &dram->msch->devtodev0);
2262 	writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
2263 	writel(sdram_params->ch.noc_timings.ddr4timing.d32,
2264 	       &dram->msch->ddr4timing);
2265 }
2266 
2267 static int split_setup(struct dram_info *dram,
2268 		       struct rv1126_sdram_params *sdram_params)
2269 {
2270 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2271 	u32 dramtype = sdram_params->base.dramtype;
2272 	u32 split_size, split_mode;
2273 	u64 cs_cap[2], cap;
2274 
2275 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dramtype);
2276 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dramtype);
2277 	/* only support the larger cap is in low 16bit */
2278 	if (cap_info->cs0_high16bit_row < cap_info->cs0_row) {
2279 		cap = cs_cap[0] / (1 << (cap_info->cs0_row -
2280 		cap_info->cs0_high16bit_row));
2281 	} else if ((cap_info->cs1_high16bit_row < cap_info->cs1_row) &&
2282 		   (cap_info->rank == 2)) {
2283 		if (!cap_info->cs1_high16bit_row)
2284 			cap = cs_cap[0];
2285 		else
2286 			cap = cs_cap[0] + cs_cap[1] / (1 << (cap_info->cs1_row -
2287 				cap_info->cs1_high16bit_row));
2288 	} else {
2289 		goto out;
2290 	}
2291 	split_size = (u32)(cap >> 24) & SPLIT_SIZE_MASK;
2292 	if (cap_info->bw == 2)
2293 		split_mode = SPLIT_MODE_32_L16_VALID;
2294 	else
2295 		split_mode = SPLIT_MODE_16_L8_VALID;
2296 
2297 	rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2298 		     (SPLIT_MODE_MASK << SPLIT_MODE_OFFSET) |
2299 		     (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2300 		     (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2301 		     (split_mode << SPLIT_MODE_OFFSET) |
2302 		     (0x0 << SPLIT_BYPASS_OFFSET) |
2303 		     (split_size << SPLIT_SIZE_OFFSET));
2304 
2305 	rk_clrsetreg(BUS_SGRF_BASE_ADDR + SGRF_SOC_CON2,
2306 		     MSCH_AXI_BYPASS_ALL_MASK << MSCH_AXI_BYPASS_ALL_SHIFT,
2307 		     0x0 << MSCH_AXI_BYPASS_ALL_SHIFT);
2308 
2309 out:
2310 	return 0;
2311 }
2312 
2313 static void split_bypass(struct dram_info *dram)
2314 {
2315 	if ((readl(&dram->ddrgrf->grf_ddrsplit_con) &
2316 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2317 		return;
2318 
2319 	/* bypass split */
2320 	rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2321 		     (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2322 		     (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2323 		     (0x1 << SPLIT_BYPASS_OFFSET) |
2324 		     (0x0 << SPLIT_SIZE_OFFSET));
2325 }
2326 
2327 static void dram_all_config(struct dram_info *dram,
2328 			    struct rv1126_sdram_params *sdram_params)
2329 {
2330 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2331 	u32 dram_type = sdram_params->base.dramtype;
2332 	void __iomem *pctl_base = dram->pctl;
2333 	u32 sys_reg2 = 0;
2334 	u32 sys_reg3 = 0;
2335 	u64 cs_cap[2];
2336 	u32 cs_pst;
2337 
2338 	set_ddrconfig(dram, cap_info->ddrconfig);
2339 	sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
2340 			 &sys_reg3, 0);
2341 	writel(sys_reg2, &dram->pmugrf->os_reg[2]);
2342 	writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2343 
2344 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2345 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2346 
2347 	if (cap_info->rank == 2) {
2348 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2349 			6 + 2;
2350 		if (cs_pst > 28)
2351 			cs_cap[0] = 1llu << cs_pst;
2352 	}
2353 
2354 	writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
2355 			(((cs_cap[0] >> 20) / 64) & 0xff),
2356 			&dram->msch->devicesize);
2357 	update_noc_timing(dram, sdram_params);
2358 }
2359 
2360 static void enable_low_power(struct dram_info *dram,
2361 			     struct rv1126_sdram_params *sdram_params)
2362 {
2363 	void __iomem *pctl_base = dram->pctl;
2364 	u32 grf_lp_con;
2365 
2366 	writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]);
2367 
2368 	if (sdram_params->base.dramtype == DDR4)
2369 		grf_lp_con = (0x7 << 16) | (1 << 1);
2370 	else if (sdram_params->base.dramtype == DDR3)
2371 		grf_lp_con = (0x7 << 16) | (1 << 0);
2372 	else
2373 		grf_lp_con = (0x7 << 16) | (1 << 2);
2374 
2375 	/* en lpckdis_en */
2376 	grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
2377 	writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con);
2378 
2379 	/* enable sr, pd */
2380 	if (dram->pd_idle == 0)
2381 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2382 	else
2383 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2384 	if (dram->sr_idle == 0)
2385 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2386 	else
2387 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2388 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
2389 }
2390 
2391 static void ddr_set_atags(struct dram_info *dram,
2392 			  struct rv1126_sdram_params *sdram_params)
2393 {
2394 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2395 	u32 dram_type = sdram_params->base.dramtype;
2396 	void __iomem *pctl_base = dram->pctl;
2397 	struct tag_serial t_serial;
2398 	struct tag_ddr_mem t_ddrmem;
2399 	struct tag_soc_info t_socinfo;
2400 	u64 cs_cap[2];
2401 	u32 cs_pst = 0;
2402 	u32 split, split_size;
2403 	u64 reduce_cap = 0;
2404 
2405 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2406 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2407 
2408 	memset(&t_serial, 0, sizeof(struct tag_serial));
2409 
2410 	t_serial.version = 0;
2411 	t_serial.enable = 1;
2412 	t_serial.addr = CONFIG_DEBUG_UART_BASE;
2413 	t_serial.baudrate = CONFIG_BAUDRATE;
2414 	t_serial.m_mode = SERIAL_M_MODE_M0;
2415 	t_serial.id = 2;
2416 
2417 	atags_destroy();
2418 	atags_set_tag(ATAG_SERIAL, &t_serial);
2419 
2420 	split = readl(&dram->ddrgrf->grf_ddrsplit_con);
2421 	memset(&t_ddrmem, 0, sizeof(struct tag_ddr_mem));
2422 	if (cap_info->row_3_4) {
2423 		cs_cap[0] =  cs_cap[0] * 3 / 4;
2424 		cs_cap[1] =  cs_cap[1] * 3 / 4;
2425 	} else if (!(split & (1 << SPLIT_BYPASS_OFFSET))) {
2426 		split_size = (split >> SPLIT_SIZE_OFFSET) & SPLIT_SIZE_MASK;
2427 		reduce_cap = (cs_cap[0] + cs_cap[1] - (split_size << 24)) / 2;
2428 	}
2429 	t_ddrmem.version = 0;
2430 	t_ddrmem.bank[0] = CONFIG_SYS_SDRAM_BASE;
2431 	if (cs_cap[1]) {
2432 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2433 			6 + 2;
2434 	}
2435 
2436 	if (cs_cap[1] && cs_pst > 27) {
2437 		t_ddrmem.count = 2;
2438 		t_ddrmem.bank[1] = 1 << cs_pst;
2439 		t_ddrmem.bank[2] = cs_cap[0];
2440 		t_ddrmem.bank[3] = cs_cap[1] - reduce_cap;
2441 	} else {
2442 		t_ddrmem.count = 1;
2443 		t_ddrmem.bank[1] = (u64)cs_cap[0] + (u64)cs_cap[1] - reduce_cap;
2444 	}
2445 
2446 	atags_set_tag(ATAG_DDR_MEM, &t_ddrmem);
2447 
2448 	memset(&t_socinfo, 0, sizeof(struct tag_soc_info));
2449 	t_socinfo.version = 0x1;
2450 	t_socinfo.name = 0x1126;
2451 	t_socinfo.flags = SOC_FLAGS_TDBT;
2452 	atags_set_tag(ATAG_SOC_INFO, &t_socinfo);
2453 }
2454 
2455 static void print_ddr_info(struct rv1126_sdram_params *sdram_params)
2456 {
2457 	u32 split;
2458 
2459 	if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2460 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2461 		split = 0;
2462 	else
2463 		split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2464 			SPLIT_SIZE_MASK;
2465 
2466 	sdram_print_ddr_info(&sdram_params->ch.cap_info,
2467 			     &sdram_params->base, split);
2468 }
2469 
2470 static int modify_ddr34_bw_byte_map(u8 rg_result, struct rv1126_sdram_params *sdram_params)
2471 {
2472 	struct sdram_head_info_index_v2 *index = (struct sdram_head_info_index_v2 *)common_info;
2473 	struct dq_map_info *map_info = (struct dq_map_info *)
2474 				       ((void *)common_info + index->dq_map_index.offset * 4);
2475 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2476 	u32 dramtype = sdram_params->base.dramtype;
2477 	u32 byte_map = 0;
2478 	u32 byte = 0;
2479 	u32 byte_map_shift;
2480 	int i;
2481 
2482 	if (dramtype == DDR3)
2483 		byte_map_shift = 24;
2484 	else if (dramtype == DDR4)
2485 		byte_map_shift = 0;
2486 	else
2487 		return -1;
2488 
2489 	for (i = 0; i < 4; i++) {
2490 		if ((rg_result & BIT(i)) == 0) {
2491 			byte_map |= byte << (i * 2);
2492 			byte++;
2493 		}
2494 	}
2495 	if (byte != 1 && byte != 2 && byte != 4) {
2496 		printascii("DTT result is abnormal: ");
2497 		printdec(byte);
2498 		printascii("byte\n");
2499 		return -1;
2500 	}
2501 	cap_info->bw = byte / 2;
2502 	for (i = 0; i < 4; i++) {
2503 		if ((rg_result & BIT(i)) != 0) {
2504 			byte_map |= byte << (i * 2);
2505 			byte++;
2506 		}
2507 	}
2508 
2509 	if ((u8)byte_map != (u8)(map_info->byte_map[0] >> byte_map_shift)) {
2510 		clrsetbits_le32(&map_info->byte_map[0],
2511 				0xff << byte_map_shift, byte_map << byte_map_shift);
2512 		pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info, dramtype);
2513 		return 1;
2514 	}
2515 
2516 	return 0;
2517 }
2518 
2519 int sdram_init_(struct dram_info *dram, struct rv1126_sdram_params *sdram_params, u32 post_init)
2520 {
2521 	void __iomem *pctl_base = dram->pctl;
2522 	void __iomem *phy_base = dram->phy;
2523 	u32 ddr4_vref;
2524 	u32 mr_tmp, tmp;
2525 	int delay = 1000;
2526 
2527 	rkclk_configure_ddr(dram, sdram_params);
2528 
2529 	rkclk_ddr_reset(dram, 1, 1, 1, 1);
2530 	udelay(10);
2531 
2532 	rkclk_ddr_reset(dram, 1, 1, 1, 0);
2533 	phy_cfg(dram, sdram_params);
2534 
2535 	rkclk_ddr_reset(dram, 1, 1, 0, 0);
2536 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1);
2537 
2538 	rkclk_ddr_reset(dram, 1, 0, 0, 0);
2539 	pctl_cfg(dram->pctl, &sdram_params->pctl_regs,
2540 		 dram->sr_idle, dram->pd_idle);
2541 
2542 	if (sdram_params->ch.cap_info.bw == 2) {
2543 		/* 32bit interface use pageclose */
2544 		setbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2545 		/* pageclose = 1, pageclose_timer = 0 will err in lp4 328MHz */
2546 		clrsetbits_le32(pctl_base + DDR_PCTL2_SCHED1, 0xff, 0x1 << 0);
2547 	} else {
2548 		clrbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2549 	}
2550 
2551 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2552 	u32 trefi;
2553 
2554 	tmp = readl(pctl_base + DDR_PCTL2_RFSHTMG);
2555 	trefi = (tmp >> 16) & 0xfff;
2556 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2557 	       pctl_base + DDR_PCTL2_RFSHTMG);
2558 #endif
2559 
2560 	/* set frequency_mode */
2561 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
2562 	/* set target_frequency to Frequency 0 */
2563 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0);
2564 
2565 	set_ds_odt(dram, sdram_params, 0);
2566 	sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params);
2567 	set_ctl_address_map(dram, sdram_params);
2568 
2569 	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
2570 
2571 	rkclk_ddr_reset(dram, 0, 0, 0, 0);
2572 
2573 	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0) {
2574 		udelay(1);
2575 		if (delay-- <= 0) {
2576 			printascii("ERROR: Cannot wait dfi_init_done!\n");
2577 			while (1)
2578 				;
2579 		}
2580 	}
2581 
2582 	if (sdram_params->base.dramtype == LPDDR3) {
2583 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3);
2584 	} else if (sdram_params->base.dramtype == LPDDR4 ||
2585 		   sdram_params->base.dramtype == LPDDR4X) {
2586 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6);
2587 		/* MR11 */
2588 		pctl_write_mr(dram->pctl, 3, 11,
2589 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2590 			      LPDDR4);
2591 		/* MR12 */
2592 		pctl_write_mr(dram->pctl, 3, 12,
2593 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2594 			      LPDDR4);
2595 
2596 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2597 		/* MR22 */
2598 		pctl_write_mr(dram->pctl, 3, 22,
2599 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2600 			      LPDDR4);
2601 	} else if (sdram_params->base.dramtype == DDR4) {
2602 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7) >> PCTL2_DDR4_MR6_SHIFT & PCTL2_MR_MASK;
2603 		pctl_write_mr(dram->pctl, 0x3, 6, mr_tmp | BIT(7), DDR4);
2604 		pctl_write_mr(dram->pctl, 0x3, 6, mr_tmp | BIT(7), DDR4);
2605 		pctl_write_mr(dram->pctl, 0x3, 6, mr_tmp, DDR4);
2606 	}
2607 
2608 	if (sdram_params->base.dramtype == DDR3 && post_init == 0)
2609 		setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2610 	tmp = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) & 0xf;
2611 
2612 	if (tmp != 0) {
2613 		if (post_init != 0) {
2614 			printascii("DTT cs0 error\n");
2615 			return -1;
2616 		}
2617 		if (sdram_params->base.dramtype != DDR3 || tmp == 0xf)
2618 			return -1;
2619 	}
2620 
2621 	if (sdram_params->base.dramtype == DDR3 && post_init == 0) {
2622 		if (modify_ddr34_bw_byte_map((u8)tmp, sdram_params) != 0)
2623 			return -1;
2624 	}
2625 
2626 	if (sdram_params->base.dramtype == LPDDR4) {
2627 		mr_tmp = read_mr(dram, 1, 14, LPDDR4);
2628 
2629 		if (mr_tmp != 0x4d)
2630 			return -1;
2631 	}
2632 
2633 	if (sdram_params->base.dramtype == LPDDR4 ||
2634 	    sdram_params->base.dramtype == LPDDR4X) {
2635 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2636 		/* MR14 */
2637 		pctl_write_mr(dram->pctl, 3, 14,
2638 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2639 			      LPDDR4);
2640 	}
2641 	if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) {
2642 		if (data_training(dram, 1, sdram_params, 0,
2643 				  READ_GATE_TRAINING) != 0) {
2644 			printascii("DTT cs1 error\n");
2645 			return -1;
2646 		}
2647 	}
2648 
2649 	if (sdram_params->base.dramtype == DDR4) {
2650 		ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39;
2651 		pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref,
2652 				  sdram_params->base.dramtype);
2653 	}
2654 
2655 	dram_all_config(dram, sdram_params);
2656 	enable_low_power(dram, sdram_params);
2657 
2658 	return 0;
2659 }
2660 
2661 static u64 dram_detect_cap(struct dram_info *dram,
2662 			   struct rv1126_sdram_params *sdram_params,
2663 			   unsigned char channel)
2664 {
2665 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2666 	void __iomem *pctl_base = dram->pctl;
2667 	void __iomem *phy_base = dram->phy;
2668 	u32 mr8;
2669 
2670 	u32 bktmp;
2671 	u32 coltmp;
2672 	u32 rowtmp;
2673 	u32 cs;
2674 	u32 dram_type = sdram_params->base.dramtype;
2675 	u32 pwrctl;
2676 	u32 i, dq_map;
2677 	u32 byte1 = 0, byte0 = 0;
2678 
2679 	if (dram_type != LPDDR4 && dram_type != LPDDR4X) {
2680 		if (dram_type != DDR4) {
2681 			if (dram_type == DDR3)
2682 				coltmp = 11;
2683 			else
2684 				coltmp = 12;
2685 			bktmp = 3;
2686 			if (dram_type == LPDDR2)
2687 				rowtmp = 15;
2688 			else
2689 				rowtmp = 16;
2690 
2691 			if (sdram_detect_col(cap_info, coltmp) != 0)
2692 				goto cap_err;
2693 
2694 			sdram_detect_bank(cap_info, coltmp, bktmp);
2695 			if (dram_type != LPDDR3)
2696 				sdram_detect_dbw(cap_info, dram_type);
2697 		} else {
2698 			coltmp = 10;
2699 			bktmp = 4;
2700 			rowtmp = 17;
2701 
2702 			cap_info->col = 10;
2703 			cap_info->bk = 2;
2704 			sdram_detect_bg(cap_info, coltmp);
2705 		}
2706 
2707 		if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
2708 			goto cap_err;
2709 
2710 		sdram_detect_row_3_4(cap_info, coltmp, bktmp);
2711 	} else {
2712 		cap_info->col = 10;
2713 		cap_info->bk = 3;
2714 		mr8 = read_mr(dram, 1, 8, dram_type);
2715 		cap_info->dbw = ((mr8 >> 6) & 0x3) == 0 ? 1 : 0;
2716 		mr8 = (mr8 >> 2) & 0xf;
2717 		if (mr8 >= 0 && mr8 <= 6) {
2718 			cap_info->cs0_row = 14 + (mr8 + 1) / 2;
2719 		} else if (mr8 == 0xc) {
2720 			cap_info->cs0_row = 13;
2721 		} else {
2722 			printascii("Cap ERR: Fail to get cap of LPDDR4/X from MR8\n");
2723 			goto cap_err;
2724 		}
2725 		if (cap_info->dbw == 0)
2726 			cap_info->cs0_row++;
2727 		cap_info->row_3_4 = mr8 % 2 == 1 ? 1 : 0;
2728 		if (cap_info->cs0_row >= 17) {
2729 			printascii("Cap ERR: ");
2730 			printascii("RV1126 LPDDR4/X cannot support row >= 17\n");
2731 			goto cap_err;
2732 			// cap_info->cs0_row = 16;
2733 			// cap_info->row_3_4 = 0;
2734 		}
2735 	}
2736 
2737 	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
2738 	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
2739 
2740 	if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0)
2741 		cs = 1;
2742 	else
2743 		cs = 0;
2744 	cap_info->rank = cs + 1;
2745 
2746 	setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2747 
2748 	if (dram_type != DDR3) {
2749 		if ((data_training_rg(dram, 0, dram_type) & 0xf) == 0) {
2750 			cap_info->bw = 2;
2751 		} else {
2752 			dq_map = readl(PHY_REG(phy_base, 0x4f));
2753 			for (i = 0; i < 4; i++) {
2754 				if (((dq_map >> (i * 2)) & 0x3) == 0)
2755 					byte0 = i;
2756 				if (((dq_map >> (i * 2)) & 0x3) == 1)
2757 					byte1 = i;
2758 			}
2759 			clrsetbits_le32(PHY_REG(phy_base, 0xf), PHY_DQ_WIDTH_MASK,
2760 					BIT(byte0) | BIT(byte1));
2761 			if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) == 0)
2762 				cap_info->bw = 1;
2763 			else
2764 				cap_info->bw = 0;
2765 		}
2766 	}
2767 
2768 	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
2769 
2770 	cap_info->cs0_high16bit_row = cap_info->cs0_row;
2771 	if (cs) {
2772 		cap_info->cs1_row = cap_info->cs0_row;
2773 		cap_info->cs1_high16bit_row = cap_info->cs0_row;
2774 	} else {
2775 		cap_info->cs1_row = 0;
2776 		cap_info->cs1_high16bit_row = 0;
2777 	}
2778 
2779 	if (dram_type == LPDDR3)
2780 		sdram_detect_dbw(cap_info, dram_type);
2781 
2782 	return 0;
2783 cap_err:
2784 	return -1;
2785 }
2786 
2787 static int dram_detect_cs1_row(struct dram_info *dram,
2788 			       struct rv1126_sdram_params *sdram_params,
2789 			       unsigned char channel)
2790 {
2791 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2792 	void __iomem *pctl_base = dram->pctl;
2793 	u32 ret = 0;
2794 	void __iomem *test_addr;
2795 	u32 row, bktmp, coltmp, bw;
2796 	u64 cs0_cap;
2797 	u32 byte_mask;
2798 	u32 cs_pst;
2799 	u32 cs_add = 0;
2800 	u32 max_row;
2801 
2802 	if (cap_info->rank == 2) {
2803 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2804 			6 + 2;
2805 		if (cs_pst < 28)
2806 			cs_add = 1;
2807 
2808 		cs0_cap = 1 << cs_pst;
2809 
2810 		if (sdram_params->base.dramtype == DDR4) {
2811 			if (cap_info->dbw == 0)
2812 				bktmp = cap_info->bk + 2;
2813 			else
2814 				bktmp = cap_info->bk + 1;
2815 		} else {
2816 			bktmp = cap_info->bk;
2817 		}
2818 		bw = cap_info->bw;
2819 		coltmp = cap_info->col;
2820 
2821 		if (bw == 2)
2822 			byte_mask = 0xFFFF;
2823 		else
2824 			byte_mask = 0xFF;
2825 
2826 		max_row = (cs_pst == 31) ? 30 : 31;
2827 
2828 		max_row = max_row - bktmp - coltmp - bw - cs_add + 1;
2829 
2830 		row = (cap_info->cs0_row > max_row) ? max_row :
2831 			cap_info->cs0_row;
2832 
2833 		for (; row > 12; row--) {
2834 			test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
2835 				    (u32)cs0_cap +
2836 				    (1ul << (row + bktmp + coltmp +
2837 					     cs_add + bw - 1ul)));
2838 
2839 			writel(0, CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap);
2840 			writel(PATTERN, test_addr);
2841 
2842 			if (((readl(test_addr) & byte_mask) ==
2843 			     (PATTERN & byte_mask)) &&
2844 			    ((readl(CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap) &
2845 			      byte_mask) == 0)) {
2846 				ret = row;
2847 				break;
2848 			}
2849 		}
2850 	}
2851 
2852 	return ret;
2853 }
2854 
2855 /* return: 0 = success, other = fail */
2856 static int sdram_init_detect(struct dram_info *dram,
2857 			     struct rv1126_sdram_params *sdram_params)
2858 {
2859 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2860 	u32 ret;
2861 	u32 sys_reg = 0;
2862 	u32 sys_reg3 = 0;
2863 
2864 	if (sdram_init_(dram, sdram_params, 0)) {
2865 		if (sdram_params->base.dramtype == DDR3) {
2866 			if (sdram_init_(dram, sdram_params, 0))
2867 				return -1;
2868 		} else {
2869 			return -1;
2870 		}
2871 	}
2872 
2873 	if (sdram_params->base.dramtype == DDR3) {
2874 		writel(PATTERN, CONFIG_SYS_SDRAM_BASE);
2875 		if (readl(CONFIG_SYS_SDRAM_BASE) != PATTERN)
2876 			return -1;
2877 	}
2878 
2879 	split_bypass(dram);
2880 	if (dram_detect_cap(dram, sdram_params, 0) != 0)
2881 		return -1;
2882 
2883 	pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
2884 				   sdram_params->base.dramtype);
2885 	ret = sdram_init_(dram, sdram_params, 1);
2886 	if (ret != 0)
2887 		goto out;
2888 
2889 	cap_info->cs1_row =
2890 		dram_detect_cs1_row(dram, sdram_params, 0);
2891 	if (cap_info->cs1_row) {
2892 		sys_reg = readl(&dram->pmugrf->os_reg[2]);
2893 		sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
2894 		SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
2895 				    sys_reg, sys_reg3, 0);
2896 		writel(sys_reg, &dram->pmugrf->os_reg[2]);
2897 		writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2898 	}
2899 
2900 	sdram_detect_high_row(cap_info, sdram_params->base.dramtype);
2901 	split_setup(dram, sdram_params);
2902 out:
2903 	return ret;
2904 }
2905 
2906 struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz)
2907 {
2908 	u32 i;
2909 	u32 offset = 0;
2910 	struct ddr2_3_4_lp2_3_info *ddr_info;
2911 
2912 	if (!freq_mhz) {
2913 		ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype);
2914 		if (ddr_info)
2915 			freq_mhz =
2916 				(ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
2917 				DDR_FREQ_MASK;
2918 		else
2919 			freq_mhz = 0;
2920 	}
2921 
2922 	for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) {
2923 		if (sdram_configs[i].base.ddr_freq == 0 ||
2924 		    freq_mhz < sdram_configs[i].base.ddr_freq)
2925 			break;
2926 	}
2927 	offset = i == 0 ? 0 : i - 1;
2928 
2929 	return &sdram_configs[offset];
2930 }
2931 
2932 static const u16 pctl_need_update_reg[] = {
2933 	DDR_PCTL2_RFSHTMG,
2934 	DDR_PCTL2_INIT3,
2935 	DDR_PCTL2_INIT4,
2936 	DDR_PCTL2_INIT6,
2937 	DDR_PCTL2_INIT7,
2938 	DDR_PCTL2_DRAMTMG0,
2939 	DDR_PCTL2_DRAMTMG1,
2940 	DDR_PCTL2_DRAMTMG2,
2941 	DDR_PCTL2_DRAMTMG3,
2942 	DDR_PCTL2_DRAMTMG4,
2943 	DDR_PCTL2_DRAMTMG5,
2944 	DDR_PCTL2_DRAMTMG6,
2945 	DDR_PCTL2_DRAMTMG7,
2946 	DDR_PCTL2_DRAMTMG8,
2947 	DDR_PCTL2_DRAMTMG9,
2948 	DDR_PCTL2_DRAMTMG12,
2949 	DDR_PCTL2_DRAMTMG13,
2950 	DDR_PCTL2_DRAMTMG14,
2951 	DDR_PCTL2_ZQCTL0,
2952 	DDR_PCTL2_DFITMG0,
2953 	DDR_PCTL2_ODTCFG
2954 };
2955 
2956 static const u16 phy_need_update_reg[] = {
2957 	0x14,
2958 	0x18,
2959 	0x1c
2960 };
2961 
2962 static void pre_set_rate(struct dram_info *dram,
2963 			 struct rv1126_sdram_params *sdram_params,
2964 			 u32 dst_fsp, u32 dst_fsp_lp4)
2965 {
2966 	u32 i, j, find;
2967 	void __iomem *pctl_base = dram->pctl;
2968 	void __iomem *phy_base = dram->phy;
2969 	u32 phy_offset;
2970 	u32 mr_tmp;
2971 	u32 dramtype = sdram_params->base.dramtype;
2972 
2973 	sw_set_req(dram);
2974 	/* pctl timing update */
2975 	for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) {
2976 		for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF;
2977 		     j++) {
2978 			if (sdram_params->pctl_regs.pctl[j][0] ==
2979 			    pctl_need_update_reg[i]) {
2980 				writel(sdram_params->pctl_regs.pctl[j][1],
2981 				       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2982 				       pctl_need_update_reg[i]);
2983 				find = j;
2984 				break;
2985 			}
2986 		}
2987 	}
2988 
2989 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2990 	u32 tmp, trefi;
2991 
2992 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2993 	trefi = (tmp >> 16) & 0xfff;
2994 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2995 	       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2996 #endif
2997 
2998 	sw_set_ack(dram);
2999 
3000 	/* phy timing update */
3001 	if (dst_fsp == 0)
3002 		phy_offset = 0;
3003 	else
3004 		phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3);
3005 	/* cl cwl al update */
3006 	for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) {
3007 		for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF;
3008 		     j++) {
3009 			if (sdram_params->phy_regs.phy[j][0] ==
3010 			    phy_need_update_reg[i]) {
3011 				writel(sdram_params->phy_regs.phy[j][1],
3012 				       phy_base + phy_offset +
3013 				       phy_need_update_reg[i]);
3014 				find = j;
3015 				break;
3016 			}
3017 		}
3018 	}
3019 
3020 	set_ds_odt(dram, sdram_params, dst_fsp);
3021 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
3022 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3023 			       DDR_PCTL2_INIT4);
3024 		/* MR13 */
3025 		pctl_write_mr(dram->pctl, 3, 13,
3026 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
3027 			       PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
3028 			      ((0x2 << 6) >> dst_fsp_lp4), dramtype);
3029 		writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
3030 				      PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
3031 				      ((0x2 << 6) >> dst_fsp_lp4),
3032 				       PHY_REG(phy_base, 0x1b));
3033 		/* MR3 */
3034 		pctl_write_mr(dram->pctl, 3, 3,
3035 			      mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT &
3036 			      PCTL2_MR_MASK,
3037 			      dramtype);
3038 		writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK,
3039 		       PHY_REG(phy_base, 0x19));
3040 
3041 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3042 			       DDR_PCTL2_INIT3);
3043 		/* MR1 */
3044 		pctl_write_mr(dram->pctl, 3, 1,
3045 			      mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT &
3046 			      PCTL2_MR_MASK,
3047 			      dramtype);
3048 		writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK,
3049 		       PHY_REG(phy_base, 0x17));
3050 		/* MR2 */
3051 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
3052 			      dramtype);
3053 		writel(mr_tmp & PCTL2_MR_MASK,
3054 		       PHY_REG(phy_base, 0x18));
3055 
3056 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3057 			       DDR_PCTL2_INIT6);
3058 		/* MR11 */
3059 		pctl_write_mr(dram->pctl, 3, 11,
3060 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
3061 			      dramtype);
3062 		writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
3063 		       PHY_REG(phy_base, 0x1a));
3064 		/* MR12 */
3065 		pctl_write_mr(dram->pctl, 3, 12,
3066 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
3067 			      dramtype);
3068 
3069 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3070 			       DDR_PCTL2_INIT7);
3071 		/* MR22 */
3072 		pctl_write_mr(dram->pctl, 3, 22,
3073 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
3074 			      dramtype);
3075 		writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
3076 		       PHY_REG(phy_base, 0x1d));
3077 		/* MR14 */
3078 		pctl_write_mr(dram->pctl, 3, 14,
3079 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
3080 			      dramtype);
3081 		writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
3082 		       PHY_REG(phy_base, 0x1c));
3083 	}
3084 
3085 	update_noc_timing(dram, sdram_params);
3086 }
3087 
3088 static void save_fsp_param(struct dram_info *dram, u32 dst_fsp,
3089 			   struct rv1126_sdram_params *sdram_params)
3090 {
3091 	void __iomem *pctl_base = dram->pctl;
3092 	void __iomem *phy_base = dram->phy;
3093 	struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp];
3094 	u32 temp, temp1;
3095 	struct ddr2_3_4_lp2_3_info *ddr_info;
3096 
3097 	ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype);
3098 
3099 	p_fsp_param->freq_mhz = sdram_params->base.ddr_freq;
3100 
3101 	if (sdram_params->base.dramtype == LPDDR4 ||
3102 	    sdram_params->base.dramtype == LPDDR4X) {
3103 		p_fsp_param->rd_odt_up_en = 0;
3104 		p_fsp_param->rd_odt_down_en = 1;
3105 	} else {
3106 		p_fsp_param->rd_odt_up_en =
3107 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
3108 		p_fsp_param->rd_odt_down_en =
3109 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
3110 	}
3111 
3112 	if (p_fsp_param->rd_odt_up_en)
3113 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111));
3114 	else if (p_fsp_param->rd_odt_down_en)
3115 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110));
3116 	else
3117 		p_fsp_param->rd_odt = 0;
3118 	p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112));
3119 	p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100));
3120 	p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102));
3121 	p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128));
3122 	p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105));
3123 
3124 	if (sdram_params->base.dramtype == DDR3) {
3125 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3126 			     DDR_PCTL2_INIT3);
3127 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
3128 		p_fsp_param->ds_pdds = temp & DDR3_DS_MASK;
3129 		p_fsp_param->dq_odt = temp & DDR3_RTT_NOM_MASK;
3130 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3131 	} else if (sdram_params->base.dramtype == DDR4) {
3132 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3133 			     DDR_PCTL2_INIT3);
3134 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
3135 		p_fsp_param->ds_pdds = temp & DDR4_DS_MASK;
3136 		p_fsp_param->dq_odt = temp & DDR4_RTT_NOM_MASK;
3137 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3138 	} else if (sdram_params->base.dramtype == LPDDR3) {
3139 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3140 			     DDR_PCTL2_INIT4);
3141 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
3142 		p_fsp_param->ds_pdds = temp & 0xf;
3143 
3144 		p_fsp_param->dq_odt = lp3_odt_value;
3145 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3146 	} else if (sdram_params->base.dramtype == LPDDR4 ||
3147 		   sdram_params->base.dramtype == LPDDR4X) {
3148 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3149 			     DDR_PCTL2_INIT4);
3150 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
3151 		p_fsp_param->ds_pdds = temp & LPDDR4_PDDS_MASK;
3152 
3153 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3154 			     DDR_PCTL2_INIT6);
3155 		temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK;
3156 		p_fsp_param->dq_odt = temp & LPDDR4_DQODT_MASK;
3157 		p_fsp_param->ca_odt = temp & LPDDR4_CAODT_MASK;
3158 
3159 		temp = MAX(readl(PHY_REG(phy_base, 0x3ae)),
3160 			   readl(PHY_REG(phy_base, 0x3ce)));
3161 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)),
3162 			    readl(PHY_REG(phy_base, 0x3de)));
3163 		p_fsp_param->vref_ca[0] = (temp + temp1) / 2;
3164 		temp = MAX(readl(PHY_REG(phy_base, 0x3af)),
3165 			   readl(PHY_REG(phy_base, 0x3cf)));
3166 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)),
3167 			    readl(PHY_REG(phy_base, 0x3df)));
3168 		p_fsp_param->vref_ca[1] = (temp + temp1) / 2;
3169 		p_fsp_param->vref_ca[0] |=
3170 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
3171 		p_fsp_param->vref_ca[1] |=
3172 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
3173 
3174 		p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >>
3175 					      3) & 0x1;
3176 	}
3177 
3178 	p_fsp_param->noc_timings.ddrtiminga0 =
3179 		sdram_params->ch.noc_timings.ddrtiminga0;
3180 	p_fsp_param->noc_timings.ddrtimingb0 =
3181 		sdram_params->ch.noc_timings.ddrtimingb0;
3182 	p_fsp_param->noc_timings.ddrtimingc0 =
3183 		sdram_params->ch.noc_timings.ddrtimingc0;
3184 	p_fsp_param->noc_timings.devtodev0 =
3185 		sdram_params->ch.noc_timings.devtodev0;
3186 	p_fsp_param->noc_timings.ddrmode =
3187 		sdram_params->ch.noc_timings.ddrmode;
3188 	p_fsp_param->noc_timings.ddr4timing =
3189 		sdram_params->ch.noc_timings.ddr4timing;
3190 	p_fsp_param->noc_timings.agingx0 =
3191 		sdram_params->ch.noc_timings.agingx0;
3192 	p_fsp_param->noc_timings.aging0 =
3193 		sdram_params->ch.noc_timings.aging0;
3194 	p_fsp_param->noc_timings.aging1 =
3195 		sdram_params->ch.noc_timings.aging1;
3196 	p_fsp_param->noc_timings.aging2 =
3197 		sdram_params->ch.noc_timings.aging2;
3198 	p_fsp_param->noc_timings.aging3 =
3199 		sdram_params->ch.noc_timings.aging3;
3200 
3201 	p_fsp_param->flag = FSP_FLAG;
3202 }
3203 
3204 #ifndef CONFIG_SPL_KERNEL_BOOT
3205 static void copy_fsp_param_to_ddr(void)
3206 {
3207 	memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param,
3208 	       sizeof(fsp_param));
3209 }
3210 #endif
3211 
3212 static void pctl_modify_trfc(struct ddr_pctl_regs *pctl_regs,
3213 			     struct sdram_cap_info *cap_info, u32 dram_type,
3214 			     u32 freq)
3215 {
3216 	u64 cs0_cap;
3217 	u32 die_cap;
3218 	u32 trfc_ns, trfc4_ns;
3219 	u32 trfc, txsnr;
3220 	u32 txs_abort_fast = 0;
3221 	u32 tmp;
3222 
3223 	cs0_cap = sdram_get_cs_cap(cap_info, 0, dram_type);
3224 	die_cap = (u32)(cs0_cap >> (20 + (cap_info->bw - cap_info->dbw)));
3225 
3226 	switch (dram_type) {
3227 	case DDR3:
3228 		if (die_cap <= DIE_CAP_512MBIT)
3229 			trfc_ns = 90;
3230 		else if (die_cap <= DIE_CAP_1GBIT)
3231 			trfc_ns = 110;
3232 		else if (die_cap <= DIE_CAP_2GBIT)
3233 			trfc_ns = 160;
3234 		else if (die_cap <= DIE_CAP_4GBIT)
3235 			trfc_ns = 260;
3236 		else
3237 			trfc_ns = 350;
3238 		txsnr = MAX(5, ((trfc_ns + 10) * freq + 999) / 1000);
3239 		break;
3240 
3241 	case DDR4:
3242 		if (die_cap <= DIE_CAP_2GBIT) {
3243 			trfc_ns = 160;
3244 			trfc4_ns = 90;
3245 		} else if (die_cap <= DIE_CAP_4GBIT) {
3246 			trfc_ns = 260;
3247 			trfc4_ns = 110;
3248 		} else if (die_cap <= DIE_CAP_8GBIT) {
3249 			trfc_ns = 350;
3250 			trfc4_ns = 160;
3251 		} else {
3252 			trfc_ns = 550;
3253 			trfc4_ns = 260;
3254 		}
3255 		txsnr = ((trfc_ns + 10) * freq + 999) / 1000;
3256 		txs_abort_fast = ((trfc4_ns + 10) * freq + 999) / 1000;
3257 		break;
3258 
3259 	case LPDDR3:
3260 		if (die_cap <= DIE_CAP_4GBIT)
3261 			trfc_ns = 130;
3262 		else
3263 			trfc_ns = 210;
3264 		txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3265 		break;
3266 
3267 	case LPDDR4:
3268 	case LPDDR4X:
3269 		if (die_cap <= DIE_CAP_2GBIT)
3270 			trfc_ns = 130;
3271 		else if (die_cap <= DIE_CAP_4GBIT)
3272 			trfc_ns = 180;
3273 		else if (die_cap <= DIE_CAP_8GBIT)
3274 			trfc_ns = 280;
3275 		else
3276 			trfc_ns = 380;
3277 		txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3278 		break;
3279 
3280 	default:
3281 		return;
3282 	}
3283 	trfc = (trfc_ns * freq + 999) / 1000;
3284 
3285 	for (int i = 0; pctl_regs->pctl[i][0] != 0xffffffff; i++) {
3286 		switch (pctl_regs->pctl[i][0]) {
3287 		case DDR_PCTL2_RFSHTMG:
3288 			tmp = pctl_regs->pctl[i][1];
3289 			/* t_rfc_min */
3290 			tmp &= ~((u32)0x3ff);
3291 			tmp |= ((trfc + 1) / 2) & 0x3ff;
3292 			pctl_regs->pctl[i][1] = tmp;
3293 			break;
3294 
3295 		case DDR_PCTL2_DRAMTMG8:
3296 			if (dram_type == DDR3 || dram_type == DDR4) {
3297 				tmp = pctl_regs->pctl[i][1];
3298 				/* t_xs_x32 */
3299 				tmp &= ~((u32)0x7f);
3300 				tmp |= ((txsnr + 63) / 64 + 1) & 0x7f;
3301 
3302 				if (dram_type == DDR4) {
3303 					/* t_xs_abort_x32 */
3304 					tmp &= ~((u32)(0x7f << 16));
3305 					tmp |= (((txs_abort_fast + 63) / 64 + 1) & 0x7f) << 16;
3306 					/* t_xs_fast_x32 */
3307 					tmp &= ~((u32)(0x7f << 24));
3308 					tmp |= (((txs_abort_fast + 63) / 64 + 1) & 0x7f) << 24;
3309 				}
3310 
3311 				pctl_regs->pctl[i][1] = tmp;
3312 			}
3313 			break;
3314 
3315 		case DDR_PCTL2_DRAMTMG14:
3316 			if (dram_type == LPDDR3 ||
3317 			    dram_type == LPDDR4 || dram_type == LPDDR4X) {
3318 				tmp = pctl_regs->pctl[i][1];
3319 				/* t_xsr */
3320 				tmp &= ~((u32)0xfff);
3321 				tmp |= ((txsnr + 1) / 2) & 0xfff;
3322 				pctl_regs->pctl[i][1] = tmp;
3323 			}
3324 			break;
3325 
3326 		default:
3327 			break;
3328 		}
3329 	}
3330 }
3331 
3332 void ddr_set_rate(struct dram_info *dram,
3333 		  struct rv1126_sdram_params *sdram_params,
3334 		  u32 freq, u32 cur_freq, u32 dst_fsp,
3335 		  u32 dst_fsp_lp4, u32 training_en)
3336 {
3337 	u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off;
3338 	u32 mr_tmp;
3339 	u32 lp_stat;
3340 	u32 dramtype = sdram_params->base.dramtype;
3341 	struct rv1126_sdram_params *sdram_params_new;
3342 	void __iomem *pctl_base = dram->pctl;
3343 	void __iomem *phy_base = dram->phy;
3344 	int delay = 1000;
3345 
3346 	lp_stat = low_power_update(dram, 0);
3347 	sdram_params_new = get_default_sdram_config(freq);
3348 	sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank;
3349 	sdram_params_new->ch.cap_info.bw = sdram_params->ch.cap_info.bw;
3350 
3351 	pctl_modify_trfc(&sdram_params_new->pctl_regs,
3352 			 &sdram_params->ch.cap_info, dramtype, freq);
3353 	pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4);
3354 
3355 	while ((readl(pctl_base + DDR_PCTL2_STAT) &
3356 			 PCTL2_OPERATING_MODE_MASK) ==
3357 			 PCTL2_OPERATING_MODE_SR)
3358 		continue;
3359 
3360 	dest_dll_off = 0;
3361 	dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3362 			  DDR_PCTL2_INIT3);
3363 	if ((dramtype == DDR3 && (dst_init3 & 1)) ||
3364 	    (dramtype == DDR4 && !(dst_init3 & 1)))
3365 		dest_dll_off = 1;
3366 
3367 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
3368 	cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
3369 			  DDR_PCTL2_INIT3);
3370 	cur_init3 &= PCTL2_MR_MASK;
3371 	cur_dll_off = 1;
3372 	if ((dramtype == DDR3 && !(cur_init3 & 1)) ||
3373 	    (dramtype == DDR4 && (cur_init3 & 1)))
3374 		cur_dll_off = 0;
3375 
3376 	if (!cur_dll_off) {
3377 		if (dramtype == DDR3)
3378 			cur_init3 |= 1;
3379 		else
3380 			cur_init3 &= ~1;
3381 		pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype);
3382 	}
3383 
3384 	setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3385 		     PCTL2_DIS_AUTO_REFRESH);
3386 	update_refresh_reg(dram);
3387 
3388 	enter_sr(dram, 1);
3389 
3390 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3391 	       PMUGRF_CON_DDRPHY_BUFFEREN_EN,
3392 	       &dram->pmugrf->soc_con[0]);
3393 	sw_set_req(dram);
3394 	clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC,
3395 		     PCTL2_DFI_INIT_COMPLETE_EN);
3396 	sw_set_ack(dram);
3397 
3398 	sw_set_req(dram);
3399 	if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off)
3400 		setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3401 	else
3402 		clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3403 
3404 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0,
3405 		     PCTL2_DIS_SRX_ZQCL);
3406 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0,
3407 		     PCTL2_DIS_SRX_ZQCL);
3408 	sw_set_ack(dram);
3409 
3410 	writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT),
3411 	       &dram->cru->clkgate_con[21]);
3412 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3413 					(0x1 << CLK_DDR_UPCTL_EN_SHIFT) |
3414 					(0x1 << ACLK_DDR_UPCTL_EN_SHIFT),
3415 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3416 
3417 	clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3418 	rkclk_set_dpll(dram, freq * MHz / 2);
3419 	phy_pll_set(dram, freq * MHz, 0);
3420 	phy_pll_set(dram, freq * MHz, 1);
3421 	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3422 
3423 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3424 			PMUGRF_CON_DDRPHY_BUFFEREN_DIS,
3425 			&dram->pmugrf->soc_con[0]);
3426 	writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT),
3427 	       &dram->cru->clkgate_con[21]);
3428 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3429 					(0x0 << CLK_DDR_UPCTL_EN_SHIFT) |
3430 					(0x0 << ACLK_DDR_UPCTL_EN_SHIFT),
3431 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3432 	while ((readl(pctl_base + DDR_PCTL2_DFISTAT) &
3433 	       PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE) {
3434 		udelay(1);
3435 		if (delay-- <= 0) {
3436 			printascii("ERROR: Cannot wait DFI_INIT_COMPLETE\n");
3437 			while (1)
3438 				;
3439 		}
3440 	}
3441 
3442 	sw_set_req(dram);
3443 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
3444 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp);
3445 	sw_set_ack(dram);
3446 	update_refresh_reg(dram);
3447 	clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2);
3448 
3449 	enter_sr(dram, 0);
3450 
3451 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3452 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3453 
3454 	mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4);
3455 	if (dramtype == LPDDR3) {
3456 		pctl_write_mr(dram->pctl, 3, 1,
3457 			      (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) &
3458 			      PCTL2_MR_MASK,
3459 			      dramtype);
3460 		pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK,
3461 			      dramtype);
3462 		pctl_write_mr(dram->pctl, 3, 3,
3463 			      (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) &
3464 			      PCTL2_MR_MASK,
3465 			      dramtype);
3466 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype);
3467 	} else if ((dramtype == DDR3) || (dramtype == DDR4)) {
3468 		pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK,
3469 			      dramtype);
3470 		if (!dest_dll_off) {
3471 			pctl_write_mr(dram->pctl, 3, 0,
3472 				      ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) &
3473 				      PCTL2_MR_MASK) | DDR3_DLL_RESET,
3474 				      dramtype);
3475 			udelay(2);
3476 		}
3477 		pctl_write_mr(dram->pctl, 3, 0,
3478 			      (dst_init3 >> PCTL2_DDR34_MR0_SHIFT &
3479 			       PCTL2_MR_MASK) & (~DDR3_DLL_RESET),
3480 			      dramtype);
3481 		pctl_write_mr(dram->pctl, 3, 2,
3482 			      ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) &
3483 			       PCTL2_MR_MASK), dramtype);
3484 		if (dramtype == DDR4) {
3485 			pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK,
3486 				      dramtype);
3487 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3488 				       DDR_PCTL2_INIT6);
3489 			pctl_write_mr(dram->pctl, 3, 4,
3490 				      (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) &
3491 				       PCTL2_MR_MASK,
3492 				      dramtype);
3493 			pctl_write_mr(dram->pctl, 3, 5,
3494 				      mr_tmp >> PCTL2_DDR4_MR5_SHIFT &
3495 				      PCTL2_MR_MASK,
3496 				      dramtype);
3497 
3498 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3499 				       DDR_PCTL2_INIT7);
3500 			/* updata ddr4 vrefdq */
3501 			pctl_write_mr(dram->pctl, 3, 6,
3502 				      (mr_tmp | (0x1 << 7)) >> PCTL2_DDR4_MR6_SHIFT &
3503 				      PCTL2_MR_MASK, dramtype);
3504 			pctl_write_mr(dram->pctl, 3, 6,
3505 				      (mr_tmp | (0x1 << 7)) >> PCTL2_DDR4_MR6_SHIFT &
3506 				      PCTL2_MR_MASK, dramtype);
3507 			pctl_write_mr(dram->pctl, 3, 6,
3508 				      mr_tmp >> PCTL2_DDR4_MR6_SHIFT &
3509 				      PCTL2_MR_MASK,
3510 				      dramtype);
3511 		}
3512 	} else if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
3513 		pctl_write_mr(dram->pctl, 3, 13,
3514 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
3515 			       PCTL2_MR_MASK) & (~(BIT(7)))) |
3516 			      dst_fsp_lp4 << 7, dramtype);
3517 	}
3518 	clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3519 		     PCTL2_DIS_AUTO_REFRESH);
3520 	update_refresh_reg(dram);
3521 
3522 	/* training */
3523 	high_freq_training(dram, sdram_params_new, dst_fsp);
3524 	low_power_update(dram, lp_stat);
3525 
3526 	save_fsp_param(dram, dst_fsp, sdram_params_new);
3527 }
3528 
3529 static void ddr_set_rate_for_fsp(struct dram_info *dram,
3530 				 struct rv1126_sdram_params *sdram_params)
3531 {
3532 	struct ddr2_3_4_lp2_3_info *ddr_info;
3533 	u32 f0;
3534 	u32 dramtype = sdram_params->base.dramtype;
3535 #ifndef CONFIG_SPL_KERNEL_BOOT
3536 	u32 f1, f2, f3;
3537 #endif
3538 
3539 	ddr_info = get_ddr_drv_odt_info(dramtype);
3540 	if (!ddr_info)
3541 		return;
3542 
3543 	f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
3544 	     DDR_FREQ_MASK;
3545 
3546 #ifndef CONFIG_SPL_KERNEL_BOOT
3547 	memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param));
3548 	memset((void *)&fsp_param, 0, sizeof(fsp_param));
3549 
3550 	f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) &
3551 	     DDR_FREQ_MASK;
3552 	f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) &
3553 	     DDR_FREQ_MASK;
3554 	f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) &
3555 	     DDR_FREQ_MASK;
3556 #endif
3557 
3558 	if (get_wrlvl_val(dram, sdram_params))
3559 		printascii("get wrlvl value fail\n");
3560 
3561 #ifndef CONFIG_SPL_KERNEL_BOOT
3562 	printascii("change to: ");
3563 	printdec(f1);
3564 	printascii("MHz\n");
3565 	ddr_set_rate(&dram_info, sdram_params, f1,
3566 		     sdram_params->base.ddr_freq, 1, 1, 1);
3567 	printascii("change to: ");
3568 	printdec(f2);
3569 	printascii("MHz\n");
3570 	ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1);
3571 	printascii("change to: ");
3572 	printdec(f3);
3573 	printascii("MHz\n");
3574 	ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1);
3575 #endif
3576 	printascii("change to: ");
3577 	printdec(f0);
3578 	printascii("MHz(final freq)\n");
3579 #ifndef CONFIG_SPL_KERNEL_BOOT
3580 	ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1);
3581 #else
3582 	ddr_set_rate(&dram_info, sdram_params, f0, sdram_params->base.ddr_freq, 1, 1, 1);
3583 #endif
3584 }
3585 
3586 int get_uart_config(void)
3587 {
3588 	struct sdram_head_info_index_v2 *index =
3589 		(struct sdram_head_info_index_v2 *)common_info;
3590 	struct global_info *gbl_info;
3591 
3592 	gbl_info = (struct global_info *)((void *)common_info +
3593 		index->global_index.offset * 4);
3594 
3595 	return gbl_info->uart_info;
3596 }
3597 
3598 /* return: 0 = success, other = fail */
3599 int sdram_init(void)
3600 {
3601 	struct rv1126_sdram_params *sdram_params;
3602 	int ret = 0;
3603 	struct sdram_head_info_index_v2 *index =
3604 		(struct sdram_head_info_index_v2 *)common_info;
3605 	struct global_info *gbl_info;
3606 
3607 	dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
3608 	dram_info.pctl = (void *)UPCTL2_BASE_ADDR;
3609 	dram_info.grf = (void *)GRF_BASE_ADDR;
3610 	dram_info.cru = (void *)CRU_BASE_ADDR;
3611 	dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR;
3612 	dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR;
3613 	dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR;
3614 
3615 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
3616 	printascii("extended temp support\n");
3617 #endif
3618 	if (index->version_info != 2 ||
3619 	    (index->global_index.size != sizeof(struct global_info) / 4) ||
3620 	    (index->ddr3_index.size !=
3621 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3622 	    (index->ddr4_index.size !=
3623 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3624 	    (index->lp3_index.size !=
3625 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3626 	    (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) ||
3627 	    (index->lp4x_index.size != (sizeof(struct lp4_info) / 4)) ||
3628 	    index->global_index.offset == 0 ||
3629 	    index->ddr3_index.offset == 0 ||
3630 	    index->ddr4_index.offset == 0 ||
3631 	    index->lp3_index.offset == 0 ||
3632 	    index->lp4_index.offset == 0 ||
3633 	    index->lp4x_index.offset == 0) {
3634 		printascii("common info error\n");
3635 		goto error;
3636 	}
3637 
3638 	gbl_info = (struct global_info *)((void *)common_info +
3639 		index->global_index.offset * 4);
3640 
3641 	dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info);
3642 	dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info);
3643 
3644 	sdram_params = &sdram_configs[0];
3645 	#if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 8)
3646 	for (int j = 0; j < ARRAY_SIZE(sdram_configs); j++)
3647 		sdram_configs[j].base.dramtype = LPDDR4X;
3648 	#endif
3649 	if (sdram_params->base.dramtype == DDR3 ||
3650 	    sdram_params->base.dramtype == DDR4) {
3651 		if (DDR_2T_INFO(gbl_info->info_2t))
3652 			sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10;
3653 		else
3654 			sdram_params->pctl_regs.pctl[0][1] &=
3655 				~(0x1 << 10);
3656 	}
3657 	ret = sdram_init_detect(&dram_info, sdram_params);
3658 	if (ret) {
3659 		sdram_print_dram_type(sdram_params->base.dramtype);
3660 		printascii(", ");
3661 		printdec(sdram_params->base.ddr_freq);
3662 		printascii("MHz\n");
3663 		goto error;
3664 	}
3665 	print_ddr_info(sdram_params);
3666 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3667 	init_rw_trn_result_struct(&rw_trn_result, dram_info.phy,
3668 				  (u8)sdram_params->ch.cap_info.rank);
3669 #endif
3670 
3671 	ddr_set_rate_for_fsp(&dram_info, sdram_params);
3672 #ifndef CONFIG_SPL_KERNEL_BOOT
3673 	copy_fsp_param_to_ddr();
3674 #endif
3675 
3676 	ddr_set_atags(&dram_info, sdram_params);
3677 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3678 	save_rw_trn_result_to_ddr(&rw_trn_result);
3679 #endif
3680 
3681 	printascii("out\n");
3682 
3683 	return ret;
3684 error:
3685 	printascii("error\n");
3686 	return (-1);
3687 }
3688 #endif /* CONFIG_TPL_BUILD */
3689