xref: /rk3399_rockchip-uboot/drivers/ram/rockchip/sdram_rv1126.c (revision 2f6c020d95ebda22b28d3a31f574ec547a9281fb)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * (C) Copyright 2020 Rockchip Electronics Co., Ltd.
4  */
5 
6 #include <common.h>
7 #include <debug_uart.h>
8 #include <dm.h>
9 #include <ram.h>
10 #include <syscon.h>
11 #include <asm/io.h>
12 #include <asm/arch/clock.h>
13 #include <asm/arch/hardware.h>
14 #include <asm/arch/rk_atags.h>
15 #include <asm/arch/cru_rv1126.h>
16 #include <asm/arch/grf_rv1126.h>
17 #include <asm/arch/sdram_common.h>
18 #include <asm/arch/sdram_rv1126.h>
19 
20 /* define training flag */
21 #define CA_TRAINING			(0x1 << 0)
22 #define READ_GATE_TRAINING		(0x1 << 1)
23 #define WRITE_LEVELING			(0x1 << 2)
24 #define WRITE_TRAINING			(0x1 << 3)
25 #define READ_TRAINING			(0x1 << 4)
26 #define FULL_TRAINING			(0xff)
27 
28 /* #define DDR4_READ_GATE_PREAMBLE_MODE */
29 #ifndef DDR4_READ_GATE_PREAMBLE_MODE
30 /* DDR4 read gate normal mode conflicts with 1nCK preamble */
31 #define DDR4_READ_GATE_2NCK_PREAMBLE
32 #endif
33 
34 #define SKEW_RX_SIGNAL			(0)
35 #define SKEW_TX_SIGNAL			(1)
36 #define SKEW_CA_SIGNAL			(2)
37 
38 #define DESKEW_MDF_ABS_VAL		(0)
39 #define DESKEW_MDF_DIFF_VAL		(1)
40 
41 #ifdef CONFIG_TPL_BUILD
42 #ifndef CONFIG_TPL_TINY_FRAMEWORK
43 #error please defined CONFIG_TPL_TINY_FRAMEWORK for RV1126 !!!
44 #endif
45 #endif
46 
47 #ifdef CONFIG_TPL_BUILD
48 
49 struct dram_info {
50 	void __iomem *pctl;
51 	void __iomem *phy;
52 	struct rv1126_cru *cru;
53 	struct msch_regs *msch;
54 	struct rv1126_ddrgrf *ddrgrf;
55 	struct rv1126_grf *grf;
56 	struct ram_info info;
57 	struct rv1126_pmugrf *pmugrf;
58 	u32 sr_idle;
59 	u32 pd_idle;
60 };
61 
62 #define GRF_BASE_ADDR			0xfe000000
63 #define PMU_GRF_BASE_ADDR		0xfe020000
64 #define DDR_GRF_BASE_ADDR		0xfe030000
65 #define BUS_SGRF_BASE_ADDR		0xfe0a0000
66 #define SERVER_MSCH_BASE_ADDR		0xfe800000
67 #define CRU_BASE_ADDR			0xff490000
68 #define DDR_PHY_BASE_ADDR		0xff4a0000
69 #define UPCTL2_BASE_ADDR		0xffa50000
70 
71 #define SGRF_SOC_CON2			0x8
72 #define SGRF_SOC_CON12			0x30
73 #define SGRF_SOC_CON13			0x34
74 
75 struct dram_info dram_info;
76 
77 #if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 3)
78 struct rv1126_sdram_params sdram_configs[] = {
79 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-328.inc"
80 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-396.inc"
81 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-528.inc"
82 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-664.inc"
83 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-784.inc"
84 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-924.inc"
85 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-1056.inc"
86 };
87 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 0)
88 struct rv1126_sdram_params sdram_configs[] = {
89 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-328.inc"
90 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-396.inc"
91 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-528.inc"
92 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-664.inc"
93 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-784.inc"
94 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-924.inc"
95 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-1056.inc"
96 };
97 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 6)
98 struct rv1126_sdram_params sdram_configs[] = {
99 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-328.inc"
100 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-396.inc"
101 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-528.inc"
102 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-664.inc"
103 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-784.inc"
104 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-924.inc"
105 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-1056.inc"
106 };
107 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 7) || (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 8)
108 struct rv1126_sdram_params sdram_configs[] = {
109 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-328.inc"
110 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-396.inc"
111 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-528.inc"
112 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-664.inc"
113 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-784.inc"
114 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-924.inc"
115 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-1056.inc"
116 };
117 #endif
118 
119 u32 common_info[] = {
120 	#include "sdram_inc/rv1126/sdram-rv1126-loader_params.inc"
121 };
122 
123 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
124 static struct rw_trn_result rw_trn_result;
125 #endif
126 
127 static struct rv1126_fsp_param fsp_param[MAX_IDX];
128 
129 static u8 lp3_odt_value;
130 
131 static s8 wrlvl_result[2][4];
132 
133 /* DDR configuration 0-9 */
134 u16 ddr_cfg_2_rbc[] = {
135 	((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */
136 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */
137 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */
138 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */
139 	((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */
140 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */
141 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */
142 	((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */
143 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */
144 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */
145 };
146 
147 /* DDR configuration 10-21 */
148 u8 ddr4_cfg_2_rbc[] = {
149 	((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */
150 	((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */
151 	((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */
152 	((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */
153 	((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */
154 	((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */
155 	((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */
156 	((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */
157 	((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */
158 	((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */
159 	((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */
160 	((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */
161 };
162 
163 /* DDR configuration 22-28 */
164 u16 ddr_cfg_2_rbc_p2[] = {
165 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */
166 	((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */
167 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */
168 	((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */
169 	((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */
170 	((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */
171 	((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */
172 };
173 
174 u8 d4_rbc_2_d3_rbc[][2] = {
175 	{10, 0},
176 	{11, 2},
177 	{12, 23},
178 	{13, 1},
179 	{14, 28},
180 	{15, 24},
181 	{16, 27},
182 	{17, 7},
183 	{18, 6},
184 	{19, 25},
185 	{20, 26},
186 	{21, 3}
187 };
188 
189 u32 addrmap[29][9] = {
190 	{24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
191 		0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */
192 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
193 		0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */
194 	{23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
195 		0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */
196 	{22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606,
197 		0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */
198 	{24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909,
199 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */
200 	{6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707,
201 		0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */
202 	{7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808,
203 		0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */
204 	{8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909,
205 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */
206 	{22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
207 		0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */
208 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
209 		0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */
210 
211 	{24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
212 		0x08080808, 0x00000f0f, 0x0801}, /* 10 */
213 	{23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
214 		0x0f080808, 0x00000f0f, 0x0801}, /* 11 */
215 	{24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
216 		0x07070707, 0x00000f07, 0x0700}, /* 12 */
217 	{23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
218 		0x07070707, 0x00000f0f, 0x0700}, /* 13 */
219 	{24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
220 		0x07070707, 0x00000f07, 0x3f01}, /* 14 */
221 	{23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
222 		0x07070707, 0x00000f0f, 0x3f01}, /* 15 */
223 	{23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606,
224 		0x06060606, 0x00000f06, 0x3f00}, /* 16 */
225 	{8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909,
226 		0x0f090909, 0x00000f0f, 0x0801}, /* 17 */
227 	{7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808,
228 		0x08080808, 0x00000f0f, 0x0700}, /* 18 */
229 	{7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
230 		0x08080808, 0x00000f0f, 0x3f01}, /* 19 */
231 
232 	{6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
233 		0x07070707, 0x00000f07, 0x3f00}, /* 20 */
234 	{23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606,
235 		0x06060606, 0x00000f06, 0x0600}, /* 21 */
236 	{21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505,
237 		0x05050505, 0x00000f0f, 0x3f3f}, /* 22 */
238 
239 	{24, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
240 		0x07070707, 0x00000f07, 0x3f3f}, /* 23 */
241 	{23, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
242 		0x07070707, 0x00000f0f, 0x3f3f}, /* 24 */
243 	{7, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
244 		0x08080808, 0x00000f0f, 0x3f3f}, /* 25 */
245 	{6, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
246 		0x07070707, 0x00000f07, 0x3f3f}, /* 26 */
247 	{23, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
248 		0x06060606, 0x00000f06, 0x3f3f}, /* 27 */
249 	{24, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
250 		0x07070707, 0x00000f07, 0x3f3f} /* 28 */
251 };
252 
253 static u8 dq_sel[22][3] = {
254 	{0x0, 0x17, 0x22},
255 	{0x1, 0x18, 0x23},
256 	{0x2, 0x19, 0x24},
257 	{0x3, 0x1a, 0x25},
258 	{0x4, 0x1b, 0x26},
259 	{0x5, 0x1c, 0x27},
260 	{0x6, 0x1d, 0x28},
261 	{0x7, 0x1e, 0x29},
262 	{0x8, 0x16, 0x21},
263 	{0x9, 0x1f, 0x2a},
264 	{0xa, 0x20, 0x2b},
265 	{0x10, 0x1, 0xc},
266 	{0x11, 0x2, 0xd},
267 	{0x12, 0x3, 0xe},
268 	{0x13, 0x4, 0xf},
269 	{0x14, 0x5, 0x10},
270 	{0x15, 0x6, 0x11},
271 	{0x16, 0x7, 0x12},
272 	{0x17, 0x8, 0x13},
273 	{0x18, 0x0, 0xb},
274 	{0x19, 0x9, 0x14},
275 	{0x1a, 0xa, 0x15}
276 };
277 
278 static u16 grp_addr[4] = {
279 	ADD_GROUP_CS0_A,
280 	ADD_GROUP_CS0_B,
281 	ADD_GROUP_CS1_A,
282 	ADD_GROUP_CS1_B
283 };
284 
285 static u8 wrlvl_result_offset[2][4] = {
286 	{0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27},
287 	{0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29},
288 };
289 
290 static u16 dqs_dq_skew_adr[16] = {
291 	0x170 + 0,	/* SKEW_UPDATE_RX_CS0_DQS0 */
292 	0x170 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS1 */
293 	0x1d0 + 0,	/* SKEW_UPDATE_RX_CS0_DQS2 */
294 	0x1d0 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS3 */
295 	0x1a0 + 0,	/* SKEW_UPDATE_RX_CS1_DQS0 */
296 	0x1a0 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS1 */
297 	0x200 + 0,	/* SKEW_UPDATE_RX_CS1_DQS2 */
298 	0x200 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS3 */
299 	0x170 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS0 */
300 	0x170 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS1 */
301 	0x1d0 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS2 */
302 	0x1d0 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS3 */
303 	0x1a0 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS0 */
304 	0x1a0 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS1 */
305 	0x200 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS2 */
306 	0x200 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS3 */
307 };
308 
309 static void rkclk_ddr_reset(struct dram_info *dram,
310 			    u32 ctl_srstn, u32 ctl_psrstn,
311 			    u32 phy_srstn, u32 phy_psrstn)
312 {
313 	writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) |
314 	       UPCTL2_ASRSTN_REQ(ctl_srstn),
315 	       BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13);
316 
317 	writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn),
318 	       &dram->cru->softrst_con[12]);
319 }
320 
321 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
322 {
323 	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
324 	int delay = 1000;
325 	u32 mhz = hz / MHz;
326 	struct global_info *gbl_info;
327 	struct sdram_head_info_index_v2 *index =
328 		(struct sdram_head_info_index_v2 *)common_info;
329 	u32 ssmod_info;
330 	u32 dsmpd = 1;
331 
332 	gbl_info = (struct global_info *)((void *)common_info +
333 		    index->global_index.offset * 4);
334 	ssmod_info = gbl_info->info_2t;
335 	refdiv = 1;
336 	if (mhz <= 100) {
337 		postdiv1 = 6;
338 		postdiv2 = 4;
339 	} else if (mhz <= 150) {
340 		postdiv1 = 4;
341 		postdiv2 = 4;
342 	} else if (mhz <= 200) {
343 		postdiv1 = 6;
344 		postdiv2 = 2;
345 	} else if (mhz <= 300) {
346 		postdiv1 = 4;
347 		postdiv2 = 2;
348 	} else if (mhz <= 400) {
349 		postdiv1 = 6;
350 		postdiv2 = 1;
351 	} else {
352 		postdiv1 = 4;
353 		postdiv2 = 1;
354 	}
355 	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
356 
357 	writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
358 
359 	writel(0x1f000000, &dram->cru->clksel_con[64]);
360 	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
361 	/* enable ssmod */
362 	if (PLL_SSMOD_SPREAD(ssmod_info)) {
363 		dsmpd = 0;
364 		clrsetbits_le32(&dram->cru->pll[1].con2,
365 				0xffffff << 0, 0x0 << 0);
366 		writel(SSMOD_SPREAD(PLL_SSMOD_SPREAD(ssmod_info)) |
367 		       SSMOD_DIVVAL(PLL_SSMOD_DIV(ssmod_info)) |
368 		       SSMOD_DOWNSPREAD(PLL_SSMOD_DOWNSPREAD(ssmod_info)) |
369 		       SSMOD_RESET(0) |
370 		       SSMOD_DIS_SSCG(0) |
371 		       SSMOD_BP(0),
372 		       &dram->cru->pll[1].con3);
373 	}
374 	writel(DSMPD(dsmpd) | POSTDIV2(postdiv2) | REFDIV(refdiv),
375 	       &dram->cru->pll[1].con1);
376 
377 	while (delay > 0) {
378 		udelay(1);
379 		if (LOCK(readl(&dram->cru->pll[1].con1)))
380 			break;
381 		delay--;
382 	}
383 	if (delay <= 0)
384 		printascii("ERROR: DPLL lock timeout!\n");
385 
386 	writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
387 }
388 
389 static void rkclk_configure_ddr(struct dram_info *dram,
390 				struct rv1126_sdram_params *sdram_params)
391 {
392 	/* for inno ddr phy need freq / 2 */
393 	rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2);
394 }
395 
396 static unsigned int
397 	calculate_ddrconfig(struct rv1126_sdram_params *sdram_params)
398 {
399 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
400 	u32 cs, bw, die_bw, col, row, bank;
401 	u32 cs1_row;
402 	u32 i, tmp;
403 	u32 ddrconf = -1;
404 	u32 row_3_4;
405 
406 	cs = cap_info->rank;
407 	bw = cap_info->bw;
408 	die_bw = cap_info->dbw;
409 	col = cap_info->col;
410 	row = cap_info->cs0_row;
411 	cs1_row = cap_info->cs1_row;
412 	bank = cap_info->bk;
413 	row_3_4 = cap_info->row_3_4;
414 
415 	if (sdram_params->base.dramtype == DDR4) {
416 		if (cs == 2 && row == cs1_row && !row_3_4) {
417 			tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) |
418 			      die_bw;
419 			for (i = 17; i < 21; i++) {
420 				if (((tmp & 0xf) ==
421 				     (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
422 				    ((tmp & 0x70) <=
423 				     (ddr4_cfg_2_rbc[i - 10] & 0x70))) {
424 					ddrconf = i;
425 					goto out;
426 				}
427 			}
428 		}
429 
430 		tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw;
431 		for (i = 10; i < 21; i++) {
432 			if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
433 			    ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) &&
434 			    ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) {
435 				ddrconf = i;
436 				goto out;
437 			}
438 		}
439 	} else {
440 		if (cs == 2 && row == cs1_row && bank == 3) {
441 			for (i = 5; i < 8; i++) {
442 				if (((bw + col - 10) == (ddr_cfg_2_rbc[i] &
443 							 0x7)) &&
444 				    ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] &
445 							  (0x7 << 5))) {
446 					ddrconf = i;
447 					goto out;
448 				}
449 			}
450 		}
451 
452 		tmp = ((cs - 1) << 8) | ((row - 13) << 5) |
453 		      ((bw + col - 10) << 0);
454 		if (bank == 3)
455 			tmp |= (1 << 3);
456 
457 		for (i = 0; i < 9; i++)
458 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) &&
459 			    ((tmp & (7 << 5)) <=
460 			     (ddr_cfg_2_rbc[i] & (7 << 5))) &&
461 			    ((tmp & (1 << 8)) <=
462 			     (ddr_cfg_2_rbc[i] & (1 << 8)))) {
463 				ddrconf = i;
464 				goto out;
465 			}
466 
467 		for (i = 0; i < 7; i++)
468 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc_p2[i] & 0x1f)) &&
469 			    ((tmp & (7 << 5)) <=
470 			     (ddr_cfg_2_rbc_p2[i] & (7 << 5))) &&
471 			    ((tmp & (1 << 8)) <=
472 			     (ddr_cfg_2_rbc_p2[i] & (1 << 8)))) {
473 				ddrconf = i + 22;
474 				goto out;
475 			}
476 
477 		if (cs == 1 && bank == 3 && row <= 17 &&
478 		    (col + bw) == 12)
479 			ddrconf = 23;
480 	}
481 
482 out:
483 	if (ddrconf > 28)
484 		printascii("calculate ddrconfig error\n");
485 
486 	if (sdram_params->base.dramtype == DDR4) {
487 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
488 			if (ddrconf == d4_rbc_2_d3_rbc[i][0]) {
489 				if (ddrconf == 21 && row > 16)
490 					printascii("warn:ddrconf21 row > 16\n");
491 				else
492 					ddrconf = d4_rbc_2_d3_rbc[i][1];
493 				break;
494 			}
495 		}
496 	}
497 
498 	return ddrconf;
499 }
500 
501 static void sw_set_req(struct dram_info *dram)
502 {
503 	void __iomem *pctl_base = dram->pctl;
504 
505 	/* clear sw_done=0 */
506 	writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL);
507 }
508 
509 static void sw_set_ack(struct dram_info *dram)
510 {
511 	void __iomem *pctl_base = dram->pctl;
512 
513 	/* set sw_done=1 */
514 	writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL);
515 	while (1) {
516 		/* wait programming done */
517 		if (readl(pctl_base + DDR_PCTL2_SWSTAT) &
518 				PCTL2_SW_DONE_ACK)
519 			break;
520 	}
521 }
522 
523 static void set_ctl_address_map(struct dram_info *dram,
524 				struct rv1126_sdram_params *sdram_params)
525 {
526 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
527 	void __iomem *pctl_base = dram->pctl;
528 	u32 ddrconf = cap_info->ddrconfig;
529 	u32 i, row;
530 
531 	row = cap_info->cs0_row;
532 	if (sdram_params->base.dramtype == DDR4) {
533 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
534 			if (ddrconf == d4_rbc_2_d3_rbc[i][1]) {
535 				ddrconf = d4_rbc_2_d3_rbc[i][0];
536 				break;
537 			}
538 		}
539 	}
540 
541 	if (ddrconf >= ARRAY_SIZE(addrmap)) {
542 		printascii("set ctl address map fail\n");
543 		return;
544 	}
545 
546 	sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
547 			  &addrmap[ddrconf][0], ARRAY_SIZE(addrmap[ddrconf]) * 4);
548 
549 	/* unused row set to 0xf */
550 	for (i = 17; i >= row; i--)
551 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
552 			((i - 12) * 8 / 32) * 4,
553 			0xf << ((i - 12) * 8 % 32));
554 
555 	if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4)
556 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
557 	if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1)
558 		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
559 
560 	if (cap_info->rank == 1)
561 		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
562 }
563 
564 static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait)
565 {
566 	void __iomem *phy_base = dram->phy;
567 	u32 fbdiv, prediv, postdiv, postdiv_en;
568 	int delay = 1000;
569 
570 	if (wait) {
571 		clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB);
572 		while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK)) {
573 			udelay(1);
574 			if (delay-- <= 0) {
575 				printascii("ERROR: phy pll lock timeout!\n");
576 				while (1)
577 					;
578 			}
579 		}
580 	} else {
581 		freq /= MHz;
582 		prediv = 1;
583 		if (freq <= 200) {
584 			fbdiv = 16;
585 			postdiv = 2;
586 			postdiv_en = 1;
587 		} else if (freq <= 456) {
588 			fbdiv = 8;
589 			postdiv = 1;
590 			postdiv_en = 1;
591 		} else {
592 			fbdiv = 4;
593 			postdiv = 0;
594 			postdiv_en = 0;
595 		}
596 		writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50));
597 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK,
598 				(fbdiv >> 8) & 1);
599 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK,
600 				postdiv_en << PHY_POSTDIV_EN_SHIFT);
601 
602 		clrsetbits_le32(PHY_REG(phy_base, 0x52),
603 				PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv);
604 		clrsetbits_le32(PHY_REG(phy_base, 0x53),
605 				PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT,
606 				postdiv << PHY_POSTDIV_SHIFT);
607 	}
608 }
609 
610 static const u16 d3_phy_drv_2_ohm[][2] = {
611 	{PHY_DDR3_RON_455ohm, 455},
612 	{PHY_DDR3_RON_230ohm, 230},
613 	{PHY_DDR3_RON_153ohm, 153},
614 	{PHY_DDR3_RON_115ohm, 115},
615 	{PHY_DDR3_RON_91ohm, 91},
616 	{PHY_DDR3_RON_76ohm, 76},
617 	{PHY_DDR3_RON_65ohm, 65},
618 	{PHY_DDR3_RON_57ohm, 57},
619 	{PHY_DDR3_RON_51ohm, 51},
620 	{PHY_DDR3_RON_46ohm, 46},
621 	{PHY_DDR3_RON_41ohm, 41},
622 	{PHY_DDR3_RON_38ohm, 38},
623 	{PHY_DDR3_RON_35ohm, 35},
624 	{PHY_DDR3_RON_32ohm, 32},
625 	{PHY_DDR3_RON_30ohm, 30},
626 	{PHY_DDR3_RON_28ohm, 28},
627 	{PHY_DDR3_RON_27ohm, 27},
628 	{PHY_DDR3_RON_25ohm, 25},
629 	{PHY_DDR3_RON_24ohm, 24},
630 	{PHY_DDR3_RON_23ohm, 23},
631 	{PHY_DDR3_RON_22ohm, 22},
632 	{PHY_DDR3_RON_21ohm, 21},
633 	{PHY_DDR3_RON_20ohm, 20}
634 };
635 
636 static u16 d3_phy_odt_2_ohm[][2] = {
637 	{PHY_DDR3_RTT_DISABLE, 0},
638 	{PHY_DDR3_RTT_561ohm, 561},
639 	{PHY_DDR3_RTT_282ohm, 282},
640 	{PHY_DDR3_RTT_188ohm, 188},
641 	{PHY_DDR3_RTT_141ohm, 141},
642 	{PHY_DDR3_RTT_113ohm, 113},
643 	{PHY_DDR3_RTT_94ohm, 94},
644 	{PHY_DDR3_RTT_81ohm, 81},
645 	{PHY_DDR3_RTT_72ohm, 72},
646 	{PHY_DDR3_RTT_64ohm, 64},
647 	{PHY_DDR3_RTT_58ohm, 58},
648 	{PHY_DDR3_RTT_52ohm, 52},
649 	{PHY_DDR3_RTT_48ohm, 48},
650 	{PHY_DDR3_RTT_44ohm, 44},
651 	{PHY_DDR3_RTT_41ohm, 41},
652 	{PHY_DDR3_RTT_38ohm, 38},
653 	{PHY_DDR3_RTT_37ohm, 37},
654 	{PHY_DDR3_RTT_34ohm, 34},
655 	{PHY_DDR3_RTT_32ohm, 32},
656 	{PHY_DDR3_RTT_31ohm, 31},
657 	{PHY_DDR3_RTT_29ohm, 29},
658 	{PHY_DDR3_RTT_28ohm, 28},
659 	{PHY_DDR3_RTT_27ohm, 27},
660 	{PHY_DDR3_RTT_25ohm, 25}
661 };
662 
663 static u16 d4lp3_phy_drv_2_ohm[][2] = {
664 	{PHY_DDR4_LPDDR3_RON_482ohm, 482},
665 	{PHY_DDR4_LPDDR3_RON_244ohm, 244},
666 	{PHY_DDR4_LPDDR3_RON_162ohm, 162},
667 	{PHY_DDR4_LPDDR3_RON_122ohm, 122},
668 	{PHY_DDR4_LPDDR3_RON_97ohm, 97},
669 	{PHY_DDR4_LPDDR3_RON_81ohm, 81},
670 	{PHY_DDR4_LPDDR3_RON_69ohm, 69},
671 	{PHY_DDR4_LPDDR3_RON_61ohm, 61},
672 	{PHY_DDR4_LPDDR3_RON_54ohm, 54},
673 	{PHY_DDR4_LPDDR3_RON_48ohm, 48},
674 	{PHY_DDR4_LPDDR3_RON_44ohm, 44},
675 	{PHY_DDR4_LPDDR3_RON_40ohm, 40},
676 	{PHY_DDR4_LPDDR3_RON_37ohm, 37},
677 	{PHY_DDR4_LPDDR3_RON_34ohm, 34},
678 	{PHY_DDR4_LPDDR3_RON_32ohm, 32},
679 	{PHY_DDR4_LPDDR3_RON_30ohm, 30},
680 	{PHY_DDR4_LPDDR3_RON_28ohm, 28},
681 	{PHY_DDR4_LPDDR3_RON_27ohm, 27},
682 	{PHY_DDR4_LPDDR3_RON_25ohm, 25},
683 	{PHY_DDR4_LPDDR3_RON_24ohm, 24},
684 	{PHY_DDR4_LPDDR3_RON_23ohm, 23},
685 	{PHY_DDR4_LPDDR3_RON_22ohm, 22},
686 	{PHY_DDR4_LPDDR3_RON_21ohm, 21}
687 };
688 
689 static u16 d4lp3_phy_odt_2_ohm[][2] = {
690 	{PHY_DDR4_LPDDR3_RTT_DISABLE, 0},
691 	{PHY_DDR4_LPDDR3_RTT_586ohm, 586},
692 	{PHY_DDR4_LPDDR3_RTT_294ohm, 294},
693 	{PHY_DDR4_LPDDR3_RTT_196ohm, 196},
694 	{PHY_DDR4_LPDDR3_RTT_148ohm, 148},
695 	{PHY_DDR4_LPDDR3_RTT_118ohm, 118},
696 	{PHY_DDR4_LPDDR3_RTT_99ohm, 99},
697 	{PHY_DDR4_LPDDR3_RTT_85ohm, 58},
698 	{PHY_DDR4_LPDDR3_RTT_76ohm, 76},
699 	{PHY_DDR4_LPDDR3_RTT_67ohm, 67},
700 	{PHY_DDR4_LPDDR3_RTT_60ohm, 60},
701 	{PHY_DDR4_LPDDR3_RTT_55ohm, 55},
702 	{PHY_DDR4_LPDDR3_RTT_50ohm, 50},
703 	{PHY_DDR4_LPDDR3_RTT_46ohm, 46},
704 	{PHY_DDR4_LPDDR3_RTT_43ohm, 43},
705 	{PHY_DDR4_LPDDR3_RTT_40ohm, 40},
706 	{PHY_DDR4_LPDDR3_RTT_38ohm, 38},
707 	{PHY_DDR4_LPDDR3_RTT_36ohm, 36},
708 	{PHY_DDR4_LPDDR3_RTT_34ohm, 34},
709 	{PHY_DDR4_LPDDR3_RTT_32ohm, 32},
710 	{PHY_DDR4_LPDDR3_RTT_31ohm, 31},
711 	{PHY_DDR4_LPDDR3_RTT_29ohm, 29},
712 	{PHY_DDR4_LPDDR3_RTT_28ohm, 28},
713 	{PHY_DDR4_LPDDR3_RTT_27ohm, 27}
714 };
715 
716 static u16 lp4_phy_drv_2_ohm[][2] = {
717 	{PHY_LPDDR4_RON_501ohm, 501},
718 	{PHY_LPDDR4_RON_253ohm, 253},
719 	{PHY_LPDDR4_RON_168ohm, 168},
720 	{PHY_LPDDR4_RON_126ohm, 126},
721 	{PHY_LPDDR4_RON_101ohm, 101},
722 	{PHY_LPDDR4_RON_84ohm, 84},
723 	{PHY_LPDDR4_RON_72ohm, 72},
724 	{PHY_LPDDR4_RON_63ohm, 63},
725 	{PHY_LPDDR4_RON_56ohm, 56},
726 	{PHY_LPDDR4_RON_50ohm, 50},
727 	{PHY_LPDDR4_RON_46ohm, 46},
728 	{PHY_LPDDR4_RON_42ohm, 42},
729 	{PHY_LPDDR4_RON_38ohm, 38},
730 	{PHY_LPDDR4_RON_36ohm, 36},
731 	{PHY_LPDDR4_RON_33ohm, 33},
732 	{PHY_LPDDR4_RON_31ohm, 31},
733 	{PHY_LPDDR4_RON_29ohm, 29},
734 	{PHY_LPDDR4_RON_28ohm, 28},
735 	{PHY_LPDDR4_RON_26ohm, 26},
736 	{PHY_LPDDR4_RON_25ohm, 25},
737 	{PHY_LPDDR4_RON_24ohm, 24},
738 	{PHY_LPDDR4_RON_23ohm, 23},
739 	{PHY_LPDDR4_RON_22ohm, 22}
740 };
741 
742 static u16 lp4_phy_odt_2_ohm[][2] = {
743 	{PHY_LPDDR4_RTT_DISABLE, 0},
744 	{PHY_LPDDR4_RTT_604ohm, 604},
745 	{PHY_LPDDR4_RTT_303ohm, 303},
746 	{PHY_LPDDR4_RTT_202ohm, 202},
747 	{PHY_LPDDR4_RTT_152ohm, 152},
748 	{PHY_LPDDR4_RTT_122ohm, 122},
749 	{PHY_LPDDR4_RTT_101ohm, 101},
750 	{PHY_LPDDR4_RTT_87ohm,	87},
751 	{PHY_LPDDR4_RTT_78ohm, 78},
752 	{PHY_LPDDR4_RTT_69ohm, 69},
753 	{PHY_LPDDR4_RTT_62ohm, 62},
754 	{PHY_LPDDR4_RTT_56ohm, 56},
755 	{PHY_LPDDR4_RTT_52ohm, 52},
756 	{PHY_LPDDR4_RTT_48ohm, 48},
757 	{PHY_LPDDR4_RTT_44ohm, 44},
758 	{PHY_LPDDR4_RTT_41ohm, 41},
759 	{PHY_LPDDR4_RTT_39ohm, 39},
760 	{PHY_LPDDR4_RTT_37ohm, 37},
761 	{PHY_LPDDR4_RTT_35ohm, 35},
762 	{PHY_LPDDR4_RTT_33ohm, 33},
763 	{PHY_LPDDR4_RTT_32ohm, 32},
764 	{PHY_LPDDR4_RTT_30ohm, 30},
765 	{PHY_LPDDR4_RTT_29ohm, 29},
766 	{PHY_LPDDR4_RTT_27ohm, 27}
767 };
768 
769 static u32 lp4_odt_calc(u32 odt_ohm)
770 {
771 	u32 odt;
772 
773 	if (odt_ohm == 0)
774 		odt = LPDDR4_DQODT_DIS;
775 	else if (odt_ohm <= 40)
776 		odt = LPDDR4_DQODT_40;
777 	else if (odt_ohm <= 48)
778 		odt = LPDDR4_DQODT_48;
779 	else if (odt_ohm <= 60)
780 		odt = LPDDR4_DQODT_60;
781 	else if (odt_ohm <= 80)
782 		odt = LPDDR4_DQODT_80;
783 	else if (odt_ohm <= 120)
784 		odt = LPDDR4_DQODT_120;
785 	else
786 		odt = LPDDR4_DQODT_240;
787 
788 	return odt;
789 }
790 
791 static void *get_ddr_drv_odt_info(u32 dramtype)
792 {
793 	struct sdram_head_info_index_v2 *index =
794 		(struct sdram_head_info_index_v2 *)common_info;
795 	void *ddr_info = 0;
796 
797 	if (dramtype == DDR4)
798 		ddr_info = (void *)common_info + index->ddr4_index.offset * 4;
799 	else if (dramtype == DDR3)
800 		ddr_info = (void *)common_info + index->ddr3_index.offset * 4;
801 	else if (dramtype == LPDDR3)
802 		ddr_info = (void *)common_info + index->lp3_index.offset * 4;
803 	else if (dramtype == LPDDR4)
804 		ddr_info = (void *)common_info + index->lp4_index.offset * 4;
805 	else if (dramtype == LPDDR4X)
806 		ddr_info = (void *)common_info + index->lp4x_index.offset * 4;
807 	else
808 		printascii("unsupported dram type\n");
809 	return ddr_info;
810 }
811 
812 static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info,
813 			 u32 freq_mhz, u32 dst_fsp, u32 dramtype)
814 {
815 	void __iomem *pctl_base = dram->pctl;
816 	u32 ca_vref, dq_vref;
817 
818 	if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
819 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff);
820 	else
821 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten);
822 
823 	if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq))
824 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff);
825 	else
826 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten);
827 
828 	if (dramtype == LPDDR4) {
829 		if (ca_vref < 100)
830 			ca_vref = 100;
831 		if (ca_vref > 420)
832 			ca_vref = 420;
833 
834 		if (ca_vref <= 300)
835 			ca_vref = (0 << 6) | (ca_vref - 100) / 4;
836 		else
837 			ca_vref = (1 << 6) | (ca_vref - 220) / 4;
838 
839 		if (dq_vref < 100)
840 			dq_vref = 100;
841 		if (dq_vref > 420)
842 			dq_vref = 420;
843 
844 		if (dq_vref <= 300)
845 			dq_vref = (0 << 6) | (dq_vref - 100) / 4;
846 		else
847 			dq_vref = (1 << 6) | (dq_vref - 220) / 4;
848 	} else {
849 		ca_vref = ca_vref * 11 / 6;
850 		if (ca_vref < 150)
851 			ca_vref = 150;
852 		if (ca_vref > 629)
853 			ca_vref = 629;
854 
855 		if (ca_vref <= 449)
856 			ca_vref = (0 << 6) | (ca_vref - 150) / 4;
857 		else
858 			ca_vref = (1 << 6) | (ca_vref - 329) / 4;
859 
860 		if (dq_vref < 150)
861 			dq_vref = 150;
862 		if (dq_vref > 629)
863 			dq_vref = 629;
864 
865 		if (dq_vref <= 449)
866 			dq_vref = (0 << 6) | (dq_vref - 150) / 6;
867 		else
868 			dq_vref = (1 << 6) | (dq_vref - 329) / 6;
869 	}
870 	sw_set_req(dram);
871 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
872 			DDR_PCTL2_INIT6,
873 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT,
874 			ca_vref << PCTL2_LPDDR4_MR12_SHIFT);
875 
876 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
877 			DDR_PCTL2_INIT7,
878 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT,
879 			dq_vref << PCTL2_LPDDR4_MR14_SHIFT);
880 	sw_set_ack(dram);
881 }
882 
883 static void set_ds_odt(struct dram_info *dram,
884 		       struct rv1126_sdram_params *sdram_params, u32 dst_fsp)
885 {
886 	void __iomem *phy_base = dram->phy;
887 	void __iomem *pctl_base = dram->pctl;
888 	u32 dramtype = sdram_params->base.dramtype;
889 	struct ddr2_3_4_lp2_3_info *ddr_info;
890 	struct lp4_info *lp4_info;
891 	u32 i, j, tmp;
892 	const u16 (*p_drv)[2];
893 	const u16 (*p_odt)[2];
894 	u32 drv_info, sr_info;
895 	u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm;
896 	u32 phy_odt_ohm, dram_odt_ohm;
897 	u32 lp4_pu_cal, phy_lp4_drv_pd_en;
898 	u32 phy_odt_up_en, phy_odt_dn_en;
899 	u32 sr_dq, sr_clk;
900 	u32 freq = sdram_params->base.ddr_freq;
901 	u32 mr1_mr3, mr11, mr22, vref_out, vref_inner;
902 	u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0;
903 	u32 phy_dq_drv = 0;
904 	u32 phy_odt_up = 0, phy_odt_dn = 0;
905 
906 	ddr_info = get_ddr_drv_odt_info(dramtype);
907 	lp4_info = (void *)ddr_info;
908 
909 	if (!ddr_info)
910 		return;
911 
912 	/* dram odt en freq control phy drv, dram odt and phy sr */
913 	if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) {
914 		drv_info = ddr_info->drv_when_odtoff;
915 		dram_odt_ohm = 0;
916 		sr_info = ddr_info->sr_when_odtoff;
917 		phy_lp4_drv_pd_en =
918 			PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info);
919 	} else {
920 		drv_info = ddr_info->drv_when_odten;
921 		dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info);
922 		sr_info = ddr_info->sr_when_odten;
923 		phy_lp4_drv_pd_en =
924 			PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info);
925 	}
926 	phy_dq_drv_ohm =
927 		DRV_INFO_PHY_DQ_DRV(drv_info);
928 	phy_clk_drv_ohm =
929 		DRV_INFO_PHY_CLK_DRV(drv_info);
930 	phy_ca_drv_ohm =
931 		DRV_INFO_PHY_CA_DRV(drv_info);
932 
933 	sr_dq = DQ_SR_INFO(sr_info);
934 	sr_clk = CLK_SR_INFO(sr_info);
935 
936 	/* phy odt en freq control dram drv and phy odt */
937 	if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) {
938 		dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff);
939 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info);
940 		phy_odt_ohm = 0;
941 		phy_odt_up_en = 0;
942 		phy_odt_dn_en = 0;
943 	} else {
944 		dram_drv_ohm =
945 			DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten);
946 		phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info);
947 		phy_odt_up_en =
948 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
949 		phy_odt_dn_en =
950 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
951 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info);
952 	}
953 
954 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
955 		if (phy_odt_ohm) {
956 			phy_odt_up_en = 0;
957 			phy_odt_dn_en = 1;
958 		}
959 		if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
960 			dram_caodt_ohm = 0;
961 		else
962 			dram_caodt_ohm =
963 				ODT_INFO_LP4_CA_ODT(lp4_info->odt_info);
964 	}
965 
966 	if (dramtype == DDR3) {
967 		p_drv = d3_phy_drv_2_ohm;
968 		p_odt = d3_phy_odt_2_ohm;
969 	} else if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
970 		p_drv = lp4_phy_drv_2_ohm;
971 		p_odt = lp4_phy_odt_2_ohm;
972 	} else {
973 		p_drv = d4lp3_phy_drv_2_ohm;
974 		p_odt = d4lp3_phy_odt_2_ohm;
975 	}
976 
977 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
978 		if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) {
979 			phy_dq_drv = **(p_drv + i);
980 			break;
981 		}
982 		if (i == 0)
983 			break;
984 	}
985 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
986 		if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) {
987 			phy_clk_drv = **(p_drv + i);
988 			break;
989 		}
990 		if (i == 0)
991 			break;
992 	}
993 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
994 		if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) {
995 			phy_ca_drv = **(p_drv + i);
996 			break;
997 		}
998 		if (i == 0)
999 			break;
1000 	}
1001 	if (!phy_odt_ohm)
1002 		phy_odt = 0;
1003 	else
1004 		for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) {
1005 			if (phy_odt_ohm <= *(*(p_odt + i) + 1)) {
1006 				phy_odt = **(p_odt + i);
1007 				break;
1008 			}
1009 			if (i == 0)
1010 				break;
1011 		}
1012 
1013 	if (dramtype != LPDDR4 && dramtype != LPDDR4X) {
1014 		if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en))
1015 			vref_inner = 0x80;
1016 		else if (phy_odt_up_en)
1017 			vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 /
1018 				     (dram_drv_ohm + phy_odt_ohm);
1019 		else
1020 			vref_inner = phy_odt_ohm * 128 /
1021 				(phy_odt_ohm + dram_drv_ohm);
1022 
1023 		if (dramtype != DDR3 && dram_odt_ohm)
1024 			vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 /
1025 				   (phy_dq_drv_ohm + dram_odt_ohm);
1026 		else
1027 			vref_out = 0x80;
1028 	} else {
1029 		/* for lp4 and lp4x*/
1030 		if (phy_odt_ohm)
1031 			vref_inner =
1032 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) *
1033 				 256) / 1000;
1034 		else
1035 			vref_inner =
1036 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) *
1037 				 256) / 1000;
1038 
1039 		vref_out = 0x80;
1040 	}
1041 
1042 	/* default ZQCALIB bypass mode */
1043 	clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv);
1044 	clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv);
1045 	clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv);
1046 	clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv);
1047 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1048 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_clk_drv);
1049 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_clk_drv);
1050 	} else {
1051 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_ca_drv);
1052 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_ca_drv);
1053 	}
1054 	/* clk / cmd slew rate */
1055 	clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk);
1056 
1057 	phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1;
1058 	if (phy_odt_up_en)
1059 		phy_odt_up = phy_odt;
1060 	if (phy_odt_dn_en)
1061 		phy_odt_dn = phy_odt;
1062 
1063 	for (i = 0; i < 4; i++) {
1064 		j = 0x110 + i * 0x10;
1065 		clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up);
1066 		clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn);
1067 		clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv);
1068 		clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv);
1069 		writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10));
1070 
1071 		clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10),
1072 				1 << 3, phy_lp4_drv_pd_en << 3);
1073 		if (dramtype == LPDDR4 || dramtype == LPDDR4X)
1074 			clrbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10), BIT(5));
1075 		/* dq slew rate */
1076 		clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10),
1077 				0x1f, sr_dq);
1078 	}
1079 
1080 	/* reg_rx_vref_value_update */
1081 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1082 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1083 
1084 	/* RAM VREF */
1085 	writel(vref_out, PHY_REG(phy_base, 0x105));
1086 	if (dramtype == LPDDR3)
1087 		udelay(100);
1088 
1089 	if (dramtype == LPDDR4 || dramtype == LPDDR4X)
1090 		set_lp4_vref(dram, lp4_info, freq, dst_fsp, dramtype);
1091 
1092 	if (dramtype == DDR3 || dramtype == DDR4) {
1093 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1094 				DDR_PCTL2_INIT3);
1095 		mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK;
1096 	} else {
1097 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1098 				DDR_PCTL2_INIT4);
1099 		mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK;
1100 	}
1101 
1102 	if (dramtype == DDR3) {
1103 		mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK);
1104 		if (dram_drv_ohm == 34)
1105 			mr1_mr3 |= DDR3_DS_34;
1106 
1107 		if (dram_odt_ohm == 0)
1108 			mr1_mr3 |= DDR3_RTT_NOM_DIS;
1109 		else if (dram_odt_ohm <= 40)
1110 			mr1_mr3 |= DDR3_RTT_NOM_40;
1111 		else if (dram_odt_ohm <= 60)
1112 			mr1_mr3 |= DDR3_RTT_NOM_60;
1113 		else
1114 			mr1_mr3 |= DDR3_RTT_NOM_120;
1115 
1116 	} else if (dramtype == DDR4) {
1117 		mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK);
1118 		if (dram_drv_ohm == 48)
1119 			mr1_mr3 |= DDR4_DS_48;
1120 
1121 		if (dram_odt_ohm == 0)
1122 			mr1_mr3 |= DDR4_RTT_NOM_DIS;
1123 		else if (dram_odt_ohm <= 34)
1124 			mr1_mr3 |= DDR4_RTT_NOM_34;
1125 		else if (dram_odt_ohm <= 40)
1126 			mr1_mr3 |= DDR4_RTT_NOM_40;
1127 		else if (dram_odt_ohm <= 48)
1128 			mr1_mr3 |= DDR4_RTT_NOM_48;
1129 		else if (dram_odt_ohm <= 60)
1130 			mr1_mr3 |= DDR4_RTT_NOM_60;
1131 		else
1132 			mr1_mr3 |= DDR4_RTT_NOM_120;
1133 
1134 	} else if (dramtype == LPDDR3) {
1135 		if (dram_drv_ohm <= 34)
1136 			mr1_mr3 |= LPDDR3_DS_34;
1137 		else if (dram_drv_ohm <= 40)
1138 			mr1_mr3 |= LPDDR3_DS_40;
1139 		else if (dram_drv_ohm <= 48)
1140 			mr1_mr3 |= LPDDR3_DS_48;
1141 		else if (dram_drv_ohm <= 60)
1142 			mr1_mr3 |= LPDDR3_DS_60;
1143 		else if (dram_drv_ohm <= 80)
1144 			mr1_mr3 |= LPDDR3_DS_80;
1145 
1146 		if (dram_odt_ohm == 0)
1147 			lp3_odt_value = LPDDR3_ODT_DIS;
1148 		else if (dram_odt_ohm <= 60)
1149 			lp3_odt_value = LPDDR3_ODT_60;
1150 		else if (dram_odt_ohm <= 120)
1151 			lp3_odt_value = LPDDR3_ODT_120;
1152 		else
1153 			lp3_odt_value = LPDDR3_ODT_240;
1154 	} else {/* for lpddr4 and lpddr4x */
1155 		/* MR3 for lp4 PU-CAL and PDDS */
1156 		mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK);
1157 		mr1_mr3 |= lp4_pu_cal;
1158 
1159 		tmp = lp4_odt_calc(dram_drv_ohm);
1160 		if (!tmp)
1161 			tmp = LPDDR4_PDDS_240;
1162 		mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT);
1163 
1164 		/* MR11 for lp4 ca odt, dq odt set */
1165 		mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1166 			     DDR_PCTL2_INIT6);
1167 		mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK;
1168 
1169 		mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK);
1170 
1171 		tmp = lp4_odt_calc(dram_odt_ohm);
1172 		mr11 |= (tmp << LPDDR4_DQODT_SHIFT);
1173 
1174 		tmp = lp4_odt_calc(dram_caodt_ohm);
1175 		mr11 |= (tmp << LPDDR4_CAODT_SHIFT);
1176 		sw_set_req(dram);
1177 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1178 				DDR_PCTL2_INIT6,
1179 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT,
1180 				mr11 << PCTL2_LPDDR4_MR11_SHIFT);
1181 		sw_set_ack(dram);
1182 
1183 		/* MR22 for soc odt/odt-ck/odt-cs/odt-ca */
1184 		mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1185 			     DDR_PCTL2_INIT7);
1186 		mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK;
1187 		mr22 &= ~LPDDR4_SOC_ODT_MASK;
1188 
1189 		tmp = lp4_odt_calc(phy_odt_ohm);
1190 		mr22 |= tmp;
1191 		mr22 = mr22 |
1192 		       (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) <<
1193 			LPDDR4_ODTE_CK_SHIFT) |
1194 		       (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) <<
1195 			LPDDR4_ODTE_CS_SHIFT) |
1196 		       (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) <<
1197 			LPDDR4_ODTD_CA_SHIFT);
1198 
1199 		sw_set_req(dram);
1200 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1201 				DDR_PCTL2_INIT7,
1202 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT,
1203 				mr22 << PCTL2_LPDDR4_MR22_SHIFT);
1204 		sw_set_ack(dram);
1205 	}
1206 
1207 	if (dramtype == DDR4 || dramtype == DDR3) {
1208 		sw_set_req(dram);
1209 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1210 				DDR_PCTL2_INIT3,
1211 				PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT,
1212 				mr1_mr3 << PCTL2_DDR34_MR1_SHIFT);
1213 		sw_set_ack(dram);
1214 	} else {
1215 		sw_set_req(dram);
1216 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1217 				DDR_PCTL2_INIT4,
1218 				PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT,
1219 				mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT);
1220 		sw_set_ack(dram);
1221 	}
1222 }
1223 
1224 static int sdram_cmd_dq_path_remap(struct dram_info *dram,
1225 				   struct rv1126_sdram_params *sdram_params)
1226 {
1227 	void __iomem *phy_base = dram->phy;
1228 	u32 dramtype = sdram_params->base.dramtype;
1229 	struct sdram_head_info_index_v2 *index =
1230 		(struct sdram_head_info_index_v2 *)common_info;
1231 	struct dq_map_info *map_info;
1232 
1233 	map_info = (struct dq_map_info *)((void *)common_info +
1234 		index->dq_map_index.offset * 4);
1235 
1236 	if (dramtype == LPDDR4X)
1237 		dramtype = LPDDR4;
1238 
1239 	if (dramtype <= LPDDR4)
1240 		writel((map_info->byte_map[dramtype / 4] >>
1241 			((dramtype % 4) * 8)) & 0xff,
1242 		       PHY_REG(phy_base, 0x4f));
1243 
1244 	return 0;
1245 }
1246 
1247 static void phy_cfg(struct dram_info *dram,
1248 		    struct rv1126_sdram_params *sdram_params)
1249 {
1250 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
1251 	void __iomem *phy_base = dram->phy;
1252 	u32 i, dq_map, tmp;
1253 	u32 byte1 = 0, byte0 = 0;
1254 
1255 	sdram_cmd_dq_path_remap(dram, sdram_params);
1256 
1257 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0);
1258 	for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
1259 		writel(sdram_params->phy_regs.phy[i][1],
1260 		       phy_base + sdram_params->phy_regs.phy[i][0]);
1261 	}
1262 
1263 	clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5));
1264 	dq_map = readl(PHY_REG(phy_base, 0x4f));
1265 	for (i = 0; i < 4; i++) {
1266 		if (((dq_map >> (i * 2)) & 0x3) == 0) {
1267 			byte0 = i;
1268 			break;
1269 		}
1270 	}
1271 	for (i = 0; i < 4; i++) {
1272 		if (((dq_map >> (i * 2)) & 0x3) == 1) {
1273 			byte1 = i;
1274 			break;
1275 		}
1276 	}
1277 
1278 	tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK);
1279 	if (cap_info->bw == 2)
1280 		tmp |= 0xf;
1281 	else if (cap_info->bw == 1)
1282 		tmp |= ((1 << byte0) | (1 << byte1));
1283 	else
1284 		tmp |= (1 << byte0);
1285 
1286 	writel(tmp, PHY_REG(phy_base, 0xf));
1287 
1288 	/* lpddr4 odt control by phy, enable cs0 odt */
1289 	if (sdram_params->base.dramtype == LPDDR4 ||
1290 	    sdram_params->base.dramtype == LPDDR4X)
1291 		clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4,
1292 				(1 << 6) | (1 << 4));
1293 	/* for ca training ca vref choose range1 */
1294 	setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6));
1295 	setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6));
1296 	/* for wr training PHY_0x7c[5], choose range0 */
1297 	clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5));
1298 }
1299 
1300 static int update_refresh_reg(struct dram_info *dram)
1301 {
1302 	void __iomem *pctl_base = dram->pctl;
1303 	u32 ret;
1304 
1305 	ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
1306 	writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
1307 
1308 	return 0;
1309 }
1310 
1311 /*
1312  * rank = 1: cs0
1313  * rank = 2: cs1
1314  */
1315 u32 read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype)
1316 {
1317 	u32 ret;
1318 	u32 i, temp;
1319 	void __iomem *pctl_base = dram->pctl;
1320 	struct sdram_head_info_index_v2 *index =
1321 		(struct sdram_head_info_index_v2 *)common_info;
1322 	struct dq_map_info *map_info;
1323 
1324 	map_info = (struct dq_map_info *)((void *)common_info +
1325 		index->dq_map_index.offset * 4);
1326 
1327 	pctl_read_mr(pctl_base, rank, mr_num);
1328 
1329 	if (dramtype == LPDDR3) {
1330 		temp = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff);
1331 		ret = 0;
1332 		for (i = 0; i < 8; i++)
1333 			ret |= ((temp >> i) & 0x1) << ((map_info->lp3_dq0_7_map >> (i * 4)) & 0xf);
1334 	} else {
1335 		ret = readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff;
1336 	}
1337 
1338 	return ret;
1339 }
1340 
1341 static void enter_sr(struct dram_info *dram, u32 en)
1342 {
1343 	void __iomem *pctl_base = dram->pctl;
1344 
1345 	if (en) {
1346 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1347 		while (1) {
1348 			if (((readl(pctl_base + DDR_PCTL2_STAT) &
1349 			      PCTL2_SELFREF_TYPE_MASK) ==
1350 			     PCTL2_SELFREF_TYPE_SR_NOT_AUTO) &&
1351 			    ((readl(pctl_base + DDR_PCTL2_STAT) &
1352 			      PCTL2_OPERATING_MODE_MASK) ==
1353 			     PCTL2_OPERATING_MODE_SR))
1354 				break;
1355 		}
1356 	} else {
1357 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1358 		while ((readl(pctl_base + DDR_PCTL2_STAT) &
1359 		       PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR)
1360 			continue;
1361 	}
1362 }
1363 
1364 void record_dq_prebit(struct dram_info *dram)
1365 {
1366 	u32 group, i, tmp;
1367 	void __iomem *phy_base = dram->phy;
1368 
1369 	for (group = 0; group < 4; group++) {
1370 		for (i = 0; i < ARRAY_SIZE(dq_sel); i++) {
1371 			/* l_loop_invdelaysel */
1372 			writel(dq_sel[i][0], PHY_REG(phy_base,
1373 						     grp_addr[group] + 0x2c));
1374 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e));
1375 			writel(tmp, PHY_REG(phy_base,
1376 					    grp_addr[group] + dq_sel[i][1]));
1377 
1378 			/* r_loop_invdelaysel */
1379 			writel(dq_sel[i][0], PHY_REG(phy_base,
1380 						     grp_addr[group] + 0x2d));
1381 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f));
1382 			writel(tmp, PHY_REG(phy_base,
1383 					    grp_addr[group] + dq_sel[i][2]));
1384 		}
1385 	}
1386 }
1387 
1388 static void update_dq_rx_prebit(struct dram_info *dram)
1389 {
1390 	void __iomem *phy_base = dram->phy;
1391 
1392 	clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4),
1393 			BIT(4));
1394 	udelay(1);
1395 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4));
1396 }
1397 
1398 static void update_dq_tx_prebit(struct dram_info *dram)
1399 {
1400 	void __iomem *phy_base = dram->phy;
1401 
1402 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1403 	setbits_le32(PHY_REG(phy_base, 0x2), BIT(3));
1404 	setbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1405 	udelay(1);
1406 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1407 }
1408 
1409 static void update_ca_prebit(struct dram_info *dram)
1410 {
1411 	void __iomem *phy_base = dram->phy;
1412 
1413 	clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2));
1414 	setbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1415 	udelay(1);
1416 	clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1417 }
1418 
1419 /*
1420  * dir: 0: de-skew = delta_*
1421  *	1: de-skew = reg val - delta_*
1422  * delta_dir: value for differential signal: clk/
1423  * delta_sig: value for single signal: ca/cmd
1424  */
1425 static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif,
1426 			     int delta_sig, u32 cs, u32 dramtype)
1427 {
1428 	void __iomem *phy_base = dram->phy;
1429 	u32 i, cs_en, tmp;
1430 	u32 dfi_lp_stat = 0;
1431 
1432 	if (cs == 0)
1433 		cs_en = 1;
1434 	else if (cs == 2)
1435 		cs_en = 2;
1436 	else
1437 		cs_en = 3;
1438 
1439 	if ((dramtype == LPDDR4 || dramtype == LPDDR4X) &&
1440 	    ((readl(PHY_REG(phy_base, 0x60)) & BIT(5)) == 0)) {
1441 		dfi_lp_stat = 1;
1442 		setbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1443 	}
1444 	enter_sr(dram, 1);
1445 
1446 	for (i = 0; i < 0x20; i++) {
1447 		if (dir == DESKEW_MDF_ABS_VAL)
1448 			tmp = delta_sig;
1449 		else
1450 			tmp = readl(PHY_REG(phy_base, 0x150 + i)) +
1451 			      delta_sig;
1452 		writel(tmp, PHY_REG(phy_base, 0x150 + i));
1453 	}
1454 
1455 	if (dir == DESKEW_MDF_ABS_VAL)
1456 		tmp = delta_dif;
1457 	else
1458 		tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) -
1459 		       delta_sig + delta_dif;
1460 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x17));
1461 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x18));
1462 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1463 		writel(tmp, PHY_REG(phy_base, 0x150 + 0x4));
1464 		writel(tmp, PHY_REG(phy_base, 0x150 + 0xa));
1465 
1466 		clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6);
1467 		update_ca_prebit(dram);
1468 	}
1469 	enter_sr(dram, 0);
1470 
1471 	if (dfi_lp_stat)
1472 		clrbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1473 
1474 }
1475 
1476 static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank)
1477 {
1478 	u32 i, j, offset = 0;
1479 	u32 min = 0x3f;
1480 	void __iomem *phy_base = dram->phy;
1481 	u32 byte_en;
1482 
1483 	if (signal == SKEW_TX_SIGNAL)
1484 		offset = 8;
1485 
1486 	if (signal == SKEW_CA_SIGNAL) {
1487 		for (i = 0; i < 0x20; i++)
1488 			min = MIN(min, readl(PHY_REG(phy_base, 0x150  + i)));
1489 	} else {
1490 		byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1491 		for (j = offset; j < offset + rank * 4; j++) {
1492 			if (!((byte_en >> (j % 4)) & 1))
1493 				continue;
1494 			for (i = 0; i < 11; i++)
1495 				min = MIN(min,
1496 					  readl(PHY_REG(phy_base,
1497 							dqs_dq_skew_adr[j] +
1498 							i)));
1499 		}
1500 	}
1501 
1502 	return min;
1503 }
1504 
1505 static u32 low_power_update(struct dram_info *dram, u32 en)
1506 {
1507 	void __iomem *pctl_base = dram->pctl;
1508 	u32 lp_stat = 0;
1509 
1510 	if (en) {
1511 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf);
1512 	} else {
1513 		lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf;
1514 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf);
1515 	}
1516 
1517 	return lp_stat;
1518 }
1519 
1520 /*
1521  * signal:
1522  * dir: 0: de-skew = delta_*
1523  *	1: de-skew = reg val - delta_*
1524  * delta_dir: value for differential signal: dqs
1525  * delta_sig: value for single signal: dq/dm
1526  */
1527 static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir,
1528 			     int delta_dif, int delta_sig, u32 rank)
1529 {
1530 	void __iomem *phy_base = dram->phy;
1531 	u32 i, j, tmp, offset;
1532 	u32 byte_en;
1533 
1534 	byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1535 
1536 	if (signal == SKEW_RX_SIGNAL)
1537 		offset = 0;
1538 	else
1539 		offset = 8;
1540 
1541 	for (j = offset; j < (offset + rank * 4); j++) {
1542 		if (!((byte_en >> (j % 4)) & 1))
1543 			continue;
1544 		for (i = 0; i < 0x9; i++) {
1545 			if (dir == DESKEW_MDF_ABS_VAL)
1546 				tmp = delta_sig;
1547 			else
1548 				tmp = delta_sig + readl(PHY_REG(phy_base,
1549 							dqs_dq_skew_adr[j] +
1550 							i));
1551 			writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i));
1552 		}
1553 		if (dir == DESKEW_MDF_ABS_VAL)
1554 			tmp = delta_dif;
1555 		else
1556 			tmp = delta_dif + readl(PHY_REG(phy_base,
1557 						dqs_dq_skew_adr[j] + 9));
1558 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9));
1559 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa));
1560 	}
1561 	if (signal == SKEW_RX_SIGNAL)
1562 		update_dq_rx_prebit(dram);
1563 	else
1564 		update_dq_tx_prebit(dram);
1565 }
1566 
1567 static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype)
1568 {
1569 	void __iomem *phy_base = dram->phy;
1570 	u32 ret;
1571 	u32 dis_auto_zq = 0;
1572 	u32 odt_val_up, odt_val_dn;
1573 	u32 i, j;
1574 #if defined(DDR4_READ_GATE_2NCK_PREAMBLE)
1575 	void __iomem *pctl_base = dram->pctl;
1576 	u32 mr4_d4 = 0;
1577 #endif
1578 
1579 	odt_val_dn = readl(PHY_REG(phy_base, 0x110));
1580 	odt_val_up = readl(PHY_REG(phy_base, 0x111));
1581 
1582 	if (dramtype != LPDDR4 || dramtype != LPDDR4X) {
1583 		for (i = 0; i < 4; i++) {
1584 			j = 0x110 + i * 0x10;
1585 			writel(PHY_DDR4_LPDDR3_RTT_294ohm,
1586 			       PHY_REG(phy_base, j));
1587 			writel(PHY_DDR4_LPDDR3_RTT_DISABLE,
1588 			       PHY_REG(phy_base, j + 0x1));
1589 		}
1590 	}
1591 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1592 	/* use normal read mode for data training */
1593 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1594 
1595 	if (dramtype == DDR4) {
1596 #if defined(DDR4_READ_GATE_PREAMBLE_MODE)
1597 		setbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1598 #elif defined(DDR4_READ_GATE_2NCK_PREAMBLE)
1599 		mr4_d4 = readl(pctl_base + DDR_PCTL2_INIT6) >> PCTL2_DDR4_MR4_SHIFT & PCTL2_MR_MASK;
1600 		/* 2nCK Read Preamble */
1601 		pctl_write_mr(pctl_base, BIT(cs), 4, mr4_d4 | BIT(11), DDR4);
1602 #endif
1603 	}
1604 
1605 	/* choose training cs */
1606 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
1607 	/* enable gate training */
1608 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
1609 	udelay(50);
1610 	ret = readl(PHY_REG(phy_base, 0x91));
1611 	/* disable gate training */
1612 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
1613 	clrbits_le32(PHY_REG(phy_base, 2), 0x30);
1614 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1615 
1616 #if defined(DDR4_READ_GATE_2NCK_PREAMBLE)
1617 	if (dramtype == DDR4) {
1618 		pctl_write_mr(pctl_base, BIT(cs), 4, mr4_d4, DDR4);
1619 	}
1620 #endif
1621 
1622 	ret = (ret & 0x2f) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf);
1623 
1624 	if (dramtype != LPDDR4 || dramtype != LPDDR4X) {
1625 		for (i = 0; i < 4; i++) {
1626 			j = 0x110 + i * 0x10;
1627 			writel(odt_val_dn, PHY_REG(phy_base, j));
1628 			writel(odt_val_up, PHY_REG(phy_base, j + 0x1));
1629 		}
1630 	}
1631 	return ret;
1632 }
1633 
1634 static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype,
1635 			    u32 rank)
1636 {
1637 	void __iomem *pctl_base = dram->pctl;
1638 	void __iomem *phy_base = dram->phy;
1639 	u32 dis_auto_zq = 0;
1640 	u32 tmp;
1641 	u32 cur_fsp;
1642 	u32 timeout_us = 1000;
1643 
1644 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1645 
1646 	clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1647 
1648 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1649 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) &
1650 	      0xffff;
1651 	writel(tmp & 0xff, PHY_REG(phy_base, 0x3));
1652 
1653 	/* disable another cs's output */
1654 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1655 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12),
1656 			      dramtype);
1657 	if (dramtype == DDR3 || dramtype == DDR4)
1658 		writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1659 	else
1660 		writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1661 
1662 	/* choose cs */
1663 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1664 			((0x2 >> cs) << 6) | (0 << 2));
1665 	/* enable write leveling */
1666 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1667 			((0x2 >> cs) << 6) | (1 << 2));
1668 
1669 	while (1) {
1670 		if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) ==
1671 		    (readl(PHY_REG(phy_base, 0xf)) & 0xf))
1672 			break;
1673 
1674 		udelay(1);
1675 		if (timeout_us-- == 0) {
1676 			printascii("error: write leveling timeout\n");
1677 			while (1)
1678 				;
1679 		}
1680 	}
1681 
1682 	/* disable write leveling */
1683 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1684 			((0x2 >> cs) << 6) | (0 << 2));
1685 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6);
1686 
1687 	/* enable another cs's output */
1688 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1689 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12),
1690 			      dramtype);
1691 
1692 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1693 
1694 	return 0;
1695 }
1696 
1697 char pattern[32] = {
1698 	0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa,
1699 	0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55,
1700 	0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55,
1701 	0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa
1702 };
1703 
1704 static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype,
1705 			    u32 mhz)
1706 {
1707 	void __iomem *pctl_base = dram->pctl;
1708 	void __iomem *phy_base = dram->phy;
1709 	u32 trefi_1x, trfc_1x;
1710 	u32 dis_auto_zq = 0;
1711 	u32 timeout_us = 1000;
1712 	u32 dqs_default;
1713 	u32 cur_fsp;
1714 	u32 vref_inner;
1715 	u32 i;
1716 	struct sdram_head_info_index_v2 *index =
1717 		(struct sdram_head_info_index_v2 *)common_info;
1718 	struct dq_map_info *map_info;
1719 
1720 	vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff;
1721 	if (dramtype == DDR3 && vref_inner == 0x80) {
1722 		for (i = 0; i < 4; i++)
1723 			writel(vref_inner - 0xa,
1724 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1725 
1726 		/* reg_rx_vref_value_update */
1727 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1728 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1729 	}
1730 
1731 	map_info = (struct dq_map_info *)((void *)common_info +
1732 		index->dq_map_index.offset * 4);
1733 	/* only 1cs a time, 0:cs0 1 cs1 */
1734 	if (cs > 1)
1735 		return -1;
1736 
1737 	dqs_default = 0xf;
1738 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1739 
1740 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1741 	/* config refresh timing */
1742 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1743 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1744 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1745 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1746 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1747 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1748 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1749 	/* reg_phy_trfc */
1750 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1751 	/* reg_max_refi_cnt */
1752 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1753 
1754 	/* choose training cs */
1755 	clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6);
1756 
1757 	/* set dq map for ddr4 */
1758 	if (dramtype == DDR4) {
1759 		setbits_le32(PHY_REG(phy_base, 0x70), BIT(7));
1760 		for (i = 0; i < 4; i++) {
1761 			writel((map_info->ddr4_dq_map[cs * 2] >>
1762 				((i % 4) * 8)) & 0xff,
1763 				PHY_REG(phy_base, 0x238 + i));
1764 			writel((map_info->ddr4_dq_map[cs * 2 + 1] >>
1765 				((i % 4) * 8)) & 0xff,
1766 				PHY_REG(phy_base, 0x2b8 + i));
1767 		}
1768 	}
1769 
1770 	/* cha_l reg_l_rd_train_dqs_default[5:0] */
1771 	clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default);
1772 	/* cha_h reg_h_rd_train_dqs_default[5:0] */
1773 	clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default);
1774 	/* chb_l reg_l_rd_train_dqs_default[5:0] */
1775 	clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default);
1776 	/* chb_h reg_h_rd_train_dqs_default[5:0] */
1777 	clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default);
1778 
1779 	/* Choose the read train auto mode */
1780 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1);
1781 	/* Enable the auto train of the read train */
1782 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3);
1783 
1784 	/* Wait the train done. */
1785 	while (1) {
1786 		if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1)
1787 			break;
1788 
1789 		udelay(1);
1790 		if (timeout_us-- == 0) {
1791 			printascii("error: read training timeout\n");
1792 			return -1;
1793 		}
1794 	}
1795 
1796 	/* Check the read train state */
1797 	if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) ||
1798 	    (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) {
1799 		printascii("error: read training error\n");
1800 		return -1;
1801 	}
1802 
1803 	/* Exit the Read Training by setting */
1804 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1));
1805 
1806 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1807 
1808 	if (dramtype == DDR3 && vref_inner == 0x80) {
1809 		for (i = 0; i < 4; i++)
1810 			writel(vref_inner,
1811 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1812 
1813 		/* reg_rx_vref_value_update */
1814 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1815 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1816 	}
1817 
1818 	return 0;
1819 }
1820 
1821 static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype,
1822 			    u32 mhz, u32 dst_fsp)
1823 {
1824 	void __iomem *pctl_base = dram->pctl;
1825 	void __iomem *phy_base = dram->phy;
1826 	u32 trefi_1x, trfc_1x;
1827 	u32 dis_auto_zq = 0;
1828 	u32 timeout_us = 1000;
1829 	u32 cur_fsp;
1830 	u32 mr_tmp, cl, cwl, phy_fsp, offset = 0;
1831 
1832 	if (dramtype == LPDDR3 && mhz <= 400) {
1833 		phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3;
1834 		offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3;
1835 		cl = readl(PHY_REG(phy_base, offset));
1836 		cwl = readl(PHY_REG(phy_base, offset + 2));
1837 
1838 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8);
1839 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4);
1840 		pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype);
1841 	}
1842 
1843 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1844 
1845 	/* PHY_0x7b[7:0] reg_train_col_addr[7:0] */
1846 	clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0);
1847 	/* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */
1848 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2);
1849 	/* PHY_0x7c[1:0] reg_train_col_addr[9:8] */
1850 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0);
1851 	/* PHY_0x7d[7:0] reg_train_row_addr[7:0] */
1852 	clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0);
1853 	/* PHY_0x7e[7:0] reg_train_row_addr[15:8] */
1854 	clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0);
1855 
1856 	/* PHY_0x71[3] wrtrain_check_data_value_random_gen */
1857 	clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3));
1858 
1859 	/* config refresh timing */
1860 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1861 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1862 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1863 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1864 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1865 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1866 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1867 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1868 	/* reg_phy_trfc */
1869 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1870 	/* reg_max_refi_cnt */
1871 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1872 
1873 	/* choose training cs */
1874 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6);
1875 
1876 	/* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */
1877 	/* 0: Use the write-leveling value. */
1878 	/* 1: use reg0x233 0x237 0x2b3 0x2b7 */
1879 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4));
1880 
1881 	/* PHY_0x7a [0] reg_dq_wr_train_auto */
1882 	setbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1883 
1884 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1885 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1886 
1887 	send_a_refresh(dram->pctl, 0x3);
1888 
1889 	while (1) {
1890 		if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1)
1891 			break;
1892 
1893 		udelay(1);
1894 		if (timeout_us-- == 0) {
1895 			printascii("error: write training timeout\n");
1896 			while (1)
1897 				;
1898 		}
1899 	}
1900 
1901 	/* Check the write train state */
1902 	if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) {
1903 		printascii("error: write training error\n");
1904 		return -1;
1905 	}
1906 
1907 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1908 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1909 
1910 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1911 
1912 	/* save LPDDR4/LPDDR4X write vref to fsp_param for dfs */
1913 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1914 		fsp_param[dst_fsp].vref_dq[cs] =
1915 			((readl(PHY_REG(phy_base, 0x384)) & 0x3f) +
1916 			 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2;
1917 		/* add range info */
1918 		fsp_param[dst_fsp].vref_dq[cs] |=
1919 			((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1);
1920 	}
1921 
1922 	if (dramtype == LPDDR3 && mhz <= 400) {
1923 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl);
1924 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl);
1925 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1926 			       DDR_PCTL2_INIT3);
1927 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
1928 			      dramtype);
1929 	}
1930 
1931 	return 0;
1932 }
1933 
1934 static int data_training(struct dram_info *dram, u32 cs,
1935 			 struct rv1126_sdram_params *sdram_params, u32 dst_fsp,
1936 			 u32 training_flag)
1937 {
1938 	u32 ret = 0;
1939 
1940 	if (training_flag == FULL_TRAINING)
1941 		training_flag = READ_GATE_TRAINING | WRITE_LEVELING |
1942 				WRITE_TRAINING | READ_TRAINING;
1943 
1944 	if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) {
1945 		ret = data_training_wl(dram, cs,
1946 				       sdram_params->base.dramtype,
1947 				       sdram_params->ch.cap_info.rank);
1948 		if (ret != 0)
1949 			goto out;
1950 	}
1951 
1952 	if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) {
1953 		ret = data_training_rg(dram, cs,
1954 				       sdram_params->base.dramtype);
1955 		if (ret != 0)
1956 			goto out;
1957 	}
1958 
1959 	if ((training_flag & READ_TRAINING) == READ_TRAINING) {
1960 		ret = data_training_rd(dram, cs,
1961 				       sdram_params->base.dramtype,
1962 				       sdram_params->base.ddr_freq);
1963 		if (ret != 0)
1964 			goto out;
1965 	}
1966 
1967 	if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) {
1968 		ret = data_training_wr(dram, cs,
1969 				       sdram_params->base.dramtype,
1970 				       sdram_params->base.ddr_freq, dst_fsp);
1971 		if (ret != 0)
1972 			goto out;
1973 	}
1974 
1975 out:
1976 	return ret;
1977 }
1978 
1979 static int get_wrlvl_val(struct dram_info *dram,
1980 			 struct rv1126_sdram_params *sdram_params)
1981 {
1982 	int i, j, clk_skew;
1983 	void __iomem *phy_base = dram->phy;
1984 	u32 lp_stat;
1985 	int ret;
1986 
1987 	lp_stat = low_power_update(dram, 0);
1988 
1989 	clk_skew = 0x1f;
1990 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, clk_skew, 3,
1991 			 sdram_params->base.dramtype);
1992 
1993 	ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING);
1994 	if (sdram_params->ch.cap_info.rank == 2)
1995 		ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING);
1996 
1997 	for (j = 0; j < 2; j++)
1998 		for (i = 0; i < 4; i++)
1999 			wrlvl_result[j][i] =
2000 				(readl(PHY_REG(phy_base, wrlvl_result_offset[j][i])) & 0x3f) -
2001 				clk_skew;
2002 
2003 	low_power_update(dram, lp_stat);
2004 
2005 	return ret;
2006 }
2007 
2008 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2009 static void init_rw_trn_result_struct(struct rw_trn_result *result,
2010 				      void __iomem *phy_base, u8 cs_num)
2011 {
2012 	int i;
2013 
2014 	result->cs_num = cs_num;
2015 	result->byte_en = readb(PHY_REG(dram_info.phy, 0xf)) &
2016 			  PHY_DQ_WIDTH_MASK;
2017 	for (i = 0; i < FSP_NUM; i++)
2018 		result->fsp_mhz[i] = 0;
2019 }
2020 
2021 static void save_rw_trn_min_max(void __iomem *phy_base,
2022 				struct cs_rw_trn_result *rd_result,
2023 				struct cs_rw_trn_result *wr_result,
2024 				u8 byte_en)
2025 {
2026 	u16 phy_ofs;
2027 	u8 dqs;
2028 	u8 dq;
2029 
2030 	for (dqs = 0; dqs < BYTE_NUM; dqs++) {
2031 		if ((byte_en & BIT(dqs)) == 0)
2032 			continue;
2033 
2034 		/* Channel A or B (low or high 16 bit) */
2035 		phy_ofs = dqs < 2 ? 0x230 : 0x2b0;
2036 		/* low or high 8 bit */
2037 		phy_ofs += (dqs & 0x1) == 0 ? 0 : 0x9;
2038 		for (dq = 0; dq < 8; dq++) {
2039 			rd_result->dqs[dqs].dq_min[dq] =
2040 				readb(PHY_REG(phy_base, phy_ofs + 0x15 + dq));
2041 			rd_result->dqs[dqs].dq_max[dq] =
2042 				readb(PHY_REG(phy_base, phy_ofs + 0x27 + dq));
2043 			wr_result->dqs[dqs].dq_min[dq] =
2044 				readb(PHY_REG(phy_base, phy_ofs + 0x3d + dq));
2045 			wr_result->dqs[dqs].dq_max[dq] =
2046 				readb(PHY_REG(phy_base, phy_ofs + 0x4f + dq));
2047 		}
2048 	}
2049 }
2050 
2051 static void save_rw_trn_deskew(void __iomem *phy_base,
2052 			       struct fsp_rw_trn_result *result, u8 cs_num,
2053 			       int min_val, bool rw)
2054 {
2055 	u16 phy_ofs;
2056 	u8 cs;
2057 	u8 dq;
2058 
2059 	result->min_val = min_val;
2060 
2061 	for (cs = 0; cs < cs_num; cs++) {
2062 		phy_ofs = cs == 0 ? 0x170 : 0x1a0;
2063 		phy_ofs += rw == SKEW_RX_SIGNAL ? 0x1 : 0x17;
2064 		for (dq = 0; dq < 8; dq++) {
2065 			result->cs[cs].dqs[0].dq_deskew[dq] =
2066 				readb(PHY_REG(phy_base, phy_ofs + dq));
2067 			result->cs[cs].dqs[1].dq_deskew[dq] =
2068 				readb(PHY_REG(phy_base, phy_ofs + 0xb + dq));
2069 			result->cs[cs].dqs[2].dq_deskew[dq] =
2070 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + dq));
2071 			result->cs[cs].dqs[3].dq_deskew[dq] =
2072 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + dq));
2073 		}
2074 
2075 		result->cs[cs].dqs[0].dqs_deskew =
2076 			readb(PHY_REG(phy_base, phy_ofs + 0x8));
2077 		result->cs[cs].dqs[1].dqs_deskew =
2078 			readb(PHY_REG(phy_base, phy_ofs + 0xb + 0x8));
2079 		result->cs[cs].dqs[2].dqs_deskew =
2080 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0x8));
2081 		result->cs[cs].dqs[3].dqs_deskew =
2082 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + 0x8));
2083 	}
2084 }
2085 
2086 static void save_rw_trn_result_to_ddr(struct rw_trn_result *result)
2087 {
2088 	result->flag = DDR_DQ_EYE_FLAG;
2089 	memcpy((void *)(RW_TRN_RESULT_ADDR), result, sizeof(*result));
2090 }
2091 #endif
2092 
2093 static int high_freq_training(struct dram_info *dram,
2094 			      struct rv1126_sdram_params *sdram_params,
2095 			      u32 fsp)
2096 {
2097 	u32 i, j;
2098 	void __iomem *phy_base = dram->phy;
2099 	u32 dramtype = sdram_params->base.dramtype;
2100 	int min_val;
2101 	int dqs_skew, clk_skew, ca_skew;
2102 	u8 byte_en;
2103 	int ret;
2104 
2105 	byte_en = readl(PHY_REG(phy_base, 0xf)) & PHY_DQ_WIDTH_MASK;
2106 	dqs_skew = 0;
2107 	for (j = 0; j < sdram_params->ch.cap_info.rank; j++) {
2108 		for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) {
2109 			if ((byte_en & BIT(i)) != 0)
2110 				dqs_skew += wrlvl_result[j][i];
2111 		}
2112 	}
2113 	dqs_skew = dqs_skew /
2114 		   (int)(sdram_params->ch.cap_info.rank * (1 << sdram_params->ch.cap_info.bw));
2115 
2116 	clk_skew = 0x20 - dqs_skew;
2117 	dqs_skew = 0x20;
2118 
2119 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
2120 		min_val = 0xff;
2121 		for (j = 0; j < sdram_params->ch.cap_info.rank; j++)
2122 			for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) {
2123 				if ((byte_en & BIT(i)) != 0)
2124 					min_val = MIN(wrlvl_result[j][i], min_val);
2125 			}
2126 
2127 		if (min_val < 0) {
2128 			clk_skew = -min_val;
2129 			ca_skew = -min_val;
2130 		} else {
2131 			clk_skew = 0;
2132 			ca_skew = 0;
2133 		}
2134 	} else if (dramtype == LPDDR3) {
2135 		ca_skew = clk_skew - 4;
2136 	} else {
2137 		ca_skew = clk_skew;
2138 	}
2139 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3,
2140 			 dramtype);
2141 
2142 	writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233));
2143 	writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237));
2144 	writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2145 	writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2146 	ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING |
2147 			    READ_TRAINING | WRITE_TRAINING);
2148 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2149 	rw_trn_result.fsp_mhz[fsp] = (u16)sdram_params->base.ddr_freq;
2150 	save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[0],
2151 			    &rw_trn_result.wr_fsp[fsp].cs[0],
2152 			    rw_trn_result.byte_en);
2153 #endif
2154 	if (sdram_params->ch.cap_info.rank == 2) {
2155 		writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233));
2156 		writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237));
2157 		writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2158 		writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2159 		ret |= data_training(dram, 1, sdram_params, fsp,
2160 				     READ_GATE_TRAINING | READ_TRAINING |
2161 				     WRITE_TRAINING);
2162 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2163 		save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[1],
2164 				    &rw_trn_result.wr_fsp[fsp].cs[1],
2165 				    rw_trn_result.byte_en);
2166 #endif
2167 	}
2168 	if (ret)
2169 		goto out;
2170 
2171 	record_dq_prebit(dram);
2172 
2173 	min_val = get_min_value(dram, SKEW_RX_SIGNAL,
2174 				sdram_params->ch.cap_info.rank) * -1;
2175 	modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2176 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2177 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2178 	save_rw_trn_deskew(phy_base, &rw_trn_result.rd_fsp[fsp],
2179 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2180 			   SKEW_RX_SIGNAL);
2181 #endif
2182 
2183 	min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL,
2184 				    sdram_params->ch.cap_info.rank),
2185 		      get_min_value(dram, SKEW_CA_SIGNAL,
2186 				    sdram_params->ch.cap_info.rank)) * -1;
2187 
2188 	/* clk = 0, rx all skew -7, tx - min_value */
2189 	modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3,
2190 			 dramtype);
2191 
2192 	modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2193 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2194 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2195 	save_rw_trn_deskew(phy_base, &rw_trn_result.wr_fsp[fsp],
2196 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2197 			   SKEW_TX_SIGNAL);
2198 #endif
2199 
2200 	ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING);
2201 	if (sdram_params->ch.cap_info.rank == 2)
2202 		ret |= data_training(dram, 1, sdram_params, 0,
2203 				     READ_GATE_TRAINING);
2204 out:
2205 	return ret;
2206 }
2207 
2208 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
2209 {
2210 	writel(ddrconfig, &dram->msch->deviceconf);
2211 	clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0);
2212 }
2213 
2214 static void update_noc_timing(struct dram_info *dram,
2215 			      struct rv1126_sdram_params *sdram_params)
2216 {
2217 	void __iomem *pctl_base = dram->pctl;
2218 	u32 bw, bl;
2219 
2220 	bw = 8 << sdram_params->ch.cap_info.bw;
2221 	bl = ((readl(pctl_base + DDR_PCTL2_MSTR) >> 16) & 0xf) * 2;
2222 
2223 	/* update the noc timing related to data bus width */
2224 	if ((bw / 8 * bl) <= 16)
2225 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 0;
2226 	else if ((bw / 8 * bl) == 32)
2227 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 1;
2228 	else if ((bw / 8 * bl) == 64)
2229 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 2;
2230 	else
2231 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 3;
2232 
2233 	sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty =
2234 		(bl * bw / 8) > 16 ? (bl / 4) : (16 / (bl * bw / 8)) * bl / 4;
2235 
2236 	if (sdram_params->base.dramtype == LPDDR4 ||
2237 	    sdram_params->base.dramtype == LPDDR4X) {
2238 		sdram_params->ch.noc_timings.ddrmode.b.mwrsize =
2239 			(bw == 16) ? 0x1 : 0x2;
2240 		sdram_params->ch.noc_timings.ddrtimingc0.b.wrtomwr =
2241 			3 * sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty;
2242 	}
2243 
2244 	writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
2245 	       &dram->msch->ddrtiminga0);
2246 	writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
2247 	       &dram->msch->ddrtimingb0);
2248 	writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
2249 	       &dram->msch->ddrtimingc0);
2250 	writel(sdram_params->ch.noc_timings.devtodev0.d32,
2251 	       &dram->msch->devtodev0);
2252 	writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
2253 	writel(sdram_params->ch.noc_timings.ddr4timing.d32,
2254 	       &dram->msch->ddr4timing);
2255 }
2256 
2257 static int split_setup(struct dram_info *dram,
2258 		       struct rv1126_sdram_params *sdram_params)
2259 {
2260 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2261 	u32 dramtype = sdram_params->base.dramtype;
2262 	u32 split_size, split_mode;
2263 	u64 cs_cap[2], cap;
2264 
2265 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dramtype);
2266 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dramtype);
2267 	/* only support the larger cap is in low 16bit */
2268 	if (cap_info->cs0_high16bit_row < cap_info->cs0_row) {
2269 		cap = cs_cap[0] / (1 << (cap_info->cs0_row -
2270 		cap_info->cs0_high16bit_row));
2271 	} else if ((cap_info->cs1_high16bit_row < cap_info->cs1_row) &&
2272 		   (cap_info->rank == 2)) {
2273 		if (!cap_info->cs1_high16bit_row)
2274 			cap = cs_cap[0];
2275 		else
2276 			cap = cs_cap[0] + cs_cap[1] / (1 << (cap_info->cs1_row -
2277 				cap_info->cs1_high16bit_row));
2278 	} else {
2279 		goto out;
2280 	}
2281 	split_size = (u32)(cap >> 24) & SPLIT_SIZE_MASK;
2282 	if (cap_info->bw == 2)
2283 		split_mode = SPLIT_MODE_32_L16_VALID;
2284 	else
2285 		split_mode = SPLIT_MODE_16_L8_VALID;
2286 
2287 	rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2288 		     (SPLIT_MODE_MASK << SPLIT_MODE_OFFSET) |
2289 		     (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2290 		     (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2291 		     (split_mode << SPLIT_MODE_OFFSET) |
2292 		     (0x0 << SPLIT_BYPASS_OFFSET) |
2293 		     (split_size << SPLIT_SIZE_OFFSET));
2294 
2295 	rk_clrsetreg(BUS_SGRF_BASE_ADDR + SGRF_SOC_CON2,
2296 		     MSCH_AXI_BYPASS_ALL_MASK << MSCH_AXI_BYPASS_ALL_SHIFT,
2297 		     0x0 << MSCH_AXI_BYPASS_ALL_SHIFT);
2298 
2299 out:
2300 	return 0;
2301 }
2302 
2303 static void split_bypass(struct dram_info *dram)
2304 {
2305 	if ((readl(&dram->ddrgrf->grf_ddrsplit_con) &
2306 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2307 		return;
2308 
2309 	/* bypass split */
2310 	rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2311 		     (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2312 		     (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2313 		     (0x1 << SPLIT_BYPASS_OFFSET) |
2314 		     (0x0 << SPLIT_SIZE_OFFSET));
2315 }
2316 
2317 static void dram_all_config(struct dram_info *dram,
2318 			    struct rv1126_sdram_params *sdram_params)
2319 {
2320 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2321 	u32 dram_type = sdram_params->base.dramtype;
2322 	void __iomem *pctl_base = dram->pctl;
2323 	u32 sys_reg2 = 0;
2324 	u32 sys_reg3 = 0;
2325 	u64 cs_cap[2];
2326 	u32 cs_pst;
2327 
2328 	set_ddrconfig(dram, cap_info->ddrconfig);
2329 	sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
2330 			 &sys_reg3, 0);
2331 	writel(sys_reg2, &dram->pmugrf->os_reg[2]);
2332 	writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2333 
2334 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2335 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2336 
2337 	if (cap_info->rank == 2) {
2338 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2339 			6 + 2;
2340 		if (cs_pst > 28)
2341 			cs_cap[0] = 1llu << cs_pst;
2342 	}
2343 
2344 	writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
2345 			(((cs_cap[0] >> 20) / 64) & 0xff),
2346 			&dram->msch->devicesize);
2347 	update_noc_timing(dram, sdram_params);
2348 }
2349 
2350 static void enable_low_power(struct dram_info *dram,
2351 			     struct rv1126_sdram_params *sdram_params)
2352 {
2353 	void __iomem *pctl_base = dram->pctl;
2354 	u32 grf_lp_con;
2355 
2356 	writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]);
2357 
2358 	if (sdram_params->base.dramtype == DDR4)
2359 		grf_lp_con = (0x7 << 16) | (1 << 1);
2360 	else if (sdram_params->base.dramtype == DDR3)
2361 		grf_lp_con = (0x7 << 16) | (1 << 0);
2362 	else
2363 		grf_lp_con = (0x7 << 16) | (1 << 2);
2364 
2365 	/* en lpckdis_en */
2366 	grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
2367 	writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con);
2368 
2369 	/* enable sr, pd */
2370 	if (dram->pd_idle == 0)
2371 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2372 	else
2373 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2374 	if (dram->sr_idle == 0)
2375 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2376 	else
2377 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2378 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
2379 }
2380 
2381 static void ddr_set_atags(struct dram_info *dram,
2382 			  struct rv1126_sdram_params *sdram_params)
2383 {
2384 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2385 	u32 dram_type = sdram_params->base.dramtype;
2386 	void __iomem *pctl_base = dram->pctl;
2387 	struct tag_serial t_serial;
2388 	struct tag_ddr_mem t_ddrmem;
2389 	struct tag_soc_info t_socinfo;
2390 	u64 cs_cap[2];
2391 	u32 cs_pst = 0;
2392 	u32 split, split_size;
2393 	u64 reduce_cap = 0;
2394 
2395 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2396 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2397 
2398 	memset(&t_serial, 0, sizeof(struct tag_serial));
2399 
2400 	t_serial.version = 0;
2401 	t_serial.enable = 1;
2402 	t_serial.addr = CONFIG_DEBUG_UART_BASE;
2403 	t_serial.baudrate = CONFIG_BAUDRATE;
2404 	t_serial.m_mode = SERIAL_M_MODE_M0;
2405 	t_serial.id = 2;
2406 
2407 	atags_destroy();
2408 	atags_set_tag(ATAG_SERIAL, &t_serial);
2409 
2410 	split = readl(&dram->ddrgrf->grf_ddrsplit_con);
2411 	memset(&t_ddrmem, 0, sizeof(struct tag_ddr_mem));
2412 	if (cap_info->row_3_4) {
2413 		cs_cap[0] =  cs_cap[0] * 3 / 4;
2414 		cs_cap[1] =  cs_cap[1] * 3 / 4;
2415 	} else if (!(split & (1 << SPLIT_BYPASS_OFFSET))) {
2416 		split_size = (split >> SPLIT_SIZE_OFFSET) & SPLIT_SIZE_MASK;
2417 		reduce_cap = (cs_cap[0] + cs_cap[1] - (split_size << 24)) / 2;
2418 	}
2419 	t_ddrmem.version = 0;
2420 	t_ddrmem.bank[0] = CONFIG_SYS_SDRAM_BASE;
2421 	if (cs_cap[1]) {
2422 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2423 			6 + 2;
2424 	}
2425 
2426 	if (cs_cap[1] && cs_pst > 27) {
2427 		t_ddrmem.count = 2;
2428 		t_ddrmem.bank[1] = 1 << cs_pst;
2429 		t_ddrmem.bank[2] = cs_cap[0];
2430 		t_ddrmem.bank[3] = cs_cap[1] - reduce_cap;
2431 	} else {
2432 		t_ddrmem.count = 1;
2433 		t_ddrmem.bank[1] = (u64)cs_cap[0] + (u64)cs_cap[1] - reduce_cap;
2434 	}
2435 
2436 	atags_set_tag(ATAG_DDR_MEM, &t_ddrmem);
2437 
2438 	memset(&t_socinfo, 0, sizeof(struct tag_soc_info));
2439 	t_socinfo.version = 0x1;
2440 	t_socinfo.name = 0x1126;
2441 	t_socinfo.flags = SOC_FLAGS_TDBT;
2442 	atags_set_tag(ATAG_SOC_INFO, &t_socinfo);
2443 }
2444 
2445 static void print_ddr_info(struct rv1126_sdram_params *sdram_params)
2446 {
2447 	u32 split;
2448 
2449 	if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2450 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2451 		split = 0;
2452 	else
2453 		split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2454 			SPLIT_SIZE_MASK;
2455 
2456 	sdram_print_ddr_info(&sdram_params->ch.cap_info,
2457 			     &sdram_params->base, split);
2458 }
2459 
2460 static int modify_ddr34_bw_byte_map(u8 rg_result, struct rv1126_sdram_params *sdram_params)
2461 {
2462 	struct sdram_head_info_index_v2 *index = (struct sdram_head_info_index_v2 *)common_info;
2463 	struct dq_map_info *map_info = (struct dq_map_info *)
2464 				       ((void *)common_info + index->dq_map_index.offset * 4);
2465 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2466 	u32 dramtype = sdram_params->base.dramtype;
2467 	u32 byte_map = 0;
2468 	u32 byte = 0;
2469 	u32 byte_map_shift;
2470 	int i;
2471 
2472 	if (dramtype == DDR3)
2473 		byte_map_shift = 24;
2474 	else if (dramtype == DDR4)
2475 		byte_map_shift = 0;
2476 	else
2477 		return -1;
2478 
2479 	for (i = 0; i < 4; i++) {
2480 		if ((rg_result & BIT(i)) == 0) {
2481 			byte_map |= byte << (i * 2);
2482 			byte++;
2483 		}
2484 	}
2485 	if (byte != 1 && byte != 2 && byte != 4) {
2486 		printascii("DTT result is abnormal: ");
2487 		printdec(byte);
2488 		printascii("byte\n");
2489 		return -1;
2490 	}
2491 	cap_info->bw = byte / 2;
2492 	for (i = 0; i < 4; i++) {
2493 		if ((rg_result & BIT(i)) != 0) {
2494 			byte_map |= byte << (i * 2);
2495 			byte++;
2496 		}
2497 	}
2498 
2499 	if ((u8)byte_map != (u8)(map_info->byte_map[0] >> byte_map_shift)) {
2500 		clrsetbits_le32(&map_info->byte_map[0],
2501 				0xff << byte_map_shift, byte_map << byte_map_shift);
2502 		pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info, dramtype);
2503 		return 1;
2504 	}
2505 
2506 	return 0;
2507 }
2508 
2509 int sdram_init_(struct dram_info *dram, struct rv1126_sdram_params *sdram_params, u32 post_init)
2510 {
2511 	void __iomem *pctl_base = dram->pctl;
2512 	void __iomem *phy_base = dram->phy;
2513 	u32 ddr4_vref;
2514 	u32 mr_tmp, tmp;
2515 	int delay = 1000;
2516 
2517 	rkclk_configure_ddr(dram, sdram_params);
2518 
2519 	rkclk_ddr_reset(dram, 1, 1, 1, 1);
2520 	udelay(10);
2521 
2522 	rkclk_ddr_reset(dram, 1, 1, 1, 0);
2523 	phy_cfg(dram, sdram_params);
2524 
2525 	rkclk_ddr_reset(dram, 1, 1, 0, 0);
2526 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1);
2527 
2528 	rkclk_ddr_reset(dram, 1, 0, 0, 0);
2529 	pctl_cfg(dram->pctl, &sdram_params->pctl_regs,
2530 		 dram->sr_idle, dram->pd_idle);
2531 
2532 	if (sdram_params->ch.cap_info.bw == 2) {
2533 		/* 32bit interface use pageclose */
2534 		setbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2535 		/* pageclose = 1, pageclose_timer = 0 will err in lp4 328MHz */
2536 		clrsetbits_le32(pctl_base + DDR_PCTL2_SCHED1, 0xff, 0x1 << 0);
2537 	} else {
2538 		clrbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2539 	}
2540 
2541 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2542 	u32 trefi;
2543 
2544 	tmp = readl(pctl_base + DDR_PCTL2_RFSHTMG);
2545 	trefi = (tmp >> 16) & 0xfff;
2546 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2547 	       pctl_base + DDR_PCTL2_RFSHTMG);
2548 #endif
2549 
2550 	/* set frequency_mode */
2551 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
2552 	/* set target_frequency to Frequency 0 */
2553 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0);
2554 
2555 	set_ds_odt(dram, sdram_params, 0);
2556 	sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params);
2557 	set_ctl_address_map(dram, sdram_params);
2558 
2559 	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
2560 
2561 	rkclk_ddr_reset(dram, 0, 0, 0, 0);
2562 
2563 	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0) {
2564 		udelay(1);
2565 		if (delay-- <= 0) {
2566 			printascii("ERROR: Cannot wait dfi_init_done!\n");
2567 			while (1)
2568 				;
2569 		}
2570 	}
2571 
2572 	if (sdram_params->base.dramtype == LPDDR3) {
2573 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3);
2574 	} else if (sdram_params->base.dramtype == LPDDR4 ||
2575 		   sdram_params->base.dramtype == LPDDR4X) {
2576 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6);
2577 		/* MR11 */
2578 		pctl_write_mr(dram->pctl, 3, 11,
2579 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2580 			      LPDDR4);
2581 		/* MR12 */
2582 		pctl_write_mr(dram->pctl, 3, 12,
2583 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2584 			      LPDDR4);
2585 
2586 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2587 		/* MR22 */
2588 		pctl_write_mr(dram->pctl, 3, 22,
2589 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2590 			      LPDDR4);
2591 	} else if (sdram_params->base.dramtype == DDR4) {
2592 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7) >> PCTL2_DDR4_MR6_SHIFT & PCTL2_MR_MASK;
2593 		pctl_write_mr(dram->pctl, 0x3, 6, mr_tmp | BIT(7), DDR4);
2594 		pctl_write_mr(dram->pctl, 0x3, 6, mr_tmp | BIT(7), DDR4);
2595 		pctl_write_mr(dram->pctl, 0x3, 6, mr_tmp, DDR4);
2596 	}
2597 
2598 	if (sdram_params->base.dramtype == DDR3 && post_init == 0)
2599 		setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2600 	tmp = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) & 0xf;
2601 
2602 	if (tmp != 0) {
2603 		if (post_init != 0) {
2604 			printascii("DTT cs0 error\n");
2605 			return -1;
2606 		}
2607 		if (sdram_params->base.dramtype != DDR3 || tmp == 0xf)
2608 			return -1;
2609 	}
2610 
2611 	if (sdram_params->base.dramtype == DDR3 && post_init == 0) {
2612 		if (modify_ddr34_bw_byte_map((u8)tmp, sdram_params) != 0)
2613 			return -1;
2614 	}
2615 
2616 	if (sdram_params->base.dramtype == LPDDR4) {
2617 		mr_tmp = read_mr(dram, 1, 14, LPDDR4);
2618 
2619 		if (mr_tmp != 0x4d)
2620 			return -1;
2621 	}
2622 
2623 	if (sdram_params->base.dramtype == LPDDR4 ||
2624 	    sdram_params->base.dramtype == LPDDR4X) {
2625 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2626 		/* MR14 */
2627 		pctl_write_mr(dram->pctl, 3, 14,
2628 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2629 			      LPDDR4);
2630 	}
2631 	if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) {
2632 		if (data_training(dram, 1, sdram_params, 0,
2633 				  READ_GATE_TRAINING) != 0) {
2634 			printascii("DTT cs1 error\n");
2635 			return -1;
2636 		}
2637 	}
2638 
2639 	if (sdram_params->base.dramtype == DDR4) {
2640 		ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39;
2641 		pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref,
2642 				  sdram_params->base.dramtype);
2643 	}
2644 
2645 	dram_all_config(dram, sdram_params);
2646 	enable_low_power(dram, sdram_params);
2647 
2648 	return 0;
2649 }
2650 
2651 static u64 dram_detect_cap(struct dram_info *dram,
2652 			   struct rv1126_sdram_params *sdram_params,
2653 			   unsigned char channel)
2654 {
2655 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2656 	void __iomem *pctl_base = dram->pctl;
2657 	void __iomem *phy_base = dram->phy;
2658 	u32 mr8;
2659 
2660 	u32 bktmp;
2661 	u32 coltmp;
2662 	u32 rowtmp;
2663 	u32 cs;
2664 	u32 dram_type = sdram_params->base.dramtype;
2665 	u32 pwrctl;
2666 	u32 i, dq_map;
2667 	u32 byte1 = 0, byte0 = 0;
2668 
2669 	if (dram_type != LPDDR4 && dram_type != LPDDR4X) {
2670 		if (dram_type != DDR4) {
2671 			if (dram_type == DDR3)
2672 				coltmp = 11;
2673 			else
2674 				coltmp = 12;
2675 			bktmp = 3;
2676 			if (dram_type == LPDDR2)
2677 				rowtmp = 15;
2678 			else
2679 				rowtmp = 16;
2680 
2681 			if (sdram_detect_col(cap_info, coltmp) != 0)
2682 				goto cap_err;
2683 
2684 			sdram_detect_bank(cap_info, pctl_base, coltmp, bktmp);
2685 			if (dram_type != LPDDR3)
2686 				sdram_detect_dbw(cap_info, dram_type);
2687 		} else {
2688 			coltmp = 10;
2689 			bktmp = 4;
2690 			rowtmp = 17;
2691 
2692 			cap_info->col = 10;
2693 			cap_info->bk = 2;
2694 			sdram_detect_bg(cap_info, pctl_base, coltmp);
2695 		}
2696 
2697 		if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
2698 			goto cap_err;
2699 
2700 		sdram_detect_row_3_4(cap_info, coltmp, bktmp);
2701 	} else {
2702 		cap_info->col = 10;
2703 		cap_info->bk = 3;
2704 		mr8 = read_mr(dram, 1, 8, dram_type);
2705 		cap_info->dbw = ((mr8 >> 6) & 0x3) == 0 ? 1 : 0;
2706 		mr8 = (mr8 >> 2) & 0xf;
2707 		if (mr8 >= 0 && mr8 <= 6) {
2708 			cap_info->cs0_row = 14 + (mr8 + 1) / 2;
2709 		} else if (mr8 == 0xc) {
2710 			cap_info->cs0_row = 13;
2711 		} else {
2712 			printascii("Cap ERR: Fail to get cap of LPDDR4/X from MR8\n");
2713 			goto cap_err;
2714 		}
2715 		if (cap_info->dbw == 0)
2716 			cap_info->cs0_row++;
2717 		cap_info->row_3_4 = mr8 % 2 == 1 ? 1 : 0;
2718 		if (cap_info->cs0_row >= 17) {
2719 			printascii("Cap ERR: ");
2720 			printascii("RV1126 LPDDR4/X cannot support row >= 17\n");
2721 			goto cap_err;
2722 			// cap_info->cs0_row = 16;
2723 			// cap_info->row_3_4 = 0;
2724 		}
2725 	}
2726 
2727 	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
2728 	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
2729 
2730 	if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0)
2731 		cs = 1;
2732 	else
2733 		cs = 0;
2734 	cap_info->rank = cs + 1;
2735 
2736 	setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2737 
2738 	if (dram_type != DDR3) {
2739 		if ((data_training_rg(dram, 0, dram_type) & 0xf) == 0) {
2740 			cap_info->bw = 2;
2741 		} else {
2742 			dq_map = readl(PHY_REG(phy_base, 0x4f));
2743 			for (i = 0; i < 4; i++) {
2744 				if (((dq_map >> (i * 2)) & 0x3) == 0)
2745 					byte0 = i;
2746 				if (((dq_map >> (i * 2)) & 0x3) == 1)
2747 					byte1 = i;
2748 			}
2749 			clrsetbits_le32(PHY_REG(phy_base, 0xf), PHY_DQ_WIDTH_MASK,
2750 					BIT(byte0) | BIT(byte1));
2751 			if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) == 0)
2752 				cap_info->bw = 1;
2753 			else
2754 				cap_info->bw = 0;
2755 		}
2756 	}
2757 
2758 	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
2759 
2760 	cap_info->cs0_high16bit_row = cap_info->cs0_row;
2761 	if (cs) {
2762 		cap_info->cs1_row = cap_info->cs0_row;
2763 		cap_info->cs1_high16bit_row = cap_info->cs0_row;
2764 	} else {
2765 		cap_info->cs1_row = 0;
2766 		cap_info->cs1_high16bit_row = 0;
2767 	}
2768 
2769 	if (dram_type == LPDDR3)
2770 		sdram_detect_dbw(cap_info, dram_type);
2771 
2772 	return 0;
2773 cap_err:
2774 	return -1;
2775 }
2776 
2777 static int dram_detect_cs1_row(struct dram_info *dram,
2778 			       struct rv1126_sdram_params *sdram_params,
2779 			       unsigned char channel)
2780 {
2781 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2782 	void __iomem *pctl_base = dram->pctl;
2783 	u32 ret = 0;
2784 	void __iomem *test_addr;
2785 	u32 row, bktmp, coltmp, bw;
2786 	u64 cs0_cap;
2787 	u32 byte_mask;
2788 	u32 cs_pst;
2789 	u32 cs_add = 0;
2790 	u32 max_row;
2791 
2792 	if (cap_info->rank == 2) {
2793 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2794 			6 + 2;
2795 		if (cs_pst < 28)
2796 			cs_add = 1;
2797 
2798 		cs0_cap = 1 << cs_pst;
2799 
2800 		if (sdram_params->base.dramtype == DDR4) {
2801 			if (cap_info->dbw == 0)
2802 				bktmp = cap_info->bk + 2;
2803 			else
2804 				bktmp = cap_info->bk + 1;
2805 		} else {
2806 			bktmp = cap_info->bk;
2807 		}
2808 		bw = cap_info->bw;
2809 		coltmp = cap_info->col;
2810 
2811 		if (bw == 2)
2812 			byte_mask = 0xFFFF;
2813 		else
2814 			byte_mask = 0xFF;
2815 
2816 		max_row = (cs_pst == 31) ? 30 : 31;
2817 
2818 		max_row = max_row - bktmp - coltmp - bw - cs_add + 1;
2819 
2820 		row = (cap_info->cs0_row > max_row) ? max_row :
2821 			cap_info->cs0_row;
2822 
2823 		for (; row > 12; row--) {
2824 			test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
2825 				    (u32)cs0_cap +
2826 				    (1ul << (row + bktmp + coltmp +
2827 					     cs_add + bw - 1ul)));
2828 
2829 			writel(0, CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap);
2830 			writel(PATTERN, test_addr);
2831 
2832 			if (((readl(test_addr) & byte_mask) ==
2833 			     (PATTERN & byte_mask)) &&
2834 			    ((readl(CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap) &
2835 			      byte_mask) == 0)) {
2836 				ret = row;
2837 				break;
2838 			}
2839 		}
2840 	}
2841 
2842 	return ret;
2843 }
2844 
2845 /* return: 0 = success, other = fail */
2846 static int sdram_init_detect(struct dram_info *dram,
2847 			     struct rv1126_sdram_params *sdram_params)
2848 {
2849 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2850 	u32 ret;
2851 	u32 sys_reg = 0;
2852 	u32 sys_reg3 = 0;
2853 
2854 	if (sdram_init_(dram, sdram_params, 0)) {
2855 		if (sdram_params->base.dramtype == DDR3) {
2856 			if (sdram_init_(dram, sdram_params, 0))
2857 				return -1;
2858 		} else {
2859 			return -1;
2860 		}
2861 	}
2862 
2863 	if (sdram_params->base.dramtype == DDR3) {
2864 		writel(PATTERN, CONFIG_SYS_SDRAM_BASE);
2865 		if (readl(CONFIG_SYS_SDRAM_BASE) != PATTERN)
2866 			return -1;
2867 	}
2868 
2869 	split_bypass(dram);
2870 	if (dram_detect_cap(dram, sdram_params, 0) != 0)
2871 		return -1;
2872 
2873 	pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
2874 				   sdram_params->base.dramtype);
2875 	ret = sdram_init_(dram, sdram_params, 1);
2876 	if (ret != 0)
2877 		goto out;
2878 
2879 	cap_info->cs1_row =
2880 		dram_detect_cs1_row(dram, sdram_params, 0);
2881 	if (cap_info->cs1_row) {
2882 		sys_reg = readl(&dram->pmugrf->os_reg[2]);
2883 		sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
2884 		SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
2885 				    sys_reg, sys_reg3, 0);
2886 		writel(sys_reg, &dram->pmugrf->os_reg[2]);
2887 		writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2888 	}
2889 
2890 	sdram_detect_high_row(cap_info, sdram_params->base.dramtype);
2891 	split_setup(dram, sdram_params);
2892 out:
2893 	return ret;
2894 }
2895 
2896 struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz)
2897 {
2898 	u32 i;
2899 	u32 offset = 0;
2900 	struct ddr2_3_4_lp2_3_info *ddr_info;
2901 
2902 	if (!freq_mhz) {
2903 		ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype);
2904 		if (ddr_info)
2905 			freq_mhz =
2906 				(ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
2907 				DDR_FREQ_MASK;
2908 		else
2909 			freq_mhz = 0;
2910 	}
2911 
2912 	for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) {
2913 		if (sdram_configs[i].base.ddr_freq == 0 ||
2914 		    freq_mhz < sdram_configs[i].base.ddr_freq)
2915 			break;
2916 	}
2917 	offset = i == 0 ? 0 : i - 1;
2918 
2919 	return &sdram_configs[offset];
2920 }
2921 
2922 static const u16 pctl_need_update_reg[] = {
2923 	DDR_PCTL2_RFSHTMG,
2924 	DDR_PCTL2_INIT3,
2925 	DDR_PCTL2_INIT4,
2926 	DDR_PCTL2_INIT6,
2927 	DDR_PCTL2_INIT7,
2928 	DDR_PCTL2_DRAMTMG0,
2929 	DDR_PCTL2_DRAMTMG1,
2930 	DDR_PCTL2_DRAMTMG2,
2931 	DDR_PCTL2_DRAMTMG3,
2932 	DDR_PCTL2_DRAMTMG4,
2933 	DDR_PCTL2_DRAMTMG5,
2934 	DDR_PCTL2_DRAMTMG6,
2935 	DDR_PCTL2_DRAMTMG7,
2936 	DDR_PCTL2_DRAMTMG8,
2937 	DDR_PCTL2_DRAMTMG9,
2938 	DDR_PCTL2_DRAMTMG12,
2939 	DDR_PCTL2_DRAMTMG13,
2940 	DDR_PCTL2_DRAMTMG14,
2941 	DDR_PCTL2_ZQCTL0,
2942 	DDR_PCTL2_DFITMG0,
2943 	DDR_PCTL2_ODTCFG
2944 };
2945 
2946 static const u16 phy_need_update_reg[] = {
2947 	0x14,
2948 	0x18,
2949 	0x1c
2950 };
2951 
2952 static void pre_set_rate(struct dram_info *dram,
2953 			 struct rv1126_sdram_params *sdram_params,
2954 			 u32 dst_fsp, u32 dst_fsp_lp4)
2955 {
2956 	u32 i, j, find;
2957 	void __iomem *pctl_base = dram->pctl;
2958 	void __iomem *phy_base = dram->phy;
2959 	u32 phy_offset;
2960 	u32 mr_tmp;
2961 	u32 dramtype = sdram_params->base.dramtype;
2962 
2963 	sw_set_req(dram);
2964 	/* pctl timing update */
2965 	for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) {
2966 		for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF;
2967 		     j++) {
2968 			if (sdram_params->pctl_regs.pctl[j][0] ==
2969 			    pctl_need_update_reg[i]) {
2970 				writel(sdram_params->pctl_regs.pctl[j][1],
2971 				       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2972 				       pctl_need_update_reg[i]);
2973 				find = j;
2974 				break;
2975 			}
2976 		}
2977 	}
2978 
2979 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2980 	u32 tmp, trefi;
2981 
2982 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2983 	trefi = (tmp >> 16) & 0xfff;
2984 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2985 	       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2986 #endif
2987 
2988 	sw_set_ack(dram);
2989 
2990 	/* phy timing update */
2991 	if (dst_fsp == 0)
2992 		phy_offset = 0;
2993 	else
2994 		phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3);
2995 	/* cl cwl al update */
2996 	for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) {
2997 		for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF;
2998 		     j++) {
2999 			if (sdram_params->phy_regs.phy[j][0] ==
3000 			    phy_need_update_reg[i]) {
3001 				writel(sdram_params->phy_regs.phy[j][1],
3002 				       phy_base + phy_offset +
3003 				       phy_need_update_reg[i]);
3004 				find = j;
3005 				break;
3006 			}
3007 		}
3008 	}
3009 
3010 	set_ds_odt(dram, sdram_params, dst_fsp);
3011 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
3012 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3013 			       DDR_PCTL2_INIT4);
3014 		/* MR13 */
3015 		pctl_write_mr(dram->pctl, 3, 13,
3016 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
3017 			       PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
3018 			      ((0x2 << 6) >> dst_fsp_lp4), dramtype);
3019 		writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
3020 				      PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
3021 				      ((0x2 << 6) >> dst_fsp_lp4),
3022 				       PHY_REG(phy_base, 0x1b));
3023 		/* MR3 */
3024 		pctl_write_mr(dram->pctl, 3, 3,
3025 			      mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT &
3026 			      PCTL2_MR_MASK,
3027 			      dramtype);
3028 		writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK,
3029 		       PHY_REG(phy_base, 0x19));
3030 
3031 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3032 			       DDR_PCTL2_INIT3);
3033 		/* MR1 */
3034 		pctl_write_mr(dram->pctl, 3, 1,
3035 			      mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT &
3036 			      PCTL2_MR_MASK,
3037 			      dramtype);
3038 		writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK,
3039 		       PHY_REG(phy_base, 0x17));
3040 		/* MR2 */
3041 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
3042 			      dramtype);
3043 		writel(mr_tmp & PCTL2_MR_MASK,
3044 		       PHY_REG(phy_base, 0x18));
3045 
3046 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3047 			       DDR_PCTL2_INIT6);
3048 		/* MR11 */
3049 		pctl_write_mr(dram->pctl, 3, 11,
3050 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
3051 			      dramtype);
3052 		writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
3053 		       PHY_REG(phy_base, 0x1a));
3054 		/* MR12 */
3055 		pctl_write_mr(dram->pctl, 3, 12,
3056 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
3057 			      dramtype);
3058 
3059 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3060 			       DDR_PCTL2_INIT7);
3061 		/* MR22 */
3062 		pctl_write_mr(dram->pctl, 3, 22,
3063 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
3064 			      dramtype);
3065 		writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
3066 		       PHY_REG(phy_base, 0x1d));
3067 		/* MR14 */
3068 		pctl_write_mr(dram->pctl, 3, 14,
3069 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
3070 			      dramtype);
3071 		writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
3072 		       PHY_REG(phy_base, 0x1c));
3073 	}
3074 
3075 	update_noc_timing(dram, sdram_params);
3076 }
3077 
3078 static void save_fsp_param(struct dram_info *dram, u32 dst_fsp,
3079 			   struct rv1126_sdram_params *sdram_params)
3080 {
3081 	void __iomem *pctl_base = dram->pctl;
3082 	void __iomem *phy_base = dram->phy;
3083 	struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp];
3084 	u32 temp, temp1;
3085 	struct ddr2_3_4_lp2_3_info *ddr_info;
3086 
3087 	ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype);
3088 
3089 	p_fsp_param->freq_mhz = sdram_params->base.ddr_freq;
3090 
3091 	if (sdram_params->base.dramtype == LPDDR4 ||
3092 	    sdram_params->base.dramtype == LPDDR4X) {
3093 		p_fsp_param->rd_odt_up_en = 0;
3094 		p_fsp_param->rd_odt_down_en = 1;
3095 	} else {
3096 		p_fsp_param->rd_odt_up_en =
3097 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
3098 		p_fsp_param->rd_odt_down_en =
3099 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
3100 	}
3101 
3102 	if (p_fsp_param->rd_odt_up_en)
3103 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111));
3104 	else if (p_fsp_param->rd_odt_down_en)
3105 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110));
3106 	else
3107 		p_fsp_param->rd_odt = 0;
3108 	p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112));
3109 	p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100));
3110 	p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102));
3111 	p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128));
3112 	p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105));
3113 
3114 	if (sdram_params->base.dramtype == DDR3) {
3115 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3116 			     DDR_PCTL2_INIT3);
3117 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
3118 		p_fsp_param->ds_pdds = temp & DDR3_DS_MASK;
3119 		p_fsp_param->dq_odt = temp & DDR3_RTT_NOM_MASK;
3120 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3121 	} else if (sdram_params->base.dramtype == DDR4) {
3122 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3123 			     DDR_PCTL2_INIT3);
3124 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
3125 		p_fsp_param->ds_pdds = temp & DDR4_DS_MASK;
3126 		p_fsp_param->dq_odt = temp & DDR4_RTT_NOM_MASK;
3127 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3128 	} else if (sdram_params->base.dramtype == LPDDR3) {
3129 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3130 			     DDR_PCTL2_INIT4);
3131 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
3132 		p_fsp_param->ds_pdds = temp & 0xf;
3133 
3134 		p_fsp_param->dq_odt = lp3_odt_value;
3135 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3136 	} else if (sdram_params->base.dramtype == LPDDR4 ||
3137 		   sdram_params->base.dramtype == LPDDR4X) {
3138 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3139 			     DDR_PCTL2_INIT4);
3140 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
3141 		p_fsp_param->ds_pdds = temp & LPDDR4_PDDS_MASK;
3142 
3143 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3144 			     DDR_PCTL2_INIT6);
3145 		temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK;
3146 		p_fsp_param->dq_odt = temp & LPDDR4_DQODT_MASK;
3147 		p_fsp_param->ca_odt = temp & LPDDR4_CAODT_MASK;
3148 
3149 		temp = MAX(readl(PHY_REG(phy_base, 0x3ae)),
3150 			   readl(PHY_REG(phy_base, 0x3ce)));
3151 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)),
3152 			    readl(PHY_REG(phy_base, 0x3de)));
3153 		p_fsp_param->vref_ca[0] = (temp + temp1) / 2;
3154 		temp = MAX(readl(PHY_REG(phy_base, 0x3af)),
3155 			   readl(PHY_REG(phy_base, 0x3cf)));
3156 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)),
3157 			    readl(PHY_REG(phy_base, 0x3df)));
3158 		p_fsp_param->vref_ca[1] = (temp + temp1) / 2;
3159 		p_fsp_param->vref_ca[0] |=
3160 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
3161 		p_fsp_param->vref_ca[1] |=
3162 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
3163 
3164 		p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >>
3165 					      3) & 0x1;
3166 	}
3167 
3168 	p_fsp_param->noc_timings.ddrtiminga0 =
3169 		sdram_params->ch.noc_timings.ddrtiminga0;
3170 	p_fsp_param->noc_timings.ddrtimingb0 =
3171 		sdram_params->ch.noc_timings.ddrtimingb0;
3172 	p_fsp_param->noc_timings.ddrtimingc0 =
3173 		sdram_params->ch.noc_timings.ddrtimingc0;
3174 	p_fsp_param->noc_timings.devtodev0 =
3175 		sdram_params->ch.noc_timings.devtodev0;
3176 	p_fsp_param->noc_timings.ddrmode =
3177 		sdram_params->ch.noc_timings.ddrmode;
3178 	p_fsp_param->noc_timings.ddr4timing =
3179 		sdram_params->ch.noc_timings.ddr4timing;
3180 	p_fsp_param->noc_timings.agingx0 =
3181 		sdram_params->ch.noc_timings.agingx0;
3182 	p_fsp_param->noc_timings.aging0 =
3183 		sdram_params->ch.noc_timings.aging0;
3184 	p_fsp_param->noc_timings.aging1 =
3185 		sdram_params->ch.noc_timings.aging1;
3186 	p_fsp_param->noc_timings.aging2 =
3187 		sdram_params->ch.noc_timings.aging2;
3188 	p_fsp_param->noc_timings.aging3 =
3189 		sdram_params->ch.noc_timings.aging3;
3190 
3191 	p_fsp_param->flag = FSP_FLAG;
3192 }
3193 
3194 #ifndef CONFIG_SPL_KERNEL_BOOT
3195 static void copy_fsp_param_to_ddr(void)
3196 {
3197 	memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param,
3198 	       sizeof(fsp_param));
3199 }
3200 #endif
3201 
3202 static void pctl_modify_trfc(struct ddr_pctl_regs *pctl_regs,
3203 			     struct sdram_cap_info *cap_info, u32 dram_type,
3204 			     u32 freq)
3205 {
3206 	u64 cs0_cap;
3207 	u32 die_cap;
3208 	u32 trfc_ns, trfc4_ns;
3209 	u32 trfc, txsnr;
3210 	u32 txs_abort_fast = 0;
3211 	u32 tmp;
3212 
3213 	cs0_cap = sdram_get_cs_cap(cap_info, 0, dram_type);
3214 	die_cap = (u32)(cs0_cap >> (20 + (cap_info->bw - cap_info->dbw)));
3215 
3216 	switch (dram_type) {
3217 	case DDR3:
3218 		if (die_cap <= DIE_CAP_512MBIT)
3219 			trfc_ns = 90;
3220 		else if (die_cap <= DIE_CAP_1GBIT)
3221 			trfc_ns = 110;
3222 		else if (die_cap <= DIE_CAP_2GBIT)
3223 			trfc_ns = 160;
3224 		else if (die_cap <= DIE_CAP_4GBIT)
3225 			trfc_ns = 260;
3226 		else
3227 			trfc_ns = 350;
3228 		txsnr = MAX(5, ((trfc_ns + 10) * freq + 999) / 1000);
3229 		break;
3230 
3231 	case DDR4:
3232 		if (die_cap <= DIE_CAP_2GBIT) {
3233 			trfc_ns = 160;
3234 			trfc4_ns = 90;
3235 		} else if (die_cap <= DIE_CAP_4GBIT) {
3236 			trfc_ns = 260;
3237 			trfc4_ns = 110;
3238 		} else if (die_cap <= DIE_CAP_8GBIT) {
3239 			trfc_ns = 350;
3240 			trfc4_ns = 160;
3241 		} else {
3242 			trfc_ns = 550;
3243 			trfc4_ns = 260;
3244 		}
3245 		txsnr = ((trfc_ns + 10) * freq + 999) / 1000;
3246 		txs_abort_fast = ((trfc4_ns + 10) * freq + 999) / 1000;
3247 		break;
3248 
3249 	case LPDDR3:
3250 		if (die_cap <= DIE_CAP_4GBIT)
3251 			trfc_ns = 130;
3252 		else
3253 			trfc_ns = 210;
3254 		txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3255 		break;
3256 
3257 	case LPDDR4:
3258 	case LPDDR4X:
3259 		if (die_cap <= DIE_CAP_2GBIT)
3260 			trfc_ns = 130;
3261 		else if (die_cap <= DIE_CAP_4GBIT)
3262 			trfc_ns = 180;
3263 		else if (die_cap <= DIE_CAP_8GBIT)
3264 			trfc_ns = 280;
3265 		else
3266 			trfc_ns = 380;
3267 		txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3268 		break;
3269 
3270 	default:
3271 		return;
3272 	}
3273 	trfc = (trfc_ns * freq + 999) / 1000;
3274 
3275 	for (int i = 0; pctl_regs->pctl[i][0] != 0xffffffff; i++) {
3276 		switch (pctl_regs->pctl[i][0]) {
3277 		case DDR_PCTL2_RFSHTMG:
3278 			tmp = pctl_regs->pctl[i][1];
3279 			/* t_rfc_min */
3280 			tmp &= ~((u32)0x3ff);
3281 			tmp |= ((trfc + 1) / 2) & 0x3ff;
3282 			pctl_regs->pctl[i][1] = tmp;
3283 			break;
3284 
3285 		case DDR_PCTL2_DRAMTMG8:
3286 			if (dram_type == DDR3 || dram_type == DDR4) {
3287 				tmp = pctl_regs->pctl[i][1];
3288 				/* t_xs_x32 */
3289 				tmp &= ~((u32)0x7f);
3290 				tmp |= ((txsnr + 63) / 64 + 1) & 0x7f;
3291 
3292 				if (dram_type == DDR4) {
3293 					/* t_xs_abort_x32 */
3294 					tmp &= ~((u32)(0x7f << 16));
3295 					tmp |= (((txs_abort_fast + 63) / 64 + 1) & 0x7f) << 16;
3296 					/* t_xs_fast_x32 */
3297 					tmp &= ~((u32)(0x7f << 24));
3298 					tmp |= (((txs_abort_fast + 63) / 64 + 1) & 0x7f) << 24;
3299 				}
3300 
3301 				pctl_regs->pctl[i][1] = tmp;
3302 			}
3303 			break;
3304 
3305 		case DDR_PCTL2_DRAMTMG14:
3306 			if (dram_type == LPDDR3 ||
3307 			    dram_type == LPDDR4 || dram_type == LPDDR4X) {
3308 				tmp = pctl_regs->pctl[i][1];
3309 				/* t_xsr */
3310 				tmp &= ~((u32)0xfff);
3311 				tmp |= ((txsnr + 1) / 2) & 0xfff;
3312 				pctl_regs->pctl[i][1] = tmp;
3313 			}
3314 			break;
3315 
3316 		default:
3317 			break;
3318 		}
3319 	}
3320 }
3321 
3322 void ddr_set_rate(struct dram_info *dram,
3323 		  struct rv1126_sdram_params *sdram_params,
3324 		  u32 freq, u32 cur_freq, u32 dst_fsp,
3325 		  u32 dst_fsp_lp4, u32 training_en)
3326 {
3327 	u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off;
3328 	u32 mr_tmp;
3329 	u32 lp_stat;
3330 	u32 dramtype = sdram_params->base.dramtype;
3331 	struct rv1126_sdram_params *sdram_params_new;
3332 	void __iomem *pctl_base = dram->pctl;
3333 	void __iomem *phy_base = dram->phy;
3334 	int delay = 1000;
3335 
3336 	lp_stat = low_power_update(dram, 0);
3337 	sdram_params_new = get_default_sdram_config(freq);
3338 	sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank;
3339 	sdram_params_new->ch.cap_info.bw = sdram_params->ch.cap_info.bw;
3340 
3341 	pctl_modify_trfc(&sdram_params_new->pctl_regs,
3342 			 &sdram_params->ch.cap_info, dramtype, freq);
3343 	pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4);
3344 
3345 	while ((readl(pctl_base + DDR_PCTL2_STAT) &
3346 			 PCTL2_OPERATING_MODE_MASK) ==
3347 			 PCTL2_OPERATING_MODE_SR)
3348 		continue;
3349 
3350 	dest_dll_off = 0;
3351 	dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3352 			  DDR_PCTL2_INIT3);
3353 	if ((dramtype == DDR3 && (dst_init3 & 1)) ||
3354 	    (dramtype == DDR4 && !(dst_init3 & 1)))
3355 		dest_dll_off = 1;
3356 
3357 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
3358 	cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
3359 			  DDR_PCTL2_INIT3);
3360 	cur_init3 &= PCTL2_MR_MASK;
3361 	cur_dll_off = 1;
3362 	if ((dramtype == DDR3 && !(cur_init3 & 1)) ||
3363 	    (dramtype == DDR4 && (cur_init3 & 1)))
3364 		cur_dll_off = 0;
3365 
3366 	if (!cur_dll_off) {
3367 		if (dramtype == DDR3)
3368 			cur_init3 |= 1;
3369 		else
3370 			cur_init3 &= ~1;
3371 		pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype);
3372 	}
3373 
3374 	setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3375 		     PCTL2_DIS_AUTO_REFRESH);
3376 	update_refresh_reg(dram);
3377 
3378 	enter_sr(dram, 1);
3379 
3380 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3381 	       PMUGRF_CON_DDRPHY_BUFFEREN_EN,
3382 	       &dram->pmugrf->soc_con[0]);
3383 	sw_set_req(dram);
3384 	clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC,
3385 		     PCTL2_DFI_INIT_COMPLETE_EN);
3386 	sw_set_ack(dram);
3387 
3388 	sw_set_req(dram);
3389 	if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off)
3390 		setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3391 	else
3392 		clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3393 
3394 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0,
3395 		     PCTL2_DIS_SRX_ZQCL);
3396 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0,
3397 		     PCTL2_DIS_SRX_ZQCL);
3398 	sw_set_ack(dram);
3399 
3400 	writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT),
3401 	       &dram->cru->clkgate_con[21]);
3402 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3403 					(0x1 << CLK_DDR_UPCTL_EN_SHIFT) |
3404 					(0x1 << ACLK_DDR_UPCTL_EN_SHIFT),
3405 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3406 
3407 	clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3408 	rkclk_set_dpll(dram, freq * MHz / 2);
3409 	phy_pll_set(dram, freq * MHz, 0);
3410 	phy_pll_set(dram, freq * MHz, 1);
3411 	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3412 
3413 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3414 			PMUGRF_CON_DDRPHY_BUFFEREN_DIS,
3415 			&dram->pmugrf->soc_con[0]);
3416 	writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT),
3417 	       &dram->cru->clkgate_con[21]);
3418 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3419 					(0x0 << CLK_DDR_UPCTL_EN_SHIFT) |
3420 					(0x0 << ACLK_DDR_UPCTL_EN_SHIFT),
3421 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3422 	while ((readl(pctl_base + DDR_PCTL2_DFISTAT) &
3423 	       PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE) {
3424 		udelay(1);
3425 		if (delay-- <= 0) {
3426 			printascii("ERROR: Cannot wait DFI_INIT_COMPLETE\n");
3427 			while (1)
3428 				;
3429 		}
3430 	}
3431 
3432 	sw_set_req(dram);
3433 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
3434 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp);
3435 	sw_set_ack(dram);
3436 	update_refresh_reg(dram);
3437 	clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2);
3438 
3439 	enter_sr(dram, 0);
3440 
3441 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3442 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3443 
3444 	mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4);
3445 	if (dramtype == LPDDR3) {
3446 		pctl_write_mr(dram->pctl, 3, 1,
3447 			      (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) &
3448 			      PCTL2_MR_MASK,
3449 			      dramtype);
3450 		pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK,
3451 			      dramtype);
3452 		pctl_write_mr(dram->pctl, 3, 3,
3453 			      (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) &
3454 			      PCTL2_MR_MASK,
3455 			      dramtype);
3456 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype);
3457 	} else if ((dramtype == DDR3) || (dramtype == DDR4)) {
3458 		pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK,
3459 			      dramtype);
3460 		if (!dest_dll_off) {
3461 			pctl_write_mr(dram->pctl, 3, 0,
3462 				      ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) &
3463 				      PCTL2_MR_MASK) | DDR3_DLL_RESET,
3464 				      dramtype);
3465 			udelay(2);
3466 		}
3467 		pctl_write_mr(dram->pctl, 3, 0,
3468 			      (dst_init3 >> PCTL2_DDR34_MR0_SHIFT &
3469 			       PCTL2_MR_MASK) & (~DDR3_DLL_RESET),
3470 			      dramtype);
3471 		pctl_write_mr(dram->pctl, 3, 2,
3472 			      ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) &
3473 			       PCTL2_MR_MASK), dramtype);
3474 		if (dramtype == DDR4) {
3475 			pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK,
3476 				      dramtype);
3477 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3478 				       DDR_PCTL2_INIT6);
3479 			pctl_write_mr(dram->pctl, 3, 4,
3480 				      (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) &
3481 				       PCTL2_MR_MASK,
3482 				      dramtype);
3483 			pctl_write_mr(dram->pctl, 3, 5,
3484 				      mr_tmp >> PCTL2_DDR4_MR5_SHIFT &
3485 				      PCTL2_MR_MASK,
3486 				      dramtype);
3487 
3488 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3489 				       DDR_PCTL2_INIT7);
3490 			/* updata ddr4 vrefdq */
3491 			pctl_write_mr(dram->pctl, 3, 6,
3492 				      (mr_tmp | (0x1 << 7)) >> PCTL2_DDR4_MR6_SHIFT &
3493 				      PCTL2_MR_MASK, dramtype);
3494 			pctl_write_mr(dram->pctl, 3, 6,
3495 				      (mr_tmp | (0x1 << 7)) >> PCTL2_DDR4_MR6_SHIFT &
3496 				      PCTL2_MR_MASK, dramtype);
3497 			pctl_write_mr(dram->pctl, 3, 6,
3498 				      mr_tmp >> PCTL2_DDR4_MR6_SHIFT &
3499 				      PCTL2_MR_MASK,
3500 				      dramtype);
3501 		}
3502 	} else if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
3503 		pctl_write_mr(dram->pctl, 3, 13,
3504 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
3505 			       PCTL2_MR_MASK) & (~(BIT(7)))) |
3506 			      dst_fsp_lp4 << 7, dramtype);
3507 	}
3508 	clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3509 		     PCTL2_DIS_AUTO_REFRESH);
3510 	update_refresh_reg(dram);
3511 
3512 	/* training */
3513 	high_freq_training(dram, sdram_params_new, dst_fsp);
3514 	low_power_update(dram, lp_stat);
3515 
3516 	save_fsp_param(dram, dst_fsp, sdram_params_new);
3517 }
3518 
3519 static void ddr_set_rate_for_fsp(struct dram_info *dram,
3520 				 struct rv1126_sdram_params *sdram_params)
3521 {
3522 	struct ddr2_3_4_lp2_3_info *ddr_info;
3523 	u32 f0;
3524 	u32 dramtype = sdram_params->base.dramtype;
3525 #ifndef CONFIG_SPL_KERNEL_BOOT
3526 	u32 f1, f2, f3;
3527 #endif
3528 
3529 	ddr_info = get_ddr_drv_odt_info(dramtype);
3530 	if (!ddr_info)
3531 		return;
3532 
3533 	f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
3534 	     DDR_FREQ_MASK;
3535 
3536 #ifndef CONFIG_SPL_KERNEL_BOOT
3537 	memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param));
3538 	memset((void *)&fsp_param, 0, sizeof(fsp_param));
3539 
3540 	f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) &
3541 	     DDR_FREQ_MASK;
3542 	f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) &
3543 	     DDR_FREQ_MASK;
3544 	f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) &
3545 	     DDR_FREQ_MASK;
3546 #endif
3547 
3548 	if (get_wrlvl_val(dram, sdram_params))
3549 		printascii("get wrlvl value fail\n");
3550 
3551 #ifndef CONFIG_SPL_KERNEL_BOOT
3552 	printascii("change to: ");
3553 	printdec(f1);
3554 	printascii("MHz\n");
3555 	ddr_set_rate(&dram_info, sdram_params, f1,
3556 		     sdram_params->base.ddr_freq, 1, 1, 1);
3557 	printascii("change to: ");
3558 	printdec(f2);
3559 	printascii("MHz\n");
3560 	ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1);
3561 	printascii("change to: ");
3562 	printdec(f3);
3563 	printascii("MHz\n");
3564 	ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1);
3565 #endif
3566 	printascii("change to: ");
3567 	printdec(f0);
3568 	printascii("MHz(final freq)\n");
3569 #ifndef CONFIG_SPL_KERNEL_BOOT
3570 	ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1);
3571 #else
3572 	ddr_set_rate(&dram_info, sdram_params, f0, sdram_params->base.ddr_freq, 1, 1, 1);
3573 #endif
3574 }
3575 
3576 int get_uart_config(void)
3577 {
3578 	struct sdram_head_info_index_v2 *index =
3579 		(struct sdram_head_info_index_v2 *)common_info;
3580 	struct global_info *gbl_info;
3581 
3582 	gbl_info = (struct global_info *)((void *)common_info +
3583 		index->global_index.offset * 4);
3584 
3585 	return gbl_info->uart_info;
3586 }
3587 
3588 /* return: 0 = success, other = fail */
3589 int sdram_init(void)
3590 {
3591 	struct rv1126_sdram_params *sdram_params;
3592 	int ret = 0;
3593 	struct sdram_head_info_index_v2 *index =
3594 		(struct sdram_head_info_index_v2 *)common_info;
3595 	struct global_info *gbl_info;
3596 
3597 	dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
3598 	dram_info.pctl = (void *)UPCTL2_BASE_ADDR;
3599 	dram_info.grf = (void *)GRF_BASE_ADDR;
3600 	dram_info.cru = (void *)CRU_BASE_ADDR;
3601 	dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR;
3602 	dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR;
3603 	dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR;
3604 
3605 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
3606 	printascii("extended temp support\n");
3607 #endif
3608 	if (index->version_info != 2 ||
3609 	    (index->global_index.size != sizeof(struct global_info) / 4) ||
3610 	    (index->ddr3_index.size !=
3611 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3612 	    (index->ddr4_index.size !=
3613 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3614 	    (index->lp3_index.size !=
3615 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3616 	    (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) ||
3617 	    (index->lp4x_index.size != (sizeof(struct lp4_info) / 4)) ||
3618 	    index->global_index.offset == 0 ||
3619 	    index->ddr3_index.offset == 0 ||
3620 	    index->ddr4_index.offset == 0 ||
3621 	    index->lp3_index.offset == 0 ||
3622 	    index->lp4_index.offset == 0 ||
3623 	    index->lp4x_index.offset == 0) {
3624 		printascii("common info error\n");
3625 		goto error;
3626 	}
3627 
3628 	gbl_info = (struct global_info *)((void *)common_info +
3629 		index->global_index.offset * 4);
3630 
3631 	dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info);
3632 	dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info);
3633 
3634 	sdram_params = &sdram_configs[0];
3635 	#if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 8)
3636 	for (int j = 0; j < ARRAY_SIZE(sdram_configs); j++)
3637 		sdram_configs[j].base.dramtype = LPDDR4X;
3638 	#endif
3639 	if (sdram_params->base.dramtype == DDR3 ||
3640 	    sdram_params->base.dramtype == DDR4) {
3641 		if (DDR_2T_INFO(gbl_info->info_2t))
3642 			sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10;
3643 		else
3644 			sdram_params->pctl_regs.pctl[0][1] &=
3645 				~(0x1 << 10);
3646 	}
3647 	ret = sdram_init_detect(&dram_info, sdram_params);
3648 	if (ret) {
3649 		sdram_print_dram_type(sdram_params->base.dramtype);
3650 		printascii(", ");
3651 		printdec(sdram_params->base.ddr_freq);
3652 		printascii("MHz\n");
3653 		goto error;
3654 	}
3655 	print_ddr_info(sdram_params);
3656 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3657 	init_rw_trn_result_struct(&rw_trn_result, dram_info.phy,
3658 				  (u8)sdram_params->ch.cap_info.rank);
3659 #endif
3660 
3661 	ddr_set_rate_for_fsp(&dram_info, sdram_params);
3662 #ifndef CONFIG_SPL_KERNEL_BOOT
3663 	copy_fsp_param_to_ddr();
3664 #endif
3665 
3666 	ddr_set_atags(&dram_info, sdram_params);
3667 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3668 	save_rw_trn_result_to_ddr(&rw_trn_result);
3669 #endif
3670 
3671 	printascii("out\n");
3672 
3673 	return ret;
3674 error:
3675 	printascii("error\n");
3676 	return (-1);
3677 }
3678 #endif /* CONFIG_TPL_BUILD */
3679