xref: /rk3399_rockchip-uboot/drivers/ram/rockchip/sdram_rv1126.c (revision 26663c2dc5b80cce67e4f8ad77e5efbb7ae05b88)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * (C) Copyright 2020 Rockchip Electronics Co., Ltd.
4  */
5 
6 #include <common.h>
7 #include <debug_uart.h>
8 #include <dm.h>
9 #include <ram.h>
10 #include <syscon.h>
11 #include <asm/io.h>
12 #include <asm/arch/clock.h>
13 #include <asm/arch/hardware.h>
14 #include <asm/arch/rk_atags.h>
15 #include <asm/arch/cru_rv1126.h>
16 #include <asm/arch/grf_rv1126.h>
17 #include <asm/arch/sdram_common.h>
18 #include <asm/arch/sdram_rv1126.h>
19 
20 /* define training flag */
21 #define CA_TRAINING			(0x1 << 0)
22 #define READ_GATE_TRAINING		(0x1 << 1)
23 #define WRITE_LEVELING			(0x1 << 2)
24 #define WRITE_TRAINING			(0x1 << 3)
25 #define READ_TRAINING			(0x1 << 4)
26 #define FULL_TRAINING			(0xff)
27 
28 #define SKEW_RX_SIGNAL			(0)
29 #define SKEW_TX_SIGNAL			(1)
30 #define SKEW_CA_SIGNAL			(2)
31 
32 #define DESKEW_MDF_ABS_VAL		(0)
33 #define DESKEW_MDF_DIFF_VAL		(1)
34 
35 #ifdef CONFIG_TPL_BUILD
36 #ifndef CONFIG_TPL_TINY_FRAMEWORK
37 #error please defined CONFIG_TPL_TINY_FRAMEWORK for RV1126 !!!
38 #endif
39 #endif
40 
41 #ifdef CONFIG_TPL_BUILD
42 
43 struct dram_info {
44 	void __iomem *pctl;
45 	void __iomem *phy;
46 	struct rv1126_cru *cru;
47 	struct msch_regs *msch;
48 	struct rv1126_ddrgrf *ddrgrf;
49 	struct rv1126_grf *grf;
50 	struct ram_info info;
51 	struct rv1126_pmugrf *pmugrf;
52 	u32 sr_idle;
53 	u32 pd_idle;
54 };
55 
56 #define GRF_BASE_ADDR			0xfe000000
57 #define PMU_GRF_BASE_ADDR		0xfe020000
58 #define DDR_GRF_BASE_ADDR		0xfe030000
59 #define BUS_SGRF_BASE_ADDR		0xfe0a0000
60 #define SERVER_MSCH_BASE_ADDR		0xfe800000
61 #define CRU_BASE_ADDR			0xff490000
62 #define DDR_PHY_BASE_ADDR		0xff4a0000
63 #define UPCTL2_BASE_ADDR		0xffa50000
64 
65 #define SGRF_SOC_CON2			0x8
66 #define SGRF_SOC_CON12			0x30
67 #define SGRF_SOC_CON13			0x34
68 
69 struct dram_info dram_info;
70 
71 #if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 3)
72 struct rv1126_sdram_params sdram_configs[] = {
73 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-328.inc"
74 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-396.inc"
75 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-528.inc"
76 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-664.inc"
77 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-784.inc"
78 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-924.inc"
79 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-1056.inc"
80 };
81 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 0)
82 struct rv1126_sdram_params sdram_configs[] = {
83 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-328.inc"
84 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-396.inc"
85 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-528.inc"
86 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-664.inc"
87 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-784.inc"
88 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-924.inc"
89 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-1056.inc"
90 };
91 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 6)
92 struct rv1126_sdram_params sdram_configs[] = {
93 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-328.inc"
94 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-396.inc"
95 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-528.inc"
96 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-664.inc"
97 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-784.inc"
98 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-924.inc"
99 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-1056.inc"
100 };
101 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 7)
102 struct rv1126_sdram_params sdram_configs[] = {
103 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-328.inc"
104 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-396.inc"
105 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-528.inc"
106 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-664.inc"
107 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-784.inc"
108 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-924.inc"
109 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-1056.inc"
110 };
111 #endif
112 
113 u32 common_info[] = {
114 	#include "sdram_inc/rv1126/sdram-rv1126-loader_params.inc"
115 };
116 
117 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
118 static struct rw_trn_result rw_trn_result;
119 #endif
120 
121 static struct rv1126_fsp_param fsp_param[MAX_IDX];
122 
123 static u8 lp3_odt_value;
124 
125 static s8 wrlvl_result[2][4];
126 
127 /* DDR configuration 0-9 */
128 u16 ddr_cfg_2_rbc[] = {
129 	((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */
130 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */
131 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */
132 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */
133 	((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */
134 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */
135 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */
136 	((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */
137 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */
138 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */
139 };
140 
141 /* DDR configuration 10-21 */
142 u8 ddr4_cfg_2_rbc[] = {
143 	((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */
144 	((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */
145 	((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */
146 	((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */
147 	((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */
148 	((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */
149 	((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */
150 	((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */
151 	((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */
152 	((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */
153 	((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */
154 	((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */
155 };
156 
157 /* DDR configuration 22-28 */
158 u16 ddr_cfg_2_rbc_p2[] = {
159 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */
160 	((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */
161 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */
162 	((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */
163 	((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */
164 	((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */
165 	((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */
166 };
167 
168 u8 d4_rbc_2_d3_rbc[][2] = {
169 	{10, 0},
170 	{11, 2},
171 	{12, 23},
172 	{13, 1},
173 	{14, 28},
174 	{15, 24},
175 	{16, 27},
176 	{17, 7},
177 	{18, 6},
178 	{19, 25},
179 	{20, 26},
180 	{21, 3}
181 };
182 
183 u32 addrmap[29][9] = {
184 	{24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
185 		0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */
186 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
187 		0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */
188 	{23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
189 		0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */
190 	{22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606,
191 		0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */
192 	{24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909,
193 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */
194 	{6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707,
195 		0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */
196 	{7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808,
197 		0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */
198 	{8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909,
199 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */
200 	{22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
201 		0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */
202 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
203 		0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */
204 
205 	{24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
206 		0x08080808, 0x00000f0f, 0x0801}, /* 10 */
207 	{23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
208 		0x0f080808, 0x00000f0f, 0x0801}, /* 11 */
209 	{24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
210 		0x07070707, 0x00000f07, 0x0700}, /* 12 */
211 	{23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
212 		0x07070707, 0x00000f0f, 0x0700}, /* 13 */
213 	{24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
214 		0x07070707, 0x00000f07, 0x3f01}, /* 14 */
215 	{23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
216 		0x07070707, 0x00000f0f, 0x3f01}, /* 15 */
217 	{23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606,
218 		0x06060606, 0x00000f06, 0x3f00}, /* 16 */
219 	{8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909,
220 		0x0f090909, 0x00000f0f, 0x0801}, /* 17 */
221 	{7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808,
222 		0x08080808, 0x00000f0f, 0x0700}, /* 18 */
223 	{7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
224 		0x08080808, 0x00000f0f, 0x3f01}, /* 19 */
225 
226 	{6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
227 		0x07070707, 0x00000f07, 0x3f00}, /* 20 */
228 	{23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606,
229 		0x06060606, 0x00000f06, 0x0600}, /* 21 */
230 	{21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505,
231 		0x05050505, 0x00000f0f, 0x3f3f}, /* 22 */
232 
233 	{24, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
234 		0x07070707, 0x00000f07, 0x3f3f}, /* 23 */
235 	{23, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
236 		0x07070707, 0x00000f0f, 0x3f3f}, /* 24 */
237 	{7, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
238 		0x08080808, 0x00000f0f, 0x3f3f}, /* 25 */
239 	{6, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
240 		0x07070707, 0x00000f07, 0x3f3f}, /* 26 */
241 	{23, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
242 		0x06060606, 0x00000f06, 0x3f3f}, /* 27 */
243 	{24, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
244 		0x07070707, 0x00000f07, 0x3f3f} /* 28 */
245 };
246 
247 static u8 dq_sel[22][3] = {
248 	{0x0, 0x17, 0x22},
249 	{0x1, 0x18, 0x23},
250 	{0x2, 0x19, 0x24},
251 	{0x3, 0x1a, 0x25},
252 	{0x4, 0x1b, 0x26},
253 	{0x5, 0x1c, 0x27},
254 	{0x6, 0x1d, 0x28},
255 	{0x7, 0x1e, 0x29},
256 	{0x8, 0x16, 0x21},
257 	{0x9, 0x1f, 0x2a},
258 	{0xa, 0x20, 0x2b},
259 	{0x10, 0x1, 0xc},
260 	{0x11, 0x2, 0xd},
261 	{0x12, 0x3, 0xe},
262 	{0x13, 0x4, 0xf},
263 	{0x14, 0x5, 0x10},
264 	{0x15, 0x6, 0x11},
265 	{0x16, 0x7, 0x12},
266 	{0x17, 0x8, 0x13},
267 	{0x18, 0x0, 0xb},
268 	{0x19, 0x9, 0x14},
269 	{0x1a, 0xa, 0x15}
270 };
271 
272 static u16 grp_addr[4] = {
273 	ADD_GROUP_CS0_A,
274 	ADD_GROUP_CS0_B,
275 	ADD_GROUP_CS1_A,
276 	ADD_GROUP_CS1_B
277 };
278 
279 static u8 wrlvl_result_offset[2][4] = {
280 	{0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27},
281 	{0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29},
282 };
283 
284 static u16 dqs_dq_skew_adr[16] = {
285 	0x170 + 0,	/* SKEW_UPDATE_RX_CS0_DQS0 */
286 	0x170 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS1 */
287 	0x1d0 + 0,	/* SKEW_UPDATE_RX_CS0_DQS2 */
288 	0x1d0 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS3 */
289 	0x1a0 + 0,	/* SKEW_UPDATE_RX_CS1_DQS0 */
290 	0x1a0 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS1 */
291 	0x200 + 0,	/* SKEW_UPDATE_RX_CS1_DQS2 */
292 	0x200 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS3 */
293 	0x170 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS0 */
294 	0x170 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS1 */
295 	0x1d0 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS2 */
296 	0x1d0 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS3 */
297 	0x1a0 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS0 */
298 	0x1a0 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS1 */
299 	0x200 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS2 */
300 	0x200 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS3 */
301 };
302 
303 static void rkclk_ddr_reset(struct dram_info *dram,
304 			    u32 ctl_srstn, u32 ctl_psrstn,
305 			    u32 phy_srstn, u32 phy_psrstn)
306 {
307 	writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) |
308 	       UPCTL2_ASRSTN_REQ(ctl_srstn),
309 	       BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13);
310 
311 	writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn),
312 	       &dram->cru->softrst_con[12]);
313 }
314 
315 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
316 {
317 	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
318 	int delay = 1000;
319 	u32 mhz = hz / MHz;
320 	struct global_info *gbl_info;
321 	struct sdram_head_info_index_v2 *index =
322 		(struct sdram_head_info_index_v2 *)common_info;
323 	u32 ssmod_info;
324 	u32 dsmpd = 1;
325 
326 	gbl_info = (struct global_info *)((void *)common_info +
327 		    index->global_index.offset * 4);
328 	ssmod_info = gbl_info->info_2t;
329 	refdiv = 1;
330 	if (mhz <= 100) {
331 		postdiv1 = 6;
332 		postdiv2 = 4;
333 	} else if (mhz <= 150) {
334 		postdiv1 = 4;
335 		postdiv2 = 4;
336 	} else if (mhz <= 200) {
337 		postdiv1 = 6;
338 		postdiv2 = 2;
339 	} else if (mhz <= 300) {
340 		postdiv1 = 4;
341 		postdiv2 = 2;
342 	} else if (mhz <= 400) {
343 		postdiv1 = 6;
344 		postdiv2 = 1;
345 	} else {
346 		postdiv1 = 4;
347 		postdiv2 = 1;
348 	}
349 	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
350 
351 	writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
352 
353 	writel(0x1f000000, &dram->cru->clksel_con[64]);
354 	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
355 	/* enable ssmod */
356 	if (PLL_SSMOD_SPREAD(ssmod_info)) {
357 		dsmpd = 0;
358 		clrsetbits_le32(&dram->cru->pll[1].con2,
359 				0xffffff << 0, 0x0 << 0);
360 		writel(SSMOD_SPREAD(PLL_SSMOD_SPREAD(ssmod_info)) |
361 		       SSMOD_DIVVAL(PLL_SSMOD_DIV(ssmod_info)) |
362 		       SSMOD_DOWNSPREAD(PLL_SSMOD_DOWNSPREAD(ssmod_info)) |
363 		       SSMOD_RESET(0) |
364 		       SSMOD_DIS_SSCG(0) |
365 		       SSMOD_BP(0),
366 		       &dram->cru->pll[1].con3);
367 	}
368 	writel(DSMPD(dsmpd) | POSTDIV2(postdiv2) | REFDIV(refdiv),
369 	       &dram->cru->pll[1].con1);
370 
371 	while (delay > 0) {
372 		udelay(1);
373 		if (LOCK(readl(&dram->cru->pll[1].con1)))
374 			break;
375 		delay--;
376 	}
377 
378 	writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
379 }
380 
381 static void rkclk_configure_ddr(struct dram_info *dram,
382 				struct rv1126_sdram_params *sdram_params)
383 {
384 	/* for inno ddr phy need freq / 2 */
385 	rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2);
386 }
387 
388 static unsigned int
389 	calculate_ddrconfig(struct rv1126_sdram_params *sdram_params)
390 {
391 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
392 	u32 cs, bw, die_bw, col, row, bank;
393 	u32 cs1_row;
394 	u32 i, tmp;
395 	u32 ddrconf = -1;
396 	u32 row_3_4;
397 
398 	cs = cap_info->rank;
399 	bw = cap_info->bw;
400 	die_bw = cap_info->dbw;
401 	col = cap_info->col;
402 	row = cap_info->cs0_row;
403 	cs1_row = cap_info->cs1_row;
404 	bank = cap_info->bk;
405 	row_3_4 = cap_info->row_3_4;
406 
407 	if (sdram_params->base.dramtype == DDR4) {
408 		if (cs == 2 && row == cs1_row && !row_3_4) {
409 			tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) |
410 			      die_bw;
411 			for (i = 17; i < 21; i++) {
412 				if (((tmp & 0xf) ==
413 				     (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
414 				    ((tmp & 0x70) <=
415 				     (ddr4_cfg_2_rbc[i - 10] & 0x70))) {
416 					ddrconf = i;
417 					goto out;
418 				}
419 			}
420 		}
421 
422 		tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw;
423 		for (i = 10; i < 21; i++) {
424 			if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
425 			    ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) &&
426 			    ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) {
427 				ddrconf = i;
428 				goto out;
429 			}
430 		}
431 	} else {
432 		if (cs == 2 && row == cs1_row && bank == 3) {
433 			for (i = 5; i < 8; i++) {
434 				if (((bw + col - 10) == (ddr_cfg_2_rbc[i] &
435 							 0x7)) &&
436 				    ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] &
437 							  (0x7 << 5))) {
438 					ddrconf = i;
439 					goto out;
440 				}
441 			}
442 		}
443 
444 		tmp = ((cs - 1) << 8) | ((row - 13) << 5) |
445 		      ((bw + col - 10) << 0);
446 		if (bank == 3)
447 			tmp |= (1 << 3);
448 
449 		for (i = 0; i < 9; i++)
450 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) &&
451 			    ((tmp & (7 << 5)) <=
452 			     (ddr_cfg_2_rbc[i] & (7 << 5))) &&
453 			    ((tmp & (1 << 8)) <=
454 			     (ddr_cfg_2_rbc[i] & (1 << 8)))) {
455 				ddrconf = i;
456 				goto out;
457 			}
458 
459 		for (i = 0; i < 7; i++)
460 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc_p2[i] & 0x1f)) &&
461 			    ((tmp & (7 << 5)) <=
462 			     (ddr_cfg_2_rbc_p2[i] & (7 << 5))) &&
463 			    ((tmp & (1 << 8)) <=
464 			     (ddr_cfg_2_rbc_p2[i] & (1 << 8)))) {
465 				ddrconf = i + 22;
466 				goto out;
467 			}
468 
469 		if (cs == 1 && bank == 3 && row <= 17 &&
470 		    (col + bw) == 12)
471 			ddrconf = 23;
472 	}
473 
474 out:
475 	if (ddrconf > 28)
476 		printascii("calculate ddrconfig error\n");
477 
478 	if (sdram_params->base.dramtype == DDR4) {
479 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
480 			if (ddrconf == d4_rbc_2_d3_rbc[i][0]) {
481 				if (ddrconf == 21 && row > 16)
482 					printascii("warn:ddrconf21 row > 16\n");
483 				else
484 					ddrconf = d4_rbc_2_d3_rbc[i][1];
485 				break;
486 			}
487 		}
488 	}
489 
490 	return ddrconf;
491 }
492 
493 static void sw_set_req(struct dram_info *dram)
494 {
495 	void __iomem *pctl_base = dram->pctl;
496 
497 	/* clear sw_done=0 */
498 	writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL);
499 }
500 
501 static void sw_set_ack(struct dram_info *dram)
502 {
503 	void __iomem *pctl_base = dram->pctl;
504 
505 	/* set sw_done=1 */
506 	writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL);
507 	while (1) {
508 		/* wait programming done */
509 		if (readl(pctl_base + DDR_PCTL2_SWSTAT) &
510 				PCTL2_SW_DONE_ACK)
511 			break;
512 	}
513 }
514 
515 static void set_ctl_address_map(struct dram_info *dram,
516 				struct rv1126_sdram_params *sdram_params)
517 {
518 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
519 	void __iomem *pctl_base = dram->pctl;
520 	u32 ddrconf = cap_info->ddrconfig;
521 	u32 i, row;
522 
523 	row = cap_info->cs0_row;
524 	if (sdram_params->base.dramtype == DDR4) {
525 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
526 			if (ddrconf == d4_rbc_2_d3_rbc[i][1]) {
527 				ddrconf = d4_rbc_2_d3_rbc[i][0];
528 				break;
529 			}
530 		}
531 	}
532 
533 	if (ddrconf >= ARRAY_SIZE(addrmap)) {
534 		printascii("set ctl address map fail\n");
535 		return;
536 	}
537 
538 	sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
539 			  &addrmap[ddrconf][0], ARRAY_SIZE(addrmap[ddrconf]) * 4);
540 
541 	/* unused row set to 0xf */
542 	for (i = 17; i >= row; i--)
543 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
544 			((i - 12) * 8 / 32) * 4,
545 			0xf << ((i - 12) * 8 % 32));
546 
547 	if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4)
548 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
549 	if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1)
550 		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
551 
552 	if (cap_info->rank == 1)
553 		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
554 }
555 
556 static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait)
557 {
558 	void __iomem *phy_base = dram->phy;
559 	u32 fbdiv, prediv, postdiv, postdiv_en;
560 
561 	if (wait) {
562 		clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB);
563 		while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK))
564 			continue;
565 	} else {
566 		freq /= MHz;
567 		prediv = 1;
568 		if (freq <= 200) {
569 			fbdiv = 16;
570 			postdiv = 2;
571 			postdiv_en = 1;
572 		} else if (freq <= 456) {
573 			fbdiv = 8;
574 			postdiv = 1;
575 			postdiv_en = 1;
576 		} else {
577 			fbdiv = 4;
578 			postdiv = 0;
579 			postdiv_en = 0;
580 		}
581 		writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50));
582 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK,
583 				(fbdiv >> 8) & 1);
584 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK,
585 				postdiv_en << PHY_POSTDIV_EN_SHIFT);
586 
587 		clrsetbits_le32(PHY_REG(phy_base, 0x52),
588 				PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv);
589 		clrsetbits_le32(PHY_REG(phy_base, 0x53),
590 				PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT,
591 				postdiv << PHY_POSTDIV_SHIFT);
592 	}
593 }
594 
595 static const u16 d3_phy_drv_2_ohm[][2] = {
596 	{PHY_DDR3_RON_455ohm, 455},
597 	{PHY_DDR3_RON_230ohm, 230},
598 	{PHY_DDR3_RON_153ohm, 153},
599 	{PHY_DDR3_RON_115ohm, 115},
600 	{PHY_DDR3_RON_91ohm, 91},
601 	{PHY_DDR3_RON_76ohm, 76},
602 	{PHY_DDR3_RON_65ohm, 65},
603 	{PHY_DDR3_RON_57ohm, 57},
604 	{PHY_DDR3_RON_51ohm, 51},
605 	{PHY_DDR3_RON_46ohm, 46},
606 	{PHY_DDR3_RON_41ohm, 41},
607 	{PHY_DDR3_RON_38ohm, 38},
608 	{PHY_DDR3_RON_35ohm, 35},
609 	{PHY_DDR3_RON_32ohm, 32},
610 	{PHY_DDR3_RON_30ohm, 30},
611 	{PHY_DDR3_RON_28ohm, 28},
612 	{PHY_DDR3_RON_27ohm, 27},
613 	{PHY_DDR3_RON_25ohm, 25},
614 	{PHY_DDR3_RON_24ohm, 24},
615 	{PHY_DDR3_RON_23ohm, 23},
616 	{PHY_DDR3_RON_22ohm, 22},
617 	{PHY_DDR3_RON_21ohm, 21},
618 	{PHY_DDR3_RON_20ohm, 20}
619 };
620 
621 static u16 d3_phy_odt_2_ohm[][2] = {
622 	{PHY_DDR3_RTT_DISABLE, 0},
623 	{PHY_DDR3_RTT_561ohm, 561},
624 	{PHY_DDR3_RTT_282ohm, 282},
625 	{PHY_DDR3_RTT_188ohm, 188},
626 	{PHY_DDR3_RTT_141ohm, 141},
627 	{PHY_DDR3_RTT_113ohm, 113},
628 	{PHY_DDR3_RTT_94ohm, 94},
629 	{PHY_DDR3_RTT_81ohm, 81},
630 	{PHY_DDR3_RTT_72ohm, 72},
631 	{PHY_DDR3_RTT_64ohm, 64},
632 	{PHY_DDR3_RTT_58ohm, 58},
633 	{PHY_DDR3_RTT_52ohm, 52},
634 	{PHY_DDR3_RTT_48ohm, 48},
635 	{PHY_DDR3_RTT_44ohm, 44},
636 	{PHY_DDR3_RTT_41ohm, 41},
637 	{PHY_DDR3_RTT_38ohm, 38},
638 	{PHY_DDR3_RTT_37ohm, 37},
639 	{PHY_DDR3_RTT_34ohm, 34},
640 	{PHY_DDR3_RTT_32ohm, 32},
641 	{PHY_DDR3_RTT_31ohm, 31},
642 	{PHY_DDR3_RTT_29ohm, 29},
643 	{PHY_DDR3_RTT_28ohm, 28},
644 	{PHY_DDR3_RTT_27ohm, 27},
645 	{PHY_DDR3_RTT_25ohm, 25}
646 };
647 
648 static u16 d4lp3_phy_drv_2_ohm[][2] = {
649 	{PHY_DDR4_LPDDR3_RON_482ohm, 482},
650 	{PHY_DDR4_LPDDR3_RON_244ohm, 244},
651 	{PHY_DDR4_LPDDR3_RON_162ohm, 162},
652 	{PHY_DDR4_LPDDR3_RON_122ohm, 122},
653 	{PHY_DDR4_LPDDR3_RON_97ohm, 97},
654 	{PHY_DDR4_LPDDR3_RON_81ohm, 81},
655 	{PHY_DDR4_LPDDR3_RON_69ohm, 69},
656 	{PHY_DDR4_LPDDR3_RON_61ohm, 61},
657 	{PHY_DDR4_LPDDR3_RON_54ohm, 54},
658 	{PHY_DDR4_LPDDR3_RON_48ohm, 48},
659 	{PHY_DDR4_LPDDR3_RON_44ohm, 44},
660 	{PHY_DDR4_LPDDR3_RON_40ohm, 40},
661 	{PHY_DDR4_LPDDR3_RON_37ohm, 37},
662 	{PHY_DDR4_LPDDR3_RON_34ohm, 34},
663 	{PHY_DDR4_LPDDR3_RON_32ohm, 32},
664 	{PHY_DDR4_LPDDR3_RON_30ohm, 30},
665 	{PHY_DDR4_LPDDR3_RON_28ohm, 28},
666 	{PHY_DDR4_LPDDR3_RON_27ohm, 27},
667 	{PHY_DDR4_LPDDR3_RON_25ohm, 25},
668 	{PHY_DDR4_LPDDR3_RON_24ohm, 24},
669 	{PHY_DDR4_LPDDR3_RON_23ohm, 23},
670 	{PHY_DDR4_LPDDR3_RON_22ohm, 22},
671 	{PHY_DDR4_LPDDR3_RON_21ohm, 21}
672 };
673 
674 static u16 d4lp3_phy_odt_2_ohm[][2] = {
675 	{PHY_DDR4_LPDDR3_RTT_DISABLE, 0},
676 	{PHY_DDR4_LPDDR3_RTT_586ohm, 586},
677 	{PHY_DDR4_LPDDR3_RTT_294ohm, 294},
678 	{PHY_DDR4_LPDDR3_RTT_196ohm, 196},
679 	{PHY_DDR4_LPDDR3_RTT_148ohm, 148},
680 	{PHY_DDR4_LPDDR3_RTT_118ohm, 118},
681 	{PHY_DDR4_LPDDR3_RTT_99ohm, 99},
682 	{PHY_DDR4_LPDDR3_RTT_85ohm, 58},
683 	{PHY_DDR4_LPDDR3_RTT_76ohm, 76},
684 	{PHY_DDR4_LPDDR3_RTT_67ohm, 67},
685 	{PHY_DDR4_LPDDR3_RTT_60ohm, 60},
686 	{PHY_DDR4_LPDDR3_RTT_55ohm, 55},
687 	{PHY_DDR4_LPDDR3_RTT_50ohm, 50},
688 	{PHY_DDR4_LPDDR3_RTT_46ohm, 46},
689 	{PHY_DDR4_LPDDR3_RTT_43ohm, 43},
690 	{PHY_DDR4_LPDDR3_RTT_40ohm, 40},
691 	{PHY_DDR4_LPDDR3_RTT_38ohm, 38},
692 	{PHY_DDR4_LPDDR3_RTT_36ohm, 36},
693 	{PHY_DDR4_LPDDR3_RTT_34ohm, 34},
694 	{PHY_DDR4_LPDDR3_RTT_32ohm, 32},
695 	{PHY_DDR4_LPDDR3_RTT_31ohm, 31},
696 	{PHY_DDR4_LPDDR3_RTT_29ohm, 29},
697 	{PHY_DDR4_LPDDR3_RTT_28ohm, 28},
698 	{PHY_DDR4_LPDDR3_RTT_27ohm, 27}
699 };
700 
701 static u16 lp4_phy_drv_2_ohm[][2] = {
702 	{PHY_LPDDR4_RON_501ohm, 501},
703 	{PHY_LPDDR4_RON_253ohm, 253},
704 	{PHY_LPDDR4_RON_168ohm, 168},
705 	{PHY_LPDDR4_RON_126ohm, 126},
706 	{PHY_LPDDR4_RON_101ohm, 101},
707 	{PHY_LPDDR4_RON_84ohm, 84},
708 	{PHY_LPDDR4_RON_72ohm, 72},
709 	{PHY_LPDDR4_RON_63ohm, 63},
710 	{PHY_LPDDR4_RON_56ohm, 56},
711 	{PHY_LPDDR4_RON_50ohm, 50},
712 	{PHY_LPDDR4_RON_46ohm, 46},
713 	{PHY_LPDDR4_RON_42ohm, 42},
714 	{PHY_LPDDR4_RON_38ohm, 38},
715 	{PHY_LPDDR4_RON_36ohm, 36},
716 	{PHY_LPDDR4_RON_33ohm, 33},
717 	{PHY_LPDDR4_RON_31ohm, 31},
718 	{PHY_LPDDR4_RON_29ohm, 29},
719 	{PHY_LPDDR4_RON_28ohm, 28},
720 	{PHY_LPDDR4_RON_26ohm, 26},
721 	{PHY_LPDDR4_RON_25ohm, 25},
722 	{PHY_LPDDR4_RON_24ohm, 24},
723 	{PHY_LPDDR4_RON_23ohm, 23},
724 	{PHY_LPDDR4_RON_22ohm, 22}
725 };
726 
727 static u16 lp4_phy_odt_2_ohm[][2] = {
728 	{PHY_LPDDR4_RTT_DISABLE, 0},
729 	{PHY_LPDDR4_RTT_604ohm, 604},
730 	{PHY_LPDDR4_RTT_303ohm, 303},
731 	{PHY_LPDDR4_RTT_202ohm, 202},
732 	{PHY_LPDDR4_RTT_152ohm, 152},
733 	{PHY_LPDDR4_RTT_122ohm, 122},
734 	{PHY_LPDDR4_RTT_101ohm, 101},
735 	{PHY_LPDDR4_RTT_87ohm,	87},
736 	{PHY_LPDDR4_RTT_78ohm, 78},
737 	{PHY_LPDDR4_RTT_69ohm, 69},
738 	{PHY_LPDDR4_RTT_62ohm, 62},
739 	{PHY_LPDDR4_RTT_56ohm, 56},
740 	{PHY_LPDDR4_RTT_52ohm, 52},
741 	{PHY_LPDDR4_RTT_48ohm, 48},
742 	{PHY_LPDDR4_RTT_44ohm, 44},
743 	{PHY_LPDDR4_RTT_41ohm, 41},
744 	{PHY_LPDDR4_RTT_39ohm, 39},
745 	{PHY_LPDDR4_RTT_37ohm, 37},
746 	{PHY_LPDDR4_RTT_35ohm, 35},
747 	{PHY_LPDDR4_RTT_33ohm, 33},
748 	{PHY_LPDDR4_RTT_32ohm, 32},
749 	{PHY_LPDDR4_RTT_30ohm, 30},
750 	{PHY_LPDDR4_RTT_29ohm, 29},
751 	{PHY_LPDDR4_RTT_27ohm, 27}
752 };
753 
754 static u32 lp4_odt_calc(u32 odt_ohm)
755 {
756 	u32 odt;
757 
758 	if (odt_ohm == 0)
759 		odt = LPDDR4_DQODT_DIS;
760 	else if (odt_ohm <= 40)
761 		odt = LPDDR4_DQODT_40;
762 	else if (odt_ohm <= 48)
763 		odt = LPDDR4_DQODT_48;
764 	else if (odt_ohm <= 60)
765 		odt = LPDDR4_DQODT_60;
766 	else if (odt_ohm <= 80)
767 		odt = LPDDR4_DQODT_80;
768 	else if (odt_ohm <= 120)
769 		odt = LPDDR4_DQODT_120;
770 	else
771 		odt = LPDDR4_DQODT_240;
772 
773 	return odt;
774 }
775 
776 static void *get_ddr_drv_odt_info(u32 dramtype)
777 {
778 	struct sdram_head_info_index_v2 *index =
779 		(struct sdram_head_info_index_v2 *)common_info;
780 	void *ddr_info = 0;
781 
782 	if (dramtype == DDR4)
783 		ddr_info = (void *)common_info + index->ddr4_index.offset * 4;
784 	else if (dramtype == DDR3)
785 		ddr_info = (void *)common_info + index->ddr3_index.offset * 4;
786 	else if (dramtype == LPDDR3)
787 		ddr_info = (void *)common_info + index->lp3_index.offset * 4;
788 	else if (dramtype == LPDDR4)
789 		ddr_info = (void *)common_info + index->lp4_index.offset * 4;
790 	else if (dramtype == LPDDR4X)
791 		ddr_info = (void *)common_info + index->lp4x_index.offset * 4;
792 	else
793 		printascii("unsupported dram type\n");
794 	return ddr_info;
795 }
796 
797 static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info,
798 			 u32 freq_mhz, u32 dst_fsp, u32 dramtype)
799 {
800 	void __iomem *pctl_base = dram->pctl;
801 	u32 ca_vref, dq_vref;
802 
803 	if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
804 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff);
805 	else
806 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten);
807 
808 	if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq))
809 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff);
810 	else
811 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten);
812 
813 	if (dramtype == LPDDR4) {
814 		if (ca_vref < 100)
815 			ca_vref = 100;
816 		if (ca_vref > 420)
817 			ca_vref = 420;
818 
819 		if (ca_vref <= 300)
820 			ca_vref = (0 << 6) | (ca_vref - 100) / 4;
821 		else
822 			ca_vref = (1 << 6) | (ca_vref - 220) / 4;
823 
824 		if (dq_vref < 100)
825 			dq_vref = 100;
826 		if (dq_vref > 420)
827 			dq_vref = 420;
828 
829 		if (dq_vref <= 300)
830 			dq_vref = (0 << 6) | (dq_vref - 100) / 4;
831 		else
832 			dq_vref = (1 << 6) | (dq_vref - 220) / 4;
833 	} else {
834 		ca_vref = ca_vref * 11 / 6;
835 		if (ca_vref < 150)
836 			ca_vref = 150;
837 		if (ca_vref > 629)
838 			ca_vref = 629;
839 
840 		if (ca_vref <= 449)
841 			ca_vref = (0 << 6) | (ca_vref - 150) / 4;
842 		else
843 			ca_vref = (1 << 6) | (ca_vref - 329) / 4;
844 
845 		if (dq_vref < 150)
846 			dq_vref = 150;
847 		if (dq_vref > 629)
848 			dq_vref = 629;
849 
850 		if (dq_vref <= 449)
851 			dq_vref = (0 << 6) | (dq_vref - 150) / 6;
852 		else
853 			dq_vref = (1 << 6) | (dq_vref - 329) / 6;
854 	}
855 	sw_set_req(dram);
856 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
857 			DDR_PCTL2_INIT6,
858 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT,
859 			ca_vref << PCTL2_LPDDR4_MR12_SHIFT);
860 
861 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
862 			DDR_PCTL2_INIT7,
863 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT,
864 			dq_vref << PCTL2_LPDDR4_MR14_SHIFT);
865 	sw_set_ack(dram);
866 }
867 
868 static void set_ds_odt(struct dram_info *dram,
869 		       struct rv1126_sdram_params *sdram_params, u32 dst_fsp)
870 {
871 	void __iomem *phy_base = dram->phy;
872 	void __iomem *pctl_base = dram->pctl;
873 	u32 dramtype = sdram_params->base.dramtype;
874 	struct ddr2_3_4_lp2_3_info *ddr_info;
875 	struct lp4_info *lp4_info;
876 	u32 i, j, tmp;
877 	const u16 (*p_drv)[2];
878 	const u16 (*p_odt)[2];
879 	u32 drv_info, sr_info;
880 	u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm;
881 	u32 phy_odt_ohm, dram_odt_ohm;
882 	u32 lp4_pu_cal, phy_lp4_drv_pd_en;
883 	u32 phy_odt_up_en, phy_odt_dn_en;
884 	u32 sr_dq, sr_clk;
885 	u32 freq = sdram_params->base.ddr_freq;
886 	u32 mr1_mr3, mr11, mr22, vref_out, vref_inner;
887 	u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0;
888 	u32 phy_dq_drv = 0;
889 	u32 phy_odt_up = 0, phy_odt_dn = 0;
890 
891 	ddr_info = get_ddr_drv_odt_info(dramtype);
892 	lp4_info = (void *)ddr_info;
893 
894 	if (!ddr_info)
895 		return;
896 
897 	/* dram odt en freq control phy drv, dram odt and phy sr */
898 	if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) {
899 		drv_info = ddr_info->drv_when_odtoff;
900 		dram_odt_ohm = 0;
901 		sr_info = ddr_info->sr_when_odtoff;
902 		phy_lp4_drv_pd_en =
903 			PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info);
904 	} else {
905 		drv_info = ddr_info->drv_when_odten;
906 		dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info);
907 		sr_info = ddr_info->sr_when_odten;
908 		phy_lp4_drv_pd_en =
909 			PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info);
910 	}
911 	phy_dq_drv_ohm =
912 		DRV_INFO_PHY_DQ_DRV(drv_info);
913 	phy_clk_drv_ohm =
914 		DRV_INFO_PHY_CLK_DRV(drv_info);
915 	phy_ca_drv_ohm =
916 		DRV_INFO_PHY_CA_DRV(drv_info);
917 
918 	sr_dq = DQ_SR_INFO(sr_info);
919 	sr_clk = CLK_SR_INFO(sr_info);
920 
921 	/* phy odt en freq control dram drv and phy odt */
922 	if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) {
923 		dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff);
924 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info);
925 		phy_odt_ohm = 0;
926 		phy_odt_up_en = 0;
927 		phy_odt_dn_en = 0;
928 	} else {
929 		dram_drv_ohm =
930 			DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten);
931 		phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info);
932 		phy_odt_up_en =
933 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
934 		phy_odt_dn_en =
935 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
936 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info);
937 	}
938 
939 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
940 		if (phy_odt_ohm) {
941 			phy_odt_up_en = 0;
942 			phy_odt_dn_en = 1;
943 		}
944 		if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
945 			dram_caodt_ohm = 0;
946 		else
947 			dram_caodt_ohm =
948 				ODT_INFO_LP4_CA_ODT(lp4_info->odt_info);
949 	}
950 
951 	if (dramtype == DDR3) {
952 		p_drv = d3_phy_drv_2_ohm;
953 		p_odt = d3_phy_odt_2_ohm;
954 	} else if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
955 		p_drv = lp4_phy_drv_2_ohm;
956 		p_odt = lp4_phy_odt_2_ohm;
957 	} else {
958 		p_drv = d4lp3_phy_drv_2_ohm;
959 		p_odt = d4lp3_phy_odt_2_ohm;
960 	}
961 
962 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
963 		if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) {
964 			phy_dq_drv = **(p_drv + i);
965 			break;
966 		}
967 		if (i == 0)
968 			break;
969 	}
970 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
971 		if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) {
972 			phy_clk_drv = **(p_drv + i);
973 			break;
974 		}
975 		if (i == 0)
976 			break;
977 	}
978 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
979 		if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) {
980 			phy_ca_drv = **(p_drv + i);
981 			break;
982 		}
983 		if (i == 0)
984 			break;
985 	}
986 	if (!phy_odt_ohm)
987 		phy_odt = 0;
988 	else
989 		for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) {
990 			if (phy_odt_ohm <= *(*(p_odt + i) + 1)) {
991 				phy_odt = **(p_odt + i);
992 				break;
993 			}
994 			if (i == 0)
995 				break;
996 		}
997 
998 	if (dramtype != LPDDR4 && dramtype != LPDDR4X) {
999 		if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en))
1000 			vref_inner = 0x80;
1001 		else if (phy_odt_up_en)
1002 			vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 /
1003 				     (dram_drv_ohm + phy_odt_ohm);
1004 		else
1005 			vref_inner = phy_odt_ohm * 128 /
1006 				(phy_odt_ohm + dram_drv_ohm);
1007 
1008 		if (dramtype != DDR3 && dram_odt_ohm)
1009 			vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 /
1010 				   (phy_dq_drv_ohm + dram_odt_ohm);
1011 		else
1012 			vref_out = 0x80;
1013 	} else {
1014 		/* for lp4 and lp4x*/
1015 		if (phy_odt_ohm)
1016 			vref_inner =
1017 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) *
1018 				 256) / 1000;
1019 		else
1020 			vref_inner =
1021 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) *
1022 				 256) / 1000;
1023 
1024 		vref_out = 0x80;
1025 	}
1026 
1027 	/* default ZQCALIB bypass mode */
1028 	clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv);
1029 	clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv);
1030 	clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv);
1031 	clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv);
1032 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1033 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_clk_drv);
1034 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_clk_drv);
1035 	} else {
1036 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_ca_drv);
1037 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_ca_drv);
1038 	}
1039 	/* clk / cmd slew rate */
1040 	clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk);
1041 
1042 	phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1;
1043 	if (phy_odt_up_en)
1044 		phy_odt_up = phy_odt;
1045 	if (phy_odt_dn_en)
1046 		phy_odt_dn = phy_odt;
1047 
1048 	for (i = 0; i < 4; i++) {
1049 		j = 0x110 + i * 0x10;
1050 		clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up);
1051 		clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn);
1052 		clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv);
1053 		clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv);
1054 		writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10));
1055 
1056 		clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10),
1057 				1 << 3, phy_lp4_drv_pd_en << 3);
1058 		if (dramtype == LPDDR4 || dramtype == LPDDR4X)
1059 			clrbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10), BIT(5));
1060 		/* dq slew rate */
1061 		clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10),
1062 				0x1f, sr_dq);
1063 	}
1064 
1065 	/* reg_rx_vref_value_update */
1066 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1067 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1068 
1069 	/* RAM VREF */
1070 	writel(vref_out, PHY_REG(phy_base, 0x105));
1071 	if (dramtype == LPDDR3)
1072 		udelay(100);
1073 
1074 	if (dramtype == LPDDR4 || dramtype == LPDDR4X)
1075 		set_lp4_vref(dram, lp4_info, freq, dst_fsp, dramtype);
1076 
1077 	if (dramtype == DDR3 || dramtype == DDR4) {
1078 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1079 				DDR_PCTL2_INIT3);
1080 		mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK;
1081 	} else {
1082 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1083 				DDR_PCTL2_INIT4);
1084 		mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK;
1085 	}
1086 
1087 	if (dramtype == DDR3) {
1088 		mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK);
1089 		if (dram_drv_ohm == 34)
1090 			mr1_mr3 |= DDR3_DS_34;
1091 
1092 		if (dram_odt_ohm == 0)
1093 			mr1_mr3 |= DDR3_RTT_NOM_DIS;
1094 		else if (dram_odt_ohm <= 40)
1095 			mr1_mr3 |= DDR3_RTT_NOM_40;
1096 		else if (dram_odt_ohm <= 60)
1097 			mr1_mr3 |= DDR3_RTT_NOM_60;
1098 		else
1099 			mr1_mr3 |= DDR3_RTT_NOM_120;
1100 
1101 	} else if (dramtype == DDR4) {
1102 		mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK);
1103 		if (dram_drv_ohm == 48)
1104 			mr1_mr3 |= DDR4_DS_48;
1105 
1106 		if (dram_odt_ohm == 0)
1107 			mr1_mr3 |= DDR4_RTT_NOM_DIS;
1108 		else if (dram_odt_ohm <= 34)
1109 			mr1_mr3 |= DDR4_RTT_NOM_34;
1110 		else if (dram_odt_ohm <= 40)
1111 			mr1_mr3 |= DDR4_RTT_NOM_40;
1112 		else if (dram_odt_ohm <= 48)
1113 			mr1_mr3 |= DDR4_RTT_NOM_48;
1114 		else if (dram_odt_ohm <= 60)
1115 			mr1_mr3 |= DDR4_RTT_NOM_60;
1116 		else
1117 			mr1_mr3 |= DDR4_RTT_NOM_120;
1118 
1119 	} else if (dramtype == LPDDR3) {
1120 		if (dram_drv_ohm <= 34)
1121 			mr1_mr3 |= LPDDR3_DS_34;
1122 		else if (dram_drv_ohm <= 40)
1123 			mr1_mr3 |= LPDDR3_DS_40;
1124 		else if (dram_drv_ohm <= 48)
1125 			mr1_mr3 |= LPDDR3_DS_48;
1126 		else if (dram_drv_ohm <= 60)
1127 			mr1_mr3 |= LPDDR3_DS_60;
1128 		else if (dram_drv_ohm <= 80)
1129 			mr1_mr3 |= LPDDR3_DS_80;
1130 
1131 		if (dram_odt_ohm == 0)
1132 			lp3_odt_value = LPDDR3_ODT_DIS;
1133 		else if (dram_odt_ohm <= 60)
1134 			lp3_odt_value = LPDDR3_ODT_60;
1135 		else if (dram_odt_ohm <= 120)
1136 			lp3_odt_value = LPDDR3_ODT_120;
1137 		else
1138 			lp3_odt_value = LPDDR3_ODT_240;
1139 	} else {/* for lpddr4 and lpddr4x */
1140 		/* MR3 for lp4 PU-CAL and PDDS */
1141 		mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK);
1142 		mr1_mr3 |= lp4_pu_cal;
1143 
1144 		tmp = lp4_odt_calc(dram_drv_ohm);
1145 		if (!tmp)
1146 			tmp = LPDDR4_PDDS_240;
1147 		mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT);
1148 
1149 		/* MR11 for lp4 ca odt, dq odt set */
1150 		mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1151 			     DDR_PCTL2_INIT6);
1152 		mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK;
1153 
1154 		mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK);
1155 
1156 		tmp = lp4_odt_calc(dram_odt_ohm);
1157 		mr11 |= (tmp << LPDDR4_DQODT_SHIFT);
1158 
1159 		tmp = lp4_odt_calc(dram_caodt_ohm);
1160 		mr11 |= (tmp << LPDDR4_CAODT_SHIFT);
1161 		sw_set_req(dram);
1162 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1163 				DDR_PCTL2_INIT6,
1164 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT,
1165 				mr11 << PCTL2_LPDDR4_MR11_SHIFT);
1166 		sw_set_ack(dram);
1167 
1168 		/* MR22 for soc odt/odt-ck/odt-cs/odt-ca */
1169 		mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1170 			     DDR_PCTL2_INIT7);
1171 		mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK;
1172 		mr22 &= ~LPDDR4_SOC_ODT_MASK;
1173 
1174 		tmp = lp4_odt_calc(phy_odt_ohm);
1175 		mr22 |= tmp;
1176 		mr22 = mr22 |
1177 		       (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) <<
1178 			LPDDR4_ODTE_CK_SHIFT) |
1179 		       (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) <<
1180 			LPDDR4_ODTE_CS_SHIFT) |
1181 		       (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) <<
1182 			LPDDR4_ODTD_CA_SHIFT);
1183 
1184 		sw_set_req(dram);
1185 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1186 				DDR_PCTL2_INIT7,
1187 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT,
1188 				mr22 << PCTL2_LPDDR4_MR22_SHIFT);
1189 		sw_set_ack(dram);
1190 	}
1191 
1192 	if (dramtype == DDR4 || dramtype == DDR3) {
1193 		sw_set_req(dram);
1194 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1195 				DDR_PCTL2_INIT3,
1196 				PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT,
1197 				mr1_mr3 << PCTL2_DDR34_MR1_SHIFT);
1198 		sw_set_ack(dram);
1199 	} else {
1200 		sw_set_req(dram);
1201 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1202 				DDR_PCTL2_INIT4,
1203 				PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT,
1204 				mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT);
1205 		sw_set_ack(dram);
1206 	}
1207 }
1208 
1209 static int sdram_cmd_dq_path_remap(struct dram_info *dram,
1210 				   struct rv1126_sdram_params *sdram_params)
1211 {
1212 	void __iomem *phy_base = dram->phy;
1213 	u32 dramtype = sdram_params->base.dramtype;
1214 	struct sdram_head_info_index_v2 *index =
1215 		(struct sdram_head_info_index_v2 *)common_info;
1216 	struct dq_map_info *map_info;
1217 
1218 	map_info = (struct dq_map_info *)((void *)common_info +
1219 		index->dq_map_index.offset * 4);
1220 
1221 	if (dramtype == LPDDR4X)
1222 		dramtype = LPDDR4;
1223 
1224 	if (dramtype <= LPDDR4)
1225 		writel((map_info->byte_map[dramtype / 4] >>
1226 			((dramtype % 4) * 8)) & 0xff,
1227 		       PHY_REG(phy_base, 0x4f));
1228 
1229 	return 0;
1230 }
1231 
1232 static void phy_cfg(struct dram_info *dram,
1233 		    struct rv1126_sdram_params *sdram_params)
1234 {
1235 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
1236 	void __iomem *phy_base = dram->phy;
1237 	u32 i, dq_map, tmp;
1238 	u32 byte1 = 0, byte0 = 0;
1239 
1240 	sdram_cmd_dq_path_remap(dram, sdram_params);
1241 
1242 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0);
1243 	for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
1244 		writel(sdram_params->phy_regs.phy[i][1],
1245 		       phy_base + sdram_params->phy_regs.phy[i][0]);
1246 	}
1247 
1248 	clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5));
1249 	dq_map = readl(PHY_REG(phy_base, 0x4f));
1250 	for (i = 0; i < 4; i++) {
1251 		if (((dq_map >> (i * 2)) & 0x3) == 0)
1252 			byte0 = i;
1253 		if (((dq_map >> (i * 2)) & 0x3) == 1)
1254 			byte1 = i;
1255 	}
1256 
1257 	tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK);
1258 	if (cap_info->bw == 2)
1259 		tmp |= 0xf;
1260 	else if (cap_info->bw == 1)
1261 		tmp |= ((1 << byte0) | (1 << byte1));
1262 	else
1263 		tmp |= (1 << byte0);
1264 
1265 	writel(tmp, PHY_REG(phy_base, 0xf));
1266 
1267 	/* lpddr4 odt control by phy, enable cs0 odt */
1268 	if (sdram_params->base.dramtype == LPDDR4 ||
1269 	    sdram_params->base.dramtype == LPDDR4X)
1270 		clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4,
1271 				(1 << 6) | (1 << 4));
1272 	/* for ca training ca vref choose range1 */
1273 	setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6));
1274 	setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6));
1275 	/* for wr training PHY_0x7c[5], choose range0 */
1276 	clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5));
1277 }
1278 
1279 static int update_refresh_reg(struct dram_info *dram)
1280 {
1281 	void __iomem *pctl_base = dram->pctl;
1282 	u32 ret;
1283 
1284 	ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
1285 	writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
1286 
1287 	return 0;
1288 }
1289 
1290 /*
1291  * rank = 1: cs0
1292  * rank = 2: cs1
1293  */
1294 int read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype)
1295 {
1296 	u32 ret;
1297 	u32 i, temp;
1298 	u32 dqmap;
1299 
1300 	void __iomem *pctl_base = dram->pctl;
1301 	struct sdram_head_info_index_v2 *index =
1302 		(struct sdram_head_info_index_v2 *)common_info;
1303 	struct dq_map_info *map_info;
1304 
1305 	map_info = (struct dq_map_info *)((void *)common_info +
1306 		index->dq_map_index.offset * 4);
1307 
1308 	if (dramtype == LPDDR2)
1309 		dqmap = map_info->lp2_dq0_7_map;
1310 	else
1311 		dqmap = map_info->lp3_dq0_7_map;
1312 
1313 	pctl_read_mr(pctl_base, rank, mr_num);
1314 
1315 	ret = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff);
1316 
1317 	if (dramtype != LPDDR4) {
1318 		temp = 0;
1319 		for (i = 0; i < 8; i++) {
1320 			temp = temp | (((ret >> i) & 0x1) <<
1321 				       ((dqmap >> (i * 4)) & 0xf));
1322 		}
1323 	} else {
1324 		temp = (readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff);
1325 	}
1326 
1327 	return temp;
1328 }
1329 
1330 /* before call this function autorefresh should be disabled */
1331 void send_a_refresh(struct dram_info *dram)
1332 {
1333 	void __iomem *pctl_base = dram->pctl;
1334 
1335 	while (readl(pctl_base + DDR_PCTL2_DBGSTAT) & 0x3)
1336 		continue;
1337 	writel(0x3, pctl_base + DDR_PCTL2_DBGCMD);
1338 }
1339 
1340 static void enter_sr(struct dram_info *dram, u32 en)
1341 {
1342 	void __iomem *pctl_base = dram->pctl;
1343 
1344 	if (en) {
1345 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1346 		while (1) {
1347 			if (((readl(pctl_base + DDR_PCTL2_STAT) &
1348 			      PCTL2_SELFREF_TYPE_MASK) ==
1349 			     PCTL2_SELFREF_TYPE_SR_NOT_AUTO) &&
1350 			    ((readl(pctl_base + DDR_PCTL2_STAT) &
1351 			      PCTL2_OPERATING_MODE_MASK) ==
1352 			     PCTL2_OPERATING_MODE_SR))
1353 				break;
1354 		}
1355 	} else {
1356 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1357 		while ((readl(pctl_base + DDR_PCTL2_STAT) &
1358 		       PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR)
1359 			continue;
1360 	}
1361 }
1362 
1363 void record_dq_prebit(struct dram_info *dram)
1364 {
1365 	u32 group, i, tmp;
1366 	void __iomem *phy_base = dram->phy;
1367 
1368 	for (group = 0; group < 4; group++) {
1369 		for (i = 0; i < ARRAY_SIZE(dq_sel); i++) {
1370 			/* l_loop_invdelaysel */
1371 			writel(dq_sel[i][0], PHY_REG(phy_base,
1372 						     grp_addr[group] + 0x2c));
1373 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e));
1374 			writel(tmp, PHY_REG(phy_base,
1375 					    grp_addr[group] + dq_sel[i][1]));
1376 
1377 			/* r_loop_invdelaysel */
1378 			writel(dq_sel[i][0], PHY_REG(phy_base,
1379 						     grp_addr[group] + 0x2d));
1380 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f));
1381 			writel(tmp, PHY_REG(phy_base,
1382 					    grp_addr[group] + dq_sel[i][2]));
1383 		}
1384 	}
1385 }
1386 
1387 static void update_dq_rx_prebit(struct dram_info *dram)
1388 {
1389 	void __iomem *phy_base = dram->phy;
1390 
1391 	clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4),
1392 			BIT(4));
1393 	udelay(1);
1394 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4));
1395 }
1396 
1397 static void update_dq_tx_prebit(struct dram_info *dram)
1398 {
1399 	void __iomem *phy_base = dram->phy;
1400 
1401 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1402 	setbits_le32(PHY_REG(phy_base, 0x2), BIT(3));
1403 	setbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1404 	udelay(1);
1405 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1406 }
1407 
1408 static void update_ca_prebit(struct dram_info *dram)
1409 {
1410 	void __iomem *phy_base = dram->phy;
1411 
1412 	clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2));
1413 	setbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1414 	udelay(1);
1415 	clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1416 }
1417 
1418 /*
1419  * dir: 0: de-skew = delta_*
1420  *	1: de-skew = reg val - delta_*
1421  * delta_dir: value for differential signal: clk/
1422  * delta_sig: value for single signal: ca/cmd
1423  */
1424 static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif,
1425 			     int delta_sig, u32 cs, u32 dramtype)
1426 {
1427 	void __iomem *phy_base = dram->phy;
1428 	u32 i, cs_en, tmp;
1429 	u32 dfi_lp_stat = 0;
1430 
1431 	if (cs == 0)
1432 		cs_en = 1;
1433 	else if (cs == 2)
1434 		cs_en = 2;
1435 	else
1436 		cs_en = 3;
1437 
1438 	if ((dramtype == LPDDR4 || dramtype == LPDDR4X) &&
1439 	    ((readl(PHY_REG(phy_base, 0x60)) & BIT(5)) == 0)) {
1440 		dfi_lp_stat = 1;
1441 		setbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1442 	}
1443 	enter_sr(dram, 1);
1444 
1445 	for (i = 0; i < 0x20; i++) {
1446 		if (dir == DESKEW_MDF_ABS_VAL)
1447 			tmp = delta_sig;
1448 		else
1449 			tmp = readl(PHY_REG(phy_base, 0x150 + i)) +
1450 			      delta_sig;
1451 		writel(tmp, PHY_REG(phy_base, 0x150 + i));
1452 	}
1453 
1454 	if (dir == DESKEW_MDF_ABS_VAL)
1455 		tmp = delta_dif;
1456 	else
1457 		tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) -
1458 		       delta_sig + delta_dif;
1459 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x17));
1460 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x18));
1461 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1462 		writel(tmp, PHY_REG(phy_base, 0x150 + 0x4));
1463 		writel(tmp, PHY_REG(phy_base, 0x150 + 0xa));
1464 
1465 		clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6);
1466 		update_ca_prebit(dram);
1467 	}
1468 	enter_sr(dram, 0);
1469 
1470 	if (dfi_lp_stat)
1471 		clrbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1472 
1473 }
1474 
1475 static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank)
1476 {
1477 	u32 i, j, offset = 0;
1478 	u32 min = 0x3f;
1479 	void __iomem *phy_base = dram->phy;
1480 	u32 byte_en;
1481 
1482 	if (signal == SKEW_TX_SIGNAL)
1483 		offset = 8;
1484 
1485 	if (signal == SKEW_CA_SIGNAL) {
1486 		for (i = 0; i < 0x20; i++)
1487 			min = MIN(min, readl(PHY_REG(phy_base, 0x150  + i)));
1488 	} else {
1489 		byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1490 		for (j = offset; j < offset + rank * 4; j++) {
1491 			if (!((byte_en >> (j % 4)) & 1))
1492 				continue;
1493 			for (i = 0; i < 11; i++)
1494 				min = MIN(min,
1495 					  readl(PHY_REG(phy_base,
1496 							dqs_dq_skew_adr[j] +
1497 							i)));
1498 		}
1499 	}
1500 
1501 	return min;
1502 }
1503 
1504 static u32 low_power_update(struct dram_info *dram, u32 en)
1505 {
1506 	void __iomem *pctl_base = dram->pctl;
1507 	u32 lp_stat = 0;
1508 
1509 	if (en) {
1510 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf);
1511 	} else {
1512 		lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf;
1513 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf);
1514 	}
1515 
1516 	return lp_stat;
1517 }
1518 
1519 /*
1520  * signal:
1521  * dir: 0: de-skew = delta_*
1522  *	1: de-skew = reg val - delta_*
1523  * delta_dir: value for differential signal: dqs
1524  * delta_sig: value for single signal: dq/dm
1525  */
1526 static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir,
1527 			     int delta_dif, int delta_sig, u32 rank)
1528 {
1529 	void __iomem *phy_base = dram->phy;
1530 	u32 i, j, tmp, offset;
1531 	u32 byte_en;
1532 
1533 	byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1534 
1535 	if (signal == SKEW_RX_SIGNAL)
1536 		offset = 0;
1537 	else
1538 		offset = 8;
1539 
1540 	for (j = offset; j < (offset + rank * 4); j++) {
1541 		if (!((byte_en >> (j % 4)) & 1))
1542 			continue;
1543 		for (i = 0; i < 0x9; i++) {
1544 			if (dir == DESKEW_MDF_ABS_VAL)
1545 				tmp = delta_sig;
1546 			else
1547 				tmp = delta_sig + readl(PHY_REG(phy_base,
1548 							dqs_dq_skew_adr[j] +
1549 							i));
1550 			writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i));
1551 		}
1552 		if (dir == DESKEW_MDF_ABS_VAL)
1553 			tmp = delta_dif;
1554 		else
1555 			tmp = delta_dif + readl(PHY_REG(phy_base,
1556 						dqs_dq_skew_adr[j] + 9));
1557 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9));
1558 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa));
1559 	}
1560 	if (signal == SKEW_RX_SIGNAL)
1561 		update_dq_rx_prebit(dram);
1562 	else
1563 		update_dq_tx_prebit(dram);
1564 }
1565 
1566 static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype)
1567 {
1568 	void __iomem *phy_base = dram->phy;
1569 	u32 ret;
1570 	u32 dis_auto_zq = 0;
1571 	u32 odt_val_up, odt_val_dn;
1572 	u32 i, j;
1573 
1574 	odt_val_dn = readl(PHY_REG(phy_base, 0x110));
1575 	odt_val_up = readl(PHY_REG(phy_base, 0x111));
1576 
1577 	if (dramtype != LPDDR4 || dramtype != LPDDR4X) {
1578 		for (i = 0; i < 4; i++) {
1579 			j = 0x110 + i * 0x10;
1580 			writel(PHY_DDR4_LPDDR3_RTT_294ohm,
1581 			       PHY_REG(phy_base, j));
1582 			writel(PHY_DDR4_LPDDR3_RTT_DISABLE,
1583 			       PHY_REG(phy_base, j + 0x1));
1584 		}
1585 	}
1586 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1587 	/* use normal read mode for data training */
1588 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1589 
1590 	if (dramtype == DDR4)
1591 		setbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1592 
1593 	/* choose training cs */
1594 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
1595 	/* enable gate training */
1596 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
1597 	udelay(50);
1598 	ret = readl(PHY_REG(phy_base, 0x91));
1599 	/* disable gate training */
1600 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
1601 	clrbits_le32(PHY_REG(phy_base, 2), 0x30);
1602 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1603 
1604 	if (ret & 0x20)
1605 		ret = -1;
1606 	else
1607 		ret = (ret & 0xf) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf);
1608 
1609 	if (dramtype != LPDDR4 || dramtype != LPDDR4X) {
1610 		for (i = 0; i < 4; i++) {
1611 			j = 0x110 + i * 0x10;
1612 			writel(odt_val_dn, PHY_REG(phy_base, j));
1613 			writel(odt_val_up, PHY_REG(phy_base, j + 0x1));
1614 		}
1615 	}
1616 	return ret;
1617 }
1618 
1619 static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype,
1620 			    u32 rank)
1621 {
1622 	void __iomem *pctl_base = dram->pctl;
1623 	void __iomem *phy_base = dram->phy;
1624 	u32 dis_auto_zq = 0;
1625 	u32 tmp;
1626 	u32 cur_fsp;
1627 	u32 timeout_us = 1000;
1628 
1629 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1630 
1631 	clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1632 
1633 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1634 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) &
1635 	      0xffff;
1636 	writel(tmp & 0xff, PHY_REG(phy_base, 0x3));
1637 
1638 	/* disable another cs's output */
1639 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1640 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12),
1641 			      dramtype);
1642 	if (dramtype == DDR3 || dramtype == DDR4)
1643 		writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1644 	else
1645 		writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1646 
1647 	/* choose cs */
1648 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1649 			((0x2 >> cs) << 6) | (0 << 2));
1650 	/* enable write leveling */
1651 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1652 			((0x2 >> cs) << 6) | (1 << 2));
1653 
1654 	while (1) {
1655 		if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) ==
1656 		    (readl(PHY_REG(phy_base, 0xf)) & 0xf))
1657 			break;
1658 
1659 		udelay(1);
1660 		if (timeout_us-- == 0) {
1661 			printascii("error: write leveling timeout\n");
1662 			while (1)
1663 				;
1664 		}
1665 	}
1666 
1667 	/* disable write leveling */
1668 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1669 			((0x2 >> cs) << 6) | (0 << 2));
1670 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6);
1671 
1672 	/* enable another cs's output */
1673 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1674 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12),
1675 			      dramtype);
1676 
1677 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1678 
1679 	return 0;
1680 }
1681 
1682 char pattern[32] = {
1683 	0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa,
1684 	0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55,
1685 	0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55,
1686 	0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa
1687 };
1688 
1689 static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype,
1690 			    u32 mhz)
1691 {
1692 	void __iomem *pctl_base = dram->pctl;
1693 	void __iomem *phy_base = dram->phy;
1694 	u32 trefi_1x, trfc_1x;
1695 	u32 dis_auto_zq = 0;
1696 	u32 timeout_us = 1000;
1697 	u32 dqs_default;
1698 	u32 cur_fsp;
1699 	u32 vref_inner;
1700 	u32 i;
1701 	struct sdram_head_info_index_v2 *index =
1702 		(struct sdram_head_info_index_v2 *)common_info;
1703 	struct dq_map_info *map_info;
1704 
1705 	vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff;
1706 	if (dramtype == DDR3 && vref_inner == 0x80) {
1707 		for (i = 0; i < 4; i++)
1708 			writel(vref_inner - 0xa,
1709 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1710 
1711 		/* reg_rx_vref_value_update */
1712 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1713 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1714 	}
1715 
1716 	map_info = (struct dq_map_info *)((void *)common_info +
1717 		index->dq_map_index.offset * 4);
1718 	/* only 1cs a time, 0:cs0 1 cs1 */
1719 	if (cs > 1)
1720 		return -1;
1721 
1722 	dqs_default = 0xf;
1723 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1724 
1725 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1726 	/* config refresh timing */
1727 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1728 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1729 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1730 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1731 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1732 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1733 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1734 	/* reg_phy_trfc */
1735 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1736 	/* reg_max_refi_cnt */
1737 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1738 
1739 	/* choose training cs */
1740 	clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6);
1741 
1742 	/* set dq map for ddr4 */
1743 	if (dramtype == DDR4) {
1744 		setbits_le32(PHY_REG(phy_base, 0x70), BIT(7));
1745 		for (i = 0; i < 4; i++) {
1746 			writel((map_info->ddr4_dq_map[cs * 2] >>
1747 				((i % 4) * 8)) & 0xff,
1748 				PHY_REG(phy_base, 0x238 + i));
1749 			writel((map_info->ddr4_dq_map[cs * 2 + 1] >>
1750 				((i % 4) * 8)) & 0xff,
1751 				PHY_REG(phy_base, 0x2b8 + i));
1752 		}
1753 	}
1754 
1755 	/* cha_l reg_l_rd_train_dqs_default[5:0] */
1756 	clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default);
1757 	/* cha_h reg_h_rd_train_dqs_default[5:0] */
1758 	clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default);
1759 	/* chb_l reg_l_rd_train_dqs_default[5:0] */
1760 	clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default);
1761 	/* chb_h reg_h_rd_train_dqs_default[5:0] */
1762 	clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default);
1763 
1764 	/* Choose the read train auto mode */
1765 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1);
1766 	/* Enable the auto train of the read train */
1767 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3);
1768 
1769 	/* Wait the train done. */
1770 	while (1) {
1771 		if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1)
1772 			break;
1773 
1774 		udelay(1);
1775 		if (timeout_us-- == 0) {
1776 			printascii("error: read training timeout\n");
1777 			return -1;
1778 		}
1779 	}
1780 
1781 	/* Check the read train state */
1782 	if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) ||
1783 	    (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) {
1784 		printascii("error: read training error\n");
1785 		return -1;
1786 	}
1787 
1788 	/* Exit the Read Training by setting */
1789 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1));
1790 
1791 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1792 
1793 	if (dramtype == DDR3 && vref_inner == 0x80) {
1794 		for (i = 0; i < 4; i++)
1795 			writel(vref_inner,
1796 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1797 
1798 		/* reg_rx_vref_value_update */
1799 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1800 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1801 	}
1802 
1803 	return 0;
1804 }
1805 
1806 static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype,
1807 			    u32 mhz, u32 dst_fsp)
1808 {
1809 	void __iomem *pctl_base = dram->pctl;
1810 	void __iomem *phy_base = dram->phy;
1811 	u32 trefi_1x, trfc_1x;
1812 	u32 dis_auto_zq = 0;
1813 	u32 timeout_us = 1000;
1814 	u32 cur_fsp;
1815 	u32 mr_tmp, cl, cwl, phy_fsp, offset = 0;
1816 
1817 	if (dramtype == LPDDR3 && mhz <= 400) {
1818 		phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3;
1819 		offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3;
1820 		cl = readl(PHY_REG(phy_base, offset));
1821 		cwl = readl(PHY_REG(phy_base, offset + 2));
1822 
1823 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8);
1824 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4);
1825 		pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype);
1826 	}
1827 
1828 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1829 
1830 	/* PHY_0x7b[7:0] reg_train_col_addr[7:0] */
1831 	clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0);
1832 	/* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */
1833 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2);
1834 	/* PHY_0x7c[1:0] reg_train_col_addr[9:8] */
1835 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0);
1836 	/* PHY_0x7d[7:0] reg_train_row_addr[7:0] */
1837 	clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0);
1838 	/* PHY_0x7e[7:0] reg_train_row_addr[15:8] */
1839 	clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0);
1840 
1841 	/* PHY_0x71[3] wrtrain_check_data_value_random_gen */
1842 	clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3));
1843 
1844 	/* config refresh timing */
1845 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1846 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1847 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1848 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1849 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1850 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1851 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1852 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1853 	/* reg_phy_trfc */
1854 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1855 	/* reg_max_refi_cnt */
1856 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1857 
1858 	/* choose training cs */
1859 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6);
1860 
1861 	/* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */
1862 	/* 0: Use the write-leveling value. */
1863 	/* 1: use reg0x233 0x237 0x2b3 0x2b7 */
1864 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4));
1865 
1866 	/* PHY_0x7a [0] reg_dq_wr_train_auto */
1867 	setbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1868 
1869 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1870 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1871 
1872 	send_a_refresh(dram);
1873 
1874 	while (1) {
1875 		if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1)
1876 			break;
1877 
1878 		udelay(1);
1879 		if (timeout_us-- == 0) {
1880 			printascii("error: write training timeout\n");
1881 			while (1)
1882 				;
1883 		}
1884 	}
1885 
1886 	/* Check the write train state */
1887 	if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) {
1888 		printascii("error: write training error\n");
1889 		return -1;
1890 	}
1891 
1892 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1893 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1894 
1895 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1896 
1897 	/* save LPDDR4/LPDDR4X write vref to fsp_param for dfs */
1898 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1899 		fsp_param[dst_fsp].vref_dq[cs] =
1900 			((readl(PHY_REG(phy_base, 0x384)) & 0x3f) +
1901 			 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2;
1902 		/* add range info */
1903 		fsp_param[dst_fsp].vref_dq[cs] |=
1904 			((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1);
1905 	}
1906 
1907 	if (dramtype == LPDDR3 && mhz <= 400) {
1908 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl);
1909 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl);
1910 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1911 			       DDR_PCTL2_INIT3);
1912 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
1913 			      dramtype);
1914 	}
1915 
1916 	return 0;
1917 }
1918 
1919 static int data_training(struct dram_info *dram, u32 cs,
1920 			 struct rv1126_sdram_params *sdram_params, u32 dst_fsp,
1921 			 u32 training_flag)
1922 {
1923 	u32 ret = 0;
1924 
1925 	if (training_flag == FULL_TRAINING)
1926 		training_flag = READ_GATE_TRAINING | WRITE_LEVELING |
1927 				WRITE_TRAINING | READ_TRAINING;
1928 
1929 	if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) {
1930 		ret = data_training_wl(dram, cs,
1931 				       sdram_params->base.dramtype,
1932 				       sdram_params->ch.cap_info.rank);
1933 		if (ret != 0)
1934 			goto out;
1935 	}
1936 
1937 	if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) {
1938 		ret = data_training_rg(dram, cs,
1939 				       sdram_params->base.dramtype);
1940 		if (ret != 0)
1941 			goto out;
1942 	}
1943 
1944 	if ((training_flag & READ_TRAINING) == READ_TRAINING) {
1945 		ret = data_training_rd(dram, cs,
1946 				       sdram_params->base.dramtype,
1947 				       sdram_params->base.ddr_freq);
1948 		if (ret != 0)
1949 			goto out;
1950 	}
1951 
1952 	if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) {
1953 		ret = data_training_wr(dram, cs,
1954 				       sdram_params->base.dramtype,
1955 				       sdram_params->base.ddr_freq, dst_fsp);
1956 		if (ret != 0)
1957 			goto out;
1958 	}
1959 
1960 out:
1961 	return ret;
1962 }
1963 
1964 static int get_wrlvl_val(struct dram_info *dram,
1965 			 struct rv1126_sdram_params *sdram_params)
1966 {
1967 	int i, j, clk_skew;
1968 	void __iomem *phy_base = dram->phy;
1969 	u32 lp_stat;
1970 	int ret;
1971 
1972 	lp_stat = low_power_update(dram, 0);
1973 
1974 	clk_skew = 0x1f;
1975 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, clk_skew, 3,
1976 			 sdram_params->base.dramtype);
1977 
1978 	ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING);
1979 	if (sdram_params->ch.cap_info.rank == 2)
1980 		ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING);
1981 
1982 	for (j = 0; j < 2; j++)
1983 		for (i = 0; i < 4; i++)
1984 			wrlvl_result[j][i] =
1985 				(readl(PHY_REG(phy_base, wrlvl_result_offset[j][i])) & 0x3f) -
1986 				clk_skew;
1987 
1988 	low_power_update(dram, lp_stat);
1989 
1990 	return ret;
1991 }
1992 
1993 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
1994 static void init_rw_trn_result_struct(struct rw_trn_result *result,
1995 				      void __iomem *phy_base, u8 cs_num)
1996 {
1997 	int i;
1998 
1999 	result->cs_num = cs_num;
2000 	result->byte_en = readb(PHY_REG(dram_info.phy, 0xf)) &
2001 			  PHY_DQ_WIDTH_MASK;
2002 	for (i = 0; i < FSP_NUM; i++)
2003 		result->fsp_mhz[i] = 0;
2004 }
2005 
2006 static void save_rw_trn_min_max(void __iomem *phy_base,
2007 				struct cs_rw_trn_result *rd_result,
2008 				struct cs_rw_trn_result *wr_result,
2009 				u8 byte_en)
2010 {
2011 	u16 phy_ofs;
2012 	u8 dqs;
2013 	u8 dq;
2014 
2015 	for (dqs = 0; dqs < BYTE_NUM; dqs++) {
2016 		if ((byte_en & BIT(dqs)) == 0)
2017 			continue;
2018 
2019 		/* Channel A or B (low or high 16 bit) */
2020 		phy_ofs = dqs < 2 ? 0x230 : 0x2b0;
2021 		/* low or high 8 bit */
2022 		phy_ofs += (dqs & 0x1) == 0 ? 0 : 0x9;
2023 		for (dq = 0; dq < 8; dq++) {
2024 			rd_result->dqs[dqs].dq_min[dq] =
2025 				readb(PHY_REG(phy_base, phy_ofs + 0x15 + dq));
2026 			rd_result->dqs[dqs].dq_max[dq] =
2027 				readb(PHY_REG(phy_base, phy_ofs + 0x27 + dq));
2028 			wr_result->dqs[dqs].dq_min[dq] =
2029 				readb(PHY_REG(phy_base, phy_ofs + 0x3d + dq));
2030 			wr_result->dqs[dqs].dq_max[dq] =
2031 				readb(PHY_REG(phy_base, phy_ofs + 0x4f + dq));
2032 		}
2033 	}
2034 }
2035 
2036 static void save_rw_trn_deskew(void __iomem *phy_base,
2037 			       struct fsp_rw_trn_result *result, u8 cs_num,
2038 			       int min_val, bool rw)
2039 {
2040 	u16 phy_ofs;
2041 	u8 cs;
2042 	u8 dq;
2043 
2044 	result->min_val = min_val;
2045 
2046 	for (cs = 0; cs < cs_num; cs++) {
2047 		phy_ofs = cs == 0 ? 0x170 : 0x1a0;
2048 		phy_ofs += rw == SKEW_RX_SIGNAL ? 0x1 : 0x17;
2049 		for (dq = 0; dq < 8; dq++) {
2050 			result->cs[cs].dqs[0].dq_deskew[dq] =
2051 				readb(PHY_REG(phy_base, phy_ofs + dq));
2052 			result->cs[cs].dqs[1].dq_deskew[dq] =
2053 				readb(PHY_REG(phy_base, phy_ofs + 0xb + dq));
2054 			result->cs[cs].dqs[2].dq_deskew[dq] =
2055 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + dq));
2056 			result->cs[cs].dqs[3].dq_deskew[dq] =
2057 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + dq));
2058 		}
2059 
2060 		result->cs[cs].dqs[0].dqs_deskew =
2061 			readb(PHY_REG(phy_base, phy_ofs + 0x8));
2062 		result->cs[cs].dqs[1].dqs_deskew =
2063 			readb(PHY_REG(phy_base, phy_ofs + 0xb + 0x8));
2064 		result->cs[cs].dqs[2].dqs_deskew =
2065 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0x8));
2066 		result->cs[cs].dqs[3].dqs_deskew =
2067 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + 0x8));
2068 	}
2069 }
2070 
2071 static void save_rw_trn_result_to_ddr(struct rw_trn_result *result)
2072 {
2073 	result->flag = DDR_DQ_EYE_FLAG;
2074 	memcpy((void *)(RW_TRN_RESULT_ADDR), result, sizeof(*result));
2075 }
2076 #endif
2077 
2078 static int high_freq_training(struct dram_info *dram,
2079 			      struct rv1126_sdram_params *sdram_params,
2080 			      u32 fsp)
2081 {
2082 	u32 i, j;
2083 	void __iomem *phy_base = dram->phy;
2084 	u32 dramtype = sdram_params->base.dramtype;
2085 	int min_val;
2086 	int dqs_skew, clk_skew, ca_skew;
2087 	u8 byte_en;
2088 	int ret;
2089 
2090 	byte_en = readl(PHY_REG(phy_base, 0xf)) & PHY_DQ_WIDTH_MASK;
2091 	dqs_skew = 0;
2092 	for (j = 0; j < sdram_params->ch.cap_info.rank; j++) {
2093 		for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) {
2094 			if ((byte_en & BIT(i)) != 0)
2095 				dqs_skew += wrlvl_result[j][i];
2096 		}
2097 	}
2098 	dqs_skew = dqs_skew /
2099 		   (int)(sdram_params->ch.cap_info.rank * (1 << sdram_params->ch.cap_info.bw));
2100 
2101 	clk_skew = 0x20 - dqs_skew;
2102 	dqs_skew = 0x20;
2103 
2104 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
2105 		min_val = 0xff;
2106 		for (j = 0; j < sdram_params->ch.cap_info.rank; j++)
2107 			for (i = 0; i < sdram_params->ch.cap_info.bw; i++)
2108 				min_val = MIN(wrlvl_result[j][i], min_val);
2109 
2110 		if (min_val < 0) {
2111 			clk_skew = -min_val;
2112 			ca_skew = -min_val;
2113 		} else {
2114 			clk_skew = 0;
2115 			ca_skew = 0;
2116 		}
2117 	} else if (dramtype == LPDDR3) {
2118 		ca_skew = clk_skew - 4;
2119 	} else {
2120 		ca_skew = clk_skew;
2121 	}
2122 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3,
2123 			 dramtype);
2124 
2125 	writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233));
2126 	writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237));
2127 	writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2128 	writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2129 	ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING |
2130 			    READ_TRAINING | WRITE_TRAINING);
2131 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2132 	rw_trn_result.fsp_mhz[fsp] = (u16)sdram_params->base.ddr_freq;
2133 	save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[0],
2134 			    &rw_trn_result.wr_fsp[fsp].cs[0],
2135 			    rw_trn_result.byte_en);
2136 #endif
2137 	if (sdram_params->ch.cap_info.rank == 2) {
2138 		writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233));
2139 		writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237));
2140 		writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2141 		writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2142 		ret |= data_training(dram, 1, sdram_params, fsp,
2143 				     READ_GATE_TRAINING | READ_TRAINING |
2144 				     WRITE_TRAINING);
2145 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2146 		save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[1],
2147 				    &rw_trn_result.wr_fsp[fsp].cs[1],
2148 				    rw_trn_result.byte_en);
2149 #endif
2150 	}
2151 	if (ret)
2152 		goto out;
2153 
2154 	record_dq_prebit(dram);
2155 
2156 	min_val = get_min_value(dram, SKEW_RX_SIGNAL,
2157 				sdram_params->ch.cap_info.rank) * -1;
2158 	modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2159 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2160 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2161 	save_rw_trn_deskew(phy_base, &rw_trn_result.rd_fsp[fsp],
2162 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2163 			   SKEW_RX_SIGNAL);
2164 #endif
2165 
2166 	min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL,
2167 				    sdram_params->ch.cap_info.rank),
2168 		      get_min_value(dram, SKEW_CA_SIGNAL,
2169 				    sdram_params->ch.cap_info.rank)) * -1;
2170 
2171 	/* clk = 0, rx all skew -7, tx - min_value */
2172 	modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3,
2173 			 dramtype);
2174 
2175 	modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2176 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2177 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2178 	save_rw_trn_deskew(phy_base, &rw_trn_result.wr_fsp[fsp],
2179 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2180 			   SKEW_TX_SIGNAL);
2181 #endif
2182 
2183 	ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING);
2184 	if (sdram_params->ch.cap_info.rank == 2)
2185 		ret |= data_training(dram, 1, sdram_params, 0,
2186 				     READ_GATE_TRAINING);
2187 out:
2188 	return ret;
2189 }
2190 
2191 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
2192 {
2193 	writel(ddrconfig, &dram->msch->deviceconf);
2194 	clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0);
2195 }
2196 
2197 static void update_noc_timing(struct dram_info *dram,
2198 			      struct rv1126_sdram_params *sdram_params)
2199 {
2200 	void __iomem *pctl_base = dram->pctl;
2201 	u32 bw, bl;
2202 
2203 	bw = 8 << sdram_params->ch.cap_info.bw;
2204 	bl = ((readl(pctl_base + DDR_PCTL2_MSTR) >> 16) & 0xf) * 2;
2205 
2206 	/* update the noc timing related to data bus width */
2207 	if ((bw / 8 * bl) <= 16)
2208 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 0;
2209 	else if ((bw / 8 * bl) == 32)
2210 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 1;
2211 	else if ((bw / 8 * bl) == 64)
2212 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 2;
2213 	else
2214 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 3;
2215 
2216 	sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty =
2217 		(bl * bw / 8) > 16 ? (bl / 4) : (16 / (bl * bw / 8)) * bl / 4;
2218 
2219 	if (sdram_params->base.dramtype == LPDDR4 ||
2220 	    sdram_params->base.dramtype == LPDDR4X) {
2221 		sdram_params->ch.noc_timings.ddrmode.b.mwrsize =
2222 			(bw == 16) ? 0x1 : 0x2;
2223 		sdram_params->ch.noc_timings.ddrtimingc0.b.wrtomwr =
2224 			3 * sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty;
2225 	}
2226 
2227 	writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
2228 	       &dram->msch->ddrtiminga0);
2229 	writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
2230 	       &dram->msch->ddrtimingb0);
2231 	writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
2232 	       &dram->msch->ddrtimingc0);
2233 	writel(sdram_params->ch.noc_timings.devtodev0.d32,
2234 	       &dram->msch->devtodev0);
2235 	writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
2236 	writel(sdram_params->ch.noc_timings.ddr4timing.d32,
2237 	       &dram->msch->ddr4timing);
2238 }
2239 
2240 static int split_setup(struct dram_info *dram,
2241 		       struct rv1126_sdram_params *sdram_params)
2242 {
2243 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2244 	u32 dramtype = sdram_params->base.dramtype;
2245 	u32 split_size, split_mode;
2246 	u64 cs_cap[2], cap;
2247 
2248 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dramtype);
2249 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dramtype);
2250 	/* only support the larger cap is in low 16bit */
2251 	if (cap_info->cs0_high16bit_row < cap_info->cs0_row) {
2252 		cap = cs_cap[0] / (1 << (cap_info->cs0_row -
2253 		cap_info->cs0_high16bit_row));
2254 	} else if ((cap_info->cs1_high16bit_row < cap_info->cs1_row) &&
2255 		   (cap_info->rank == 2)) {
2256 		if (!cap_info->cs1_high16bit_row)
2257 			cap = cs_cap[0];
2258 		else
2259 			cap = cs_cap[0] + cs_cap[1] / (1 << (cap_info->cs1_row -
2260 				cap_info->cs1_high16bit_row));
2261 	} else {
2262 		goto out;
2263 	}
2264 	split_size = (u32)(cap >> 24) & SPLIT_SIZE_MASK;
2265 	if (cap_info->bw == 2)
2266 		split_mode = SPLIT_MODE_32_L16_VALID;
2267 	else
2268 		split_mode = SPLIT_MODE_16_L8_VALID;
2269 
2270 	rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2271 		     (SPLIT_MODE_MASK << SPLIT_MODE_OFFSET) |
2272 		     (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2273 		     (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2274 		     (split_mode << SPLIT_MODE_OFFSET) |
2275 		     (0x0 << SPLIT_BYPASS_OFFSET) |
2276 		     (split_size << SPLIT_SIZE_OFFSET));
2277 
2278 	rk_clrsetreg(BUS_SGRF_BASE_ADDR + SGRF_SOC_CON2,
2279 		     MSCH_AXI_BYPASS_ALL_MASK << MSCH_AXI_BYPASS_ALL_SHIFT,
2280 		     0x0 << MSCH_AXI_BYPASS_ALL_SHIFT);
2281 
2282 out:
2283 	return 0;
2284 }
2285 
2286 static void split_bypass(struct dram_info *dram)
2287 {
2288 	if ((readl(&dram->ddrgrf->grf_ddrsplit_con) &
2289 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2290 		return;
2291 
2292 	/* bypass split */
2293 	rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2294 		     (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2295 		     (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2296 		     (0x1 << SPLIT_BYPASS_OFFSET) |
2297 		     (0x0 << SPLIT_SIZE_OFFSET));
2298 }
2299 
2300 static void dram_all_config(struct dram_info *dram,
2301 			    struct rv1126_sdram_params *sdram_params)
2302 {
2303 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2304 	u32 dram_type = sdram_params->base.dramtype;
2305 	void __iomem *pctl_base = dram->pctl;
2306 	u32 sys_reg2 = 0;
2307 	u32 sys_reg3 = 0;
2308 	u64 cs_cap[2];
2309 	u32 cs_pst;
2310 
2311 	set_ddrconfig(dram, cap_info->ddrconfig);
2312 	sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
2313 			 &sys_reg3, 0);
2314 	writel(sys_reg2, &dram->pmugrf->os_reg[2]);
2315 	writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2316 
2317 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2318 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2319 
2320 	if (cap_info->rank == 2) {
2321 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2322 			6 + 2;
2323 		if (cs_pst > 28)
2324 			cs_cap[0] = 1llu << cs_pst;
2325 	}
2326 
2327 	writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
2328 			(((cs_cap[0] >> 20) / 64) & 0xff),
2329 			&dram->msch->devicesize);
2330 	update_noc_timing(dram, sdram_params);
2331 }
2332 
2333 static void enable_low_power(struct dram_info *dram,
2334 			     struct rv1126_sdram_params *sdram_params)
2335 {
2336 	void __iomem *pctl_base = dram->pctl;
2337 	u32 grf_lp_con;
2338 
2339 	writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]);
2340 
2341 	if (sdram_params->base.dramtype == DDR4)
2342 		grf_lp_con = (0x7 << 16) | (1 << 1);
2343 	else if (sdram_params->base.dramtype == DDR3)
2344 		grf_lp_con = (0x7 << 16) | (1 << 0);
2345 	else
2346 		grf_lp_con = (0x7 << 16) | (1 << 2);
2347 
2348 	/* en lpckdis_en */
2349 	grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
2350 	writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con);
2351 
2352 	/* enable sr, pd */
2353 	if (dram->pd_idle == 0)
2354 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2355 	else
2356 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2357 	if (dram->sr_idle == 0)
2358 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2359 	else
2360 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2361 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
2362 }
2363 
2364 static void ddr_set_atags(struct dram_info *dram,
2365 			  struct rv1126_sdram_params *sdram_params)
2366 {
2367 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2368 	u32 dram_type = sdram_params->base.dramtype;
2369 	void __iomem *pctl_base = dram->pctl;
2370 	struct tag_serial t_serial;
2371 	struct tag_ddr_mem t_ddrmem;
2372 	struct tag_soc_info t_socinfo;
2373 	u64 cs_cap[2];
2374 	u32 cs_pst = 0;
2375 	u32 split, split_size;
2376 	u64 reduce_cap = 0;
2377 
2378 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2379 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2380 
2381 	memset(&t_serial, 0, sizeof(struct tag_serial));
2382 
2383 	t_serial.version = 0;
2384 	t_serial.enable = 1;
2385 	t_serial.addr = CONFIG_DEBUG_UART_BASE;
2386 	t_serial.baudrate = CONFIG_BAUDRATE;
2387 	t_serial.m_mode = SERIAL_M_MODE_M0;
2388 	t_serial.id = 2;
2389 
2390 	atags_destroy();
2391 	atags_set_tag(ATAG_SERIAL,  &t_serial);
2392 
2393 	split = readl(&dram->ddrgrf->grf_ddrsplit_con);
2394 	memset(&t_ddrmem, 0, sizeof(struct tag_ddr_mem));
2395 	if (cap_info->row_3_4) {
2396 		cs_cap[0] =  cs_cap[0] * 3 / 4;
2397 		cs_cap[1] =  cs_cap[1] * 3 / 4;
2398 	} else if (!(split & (1 << SPLIT_BYPASS_OFFSET))) {
2399 		split_size = (split >> SPLIT_SIZE_OFFSET) & SPLIT_SIZE_MASK;
2400 		reduce_cap = (cs_cap[0] + cs_cap[1] - (split_size << 24)) / 2;
2401 	}
2402 	t_ddrmem.version = 0;
2403 	t_ddrmem.bank[0] = CONFIG_SYS_SDRAM_BASE;
2404 	if (cs_cap[1]) {
2405 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2406 			6 + 2;
2407 	}
2408 
2409 	if (cs_cap[1] && cs_pst > 27) {
2410 		t_ddrmem.count = 2;
2411 		t_ddrmem.bank[1] = 1 << cs_pst;
2412 		t_ddrmem.bank[2] = cs_cap[0];
2413 		t_ddrmem.bank[3] = cs_cap[1] - reduce_cap;
2414 	} else {
2415 		t_ddrmem.count = 1;
2416 		t_ddrmem.bank[1] = (u64)cs_cap[0] + (u64)cs_cap[1] - reduce_cap;
2417 	}
2418 
2419 	atags_set_tag(ATAG_DDR_MEM,  &t_ddrmem);
2420 
2421 	memset(&t_socinfo, 0, sizeof(struct tag_soc_info));
2422 	t_socinfo.version = 0;
2423 	t_socinfo.name = 0x1126;
2424 }
2425 
2426 static void print_ddr_info(struct rv1126_sdram_params *sdram_params)
2427 {
2428 	u32 split;
2429 
2430 	if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2431 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2432 		split = 0;
2433 	else
2434 		split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2435 			SPLIT_SIZE_MASK;
2436 
2437 	sdram_print_ddr_info(&sdram_params->ch.cap_info,
2438 			     &sdram_params->base, split);
2439 }
2440 
2441 static int sdram_init_(struct dram_info *dram,
2442 		       struct rv1126_sdram_params *sdram_params, u32 post_init)
2443 {
2444 	void __iomem *pctl_base = dram->pctl;
2445 	void __iomem *phy_base = dram->phy;
2446 	u32 ddr4_vref;
2447 	u32 mr_tmp;
2448 
2449 	rkclk_configure_ddr(dram, sdram_params);
2450 
2451 	rkclk_ddr_reset(dram, 1, 1, 1, 1);
2452 	udelay(10);
2453 
2454 	rkclk_ddr_reset(dram, 1, 1, 1, 0);
2455 	phy_cfg(dram, sdram_params);
2456 
2457 	rkclk_ddr_reset(dram, 1, 1, 0, 0);
2458 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1);
2459 
2460 	rkclk_ddr_reset(dram, 1, 0, 0, 0);
2461 	pctl_cfg(dram->pctl, &sdram_params->pctl_regs,
2462 		 dram->sr_idle, dram->pd_idle);
2463 
2464 	if (sdram_params->ch.cap_info.bw == 2) {
2465 		/* 32bit interface use pageclose */
2466 		setbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2467 		/* pageclose = 1, pageclose_timer = 0 will err in lp4 328MHz */
2468 		clrsetbits_le32(pctl_base + DDR_PCTL2_SCHED1, 0xff, 0x1 << 0);
2469 	} else {
2470 		clrbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2471 	}
2472 
2473 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2474 	u32 tmp, trefi;
2475 
2476 	tmp = readl(pctl_base + DDR_PCTL2_RFSHTMG);
2477 	trefi = (tmp >> 16) & 0xfff;
2478 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2479 	       pctl_base + DDR_PCTL2_RFSHTMG);
2480 #endif
2481 
2482 	/* set frequency_mode */
2483 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
2484 	/* set target_frequency to Frequency 0 */
2485 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0);
2486 
2487 	set_ds_odt(dram, sdram_params, 0);
2488 	sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params);
2489 	set_ctl_address_map(dram, sdram_params);
2490 
2491 	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
2492 
2493 	rkclk_ddr_reset(dram, 0, 0, 0, 0);
2494 
2495 	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
2496 		continue;
2497 
2498 	if (sdram_params->base.dramtype == LPDDR3) {
2499 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3);
2500 	} else if (sdram_params->base.dramtype == LPDDR4 ||
2501 		   sdram_params->base.dramtype == LPDDR4X) {
2502 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6);
2503 		/* MR11 */
2504 		pctl_write_mr(dram->pctl, 3, 11,
2505 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2506 			      LPDDR4);
2507 		/* MR12 */
2508 		pctl_write_mr(dram->pctl, 3, 12,
2509 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2510 			      LPDDR4);
2511 
2512 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2513 		/* MR22 */
2514 		pctl_write_mr(dram->pctl, 3, 22,
2515 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2516 			      LPDDR4);
2517 	}
2518 
2519 	if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) != 0) {
2520 		if (post_init != 0)
2521 			printascii("DTT cs0 error\n");
2522 		return -1;
2523 	}
2524 
2525 	if (sdram_params->base.dramtype == LPDDR4) {
2526 		mr_tmp = read_mr(dram, 1, 14, LPDDR4);
2527 
2528 		if (mr_tmp != 0x4d)
2529 			return -1;
2530 	}
2531 
2532 	if (sdram_params->base.dramtype == LPDDR4 ||
2533 	    sdram_params->base.dramtype == LPDDR4X) {
2534 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2535 		/* MR14 */
2536 		pctl_write_mr(dram->pctl, 3, 14,
2537 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2538 			      LPDDR4);
2539 	}
2540 	if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) {
2541 		if (data_training(dram, 1, sdram_params, 0,
2542 				  READ_GATE_TRAINING) != 0) {
2543 			printascii("DTT cs1 error\n");
2544 			return -1;
2545 		}
2546 	}
2547 
2548 	if (sdram_params->base.dramtype == DDR4) {
2549 		ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39;
2550 		pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref,
2551 				  sdram_params->base.dramtype);
2552 	}
2553 
2554 	dram_all_config(dram, sdram_params);
2555 	enable_low_power(dram, sdram_params);
2556 
2557 	return 0;
2558 }
2559 
2560 static u64 dram_detect_cap(struct dram_info *dram,
2561 			   struct rv1126_sdram_params *sdram_params,
2562 			   unsigned char channel)
2563 {
2564 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2565 	void __iomem *pctl_base = dram->pctl;
2566 	void __iomem *phy_base = dram->phy;
2567 	u32 mr8;
2568 
2569 	u32 bktmp;
2570 	u32 coltmp;
2571 	u32 rowtmp;
2572 	u32 cs;
2573 	u32 dram_type = sdram_params->base.dramtype;
2574 	u32 pwrctl;
2575 	u32 i, dq_map;
2576 	u32 byte1 = 0, byte0 = 0;
2577 
2578 	cap_info->bw = dram_type == DDR3 ? 0 : 1;
2579 	if (dram_type != LPDDR4 && dram_type != LPDDR4X) {
2580 		if (dram_type != DDR4) {
2581 			coltmp = 12;
2582 			bktmp = 3;
2583 			if (dram_type == LPDDR2)
2584 				rowtmp = 15;
2585 			else
2586 				rowtmp = 16;
2587 
2588 			if (sdram_detect_col(cap_info, coltmp) != 0)
2589 				goto cap_err;
2590 
2591 			sdram_detect_bank(cap_info, coltmp, bktmp);
2592 			sdram_detect_dbw(cap_info, dram_type);
2593 		} else {
2594 			coltmp = 10;
2595 			bktmp = 4;
2596 			rowtmp = 17;
2597 
2598 			cap_info->col = 10;
2599 			cap_info->bk = 2;
2600 			sdram_detect_bg(cap_info, coltmp);
2601 		}
2602 
2603 		if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
2604 			goto cap_err;
2605 
2606 		sdram_detect_row_3_4(cap_info, coltmp, bktmp);
2607 	} else {
2608 		cap_info->col = 10;
2609 		cap_info->bk = 3;
2610 		mr8 = read_mr(dram, 1, 8, dram_type);
2611 		cap_info->dbw = ((mr8 >> 6) & 0x3) == 0 ? 1 : 0;
2612 		mr8 = (mr8 >> 2) & 0xf;
2613 		if (mr8 >= 0 && mr8 <= 6) {
2614 			cap_info->cs0_row = 14 + (mr8 + 1) / 2;
2615 		} else if (mr8 == 0xc) {
2616 			cap_info->cs0_row = 13;
2617 		} else {
2618 			printascii("Cap ERR: Fail to get cap of LPDDR4/X from MR8\n");
2619 			goto cap_err;
2620 		}
2621 		if (cap_info->dbw == 0)
2622 			cap_info->cs0_row++;
2623 		cap_info->row_3_4 = mr8 % 2 == 1 ? 1 : 0;
2624 		if (cap_info->cs0_row >= 17) {
2625 			printascii("Cap ERR: ");
2626 			printascii("RV1126 LPDDR4/X cannot support row >= 17\n");
2627 			goto cap_err;
2628 			// cap_info->cs0_row = 16;
2629 			// cap_info->row_3_4 = 0;
2630 		}
2631 	}
2632 
2633 	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
2634 	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
2635 
2636 	if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0)
2637 		cs = 1;
2638 	else
2639 		cs = 0;
2640 	cap_info->rank = cs + 1;
2641 
2642 	setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2643 
2644 	if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) == 0) {
2645 		cap_info->bw = 2;
2646 	} else {
2647 		dq_map = readl(PHY_REG(phy_base, 0x4f));
2648 		for (i = 0; i < 4; i++) {
2649 			if (((dq_map >> (i * 2)) & 0x3) == 0)
2650 				byte0 = i;
2651 			if (((dq_map >> (i * 2)) & 0x3) == 1)
2652 				byte1 = i;
2653 		}
2654 		clrsetbits_le32(PHY_REG(phy_base, 0xf), PHY_DQ_WIDTH_MASK,
2655 				BIT(byte0) | BIT(byte1));
2656 		if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) == 0)
2657 			cap_info->bw = 1;
2658 		else
2659 			cap_info->bw = 0;
2660 	}
2661 	if (cap_info->bw > 0)
2662 		cap_info->dbw = 1;
2663 
2664 	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
2665 
2666 	cap_info->cs0_high16bit_row = cap_info->cs0_row;
2667 	if (cs) {
2668 		cap_info->cs1_row = cap_info->cs0_row;
2669 		cap_info->cs1_high16bit_row = cap_info->cs0_row;
2670 	} else {
2671 		cap_info->cs1_row = 0;
2672 		cap_info->cs1_high16bit_row = 0;
2673 	}
2674 
2675 	return 0;
2676 cap_err:
2677 	return -1;
2678 }
2679 
2680 static int dram_detect_cs1_row(struct dram_info *dram,
2681 			       struct rv1126_sdram_params *sdram_params,
2682 			       unsigned char channel)
2683 {
2684 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2685 	void __iomem *pctl_base = dram->pctl;
2686 	u32 ret = 0;
2687 	void __iomem *test_addr;
2688 	u32 row, bktmp, coltmp, bw;
2689 	u64 cs0_cap;
2690 	u32 byte_mask;
2691 	u32 cs_pst;
2692 	u32 cs_add = 0;
2693 	u32 max_row;
2694 
2695 	if (cap_info->rank == 2) {
2696 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2697 			6 + 2;
2698 		if (cs_pst < 28)
2699 			cs_add = 1;
2700 
2701 		cs0_cap = 1 << cs_pst;
2702 
2703 		if (sdram_params->base.dramtype == DDR4) {
2704 			if (cap_info->dbw == 0)
2705 				bktmp = cap_info->bk + 2;
2706 			else
2707 				bktmp = cap_info->bk + 1;
2708 		} else {
2709 			bktmp = cap_info->bk;
2710 		}
2711 		bw = cap_info->bw;
2712 		coltmp = cap_info->col;
2713 
2714 		if (bw == 2)
2715 			byte_mask = 0xFFFF;
2716 		else
2717 			byte_mask = 0xFF;
2718 
2719 		max_row = (cs_pst == 31) ? 30 : 31;
2720 
2721 		max_row = max_row - bktmp - coltmp - bw - cs_add + 1;
2722 
2723 		row = (cap_info->cs0_row > max_row) ? max_row :
2724 			cap_info->cs0_row;
2725 
2726 		for (; row > 12; row--) {
2727 			test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
2728 				    (u32)cs0_cap +
2729 				    (1ul << (row + bktmp + coltmp +
2730 					     cs_add + bw - 1ul)));
2731 
2732 			writel(0, CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap);
2733 			writel(PATTERN, test_addr);
2734 
2735 			if (((readl(test_addr) & byte_mask) ==
2736 			     (PATTERN & byte_mask)) &&
2737 			    ((readl(CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap) &
2738 			      byte_mask) == 0)) {
2739 				ret = row;
2740 				break;
2741 			}
2742 		}
2743 	}
2744 
2745 	return ret;
2746 }
2747 
2748 /* return: 0 = success, other = fail */
2749 static int sdram_init_detect(struct dram_info *dram,
2750 			     struct rv1126_sdram_params *sdram_params)
2751 {
2752 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2753 	u32 ret;
2754 	u32 sys_reg = 0;
2755 	u32 sys_reg3 = 0;
2756 	struct sdram_head_info_index_v2 *index =
2757 		(struct sdram_head_info_index_v2 *)common_info;
2758 	struct dq_map_info *map_info;
2759 
2760 	map_info = (struct dq_map_info *)((void *)common_info +
2761 		index->dq_map_index.offset * 4);
2762 
2763 	if (sdram_init_(dram, sdram_params, 0)) {
2764 		if (sdram_params->base.dramtype == DDR3) {
2765 			clrsetbits_le32(&map_info->byte_map[0], 0xff << 24,
2766 					((0x1 << 6) | (0x3 << 4) | (0x2 << 2) |
2767 					(0x0 << 0)) << 24);
2768 			if (sdram_init_(dram, sdram_params, 0))
2769 				return -1;
2770 		} else {
2771 			return -1;
2772 		}
2773 	}
2774 
2775 	if (sdram_params->base.dramtype == DDR3) {
2776 		writel(PATTERN, CONFIG_SYS_SDRAM_BASE);
2777 		if (readl(CONFIG_SYS_SDRAM_BASE) != PATTERN)
2778 			return -1;
2779 	}
2780 
2781 	split_bypass(dram);
2782 	if (dram_detect_cap(dram, sdram_params, 0) != 0)
2783 		return -1;
2784 
2785 	pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
2786 				   sdram_params->base.dramtype);
2787 	ret = sdram_init_(dram, sdram_params, 1);
2788 	if (ret != 0)
2789 		goto out;
2790 
2791 	cap_info->cs1_row =
2792 		dram_detect_cs1_row(dram, sdram_params, 0);
2793 	if (cap_info->cs1_row) {
2794 		sys_reg = readl(&dram->pmugrf->os_reg[2]);
2795 		sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
2796 		SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
2797 				    sys_reg, sys_reg3, 0);
2798 		writel(sys_reg, &dram->pmugrf->os_reg[2]);
2799 		writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2800 	}
2801 
2802 	sdram_detect_high_row(cap_info, sdram_params->base.dramtype);
2803 	split_setup(dram, sdram_params);
2804 out:
2805 	return ret;
2806 }
2807 
2808 struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz)
2809 {
2810 	u32 i;
2811 	u32 offset = 0;
2812 	struct ddr2_3_4_lp2_3_info *ddr_info;
2813 
2814 	if (!freq_mhz) {
2815 		ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype);
2816 		if (ddr_info)
2817 			freq_mhz =
2818 				(ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
2819 				DDR_FREQ_MASK;
2820 		else
2821 			freq_mhz = 0;
2822 	}
2823 
2824 	for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) {
2825 		if (sdram_configs[i].base.ddr_freq == 0 ||
2826 		    freq_mhz < sdram_configs[i].base.ddr_freq)
2827 			break;
2828 	}
2829 	offset = i == 0 ? 0 : i - 1;
2830 
2831 	return &sdram_configs[offset];
2832 }
2833 
2834 static const u16 pctl_need_update_reg[] = {
2835 	DDR_PCTL2_RFSHTMG,
2836 	DDR_PCTL2_INIT3,
2837 	DDR_PCTL2_INIT4,
2838 	DDR_PCTL2_INIT6,
2839 	DDR_PCTL2_INIT7,
2840 	DDR_PCTL2_DRAMTMG0,
2841 	DDR_PCTL2_DRAMTMG1,
2842 	DDR_PCTL2_DRAMTMG2,
2843 	DDR_PCTL2_DRAMTMG3,
2844 	DDR_PCTL2_DRAMTMG4,
2845 	DDR_PCTL2_DRAMTMG5,
2846 	DDR_PCTL2_DRAMTMG6,
2847 	DDR_PCTL2_DRAMTMG7,
2848 	DDR_PCTL2_DRAMTMG8,
2849 	DDR_PCTL2_DRAMTMG9,
2850 	DDR_PCTL2_DRAMTMG12,
2851 	DDR_PCTL2_DRAMTMG13,
2852 	DDR_PCTL2_DRAMTMG14,
2853 	DDR_PCTL2_ZQCTL0,
2854 	DDR_PCTL2_DFITMG0,
2855 	DDR_PCTL2_ODTCFG
2856 };
2857 
2858 static const u16 phy_need_update_reg[] = {
2859 	0x14,
2860 	0x18,
2861 	0x1c
2862 };
2863 
2864 static void pre_set_rate(struct dram_info *dram,
2865 			 struct rv1126_sdram_params *sdram_params,
2866 			 u32 dst_fsp, u32 dst_fsp_lp4)
2867 {
2868 	u32 i, j, find;
2869 	void __iomem *pctl_base = dram->pctl;
2870 	void __iomem *phy_base = dram->phy;
2871 	u32 phy_offset;
2872 	u32 mr_tmp;
2873 	u32 dramtype = sdram_params->base.dramtype;
2874 
2875 	sw_set_req(dram);
2876 	/* pctl timing update */
2877 	for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) {
2878 		for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF;
2879 		     j++) {
2880 			if (sdram_params->pctl_regs.pctl[j][0] ==
2881 			    pctl_need_update_reg[i]) {
2882 				writel(sdram_params->pctl_regs.pctl[j][1],
2883 				       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2884 				       pctl_need_update_reg[i]);
2885 				find = j;
2886 				break;
2887 			}
2888 		}
2889 	}
2890 
2891 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2892 	u32 tmp, trefi;
2893 
2894 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2895 	trefi = (tmp >> 16) & 0xfff;
2896 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2897 	       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2898 #endif
2899 
2900 	sw_set_ack(dram);
2901 
2902 	/* phy timing update */
2903 	if (dst_fsp == 0)
2904 		phy_offset = 0;
2905 	else
2906 		phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3);
2907 	/* cl cwl al update */
2908 	for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) {
2909 		for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF;
2910 		     j++) {
2911 			if (sdram_params->phy_regs.phy[j][0] ==
2912 			    phy_need_update_reg[i]) {
2913 				writel(sdram_params->phy_regs.phy[j][1],
2914 				       phy_base + phy_offset +
2915 				       phy_need_update_reg[i]);
2916 				find = j;
2917 				break;
2918 			}
2919 		}
2920 	}
2921 
2922 	set_ds_odt(dram, sdram_params, dst_fsp);
2923 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
2924 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2925 			       DDR_PCTL2_INIT4);
2926 		/* MR13 */
2927 		pctl_write_mr(dram->pctl, 3, 13,
2928 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2929 			       PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2930 			      ((0x2 << 6) >> dst_fsp_lp4), dramtype);
2931 		writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2932 				      PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2933 				      ((0x2 << 6) >> dst_fsp_lp4),
2934 				       PHY_REG(phy_base, 0x1b));
2935 		/* MR3 */
2936 		pctl_write_mr(dram->pctl, 3, 3,
2937 			      mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT &
2938 			      PCTL2_MR_MASK,
2939 			      dramtype);
2940 		writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK,
2941 		       PHY_REG(phy_base, 0x19));
2942 
2943 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2944 			       DDR_PCTL2_INIT3);
2945 		/* MR1 */
2946 		pctl_write_mr(dram->pctl, 3, 1,
2947 			      mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT &
2948 			      PCTL2_MR_MASK,
2949 			      dramtype);
2950 		writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK,
2951 		       PHY_REG(phy_base, 0x17));
2952 		/* MR2 */
2953 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
2954 			      dramtype);
2955 		writel(mr_tmp & PCTL2_MR_MASK,
2956 		       PHY_REG(phy_base, 0x18));
2957 
2958 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2959 			       DDR_PCTL2_INIT6);
2960 		/* MR11 */
2961 		pctl_write_mr(dram->pctl, 3, 11,
2962 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2963 			      dramtype);
2964 		writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2965 		       PHY_REG(phy_base, 0x1a));
2966 		/* MR12 */
2967 		pctl_write_mr(dram->pctl, 3, 12,
2968 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2969 			      dramtype);
2970 
2971 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2972 			       DDR_PCTL2_INIT7);
2973 		/* MR22 */
2974 		pctl_write_mr(dram->pctl, 3, 22,
2975 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2976 			      dramtype);
2977 		writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2978 		       PHY_REG(phy_base, 0x1d));
2979 		/* MR14 */
2980 		pctl_write_mr(dram->pctl, 3, 14,
2981 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2982 			      dramtype);
2983 		writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2984 		       PHY_REG(phy_base, 0x1c));
2985 	}
2986 
2987 	update_noc_timing(dram, sdram_params);
2988 }
2989 
2990 static void save_fsp_param(struct dram_info *dram, u32 dst_fsp,
2991 			   struct rv1126_sdram_params *sdram_params)
2992 {
2993 	void __iomem *pctl_base = dram->pctl;
2994 	void __iomem *phy_base = dram->phy;
2995 	struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp];
2996 	u32 temp, temp1;
2997 	struct ddr2_3_4_lp2_3_info *ddr_info;
2998 
2999 	ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype);
3000 
3001 	p_fsp_param->freq_mhz = sdram_params->base.ddr_freq;
3002 
3003 	if (sdram_params->base.dramtype == LPDDR4 ||
3004 	    sdram_params->base.dramtype == LPDDR4X) {
3005 		p_fsp_param->rd_odt_up_en = 0;
3006 		p_fsp_param->rd_odt_down_en = 1;
3007 	} else {
3008 		p_fsp_param->rd_odt_up_en =
3009 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
3010 		p_fsp_param->rd_odt_down_en =
3011 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
3012 	}
3013 
3014 	if (p_fsp_param->rd_odt_up_en)
3015 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111));
3016 	else if (p_fsp_param->rd_odt_down_en)
3017 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110));
3018 	else
3019 		p_fsp_param->rd_odt = 0;
3020 	p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112));
3021 	p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100));
3022 	p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102));
3023 	p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128));
3024 	p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105));
3025 
3026 	if (sdram_params->base.dramtype == DDR3) {
3027 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3028 			     DDR_PCTL2_INIT3);
3029 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
3030 		p_fsp_param->ds_pdds = temp & DDR3_DS_MASK;
3031 		p_fsp_param->dq_odt = temp & DDR3_RTT_NOM_MASK;
3032 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3033 	} else if (sdram_params->base.dramtype == DDR4) {
3034 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3035 			     DDR_PCTL2_INIT3);
3036 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
3037 		p_fsp_param->ds_pdds = temp & DDR4_DS_MASK;
3038 		p_fsp_param->dq_odt = temp & DDR4_RTT_NOM_MASK;
3039 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3040 	} else if (sdram_params->base.dramtype == LPDDR3) {
3041 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3042 			     DDR_PCTL2_INIT4);
3043 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
3044 		p_fsp_param->ds_pdds = temp & 0xf;
3045 
3046 		p_fsp_param->dq_odt = lp3_odt_value;
3047 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3048 	} else if (sdram_params->base.dramtype == LPDDR4 ||
3049 		   sdram_params->base.dramtype == LPDDR4X) {
3050 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3051 			     DDR_PCTL2_INIT4);
3052 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
3053 		p_fsp_param->ds_pdds = temp & LPDDR4_PDDS_MASK;
3054 
3055 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3056 			     DDR_PCTL2_INIT6);
3057 		temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK;
3058 		p_fsp_param->dq_odt = temp & LPDDR4_DQODT_MASK;
3059 		p_fsp_param->ca_odt = temp & LPDDR4_CAODT_MASK;
3060 
3061 		temp = MAX(readl(PHY_REG(phy_base, 0x3ae)),
3062 			   readl(PHY_REG(phy_base, 0x3ce)));
3063 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)),
3064 			    readl(PHY_REG(phy_base, 0x3de)));
3065 		p_fsp_param->vref_ca[0] = (temp + temp1) / 2;
3066 		temp = MAX(readl(PHY_REG(phy_base, 0x3af)),
3067 			   readl(PHY_REG(phy_base, 0x3cf)));
3068 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)),
3069 			    readl(PHY_REG(phy_base, 0x3df)));
3070 		p_fsp_param->vref_ca[1] = (temp + temp1) / 2;
3071 		p_fsp_param->vref_ca[0] |=
3072 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
3073 		p_fsp_param->vref_ca[1] |=
3074 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
3075 
3076 		p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >>
3077 					      3) & 0x1;
3078 	}
3079 
3080 	p_fsp_param->noc_timings.ddrtiminga0 =
3081 		sdram_params->ch.noc_timings.ddrtiminga0;
3082 	p_fsp_param->noc_timings.ddrtimingb0 =
3083 		sdram_params->ch.noc_timings.ddrtimingb0;
3084 	p_fsp_param->noc_timings.ddrtimingc0 =
3085 		sdram_params->ch.noc_timings.ddrtimingc0;
3086 	p_fsp_param->noc_timings.devtodev0 =
3087 		sdram_params->ch.noc_timings.devtodev0;
3088 	p_fsp_param->noc_timings.ddrmode =
3089 		sdram_params->ch.noc_timings.ddrmode;
3090 	p_fsp_param->noc_timings.ddr4timing =
3091 		sdram_params->ch.noc_timings.ddr4timing;
3092 	p_fsp_param->noc_timings.agingx0 =
3093 		sdram_params->ch.noc_timings.agingx0;
3094 	p_fsp_param->noc_timings.aging0 =
3095 		sdram_params->ch.noc_timings.aging0;
3096 	p_fsp_param->noc_timings.aging1 =
3097 		sdram_params->ch.noc_timings.aging1;
3098 	p_fsp_param->noc_timings.aging2 =
3099 		sdram_params->ch.noc_timings.aging2;
3100 	p_fsp_param->noc_timings.aging3 =
3101 		sdram_params->ch.noc_timings.aging3;
3102 
3103 	p_fsp_param->flag = FSP_FLAG;
3104 }
3105 
3106 #ifndef CONFIG_SPL_KERNEL_BOOT
3107 static void copy_fsp_param_to_ddr(void)
3108 {
3109 	memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param,
3110 	       sizeof(fsp_param));
3111 }
3112 #endif
3113 
3114 static void pctl_modify_trfc(struct ddr_pctl_regs *pctl_regs,
3115 			     struct sdram_cap_info *cap_info, u32 dram_type,
3116 			     u32 freq)
3117 {
3118 	u64 cs0_cap;
3119 	u32 die_cap;
3120 	u32 trfc_ns, trfc4_ns;
3121 	u32 trfc, txsnr;
3122 	u32 txs_abort_fast = 0;
3123 	u32 tmp;
3124 
3125 	cs0_cap = sdram_get_cs_cap(cap_info, 0, dram_type);
3126 	die_cap = (u32)(cs0_cap >> (20 + (cap_info->bw - cap_info->dbw)));
3127 
3128 	switch (dram_type) {
3129 	case DDR3:
3130 		if (die_cap <= DIE_CAP_512MBIT)
3131 			trfc_ns = 90;
3132 		else if (die_cap <= DIE_CAP_1GBIT)
3133 			trfc_ns = 110;
3134 		else if (die_cap <= DIE_CAP_2GBIT)
3135 			trfc_ns = 160;
3136 		else if (die_cap <= DIE_CAP_4GBIT)
3137 			trfc_ns = 260;
3138 		else
3139 			trfc_ns = 350;
3140 		txsnr = MAX(5, ((trfc_ns + 10) * freq + 999) / 1000);
3141 		break;
3142 
3143 	case DDR4:
3144 		if (die_cap <= DIE_CAP_2GBIT) {
3145 			trfc_ns = 160;
3146 			trfc4_ns = 90;
3147 		} else if (die_cap <= DIE_CAP_4GBIT) {
3148 			trfc_ns = 260;
3149 			trfc4_ns = 110;
3150 		} else if (die_cap <= DIE_CAP_8GBIT) {
3151 			trfc_ns = 350;
3152 			trfc4_ns = 160;
3153 		} else {
3154 			trfc_ns = 550;
3155 			trfc4_ns = 260;
3156 		}
3157 		txsnr = ((trfc_ns + 10) * freq + 999) / 1000;
3158 		txs_abort_fast = ((trfc4_ns + 10) * freq + 999) / 1000;
3159 		break;
3160 
3161 	case LPDDR3:
3162 		if (die_cap <= DIE_CAP_4GBIT)
3163 			trfc_ns = 130;
3164 		else
3165 			trfc_ns = 210;
3166 		txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3167 		break;
3168 
3169 	case LPDDR4:
3170 	case LPDDR4X:
3171 		if (die_cap <= DIE_CAP_2GBIT)
3172 			trfc_ns = 130;
3173 		else if (die_cap <= DIE_CAP_4GBIT)
3174 			trfc_ns = 180;
3175 		else if (die_cap <= DIE_CAP_8GBIT)
3176 			trfc_ns = 280;
3177 		else
3178 			trfc_ns = 380;
3179 		txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3180 		break;
3181 
3182 	default:
3183 		return;
3184 	}
3185 	trfc = (trfc_ns * freq + 999) / 1000;
3186 
3187 	for (int i = 0; pctl_regs->pctl[i][0] != 0xffffffff; i++) {
3188 		switch (pctl_regs->pctl[i][0]) {
3189 		case DDR_PCTL2_RFSHTMG:
3190 			tmp = pctl_regs->pctl[i][1];
3191 			/* t_rfc_min */
3192 			tmp &= ~((u32)0x3ff);
3193 			tmp |= ((trfc + 1) / 2) & 0x3ff;
3194 			pctl_regs->pctl[i][1] = tmp;
3195 			break;
3196 
3197 		case DDR_PCTL2_DRAMTMG8:
3198 			if (dram_type == DDR3 || dram_type == DDR4) {
3199 				tmp = pctl_regs->pctl[i][1];
3200 				/* t_xs_x32 */
3201 				tmp &= ~((u32)0x7f);
3202 				tmp |= ((txsnr + 63) / 64) & 0x7f;
3203 
3204 				if (dram_type == DDR4) {
3205 					/* t_xs_abort_x32 */
3206 					tmp &= ~((u32)(0x7f << 16));
3207 					tmp |= (((txs_abort_fast + 63) / 64) & 0x7f) << 16;
3208 					/* t_xs_fast_x32 */
3209 					tmp &= ~((u32)(0x7f << 24));
3210 					tmp |= (((txs_abort_fast + 63) / 64) & 0x7f) << 24;
3211 				}
3212 
3213 				pctl_regs->pctl[i][1] = tmp;
3214 			}
3215 			break;
3216 
3217 		case DDR_PCTL2_DRAMTMG14:
3218 			if (dram_type == LPDDR3 ||
3219 			    dram_type == LPDDR4 || dram_type == LPDDR4X) {
3220 				tmp = pctl_regs->pctl[i][1];
3221 				/* t_xsr */
3222 				tmp &= ~((u32)0xfff);
3223 				tmp |= ((txsnr + 1) / 2) & 0xfff;
3224 				pctl_regs->pctl[i][1] = tmp;
3225 			}
3226 			break;
3227 
3228 		default:
3229 			break;
3230 		}
3231 	}
3232 }
3233 
3234 void ddr_set_rate(struct dram_info *dram,
3235 		  struct rv1126_sdram_params *sdram_params,
3236 		  u32 freq, u32 cur_freq, u32 dst_fsp,
3237 		  u32 dst_fsp_lp4, u32 training_en)
3238 {
3239 	u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off;
3240 	u32 mr_tmp;
3241 	u32 lp_stat;
3242 	u32 dramtype = sdram_params->base.dramtype;
3243 	struct rv1126_sdram_params *sdram_params_new;
3244 	void __iomem *pctl_base = dram->pctl;
3245 	void __iomem *phy_base = dram->phy;
3246 
3247 	lp_stat = low_power_update(dram, 0);
3248 	sdram_params_new = get_default_sdram_config(freq);
3249 	sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank;
3250 	sdram_params_new->ch.cap_info.bw = sdram_params->ch.cap_info.bw;
3251 
3252 	pctl_modify_trfc(&sdram_params_new->pctl_regs,
3253 			 &sdram_params->ch.cap_info, dramtype, freq);
3254 	pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4);
3255 
3256 	while ((readl(pctl_base + DDR_PCTL2_STAT) &
3257 			 PCTL2_OPERATING_MODE_MASK) ==
3258 			 PCTL2_OPERATING_MODE_SR)
3259 		continue;
3260 
3261 	dest_dll_off = 0;
3262 	dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3263 			  DDR_PCTL2_INIT3);
3264 	if ((dramtype == DDR3 && (dst_init3 & 1)) ||
3265 	    (dramtype == DDR4 && !(dst_init3 & 1)))
3266 		dest_dll_off = 1;
3267 
3268 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
3269 	cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
3270 			  DDR_PCTL2_INIT3);
3271 	cur_init3 &= PCTL2_MR_MASK;
3272 	cur_dll_off = 1;
3273 	if ((dramtype == DDR3 && !(cur_init3 & 1)) ||
3274 	    (dramtype == DDR4 && (cur_init3 & 1)))
3275 		cur_dll_off = 0;
3276 
3277 	if (!cur_dll_off) {
3278 		if (dramtype == DDR3)
3279 			cur_init3 |= 1;
3280 		else
3281 			cur_init3 &= ~1;
3282 		pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype);
3283 	}
3284 
3285 	setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3286 		     PCTL2_DIS_AUTO_REFRESH);
3287 	update_refresh_reg(dram);
3288 
3289 	enter_sr(dram, 1);
3290 
3291 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3292 	       PMUGRF_CON_DDRPHY_BUFFEREN_EN,
3293 	       &dram->pmugrf->soc_con[0]);
3294 	sw_set_req(dram);
3295 	clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC,
3296 		     PCTL2_DFI_INIT_COMPLETE_EN);
3297 	sw_set_ack(dram);
3298 
3299 	sw_set_req(dram);
3300 	if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off)
3301 		setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3302 	else
3303 		clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3304 
3305 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0,
3306 		     PCTL2_DIS_SRX_ZQCL);
3307 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0,
3308 		     PCTL2_DIS_SRX_ZQCL);
3309 	sw_set_ack(dram);
3310 
3311 	writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT),
3312 	       &dram->cru->clkgate_con[21]);
3313 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3314 					(0x1 << CLK_DDR_UPCTL_EN_SHIFT) |
3315 					(0x1 << ACLK_DDR_UPCTL_EN_SHIFT),
3316 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3317 
3318 	clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3319 	rkclk_set_dpll(dram, freq * MHz / 2);
3320 	phy_pll_set(dram, freq * MHz, 0);
3321 	phy_pll_set(dram, freq * MHz, 1);
3322 	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3323 
3324 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3325 			PMUGRF_CON_DDRPHY_BUFFEREN_DIS,
3326 			&dram->pmugrf->soc_con[0]);
3327 	writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT),
3328 	       &dram->cru->clkgate_con[21]);
3329 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3330 					(0x0 << CLK_DDR_UPCTL_EN_SHIFT) |
3331 					(0x0 << ACLK_DDR_UPCTL_EN_SHIFT),
3332 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3333 	while ((readl(pctl_base + DDR_PCTL2_DFISTAT) &
3334 	       PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE)
3335 		continue;
3336 
3337 	sw_set_req(dram);
3338 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
3339 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp);
3340 	sw_set_ack(dram);
3341 	update_refresh_reg(dram);
3342 	clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2);
3343 
3344 	enter_sr(dram, 0);
3345 
3346 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3347 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3348 
3349 	mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4);
3350 	if (dramtype == LPDDR3) {
3351 		pctl_write_mr(dram->pctl, 3, 1,
3352 			      (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) &
3353 			      PCTL2_MR_MASK,
3354 			      dramtype);
3355 		pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK,
3356 			      dramtype);
3357 		pctl_write_mr(dram->pctl, 3, 3,
3358 			      (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) &
3359 			      PCTL2_MR_MASK,
3360 			      dramtype);
3361 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype);
3362 	} else if ((dramtype == DDR3) || (dramtype == DDR4)) {
3363 		pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK,
3364 			      dramtype);
3365 		if (!dest_dll_off) {
3366 			pctl_write_mr(dram->pctl, 3, 0,
3367 				      ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) &
3368 				      PCTL2_MR_MASK) | DDR3_DLL_RESET,
3369 				      dramtype);
3370 			udelay(2);
3371 		}
3372 		pctl_write_mr(dram->pctl, 3, 0,
3373 			      (dst_init3 >> PCTL2_DDR34_MR0_SHIFT &
3374 			       PCTL2_MR_MASK) & (~DDR3_DLL_RESET),
3375 			      dramtype);
3376 		pctl_write_mr(dram->pctl, 3, 2,
3377 			      ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) &
3378 			       PCTL2_MR_MASK), dramtype);
3379 		if (dramtype == DDR4) {
3380 			pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK,
3381 				      dramtype);
3382 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3383 				       DDR_PCTL2_INIT6);
3384 			pctl_write_mr(dram->pctl, 3, 4,
3385 				      (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) &
3386 				       PCTL2_MR_MASK,
3387 				      dramtype);
3388 			pctl_write_mr(dram->pctl, 3, 5,
3389 				      mr_tmp >> PCTL2_DDR4_MR5_SHIFT &
3390 				      PCTL2_MR_MASK,
3391 				      dramtype);
3392 
3393 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3394 				       DDR_PCTL2_INIT7);
3395 			pctl_write_mr(dram->pctl, 3, 6,
3396 				      mr_tmp >> PCTL2_DDR4_MR6_SHIFT &
3397 				      PCTL2_MR_MASK,
3398 				      dramtype);
3399 		}
3400 	} else if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
3401 		pctl_write_mr(dram->pctl, 3, 13,
3402 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
3403 			       PCTL2_MR_MASK) & (~(BIT(7)))) |
3404 			      dst_fsp_lp4 << 7, dramtype);
3405 	}
3406 	clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3407 		     PCTL2_DIS_AUTO_REFRESH);
3408 	update_refresh_reg(dram);
3409 
3410 	/* training */
3411 	high_freq_training(dram, sdram_params_new, dst_fsp);
3412 	low_power_update(dram, lp_stat);
3413 
3414 	save_fsp_param(dram, dst_fsp, sdram_params_new);
3415 }
3416 
3417 static void ddr_set_rate_for_fsp(struct dram_info *dram,
3418 				 struct rv1126_sdram_params *sdram_params)
3419 {
3420 	struct ddr2_3_4_lp2_3_info *ddr_info;
3421 	u32 f0;
3422 	u32 dramtype = sdram_params->base.dramtype;
3423 #ifndef CONFIG_SPL_KERNEL_BOOT
3424 	u32 f1, f2, f3;
3425 #endif
3426 
3427 	ddr_info = get_ddr_drv_odt_info(dramtype);
3428 	if (!ddr_info)
3429 		return;
3430 
3431 	f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
3432 	     DDR_FREQ_MASK;
3433 
3434 #ifndef CONFIG_SPL_KERNEL_BOOT
3435 	memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param));
3436 	memset((void *)&fsp_param, 0, sizeof(fsp_param));
3437 
3438 	f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) &
3439 	     DDR_FREQ_MASK;
3440 	f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) &
3441 	     DDR_FREQ_MASK;
3442 	f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) &
3443 	     DDR_FREQ_MASK;
3444 #endif
3445 
3446 	if (get_wrlvl_val(dram, sdram_params))
3447 		printascii("get wrlvl value fail\n");
3448 
3449 #ifndef CONFIG_SPL_KERNEL_BOOT
3450 	printascii("change to: ");
3451 	printdec(f1);
3452 	printascii("MHz\n");
3453 	ddr_set_rate(&dram_info, sdram_params, f1,
3454 		     sdram_params->base.ddr_freq, 1, 1, 1);
3455 	printascii("change to: ");
3456 	printdec(f2);
3457 	printascii("MHz\n");
3458 	ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1);
3459 	printascii("change to: ");
3460 	printdec(f3);
3461 	printascii("MHz\n");
3462 	ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1);
3463 #endif
3464 	printascii("change to: ");
3465 	printdec(f0);
3466 	printascii("MHz(final freq)\n");
3467 #ifndef CONFIG_SPL_KERNEL_BOOT
3468 	ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1);
3469 #else
3470 	ddr_set_rate(&dram_info, sdram_params, f0, sdram_params->base.ddr_freq, 1, 1, 1);
3471 #endif
3472 }
3473 
3474 int get_uart_config(void)
3475 {
3476 	struct sdram_head_info_index_v2 *index =
3477 		(struct sdram_head_info_index_v2 *)common_info;
3478 	struct global_info *gbl_info;
3479 
3480 	gbl_info = (struct global_info *)((void *)common_info +
3481 		index->global_index.offset * 4);
3482 
3483 	return gbl_info->uart_info;
3484 }
3485 
3486 /* return: 0 = success, other = fail */
3487 int sdram_init(void)
3488 {
3489 	struct rv1126_sdram_params *sdram_params;
3490 	int ret = 0;
3491 	struct sdram_head_info_index_v2 *index =
3492 		(struct sdram_head_info_index_v2 *)common_info;
3493 	struct global_info *gbl_info;
3494 
3495 	dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
3496 	dram_info.pctl = (void *)UPCTL2_BASE_ADDR;
3497 	dram_info.grf = (void *)GRF_BASE_ADDR;
3498 	dram_info.cru = (void *)CRU_BASE_ADDR;
3499 	dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR;
3500 	dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR;
3501 	dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR;
3502 
3503 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
3504 	printascii("extended temp support\n");
3505 #endif
3506 	if (index->version_info != 2 ||
3507 	    (index->global_index.size != sizeof(struct global_info) / 4) ||
3508 	    (index->ddr3_index.size !=
3509 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3510 	    (index->ddr4_index.size !=
3511 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3512 	    (index->lp3_index.size !=
3513 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3514 	    (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) ||
3515 	    (index->lp4x_index.size != (sizeof(struct lp4_info) / 4)) ||
3516 	    index->global_index.offset == 0 ||
3517 	    index->ddr3_index.offset == 0 ||
3518 	    index->ddr4_index.offset == 0 ||
3519 	    index->lp3_index.offset == 0 ||
3520 	    index->lp4_index.offset == 0 ||
3521 	    index->lp4x_index.offset == 0) {
3522 		printascii("common info error\n");
3523 		goto error;
3524 	}
3525 
3526 	gbl_info = (struct global_info *)((void *)common_info +
3527 		index->global_index.offset * 4);
3528 
3529 	dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info);
3530 	dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info);
3531 
3532 	sdram_params = &sdram_configs[0];
3533 	#if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 8)
3534 	for (j = 0; j < ARRAY_SIZE(sdram_configs); j++)
3535 		sdram_configs[j].base.dramtype = LPDDR4X;
3536 	#endif
3537 	if (sdram_params->base.dramtype == DDR3 ||
3538 	    sdram_params->base.dramtype == DDR4) {
3539 		if (DDR_2T_INFO(gbl_info->info_2t))
3540 			sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10;
3541 		else
3542 			sdram_params->pctl_regs.pctl[0][1] &=
3543 				~(0x1 << 10);
3544 	}
3545 	ret = sdram_init_detect(&dram_info, sdram_params);
3546 	if (ret) {
3547 		sdram_print_dram_type(sdram_params->base.dramtype);
3548 		printascii(", ");
3549 		printdec(sdram_params->base.ddr_freq);
3550 		printascii("MHz\n");
3551 		goto error;
3552 	}
3553 	print_ddr_info(sdram_params);
3554 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3555 	init_rw_trn_result_struct(&rw_trn_result, dram_info.phy,
3556 				  (u8)sdram_params->ch.cap_info.rank);
3557 #endif
3558 
3559 	ddr_set_rate_for_fsp(&dram_info, sdram_params);
3560 #ifndef CONFIG_SPL_KERNEL_BOOT
3561 	copy_fsp_param_to_ddr();
3562 #endif
3563 
3564 	ddr_set_atags(&dram_info, sdram_params);
3565 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3566 	save_rw_trn_result_to_ddr(&rw_trn_result);
3567 #endif
3568 
3569 	printascii("out\n");
3570 
3571 	return ret;
3572 error:
3573 	printascii("error\n");
3574 	return (-1);
3575 }
3576 #endif /* CONFIG_TPL_BUILD */
3577