xref: /rk3399_rockchip-uboot/drivers/ram/rockchip/sdram_rv1126.c (revision 80cf78ca99a6cc7b4f99b472e0c2da510bdf528b)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * (C) Copyright 2020 Rockchip Electronics Co., Ltd.
4  */
5 
6 #include <common.h>
7 #include <debug_uart.h>
8 #include <dm.h>
9 #include <ram.h>
10 #include <syscon.h>
11 #include <asm/io.h>
12 #include <asm/arch/clock.h>
13 #include <asm/arch/hardware.h>
14 #include <asm/arch/rk_atags.h>
15 #include <asm/arch/cru_rv1126.h>
16 #include <asm/arch/grf_rv1126.h>
17 #include <asm/arch/sdram_common.h>
18 #include <asm/arch/sdram_rv1126.h>
19 
20 /* define training flag */
21 #define CA_TRAINING			(0x1 << 0)
22 #define READ_GATE_TRAINING		(0x1 << 1)
23 #define WRITE_LEVELING			(0x1 << 2)
24 #define WRITE_TRAINING			(0x1 << 3)
25 #define READ_TRAINING			(0x1 << 4)
26 #define FULL_TRAINING			(0xff)
27 
28 #define SKEW_RX_SIGNAL			(0)
29 #define SKEW_TX_SIGNAL			(1)
30 #define SKEW_CA_SIGNAL			(2)
31 
32 #define DESKEW_MDF_ABS_VAL		(0)
33 #define DESKEW_MDF_DIFF_VAL		(1)
34 
35 #ifdef CONFIG_TPL_BUILD
36 #ifndef CONFIG_TPL_TINY_FRAMEWORK
37 #error please defined CONFIG_TPL_TINY_FRAMEWORK for RV1126 !!!
38 #endif
39 #endif
40 
41 #ifdef CONFIG_TPL_BUILD
42 
43 struct dram_info {
44 	void __iomem *pctl;
45 	void __iomem *phy;
46 	struct rv1126_cru *cru;
47 	struct msch_regs *msch;
48 	struct rv1126_ddrgrf *ddrgrf;
49 	struct rv1126_grf *grf;
50 	struct ram_info info;
51 	struct rv1126_pmugrf *pmugrf;
52 	u32 sr_idle;
53 	u32 pd_idle;
54 };
55 
56 #define GRF_BASE_ADDR			0xfe000000
57 #define PMU_GRF_BASE_ADDR		0xfe020000
58 #define DDR_GRF_BASE_ADDR		0xfe030000
59 #define BUS_SGRF_BASE_ADDR		0xfe0a0000
60 #define SERVER_MSCH_BASE_ADDR		0xfe800000
61 #define CRU_BASE_ADDR			0xff490000
62 #define DDR_PHY_BASE_ADDR		0xff4a0000
63 #define UPCTL2_BASE_ADDR		0xffa50000
64 
65 #define SGRF_SOC_CON2			0x8
66 #define SGRF_SOC_CON12			0x30
67 #define SGRF_SOC_CON13			0x34
68 
69 struct dram_info dram_info;
70 
71 #if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 3)
72 struct rv1126_sdram_params sdram_configs[] = {
73 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-328.inc"
74 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-396.inc"
75 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-528.inc"
76 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-664.inc"
77 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-784.inc"
78 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-924.inc"
79 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-1056.inc"
80 };
81 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 0)
82 struct rv1126_sdram_params sdram_configs[] = {
83 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-328.inc"
84 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-396.inc"
85 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-528.inc"
86 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-664.inc"
87 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-784.inc"
88 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-924.inc"
89 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-1056.inc"
90 };
91 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 6)
92 struct rv1126_sdram_params sdram_configs[] = {
93 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-328.inc"
94 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-396.inc"
95 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-528.inc"
96 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-664.inc"
97 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-784.inc"
98 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-924.inc"
99 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-1056.inc"
100 };
101 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 7) || (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 8)
102 struct rv1126_sdram_params sdram_configs[] = {
103 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-328.inc"
104 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-396.inc"
105 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-528.inc"
106 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-664.inc"
107 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-784.inc"
108 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-924.inc"
109 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-1056.inc"
110 };
111 #endif
112 
113 u32 common_info[] = {
114 	#include "sdram_inc/rv1126/sdram-rv1126-loader_params.inc"
115 };
116 
117 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
118 static struct rw_trn_result rw_trn_result;
119 #endif
120 
121 static struct rv1126_fsp_param fsp_param[MAX_IDX];
122 
123 static u8 lp3_odt_value;
124 
125 static s8 wrlvl_result[2][4];
126 
127 /* DDR configuration 0-9 */
128 u16 ddr_cfg_2_rbc[] = {
129 	((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */
130 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */
131 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */
132 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */
133 	((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */
134 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */
135 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */
136 	((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */
137 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */
138 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */
139 };
140 
141 /* DDR configuration 10-21 */
142 u8 ddr4_cfg_2_rbc[] = {
143 	((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */
144 	((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */
145 	((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */
146 	((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */
147 	((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */
148 	((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */
149 	((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */
150 	((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */
151 	((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */
152 	((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */
153 	((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */
154 	((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */
155 };
156 
157 /* DDR configuration 22-28 */
158 u16 ddr_cfg_2_rbc_p2[] = {
159 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */
160 	((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */
161 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */
162 	((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */
163 	((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */
164 	((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */
165 	((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */
166 };
167 
168 u8 d4_rbc_2_d3_rbc[][2] = {
169 	{10, 0},
170 	{11, 2},
171 	{12, 23},
172 	{13, 1},
173 	{14, 28},
174 	{15, 24},
175 	{16, 27},
176 	{17, 7},
177 	{18, 6},
178 	{19, 25},
179 	{20, 26},
180 	{21, 3}
181 };
182 
183 u32 addrmap[29][9] = {
184 	{24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
185 		0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */
186 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
187 		0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */
188 	{23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
189 		0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */
190 	{22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606,
191 		0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */
192 	{24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909,
193 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */
194 	{6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707,
195 		0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */
196 	{7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808,
197 		0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */
198 	{8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909,
199 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */
200 	{22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
201 		0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */
202 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
203 		0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */
204 
205 	{24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
206 		0x08080808, 0x00000f0f, 0x0801}, /* 10 */
207 	{23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
208 		0x0f080808, 0x00000f0f, 0x0801}, /* 11 */
209 	{24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
210 		0x07070707, 0x00000f07, 0x0700}, /* 12 */
211 	{23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
212 		0x07070707, 0x00000f0f, 0x0700}, /* 13 */
213 	{24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
214 		0x07070707, 0x00000f07, 0x3f01}, /* 14 */
215 	{23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
216 		0x07070707, 0x00000f0f, 0x3f01}, /* 15 */
217 	{23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606,
218 		0x06060606, 0x00000f06, 0x3f00}, /* 16 */
219 	{8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909,
220 		0x0f090909, 0x00000f0f, 0x0801}, /* 17 */
221 	{7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808,
222 		0x08080808, 0x00000f0f, 0x0700}, /* 18 */
223 	{7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
224 		0x08080808, 0x00000f0f, 0x3f01}, /* 19 */
225 
226 	{6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
227 		0x07070707, 0x00000f07, 0x3f00}, /* 20 */
228 	{23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606,
229 		0x06060606, 0x00000f06, 0x0600}, /* 21 */
230 	{21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505,
231 		0x05050505, 0x00000f0f, 0x3f3f}, /* 22 */
232 
233 	{24, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
234 		0x07070707, 0x00000f07, 0x3f3f}, /* 23 */
235 	{23, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
236 		0x07070707, 0x00000f0f, 0x3f3f}, /* 24 */
237 	{7, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
238 		0x08080808, 0x00000f0f, 0x3f3f}, /* 25 */
239 	{6, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
240 		0x07070707, 0x00000f07, 0x3f3f}, /* 26 */
241 	{23, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
242 		0x06060606, 0x00000f06, 0x3f3f}, /* 27 */
243 	{24, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
244 		0x07070707, 0x00000f07, 0x3f3f} /* 28 */
245 };
246 
247 static u8 dq_sel[22][3] = {
248 	{0x0, 0x17, 0x22},
249 	{0x1, 0x18, 0x23},
250 	{0x2, 0x19, 0x24},
251 	{0x3, 0x1a, 0x25},
252 	{0x4, 0x1b, 0x26},
253 	{0x5, 0x1c, 0x27},
254 	{0x6, 0x1d, 0x28},
255 	{0x7, 0x1e, 0x29},
256 	{0x8, 0x16, 0x21},
257 	{0x9, 0x1f, 0x2a},
258 	{0xa, 0x20, 0x2b},
259 	{0x10, 0x1, 0xc},
260 	{0x11, 0x2, 0xd},
261 	{0x12, 0x3, 0xe},
262 	{0x13, 0x4, 0xf},
263 	{0x14, 0x5, 0x10},
264 	{0x15, 0x6, 0x11},
265 	{0x16, 0x7, 0x12},
266 	{0x17, 0x8, 0x13},
267 	{0x18, 0x0, 0xb},
268 	{0x19, 0x9, 0x14},
269 	{0x1a, 0xa, 0x15}
270 };
271 
272 static u16 grp_addr[4] = {
273 	ADD_GROUP_CS0_A,
274 	ADD_GROUP_CS0_B,
275 	ADD_GROUP_CS1_A,
276 	ADD_GROUP_CS1_B
277 };
278 
279 static u8 wrlvl_result_offset[2][4] = {
280 	{0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27},
281 	{0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29},
282 };
283 
284 static u16 dqs_dq_skew_adr[16] = {
285 	0x170 + 0,	/* SKEW_UPDATE_RX_CS0_DQS0 */
286 	0x170 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS1 */
287 	0x1d0 + 0,	/* SKEW_UPDATE_RX_CS0_DQS2 */
288 	0x1d0 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS3 */
289 	0x1a0 + 0,	/* SKEW_UPDATE_RX_CS1_DQS0 */
290 	0x1a0 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS1 */
291 	0x200 + 0,	/* SKEW_UPDATE_RX_CS1_DQS2 */
292 	0x200 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS3 */
293 	0x170 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS0 */
294 	0x170 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS1 */
295 	0x1d0 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS2 */
296 	0x1d0 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS3 */
297 	0x1a0 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS0 */
298 	0x1a0 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS1 */
299 	0x200 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS2 */
300 	0x200 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS3 */
301 };
302 
303 static void rkclk_ddr_reset(struct dram_info *dram,
304 			    u32 ctl_srstn, u32 ctl_psrstn,
305 			    u32 phy_srstn, u32 phy_psrstn)
306 {
307 	writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) |
308 	       UPCTL2_ASRSTN_REQ(ctl_srstn),
309 	       BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13);
310 
311 	writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn),
312 	       &dram->cru->softrst_con[12]);
313 }
314 
315 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
316 {
317 	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
318 	int delay = 1000;
319 	u32 mhz = hz / MHz;
320 	struct global_info *gbl_info;
321 	struct sdram_head_info_index_v2 *index =
322 		(struct sdram_head_info_index_v2 *)common_info;
323 	u32 ssmod_info;
324 	u32 dsmpd = 1;
325 
326 	gbl_info = (struct global_info *)((void *)common_info +
327 		    index->global_index.offset * 4);
328 	ssmod_info = gbl_info->info_2t;
329 	refdiv = 1;
330 	if (mhz <= 100) {
331 		postdiv1 = 6;
332 		postdiv2 = 4;
333 	} else if (mhz <= 150) {
334 		postdiv1 = 4;
335 		postdiv2 = 4;
336 	} else if (mhz <= 200) {
337 		postdiv1 = 6;
338 		postdiv2 = 2;
339 	} else if (mhz <= 300) {
340 		postdiv1 = 4;
341 		postdiv2 = 2;
342 	} else if (mhz <= 400) {
343 		postdiv1 = 6;
344 		postdiv2 = 1;
345 	} else {
346 		postdiv1 = 4;
347 		postdiv2 = 1;
348 	}
349 	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
350 
351 	writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
352 
353 	writel(0x1f000000, &dram->cru->clksel_con[64]);
354 	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
355 	/* enable ssmod */
356 	if (PLL_SSMOD_SPREAD(ssmod_info)) {
357 		dsmpd = 0;
358 		clrsetbits_le32(&dram->cru->pll[1].con2,
359 				0xffffff << 0, 0x0 << 0);
360 		writel(SSMOD_SPREAD(PLL_SSMOD_SPREAD(ssmod_info)) |
361 		       SSMOD_DIVVAL(PLL_SSMOD_DIV(ssmod_info)) |
362 		       SSMOD_DOWNSPREAD(PLL_SSMOD_DOWNSPREAD(ssmod_info)) |
363 		       SSMOD_RESET(0) |
364 		       SSMOD_DIS_SSCG(0) |
365 		       SSMOD_BP(0),
366 		       &dram->cru->pll[1].con3);
367 	}
368 	writel(DSMPD(dsmpd) | POSTDIV2(postdiv2) | REFDIV(refdiv),
369 	       &dram->cru->pll[1].con1);
370 
371 	while (delay > 0) {
372 		udelay(1);
373 		if (LOCK(readl(&dram->cru->pll[1].con1)))
374 			break;
375 		delay--;
376 	}
377 
378 	writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
379 }
380 
381 static void rkclk_configure_ddr(struct dram_info *dram,
382 				struct rv1126_sdram_params *sdram_params)
383 {
384 	/* for inno ddr phy need freq / 2 */
385 	rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2);
386 }
387 
388 static unsigned int
389 	calculate_ddrconfig(struct rv1126_sdram_params *sdram_params)
390 {
391 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
392 	u32 cs, bw, die_bw, col, row, bank;
393 	u32 cs1_row;
394 	u32 i, tmp;
395 	u32 ddrconf = -1;
396 	u32 row_3_4;
397 
398 	cs = cap_info->rank;
399 	bw = cap_info->bw;
400 	die_bw = cap_info->dbw;
401 	col = cap_info->col;
402 	row = cap_info->cs0_row;
403 	cs1_row = cap_info->cs1_row;
404 	bank = cap_info->bk;
405 	row_3_4 = cap_info->row_3_4;
406 
407 	if (sdram_params->base.dramtype == DDR4) {
408 		if (cs == 2 && row == cs1_row && !row_3_4) {
409 			tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) |
410 			      die_bw;
411 			for (i = 17; i < 21; i++) {
412 				if (((tmp & 0xf) ==
413 				     (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
414 				    ((tmp & 0x70) <=
415 				     (ddr4_cfg_2_rbc[i - 10] & 0x70))) {
416 					ddrconf = i;
417 					goto out;
418 				}
419 			}
420 		}
421 
422 		tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw;
423 		for (i = 10; i < 21; i++) {
424 			if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
425 			    ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) &&
426 			    ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) {
427 				ddrconf = i;
428 				goto out;
429 			}
430 		}
431 	} else {
432 		if (cs == 2 && row == cs1_row && bank == 3) {
433 			for (i = 5; i < 8; i++) {
434 				if (((bw + col - 10) == (ddr_cfg_2_rbc[i] &
435 							 0x7)) &&
436 				    ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] &
437 							  (0x7 << 5))) {
438 					ddrconf = i;
439 					goto out;
440 				}
441 			}
442 		}
443 
444 		tmp = ((cs - 1) << 8) | ((row - 13) << 5) |
445 		      ((bw + col - 10) << 0);
446 		if (bank == 3)
447 			tmp |= (1 << 3);
448 
449 		for (i = 0; i < 9; i++)
450 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) &&
451 			    ((tmp & (7 << 5)) <=
452 			     (ddr_cfg_2_rbc[i] & (7 << 5))) &&
453 			    ((tmp & (1 << 8)) <=
454 			     (ddr_cfg_2_rbc[i] & (1 << 8)))) {
455 				ddrconf = i;
456 				goto out;
457 			}
458 
459 		for (i = 0; i < 7; i++)
460 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc_p2[i] & 0x1f)) &&
461 			    ((tmp & (7 << 5)) <=
462 			     (ddr_cfg_2_rbc_p2[i] & (7 << 5))) &&
463 			    ((tmp & (1 << 8)) <=
464 			     (ddr_cfg_2_rbc_p2[i] & (1 << 8)))) {
465 				ddrconf = i + 22;
466 				goto out;
467 			}
468 
469 		if (cs == 1 && bank == 3 && row <= 17 &&
470 		    (col + bw) == 12)
471 			ddrconf = 23;
472 	}
473 
474 out:
475 	if (ddrconf > 28)
476 		printascii("calculate ddrconfig error\n");
477 
478 	if (sdram_params->base.dramtype == DDR4) {
479 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
480 			if (ddrconf == d4_rbc_2_d3_rbc[i][0]) {
481 				if (ddrconf == 21 && row > 16)
482 					printascii("warn:ddrconf21 row > 16\n");
483 				else
484 					ddrconf = d4_rbc_2_d3_rbc[i][1];
485 				break;
486 			}
487 		}
488 	}
489 
490 	return ddrconf;
491 }
492 
493 static void sw_set_req(struct dram_info *dram)
494 {
495 	void __iomem *pctl_base = dram->pctl;
496 
497 	/* clear sw_done=0 */
498 	writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL);
499 }
500 
501 static void sw_set_ack(struct dram_info *dram)
502 {
503 	void __iomem *pctl_base = dram->pctl;
504 
505 	/* set sw_done=1 */
506 	writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL);
507 	while (1) {
508 		/* wait programming done */
509 		if (readl(pctl_base + DDR_PCTL2_SWSTAT) &
510 				PCTL2_SW_DONE_ACK)
511 			break;
512 	}
513 }
514 
515 static void set_ctl_address_map(struct dram_info *dram,
516 				struct rv1126_sdram_params *sdram_params)
517 {
518 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
519 	void __iomem *pctl_base = dram->pctl;
520 	u32 ddrconf = cap_info->ddrconfig;
521 	u32 i, row;
522 
523 	row = cap_info->cs0_row;
524 	if (sdram_params->base.dramtype == DDR4) {
525 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
526 			if (ddrconf == d4_rbc_2_d3_rbc[i][1]) {
527 				ddrconf = d4_rbc_2_d3_rbc[i][0];
528 				break;
529 			}
530 		}
531 	}
532 
533 	if (ddrconf >= ARRAY_SIZE(addrmap)) {
534 		printascii("set ctl address map fail\n");
535 		return;
536 	}
537 
538 	sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
539 			  &addrmap[ddrconf][0], ARRAY_SIZE(addrmap[ddrconf]) * 4);
540 
541 	/* unused row set to 0xf */
542 	for (i = 17; i >= row; i--)
543 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
544 			((i - 12) * 8 / 32) * 4,
545 			0xf << ((i - 12) * 8 % 32));
546 
547 	if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4)
548 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
549 	if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1)
550 		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
551 
552 	if (cap_info->rank == 1)
553 		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
554 }
555 
556 static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait)
557 {
558 	void __iomem *phy_base = dram->phy;
559 	u32 fbdiv, prediv, postdiv, postdiv_en;
560 
561 	if (wait) {
562 		clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB);
563 		while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK))
564 			continue;
565 	} else {
566 		freq /= MHz;
567 		prediv = 1;
568 		if (freq <= 200) {
569 			fbdiv = 16;
570 			postdiv = 2;
571 			postdiv_en = 1;
572 		} else if (freq <= 456) {
573 			fbdiv = 8;
574 			postdiv = 1;
575 			postdiv_en = 1;
576 		} else {
577 			fbdiv = 4;
578 			postdiv = 0;
579 			postdiv_en = 0;
580 		}
581 		writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50));
582 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK,
583 				(fbdiv >> 8) & 1);
584 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK,
585 				postdiv_en << PHY_POSTDIV_EN_SHIFT);
586 
587 		clrsetbits_le32(PHY_REG(phy_base, 0x52),
588 				PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv);
589 		clrsetbits_le32(PHY_REG(phy_base, 0x53),
590 				PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT,
591 				postdiv << PHY_POSTDIV_SHIFT);
592 	}
593 }
594 
595 static const u16 d3_phy_drv_2_ohm[][2] = {
596 	{PHY_DDR3_RON_455ohm, 455},
597 	{PHY_DDR3_RON_230ohm, 230},
598 	{PHY_DDR3_RON_153ohm, 153},
599 	{PHY_DDR3_RON_115ohm, 115},
600 	{PHY_DDR3_RON_91ohm, 91},
601 	{PHY_DDR3_RON_76ohm, 76},
602 	{PHY_DDR3_RON_65ohm, 65},
603 	{PHY_DDR3_RON_57ohm, 57},
604 	{PHY_DDR3_RON_51ohm, 51},
605 	{PHY_DDR3_RON_46ohm, 46},
606 	{PHY_DDR3_RON_41ohm, 41},
607 	{PHY_DDR3_RON_38ohm, 38},
608 	{PHY_DDR3_RON_35ohm, 35},
609 	{PHY_DDR3_RON_32ohm, 32},
610 	{PHY_DDR3_RON_30ohm, 30},
611 	{PHY_DDR3_RON_28ohm, 28},
612 	{PHY_DDR3_RON_27ohm, 27},
613 	{PHY_DDR3_RON_25ohm, 25},
614 	{PHY_DDR3_RON_24ohm, 24},
615 	{PHY_DDR3_RON_23ohm, 23},
616 	{PHY_DDR3_RON_22ohm, 22},
617 	{PHY_DDR3_RON_21ohm, 21},
618 	{PHY_DDR3_RON_20ohm, 20}
619 };
620 
621 static u16 d3_phy_odt_2_ohm[][2] = {
622 	{PHY_DDR3_RTT_DISABLE, 0},
623 	{PHY_DDR3_RTT_561ohm, 561},
624 	{PHY_DDR3_RTT_282ohm, 282},
625 	{PHY_DDR3_RTT_188ohm, 188},
626 	{PHY_DDR3_RTT_141ohm, 141},
627 	{PHY_DDR3_RTT_113ohm, 113},
628 	{PHY_DDR3_RTT_94ohm, 94},
629 	{PHY_DDR3_RTT_81ohm, 81},
630 	{PHY_DDR3_RTT_72ohm, 72},
631 	{PHY_DDR3_RTT_64ohm, 64},
632 	{PHY_DDR3_RTT_58ohm, 58},
633 	{PHY_DDR3_RTT_52ohm, 52},
634 	{PHY_DDR3_RTT_48ohm, 48},
635 	{PHY_DDR3_RTT_44ohm, 44},
636 	{PHY_DDR3_RTT_41ohm, 41},
637 	{PHY_DDR3_RTT_38ohm, 38},
638 	{PHY_DDR3_RTT_37ohm, 37},
639 	{PHY_DDR3_RTT_34ohm, 34},
640 	{PHY_DDR3_RTT_32ohm, 32},
641 	{PHY_DDR3_RTT_31ohm, 31},
642 	{PHY_DDR3_RTT_29ohm, 29},
643 	{PHY_DDR3_RTT_28ohm, 28},
644 	{PHY_DDR3_RTT_27ohm, 27},
645 	{PHY_DDR3_RTT_25ohm, 25}
646 };
647 
648 static u16 d4lp3_phy_drv_2_ohm[][2] = {
649 	{PHY_DDR4_LPDDR3_RON_482ohm, 482},
650 	{PHY_DDR4_LPDDR3_RON_244ohm, 244},
651 	{PHY_DDR4_LPDDR3_RON_162ohm, 162},
652 	{PHY_DDR4_LPDDR3_RON_122ohm, 122},
653 	{PHY_DDR4_LPDDR3_RON_97ohm, 97},
654 	{PHY_DDR4_LPDDR3_RON_81ohm, 81},
655 	{PHY_DDR4_LPDDR3_RON_69ohm, 69},
656 	{PHY_DDR4_LPDDR3_RON_61ohm, 61},
657 	{PHY_DDR4_LPDDR3_RON_54ohm, 54},
658 	{PHY_DDR4_LPDDR3_RON_48ohm, 48},
659 	{PHY_DDR4_LPDDR3_RON_44ohm, 44},
660 	{PHY_DDR4_LPDDR3_RON_40ohm, 40},
661 	{PHY_DDR4_LPDDR3_RON_37ohm, 37},
662 	{PHY_DDR4_LPDDR3_RON_34ohm, 34},
663 	{PHY_DDR4_LPDDR3_RON_32ohm, 32},
664 	{PHY_DDR4_LPDDR3_RON_30ohm, 30},
665 	{PHY_DDR4_LPDDR3_RON_28ohm, 28},
666 	{PHY_DDR4_LPDDR3_RON_27ohm, 27},
667 	{PHY_DDR4_LPDDR3_RON_25ohm, 25},
668 	{PHY_DDR4_LPDDR3_RON_24ohm, 24},
669 	{PHY_DDR4_LPDDR3_RON_23ohm, 23},
670 	{PHY_DDR4_LPDDR3_RON_22ohm, 22},
671 	{PHY_DDR4_LPDDR3_RON_21ohm, 21}
672 };
673 
674 static u16 d4lp3_phy_odt_2_ohm[][2] = {
675 	{PHY_DDR4_LPDDR3_RTT_DISABLE, 0},
676 	{PHY_DDR4_LPDDR3_RTT_586ohm, 586},
677 	{PHY_DDR4_LPDDR3_RTT_294ohm, 294},
678 	{PHY_DDR4_LPDDR3_RTT_196ohm, 196},
679 	{PHY_DDR4_LPDDR3_RTT_148ohm, 148},
680 	{PHY_DDR4_LPDDR3_RTT_118ohm, 118},
681 	{PHY_DDR4_LPDDR3_RTT_99ohm, 99},
682 	{PHY_DDR4_LPDDR3_RTT_85ohm, 58},
683 	{PHY_DDR4_LPDDR3_RTT_76ohm, 76},
684 	{PHY_DDR4_LPDDR3_RTT_67ohm, 67},
685 	{PHY_DDR4_LPDDR3_RTT_60ohm, 60},
686 	{PHY_DDR4_LPDDR3_RTT_55ohm, 55},
687 	{PHY_DDR4_LPDDR3_RTT_50ohm, 50},
688 	{PHY_DDR4_LPDDR3_RTT_46ohm, 46},
689 	{PHY_DDR4_LPDDR3_RTT_43ohm, 43},
690 	{PHY_DDR4_LPDDR3_RTT_40ohm, 40},
691 	{PHY_DDR4_LPDDR3_RTT_38ohm, 38},
692 	{PHY_DDR4_LPDDR3_RTT_36ohm, 36},
693 	{PHY_DDR4_LPDDR3_RTT_34ohm, 34},
694 	{PHY_DDR4_LPDDR3_RTT_32ohm, 32},
695 	{PHY_DDR4_LPDDR3_RTT_31ohm, 31},
696 	{PHY_DDR4_LPDDR3_RTT_29ohm, 29},
697 	{PHY_DDR4_LPDDR3_RTT_28ohm, 28},
698 	{PHY_DDR4_LPDDR3_RTT_27ohm, 27}
699 };
700 
701 static u16 lp4_phy_drv_2_ohm[][2] = {
702 	{PHY_LPDDR4_RON_501ohm, 501},
703 	{PHY_LPDDR4_RON_253ohm, 253},
704 	{PHY_LPDDR4_RON_168ohm, 168},
705 	{PHY_LPDDR4_RON_126ohm, 126},
706 	{PHY_LPDDR4_RON_101ohm, 101},
707 	{PHY_LPDDR4_RON_84ohm, 84},
708 	{PHY_LPDDR4_RON_72ohm, 72},
709 	{PHY_LPDDR4_RON_63ohm, 63},
710 	{PHY_LPDDR4_RON_56ohm, 56},
711 	{PHY_LPDDR4_RON_50ohm, 50},
712 	{PHY_LPDDR4_RON_46ohm, 46},
713 	{PHY_LPDDR4_RON_42ohm, 42},
714 	{PHY_LPDDR4_RON_38ohm, 38},
715 	{PHY_LPDDR4_RON_36ohm, 36},
716 	{PHY_LPDDR4_RON_33ohm, 33},
717 	{PHY_LPDDR4_RON_31ohm, 31},
718 	{PHY_LPDDR4_RON_29ohm, 29},
719 	{PHY_LPDDR4_RON_28ohm, 28},
720 	{PHY_LPDDR4_RON_26ohm, 26},
721 	{PHY_LPDDR4_RON_25ohm, 25},
722 	{PHY_LPDDR4_RON_24ohm, 24},
723 	{PHY_LPDDR4_RON_23ohm, 23},
724 	{PHY_LPDDR4_RON_22ohm, 22}
725 };
726 
727 static u16 lp4_phy_odt_2_ohm[][2] = {
728 	{PHY_LPDDR4_RTT_DISABLE, 0},
729 	{PHY_LPDDR4_RTT_604ohm, 604},
730 	{PHY_LPDDR4_RTT_303ohm, 303},
731 	{PHY_LPDDR4_RTT_202ohm, 202},
732 	{PHY_LPDDR4_RTT_152ohm, 152},
733 	{PHY_LPDDR4_RTT_122ohm, 122},
734 	{PHY_LPDDR4_RTT_101ohm, 101},
735 	{PHY_LPDDR4_RTT_87ohm,	87},
736 	{PHY_LPDDR4_RTT_78ohm, 78},
737 	{PHY_LPDDR4_RTT_69ohm, 69},
738 	{PHY_LPDDR4_RTT_62ohm, 62},
739 	{PHY_LPDDR4_RTT_56ohm, 56},
740 	{PHY_LPDDR4_RTT_52ohm, 52},
741 	{PHY_LPDDR4_RTT_48ohm, 48},
742 	{PHY_LPDDR4_RTT_44ohm, 44},
743 	{PHY_LPDDR4_RTT_41ohm, 41},
744 	{PHY_LPDDR4_RTT_39ohm, 39},
745 	{PHY_LPDDR4_RTT_37ohm, 37},
746 	{PHY_LPDDR4_RTT_35ohm, 35},
747 	{PHY_LPDDR4_RTT_33ohm, 33},
748 	{PHY_LPDDR4_RTT_32ohm, 32},
749 	{PHY_LPDDR4_RTT_30ohm, 30},
750 	{PHY_LPDDR4_RTT_29ohm, 29},
751 	{PHY_LPDDR4_RTT_27ohm, 27}
752 };
753 
754 static u32 lp4_odt_calc(u32 odt_ohm)
755 {
756 	u32 odt;
757 
758 	if (odt_ohm == 0)
759 		odt = LPDDR4_DQODT_DIS;
760 	else if (odt_ohm <= 40)
761 		odt = LPDDR4_DQODT_40;
762 	else if (odt_ohm <= 48)
763 		odt = LPDDR4_DQODT_48;
764 	else if (odt_ohm <= 60)
765 		odt = LPDDR4_DQODT_60;
766 	else if (odt_ohm <= 80)
767 		odt = LPDDR4_DQODT_80;
768 	else if (odt_ohm <= 120)
769 		odt = LPDDR4_DQODT_120;
770 	else
771 		odt = LPDDR4_DQODT_240;
772 
773 	return odt;
774 }
775 
776 static void *get_ddr_drv_odt_info(u32 dramtype)
777 {
778 	struct sdram_head_info_index_v2 *index =
779 		(struct sdram_head_info_index_v2 *)common_info;
780 	void *ddr_info = 0;
781 
782 	if (dramtype == DDR4)
783 		ddr_info = (void *)common_info + index->ddr4_index.offset * 4;
784 	else if (dramtype == DDR3)
785 		ddr_info = (void *)common_info + index->ddr3_index.offset * 4;
786 	else if (dramtype == LPDDR3)
787 		ddr_info = (void *)common_info + index->lp3_index.offset * 4;
788 	else if (dramtype == LPDDR4)
789 		ddr_info = (void *)common_info + index->lp4_index.offset * 4;
790 	else if (dramtype == LPDDR4X)
791 		ddr_info = (void *)common_info + index->lp4x_index.offset * 4;
792 	else
793 		printascii("unsupported dram type\n");
794 	return ddr_info;
795 }
796 
797 static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info,
798 			 u32 freq_mhz, u32 dst_fsp, u32 dramtype)
799 {
800 	void __iomem *pctl_base = dram->pctl;
801 	u32 ca_vref, dq_vref;
802 
803 	if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
804 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff);
805 	else
806 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten);
807 
808 	if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq))
809 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff);
810 	else
811 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten);
812 
813 	if (dramtype == LPDDR4) {
814 		if (ca_vref < 100)
815 			ca_vref = 100;
816 		if (ca_vref > 420)
817 			ca_vref = 420;
818 
819 		if (ca_vref <= 300)
820 			ca_vref = (0 << 6) | (ca_vref - 100) / 4;
821 		else
822 			ca_vref = (1 << 6) | (ca_vref - 220) / 4;
823 
824 		if (dq_vref < 100)
825 			dq_vref = 100;
826 		if (dq_vref > 420)
827 			dq_vref = 420;
828 
829 		if (dq_vref <= 300)
830 			dq_vref = (0 << 6) | (dq_vref - 100) / 4;
831 		else
832 			dq_vref = (1 << 6) | (dq_vref - 220) / 4;
833 	} else {
834 		ca_vref = ca_vref * 11 / 6;
835 		if (ca_vref < 150)
836 			ca_vref = 150;
837 		if (ca_vref > 629)
838 			ca_vref = 629;
839 
840 		if (ca_vref <= 449)
841 			ca_vref = (0 << 6) | (ca_vref - 150) / 4;
842 		else
843 			ca_vref = (1 << 6) | (ca_vref - 329) / 4;
844 
845 		if (dq_vref < 150)
846 			dq_vref = 150;
847 		if (dq_vref > 629)
848 			dq_vref = 629;
849 
850 		if (dq_vref <= 449)
851 			dq_vref = (0 << 6) | (dq_vref - 150) / 6;
852 		else
853 			dq_vref = (1 << 6) | (dq_vref - 329) / 6;
854 	}
855 	sw_set_req(dram);
856 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
857 			DDR_PCTL2_INIT6,
858 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT,
859 			ca_vref << PCTL2_LPDDR4_MR12_SHIFT);
860 
861 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
862 			DDR_PCTL2_INIT7,
863 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT,
864 			dq_vref << PCTL2_LPDDR4_MR14_SHIFT);
865 	sw_set_ack(dram);
866 }
867 
868 static void set_ds_odt(struct dram_info *dram,
869 		       struct rv1126_sdram_params *sdram_params, u32 dst_fsp)
870 {
871 	void __iomem *phy_base = dram->phy;
872 	void __iomem *pctl_base = dram->pctl;
873 	u32 dramtype = sdram_params->base.dramtype;
874 	struct ddr2_3_4_lp2_3_info *ddr_info;
875 	struct lp4_info *lp4_info;
876 	u32 i, j, tmp;
877 	const u16 (*p_drv)[2];
878 	const u16 (*p_odt)[2];
879 	u32 drv_info, sr_info;
880 	u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm;
881 	u32 phy_odt_ohm, dram_odt_ohm;
882 	u32 lp4_pu_cal, phy_lp4_drv_pd_en;
883 	u32 phy_odt_up_en, phy_odt_dn_en;
884 	u32 sr_dq, sr_clk;
885 	u32 freq = sdram_params->base.ddr_freq;
886 	u32 mr1_mr3, mr11, mr22, vref_out, vref_inner;
887 	u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0;
888 	u32 phy_dq_drv = 0;
889 	u32 phy_odt_up = 0, phy_odt_dn = 0;
890 
891 	ddr_info = get_ddr_drv_odt_info(dramtype);
892 	lp4_info = (void *)ddr_info;
893 
894 	if (!ddr_info)
895 		return;
896 
897 	/* dram odt en freq control phy drv, dram odt and phy sr */
898 	if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) {
899 		drv_info = ddr_info->drv_when_odtoff;
900 		dram_odt_ohm = 0;
901 		sr_info = ddr_info->sr_when_odtoff;
902 		phy_lp4_drv_pd_en =
903 			PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info);
904 	} else {
905 		drv_info = ddr_info->drv_when_odten;
906 		dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info);
907 		sr_info = ddr_info->sr_when_odten;
908 		phy_lp4_drv_pd_en =
909 			PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info);
910 	}
911 	phy_dq_drv_ohm =
912 		DRV_INFO_PHY_DQ_DRV(drv_info);
913 	phy_clk_drv_ohm =
914 		DRV_INFO_PHY_CLK_DRV(drv_info);
915 	phy_ca_drv_ohm =
916 		DRV_INFO_PHY_CA_DRV(drv_info);
917 
918 	sr_dq = DQ_SR_INFO(sr_info);
919 	sr_clk = CLK_SR_INFO(sr_info);
920 
921 	/* phy odt en freq control dram drv and phy odt */
922 	if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) {
923 		dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff);
924 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info);
925 		phy_odt_ohm = 0;
926 		phy_odt_up_en = 0;
927 		phy_odt_dn_en = 0;
928 	} else {
929 		dram_drv_ohm =
930 			DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten);
931 		phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info);
932 		phy_odt_up_en =
933 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
934 		phy_odt_dn_en =
935 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
936 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info);
937 	}
938 
939 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
940 		if (phy_odt_ohm) {
941 			phy_odt_up_en = 0;
942 			phy_odt_dn_en = 1;
943 		}
944 		if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
945 			dram_caodt_ohm = 0;
946 		else
947 			dram_caodt_ohm =
948 				ODT_INFO_LP4_CA_ODT(lp4_info->odt_info);
949 	}
950 
951 	if (dramtype == DDR3) {
952 		p_drv = d3_phy_drv_2_ohm;
953 		p_odt = d3_phy_odt_2_ohm;
954 	} else if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
955 		p_drv = lp4_phy_drv_2_ohm;
956 		p_odt = lp4_phy_odt_2_ohm;
957 	} else {
958 		p_drv = d4lp3_phy_drv_2_ohm;
959 		p_odt = d4lp3_phy_odt_2_ohm;
960 	}
961 
962 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
963 		if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) {
964 			phy_dq_drv = **(p_drv + i);
965 			break;
966 		}
967 		if (i == 0)
968 			break;
969 	}
970 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
971 		if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) {
972 			phy_clk_drv = **(p_drv + i);
973 			break;
974 		}
975 		if (i == 0)
976 			break;
977 	}
978 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
979 		if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) {
980 			phy_ca_drv = **(p_drv + i);
981 			break;
982 		}
983 		if (i == 0)
984 			break;
985 	}
986 	if (!phy_odt_ohm)
987 		phy_odt = 0;
988 	else
989 		for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) {
990 			if (phy_odt_ohm <= *(*(p_odt + i) + 1)) {
991 				phy_odt = **(p_odt + i);
992 				break;
993 			}
994 			if (i == 0)
995 				break;
996 		}
997 
998 	if (dramtype != LPDDR4 && dramtype != LPDDR4X) {
999 		if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en))
1000 			vref_inner = 0x80;
1001 		else if (phy_odt_up_en)
1002 			vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 /
1003 				     (dram_drv_ohm + phy_odt_ohm);
1004 		else
1005 			vref_inner = phy_odt_ohm * 128 /
1006 				(phy_odt_ohm + dram_drv_ohm);
1007 
1008 		if (dramtype != DDR3 && dram_odt_ohm)
1009 			vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 /
1010 				   (phy_dq_drv_ohm + dram_odt_ohm);
1011 		else
1012 			vref_out = 0x80;
1013 	} else {
1014 		/* for lp4 and lp4x*/
1015 		if (phy_odt_ohm)
1016 			vref_inner =
1017 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) *
1018 				 256) / 1000;
1019 		else
1020 			vref_inner =
1021 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) *
1022 				 256) / 1000;
1023 
1024 		vref_out = 0x80;
1025 	}
1026 
1027 	/* default ZQCALIB bypass mode */
1028 	clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv);
1029 	clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv);
1030 	clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv);
1031 	clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv);
1032 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1033 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_clk_drv);
1034 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_clk_drv);
1035 	} else {
1036 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_ca_drv);
1037 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_ca_drv);
1038 	}
1039 	/* clk / cmd slew rate */
1040 	clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk);
1041 
1042 	phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1;
1043 	if (phy_odt_up_en)
1044 		phy_odt_up = phy_odt;
1045 	if (phy_odt_dn_en)
1046 		phy_odt_dn = phy_odt;
1047 
1048 	for (i = 0; i < 4; i++) {
1049 		j = 0x110 + i * 0x10;
1050 		clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up);
1051 		clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn);
1052 		clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv);
1053 		clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv);
1054 		writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10));
1055 
1056 		clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10),
1057 				1 << 3, phy_lp4_drv_pd_en << 3);
1058 		if (dramtype == LPDDR4 || dramtype == LPDDR4X)
1059 			clrbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10), BIT(5));
1060 		/* dq slew rate */
1061 		clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10),
1062 				0x1f, sr_dq);
1063 	}
1064 
1065 	/* reg_rx_vref_value_update */
1066 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1067 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1068 
1069 	/* RAM VREF */
1070 	writel(vref_out, PHY_REG(phy_base, 0x105));
1071 	if (dramtype == LPDDR3)
1072 		udelay(100);
1073 
1074 	if (dramtype == LPDDR4 || dramtype == LPDDR4X)
1075 		set_lp4_vref(dram, lp4_info, freq, dst_fsp, dramtype);
1076 
1077 	if (dramtype == DDR3 || dramtype == DDR4) {
1078 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1079 				DDR_PCTL2_INIT3);
1080 		mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK;
1081 	} else {
1082 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1083 				DDR_PCTL2_INIT4);
1084 		mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK;
1085 	}
1086 
1087 	if (dramtype == DDR3) {
1088 		mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK);
1089 		if (dram_drv_ohm == 34)
1090 			mr1_mr3 |= DDR3_DS_34;
1091 
1092 		if (dram_odt_ohm == 0)
1093 			mr1_mr3 |= DDR3_RTT_NOM_DIS;
1094 		else if (dram_odt_ohm <= 40)
1095 			mr1_mr3 |= DDR3_RTT_NOM_40;
1096 		else if (dram_odt_ohm <= 60)
1097 			mr1_mr3 |= DDR3_RTT_NOM_60;
1098 		else
1099 			mr1_mr3 |= DDR3_RTT_NOM_120;
1100 
1101 	} else if (dramtype == DDR4) {
1102 		mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK);
1103 		if (dram_drv_ohm == 48)
1104 			mr1_mr3 |= DDR4_DS_48;
1105 
1106 		if (dram_odt_ohm == 0)
1107 			mr1_mr3 |= DDR4_RTT_NOM_DIS;
1108 		else if (dram_odt_ohm <= 34)
1109 			mr1_mr3 |= DDR4_RTT_NOM_34;
1110 		else if (dram_odt_ohm <= 40)
1111 			mr1_mr3 |= DDR4_RTT_NOM_40;
1112 		else if (dram_odt_ohm <= 48)
1113 			mr1_mr3 |= DDR4_RTT_NOM_48;
1114 		else if (dram_odt_ohm <= 60)
1115 			mr1_mr3 |= DDR4_RTT_NOM_60;
1116 		else
1117 			mr1_mr3 |= DDR4_RTT_NOM_120;
1118 
1119 	} else if (dramtype == LPDDR3) {
1120 		if (dram_drv_ohm <= 34)
1121 			mr1_mr3 |= LPDDR3_DS_34;
1122 		else if (dram_drv_ohm <= 40)
1123 			mr1_mr3 |= LPDDR3_DS_40;
1124 		else if (dram_drv_ohm <= 48)
1125 			mr1_mr3 |= LPDDR3_DS_48;
1126 		else if (dram_drv_ohm <= 60)
1127 			mr1_mr3 |= LPDDR3_DS_60;
1128 		else if (dram_drv_ohm <= 80)
1129 			mr1_mr3 |= LPDDR3_DS_80;
1130 
1131 		if (dram_odt_ohm == 0)
1132 			lp3_odt_value = LPDDR3_ODT_DIS;
1133 		else if (dram_odt_ohm <= 60)
1134 			lp3_odt_value = LPDDR3_ODT_60;
1135 		else if (dram_odt_ohm <= 120)
1136 			lp3_odt_value = LPDDR3_ODT_120;
1137 		else
1138 			lp3_odt_value = LPDDR3_ODT_240;
1139 	} else {/* for lpddr4 and lpddr4x */
1140 		/* MR3 for lp4 PU-CAL and PDDS */
1141 		mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK);
1142 		mr1_mr3 |= lp4_pu_cal;
1143 
1144 		tmp = lp4_odt_calc(dram_drv_ohm);
1145 		if (!tmp)
1146 			tmp = LPDDR4_PDDS_240;
1147 		mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT);
1148 
1149 		/* MR11 for lp4 ca odt, dq odt set */
1150 		mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1151 			     DDR_PCTL2_INIT6);
1152 		mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK;
1153 
1154 		mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK);
1155 
1156 		tmp = lp4_odt_calc(dram_odt_ohm);
1157 		mr11 |= (tmp << LPDDR4_DQODT_SHIFT);
1158 
1159 		tmp = lp4_odt_calc(dram_caodt_ohm);
1160 		mr11 |= (tmp << LPDDR4_CAODT_SHIFT);
1161 		sw_set_req(dram);
1162 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1163 				DDR_PCTL2_INIT6,
1164 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT,
1165 				mr11 << PCTL2_LPDDR4_MR11_SHIFT);
1166 		sw_set_ack(dram);
1167 
1168 		/* MR22 for soc odt/odt-ck/odt-cs/odt-ca */
1169 		mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1170 			     DDR_PCTL2_INIT7);
1171 		mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK;
1172 		mr22 &= ~LPDDR4_SOC_ODT_MASK;
1173 
1174 		tmp = lp4_odt_calc(phy_odt_ohm);
1175 		mr22 |= tmp;
1176 		mr22 = mr22 |
1177 		       (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) <<
1178 			LPDDR4_ODTE_CK_SHIFT) |
1179 		       (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) <<
1180 			LPDDR4_ODTE_CS_SHIFT) |
1181 		       (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) <<
1182 			LPDDR4_ODTD_CA_SHIFT);
1183 
1184 		sw_set_req(dram);
1185 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1186 				DDR_PCTL2_INIT7,
1187 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT,
1188 				mr22 << PCTL2_LPDDR4_MR22_SHIFT);
1189 		sw_set_ack(dram);
1190 	}
1191 
1192 	if (dramtype == DDR4 || dramtype == DDR3) {
1193 		sw_set_req(dram);
1194 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1195 				DDR_PCTL2_INIT3,
1196 				PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT,
1197 				mr1_mr3 << PCTL2_DDR34_MR1_SHIFT);
1198 		sw_set_ack(dram);
1199 	} else {
1200 		sw_set_req(dram);
1201 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1202 				DDR_PCTL2_INIT4,
1203 				PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT,
1204 				mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT);
1205 		sw_set_ack(dram);
1206 	}
1207 }
1208 
1209 static int sdram_cmd_dq_path_remap(struct dram_info *dram,
1210 				   struct rv1126_sdram_params *sdram_params)
1211 {
1212 	void __iomem *phy_base = dram->phy;
1213 	u32 dramtype = sdram_params->base.dramtype;
1214 	struct sdram_head_info_index_v2 *index =
1215 		(struct sdram_head_info_index_v2 *)common_info;
1216 	struct dq_map_info *map_info;
1217 
1218 	map_info = (struct dq_map_info *)((void *)common_info +
1219 		index->dq_map_index.offset * 4);
1220 
1221 	if (dramtype == LPDDR4X)
1222 		dramtype = LPDDR4;
1223 
1224 	if (dramtype <= LPDDR4)
1225 		writel((map_info->byte_map[dramtype / 4] >>
1226 			((dramtype % 4) * 8)) & 0xff,
1227 		       PHY_REG(phy_base, 0x4f));
1228 
1229 	return 0;
1230 }
1231 
1232 static void phy_cfg(struct dram_info *dram,
1233 		    struct rv1126_sdram_params *sdram_params)
1234 {
1235 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
1236 	void __iomem *phy_base = dram->phy;
1237 	u32 i, dq_map, tmp;
1238 	u32 byte1 = 0, byte0 = 0;
1239 
1240 	sdram_cmd_dq_path_remap(dram, sdram_params);
1241 
1242 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0);
1243 	for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
1244 		writel(sdram_params->phy_regs.phy[i][1],
1245 		       phy_base + sdram_params->phy_regs.phy[i][0]);
1246 	}
1247 
1248 	clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5));
1249 	dq_map = readl(PHY_REG(phy_base, 0x4f));
1250 	for (i = 0; i < 4; i++) {
1251 		if (((dq_map >> (i * 2)) & 0x3) == 0)
1252 			byte0 = i;
1253 		if (((dq_map >> (i * 2)) & 0x3) == 1)
1254 			byte1 = i;
1255 	}
1256 
1257 	tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK);
1258 	if (cap_info->bw == 2)
1259 		tmp |= 0xf;
1260 	else if (cap_info->bw == 1)
1261 		tmp |= ((1 << byte0) | (1 << byte1));
1262 	else
1263 		tmp |= (1 << byte0);
1264 
1265 	writel(tmp, PHY_REG(phy_base, 0xf));
1266 
1267 	/* lpddr4 odt control by phy, enable cs0 odt */
1268 	if (sdram_params->base.dramtype == LPDDR4 ||
1269 	    sdram_params->base.dramtype == LPDDR4X)
1270 		clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4,
1271 				(1 << 6) | (1 << 4));
1272 	/* for ca training ca vref choose range1 */
1273 	setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6));
1274 	setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6));
1275 	/* for wr training PHY_0x7c[5], choose range0 */
1276 	clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5));
1277 }
1278 
1279 static int update_refresh_reg(struct dram_info *dram)
1280 {
1281 	void __iomem *pctl_base = dram->pctl;
1282 	u32 ret;
1283 
1284 	ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
1285 	writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
1286 
1287 	return 0;
1288 }
1289 
1290 /*
1291  * rank = 1: cs0
1292  * rank = 2: cs1
1293  */
1294 int read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype)
1295 {
1296 	u32 ret;
1297 	u32 i, temp;
1298 	u32 dqmap;
1299 
1300 	void __iomem *pctl_base = dram->pctl;
1301 	struct sdram_head_info_index_v2 *index =
1302 		(struct sdram_head_info_index_v2 *)common_info;
1303 	struct dq_map_info *map_info;
1304 
1305 	map_info = (struct dq_map_info *)((void *)common_info +
1306 		index->dq_map_index.offset * 4);
1307 
1308 	if (dramtype == LPDDR2)
1309 		dqmap = map_info->lp2_dq0_7_map;
1310 	else
1311 		dqmap = map_info->lp3_dq0_7_map;
1312 
1313 	pctl_read_mr(pctl_base, rank, mr_num);
1314 
1315 	ret = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff);
1316 
1317 	if (dramtype != LPDDR4) {
1318 		temp = 0;
1319 		for (i = 0; i < 8; i++) {
1320 			temp = temp | (((ret >> i) & 0x1) <<
1321 				       ((dqmap >> (i * 4)) & 0xf));
1322 		}
1323 	} else {
1324 		temp = (readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff);
1325 	}
1326 
1327 	return temp;
1328 }
1329 
1330 /* before call this function autorefresh should be disabled */
1331 void send_a_refresh(struct dram_info *dram)
1332 {
1333 	void __iomem *pctl_base = dram->pctl;
1334 
1335 	while (readl(pctl_base + DDR_PCTL2_DBGSTAT) & 0x3)
1336 		continue;
1337 	writel(0x3, pctl_base + DDR_PCTL2_DBGCMD);
1338 }
1339 
1340 static void enter_sr(struct dram_info *dram, u32 en)
1341 {
1342 	void __iomem *pctl_base = dram->pctl;
1343 
1344 	if (en) {
1345 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1346 		while (1) {
1347 			if (((readl(pctl_base + DDR_PCTL2_STAT) &
1348 			      PCTL2_SELFREF_TYPE_MASK) ==
1349 			     PCTL2_SELFREF_TYPE_SR_NOT_AUTO) &&
1350 			    ((readl(pctl_base + DDR_PCTL2_STAT) &
1351 			      PCTL2_OPERATING_MODE_MASK) ==
1352 			     PCTL2_OPERATING_MODE_SR))
1353 				break;
1354 		}
1355 	} else {
1356 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1357 		while ((readl(pctl_base + DDR_PCTL2_STAT) &
1358 		       PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR)
1359 			continue;
1360 	}
1361 }
1362 
1363 void record_dq_prebit(struct dram_info *dram)
1364 {
1365 	u32 group, i, tmp;
1366 	void __iomem *phy_base = dram->phy;
1367 
1368 	for (group = 0; group < 4; group++) {
1369 		for (i = 0; i < ARRAY_SIZE(dq_sel); i++) {
1370 			/* l_loop_invdelaysel */
1371 			writel(dq_sel[i][0], PHY_REG(phy_base,
1372 						     grp_addr[group] + 0x2c));
1373 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e));
1374 			writel(tmp, PHY_REG(phy_base,
1375 					    grp_addr[group] + dq_sel[i][1]));
1376 
1377 			/* r_loop_invdelaysel */
1378 			writel(dq_sel[i][0], PHY_REG(phy_base,
1379 						     grp_addr[group] + 0x2d));
1380 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f));
1381 			writel(tmp, PHY_REG(phy_base,
1382 					    grp_addr[group] + dq_sel[i][2]));
1383 		}
1384 	}
1385 }
1386 
1387 static void update_dq_rx_prebit(struct dram_info *dram)
1388 {
1389 	void __iomem *phy_base = dram->phy;
1390 
1391 	clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4),
1392 			BIT(4));
1393 	udelay(1);
1394 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4));
1395 }
1396 
1397 static void update_dq_tx_prebit(struct dram_info *dram)
1398 {
1399 	void __iomem *phy_base = dram->phy;
1400 
1401 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1402 	setbits_le32(PHY_REG(phy_base, 0x2), BIT(3));
1403 	setbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1404 	udelay(1);
1405 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1406 }
1407 
1408 static void update_ca_prebit(struct dram_info *dram)
1409 {
1410 	void __iomem *phy_base = dram->phy;
1411 
1412 	clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2));
1413 	setbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1414 	udelay(1);
1415 	clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1416 }
1417 
1418 /*
1419  * dir: 0: de-skew = delta_*
1420  *	1: de-skew = reg val - delta_*
1421  * delta_dir: value for differential signal: clk/
1422  * delta_sig: value for single signal: ca/cmd
1423  */
1424 static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif,
1425 			     int delta_sig, u32 cs, u32 dramtype)
1426 {
1427 	void __iomem *phy_base = dram->phy;
1428 	u32 i, cs_en, tmp;
1429 	u32 dfi_lp_stat = 0;
1430 
1431 	if (cs == 0)
1432 		cs_en = 1;
1433 	else if (cs == 2)
1434 		cs_en = 2;
1435 	else
1436 		cs_en = 3;
1437 
1438 	if ((dramtype == LPDDR4 || dramtype == LPDDR4X) &&
1439 	    ((readl(PHY_REG(phy_base, 0x60)) & BIT(5)) == 0)) {
1440 		dfi_lp_stat = 1;
1441 		setbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1442 	}
1443 	enter_sr(dram, 1);
1444 
1445 	for (i = 0; i < 0x20; i++) {
1446 		if (dir == DESKEW_MDF_ABS_VAL)
1447 			tmp = delta_sig;
1448 		else
1449 			tmp = readl(PHY_REG(phy_base, 0x150 + i)) +
1450 			      delta_sig;
1451 		writel(tmp, PHY_REG(phy_base, 0x150 + i));
1452 	}
1453 
1454 	if (dir == DESKEW_MDF_ABS_VAL)
1455 		tmp = delta_dif;
1456 	else
1457 		tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) -
1458 		       delta_sig + delta_dif;
1459 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x17));
1460 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x18));
1461 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1462 		writel(tmp, PHY_REG(phy_base, 0x150 + 0x4));
1463 		writel(tmp, PHY_REG(phy_base, 0x150 + 0xa));
1464 
1465 		clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6);
1466 		update_ca_prebit(dram);
1467 	}
1468 	enter_sr(dram, 0);
1469 
1470 	if (dfi_lp_stat)
1471 		clrbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1472 
1473 }
1474 
1475 static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank)
1476 {
1477 	u32 i, j, offset = 0;
1478 	u32 min = 0x3f;
1479 	void __iomem *phy_base = dram->phy;
1480 	u32 byte_en;
1481 
1482 	if (signal == SKEW_TX_SIGNAL)
1483 		offset = 8;
1484 
1485 	if (signal == SKEW_CA_SIGNAL) {
1486 		for (i = 0; i < 0x20; i++)
1487 			min = MIN(min, readl(PHY_REG(phy_base, 0x150  + i)));
1488 	} else {
1489 		byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1490 		for (j = offset; j < offset + rank * 4; j++) {
1491 			if (!((byte_en >> (j % 4)) & 1))
1492 				continue;
1493 			for (i = 0; i < 11; i++)
1494 				min = MIN(min,
1495 					  readl(PHY_REG(phy_base,
1496 							dqs_dq_skew_adr[j] +
1497 							i)));
1498 		}
1499 	}
1500 
1501 	return min;
1502 }
1503 
1504 static u32 low_power_update(struct dram_info *dram, u32 en)
1505 {
1506 	void __iomem *pctl_base = dram->pctl;
1507 	u32 lp_stat = 0;
1508 
1509 	if (en) {
1510 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf);
1511 	} else {
1512 		lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf;
1513 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf);
1514 	}
1515 
1516 	return lp_stat;
1517 }
1518 
1519 /*
1520  * signal:
1521  * dir: 0: de-skew = delta_*
1522  *	1: de-skew = reg val - delta_*
1523  * delta_dir: value for differential signal: dqs
1524  * delta_sig: value for single signal: dq/dm
1525  */
1526 static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir,
1527 			     int delta_dif, int delta_sig, u32 rank)
1528 {
1529 	void __iomem *phy_base = dram->phy;
1530 	u32 i, j, tmp, offset;
1531 	u32 byte_en;
1532 
1533 	byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1534 
1535 	if (signal == SKEW_RX_SIGNAL)
1536 		offset = 0;
1537 	else
1538 		offset = 8;
1539 
1540 	for (j = offset; j < (offset + rank * 4); j++) {
1541 		if (!((byte_en >> (j % 4)) & 1))
1542 			continue;
1543 		for (i = 0; i < 0x9; i++) {
1544 			if (dir == DESKEW_MDF_ABS_VAL)
1545 				tmp = delta_sig;
1546 			else
1547 				tmp = delta_sig + readl(PHY_REG(phy_base,
1548 							dqs_dq_skew_adr[j] +
1549 							i));
1550 			writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i));
1551 		}
1552 		if (dir == DESKEW_MDF_ABS_VAL)
1553 			tmp = delta_dif;
1554 		else
1555 			tmp = delta_dif + readl(PHY_REG(phy_base,
1556 						dqs_dq_skew_adr[j] + 9));
1557 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9));
1558 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa));
1559 	}
1560 	if (signal == SKEW_RX_SIGNAL)
1561 		update_dq_rx_prebit(dram);
1562 	else
1563 		update_dq_tx_prebit(dram);
1564 }
1565 
1566 static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype)
1567 {
1568 	void __iomem *phy_base = dram->phy;
1569 	u32 ret;
1570 	u32 dis_auto_zq = 0;
1571 	u32 odt_val_up, odt_val_dn;
1572 	u32 i, j;
1573 
1574 	odt_val_dn = readl(PHY_REG(phy_base, 0x110));
1575 	odt_val_up = readl(PHY_REG(phy_base, 0x111));
1576 
1577 	if (dramtype != LPDDR4 || dramtype != LPDDR4X) {
1578 		for (i = 0; i < 4; i++) {
1579 			j = 0x110 + i * 0x10;
1580 			writel(PHY_DDR4_LPDDR3_RTT_294ohm,
1581 			       PHY_REG(phy_base, j));
1582 			writel(PHY_DDR4_LPDDR3_RTT_DISABLE,
1583 			       PHY_REG(phy_base, j + 0x1));
1584 		}
1585 	}
1586 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1587 	/* use normal read mode for data training */
1588 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1589 
1590 	if (dramtype == DDR4)
1591 		setbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1592 
1593 	/* choose training cs */
1594 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
1595 	/* enable gate training */
1596 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
1597 	udelay(50);
1598 	ret = readl(PHY_REG(phy_base, 0x91));
1599 	/* disable gate training */
1600 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
1601 	clrbits_le32(PHY_REG(phy_base, 2), 0x30);
1602 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1603 
1604 	ret = (ret & 0x2f) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf);
1605 
1606 	if (dramtype != LPDDR4 || dramtype != LPDDR4X) {
1607 		for (i = 0; i < 4; i++) {
1608 			j = 0x110 + i * 0x10;
1609 			writel(odt_val_dn, PHY_REG(phy_base, j));
1610 			writel(odt_val_up, PHY_REG(phy_base, j + 0x1));
1611 		}
1612 	}
1613 	return ret;
1614 }
1615 
1616 static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype,
1617 			    u32 rank)
1618 {
1619 	void __iomem *pctl_base = dram->pctl;
1620 	void __iomem *phy_base = dram->phy;
1621 	u32 dis_auto_zq = 0;
1622 	u32 tmp;
1623 	u32 cur_fsp;
1624 	u32 timeout_us = 1000;
1625 
1626 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1627 
1628 	clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1629 
1630 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1631 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) &
1632 	      0xffff;
1633 	writel(tmp & 0xff, PHY_REG(phy_base, 0x3));
1634 
1635 	/* disable another cs's output */
1636 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1637 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12),
1638 			      dramtype);
1639 	if (dramtype == DDR3 || dramtype == DDR4)
1640 		writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1641 	else
1642 		writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1643 
1644 	/* choose cs */
1645 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1646 			((0x2 >> cs) << 6) | (0 << 2));
1647 	/* enable write leveling */
1648 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1649 			((0x2 >> cs) << 6) | (1 << 2));
1650 
1651 	while (1) {
1652 		if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) ==
1653 		    (readl(PHY_REG(phy_base, 0xf)) & 0xf))
1654 			break;
1655 
1656 		udelay(1);
1657 		if (timeout_us-- == 0) {
1658 			printascii("error: write leveling timeout\n");
1659 			while (1)
1660 				;
1661 		}
1662 	}
1663 
1664 	/* disable write leveling */
1665 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1666 			((0x2 >> cs) << 6) | (0 << 2));
1667 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6);
1668 
1669 	/* enable another cs's output */
1670 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1671 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12),
1672 			      dramtype);
1673 
1674 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1675 
1676 	return 0;
1677 }
1678 
1679 char pattern[32] = {
1680 	0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa,
1681 	0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55,
1682 	0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55,
1683 	0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa
1684 };
1685 
1686 static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype,
1687 			    u32 mhz)
1688 {
1689 	void __iomem *pctl_base = dram->pctl;
1690 	void __iomem *phy_base = dram->phy;
1691 	u32 trefi_1x, trfc_1x;
1692 	u32 dis_auto_zq = 0;
1693 	u32 timeout_us = 1000;
1694 	u32 dqs_default;
1695 	u32 cur_fsp;
1696 	u32 vref_inner;
1697 	u32 i;
1698 	struct sdram_head_info_index_v2 *index =
1699 		(struct sdram_head_info_index_v2 *)common_info;
1700 	struct dq_map_info *map_info;
1701 
1702 	vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff;
1703 	if (dramtype == DDR3 && vref_inner == 0x80) {
1704 		for (i = 0; i < 4; i++)
1705 			writel(vref_inner - 0xa,
1706 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1707 
1708 		/* reg_rx_vref_value_update */
1709 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1710 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1711 	}
1712 
1713 	map_info = (struct dq_map_info *)((void *)common_info +
1714 		index->dq_map_index.offset * 4);
1715 	/* only 1cs a time, 0:cs0 1 cs1 */
1716 	if (cs > 1)
1717 		return -1;
1718 
1719 	dqs_default = 0xf;
1720 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1721 
1722 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1723 	/* config refresh timing */
1724 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1725 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1726 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1727 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1728 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1729 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1730 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1731 	/* reg_phy_trfc */
1732 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1733 	/* reg_max_refi_cnt */
1734 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1735 
1736 	/* choose training cs */
1737 	clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6);
1738 
1739 	/* set dq map for ddr4 */
1740 	if (dramtype == DDR4) {
1741 		setbits_le32(PHY_REG(phy_base, 0x70), BIT(7));
1742 		for (i = 0; i < 4; i++) {
1743 			writel((map_info->ddr4_dq_map[cs * 2] >>
1744 				((i % 4) * 8)) & 0xff,
1745 				PHY_REG(phy_base, 0x238 + i));
1746 			writel((map_info->ddr4_dq_map[cs * 2 + 1] >>
1747 				((i % 4) * 8)) & 0xff,
1748 				PHY_REG(phy_base, 0x2b8 + i));
1749 		}
1750 	}
1751 
1752 	/* cha_l reg_l_rd_train_dqs_default[5:0] */
1753 	clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default);
1754 	/* cha_h reg_h_rd_train_dqs_default[5:0] */
1755 	clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default);
1756 	/* chb_l reg_l_rd_train_dqs_default[5:0] */
1757 	clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default);
1758 	/* chb_h reg_h_rd_train_dqs_default[5:0] */
1759 	clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default);
1760 
1761 	/* Choose the read train auto mode */
1762 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1);
1763 	/* Enable the auto train of the read train */
1764 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3);
1765 
1766 	/* Wait the train done. */
1767 	while (1) {
1768 		if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1)
1769 			break;
1770 
1771 		udelay(1);
1772 		if (timeout_us-- == 0) {
1773 			printascii("error: read training timeout\n");
1774 			return -1;
1775 		}
1776 	}
1777 
1778 	/* Check the read train state */
1779 	if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) ||
1780 	    (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) {
1781 		printascii("error: read training error\n");
1782 		return -1;
1783 	}
1784 
1785 	/* Exit the Read Training by setting */
1786 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1));
1787 
1788 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1789 
1790 	if (dramtype == DDR3 && vref_inner == 0x80) {
1791 		for (i = 0; i < 4; i++)
1792 			writel(vref_inner,
1793 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1794 
1795 		/* reg_rx_vref_value_update */
1796 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1797 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1798 	}
1799 
1800 	return 0;
1801 }
1802 
1803 static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype,
1804 			    u32 mhz, u32 dst_fsp)
1805 {
1806 	void __iomem *pctl_base = dram->pctl;
1807 	void __iomem *phy_base = dram->phy;
1808 	u32 trefi_1x, trfc_1x;
1809 	u32 dis_auto_zq = 0;
1810 	u32 timeout_us = 1000;
1811 	u32 cur_fsp;
1812 	u32 mr_tmp, cl, cwl, phy_fsp, offset = 0;
1813 
1814 	if (dramtype == LPDDR3 && mhz <= 400) {
1815 		phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3;
1816 		offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3;
1817 		cl = readl(PHY_REG(phy_base, offset));
1818 		cwl = readl(PHY_REG(phy_base, offset + 2));
1819 
1820 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8);
1821 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4);
1822 		pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype);
1823 	}
1824 
1825 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1826 
1827 	/* PHY_0x7b[7:0] reg_train_col_addr[7:0] */
1828 	clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0);
1829 	/* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */
1830 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2);
1831 	/* PHY_0x7c[1:0] reg_train_col_addr[9:8] */
1832 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0);
1833 	/* PHY_0x7d[7:0] reg_train_row_addr[7:0] */
1834 	clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0);
1835 	/* PHY_0x7e[7:0] reg_train_row_addr[15:8] */
1836 	clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0);
1837 
1838 	/* PHY_0x71[3] wrtrain_check_data_value_random_gen */
1839 	clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3));
1840 
1841 	/* config refresh timing */
1842 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1843 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1844 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1845 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1846 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1847 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1848 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1849 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1850 	/* reg_phy_trfc */
1851 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1852 	/* reg_max_refi_cnt */
1853 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1854 
1855 	/* choose training cs */
1856 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6);
1857 
1858 	/* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */
1859 	/* 0: Use the write-leveling value. */
1860 	/* 1: use reg0x233 0x237 0x2b3 0x2b7 */
1861 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4));
1862 
1863 	/* PHY_0x7a [0] reg_dq_wr_train_auto */
1864 	setbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1865 
1866 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1867 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1868 
1869 	send_a_refresh(dram);
1870 
1871 	while (1) {
1872 		if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1)
1873 			break;
1874 
1875 		udelay(1);
1876 		if (timeout_us-- == 0) {
1877 			printascii("error: write training timeout\n");
1878 			while (1)
1879 				;
1880 		}
1881 	}
1882 
1883 	/* Check the write train state */
1884 	if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) {
1885 		printascii("error: write training error\n");
1886 		return -1;
1887 	}
1888 
1889 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1890 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1891 
1892 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1893 
1894 	/* save LPDDR4/LPDDR4X write vref to fsp_param for dfs */
1895 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1896 		fsp_param[dst_fsp].vref_dq[cs] =
1897 			((readl(PHY_REG(phy_base, 0x384)) & 0x3f) +
1898 			 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2;
1899 		/* add range info */
1900 		fsp_param[dst_fsp].vref_dq[cs] |=
1901 			((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1);
1902 	}
1903 
1904 	if (dramtype == LPDDR3 && mhz <= 400) {
1905 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl);
1906 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl);
1907 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1908 			       DDR_PCTL2_INIT3);
1909 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
1910 			      dramtype);
1911 	}
1912 
1913 	return 0;
1914 }
1915 
1916 static int data_training(struct dram_info *dram, u32 cs,
1917 			 struct rv1126_sdram_params *sdram_params, u32 dst_fsp,
1918 			 u32 training_flag)
1919 {
1920 	u32 ret = 0;
1921 
1922 	if (training_flag == FULL_TRAINING)
1923 		training_flag = READ_GATE_TRAINING | WRITE_LEVELING |
1924 				WRITE_TRAINING | READ_TRAINING;
1925 
1926 	if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) {
1927 		ret = data_training_wl(dram, cs,
1928 				       sdram_params->base.dramtype,
1929 				       sdram_params->ch.cap_info.rank);
1930 		if (ret != 0)
1931 			goto out;
1932 	}
1933 
1934 	if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) {
1935 		ret = data_training_rg(dram, cs,
1936 				       sdram_params->base.dramtype);
1937 		if (ret != 0)
1938 			goto out;
1939 	}
1940 
1941 	if ((training_flag & READ_TRAINING) == READ_TRAINING) {
1942 		ret = data_training_rd(dram, cs,
1943 				       sdram_params->base.dramtype,
1944 				       sdram_params->base.ddr_freq);
1945 		if (ret != 0)
1946 			goto out;
1947 	}
1948 
1949 	if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) {
1950 		ret = data_training_wr(dram, cs,
1951 				       sdram_params->base.dramtype,
1952 				       sdram_params->base.ddr_freq, dst_fsp);
1953 		if (ret != 0)
1954 			goto out;
1955 	}
1956 
1957 out:
1958 	return ret;
1959 }
1960 
1961 static int get_wrlvl_val(struct dram_info *dram,
1962 			 struct rv1126_sdram_params *sdram_params)
1963 {
1964 	int i, j, clk_skew;
1965 	void __iomem *phy_base = dram->phy;
1966 	u32 lp_stat;
1967 	int ret;
1968 
1969 	lp_stat = low_power_update(dram, 0);
1970 
1971 	clk_skew = 0x1f;
1972 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, clk_skew, 3,
1973 			 sdram_params->base.dramtype);
1974 
1975 	ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING);
1976 	if (sdram_params->ch.cap_info.rank == 2)
1977 		ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING);
1978 
1979 	for (j = 0; j < 2; j++)
1980 		for (i = 0; i < 4; i++)
1981 			wrlvl_result[j][i] =
1982 				(readl(PHY_REG(phy_base, wrlvl_result_offset[j][i])) & 0x3f) -
1983 				clk_skew;
1984 
1985 	low_power_update(dram, lp_stat);
1986 
1987 	return ret;
1988 }
1989 
1990 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
1991 static void init_rw_trn_result_struct(struct rw_trn_result *result,
1992 				      void __iomem *phy_base, u8 cs_num)
1993 {
1994 	int i;
1995 
1996 	result->cs_num = cs_num;
1997 	result->byte_en = readb(PHY_REG(dram_info.phy, 0xf)) &
1998 			  PHY_DQ_WIDTH_MASK;
1999 	for (i = 0; i < FSP_NUM; i++)
2000 		result->fsp_mhz[i] = 0;
2001 }
2002 
2003 static void save_rw_trn_min_max(void __iomem *phy_base,
2004 				struct cs_rw_trn_result *rd_result,
2005 				struct cs_rw_trn_result *wr_result,
2006 				u8 byte_en)
2007 {
2008 	u16 phy_ofs;
2009 	u8 dqs;
2010 	u8 dq;
2011 
2012 	for (dqs = 0; dqs < BYTE_NUM; dqs++) {
2013 		if ((byte_en & BIT(dqs)) == 0)
2014 			continue;
2015 
2016 		/* Channel A or B (low or high 16 bit) */
2017 		phy_ofs = dqs < 2 ? 0x230 : 0x2b0;
2018 		/* low or high 8 bit */
2019 		phy_ofs += (dqs & 0x1) == 0 ? 0 : 0x9;
2020 		for (dq = 0; dq < 8; dq++) {
2021 			rd_result->dqs[dqs].dq_min[dq] =
2022 				readb(PHY_REG(phy_base, phy_ofs + 0x15 + dq));
2023 			rd_result->dqs[dqs].dq_max[dq] =
2024 				readb(PHY_REG(phy_base, phy_ofs + 0x27 + dq));
2025 			wr_result->dqs[dqs].dq_min[dq] =
2026 				readb(PHY_REG(phy_base, phy_ofs + 0x3d + dq));
2027 			wr_result->dqs[dqs].dq_max[dq] =
2028 				readb(PHY_REG(phy_base, phy_ofs + 0x4f + dq));
2029 		}
2030 	}
2031 }
2032 
2033 static void save_rw_trn_deskew(void __iomem *phy_base,
2034 			       struct fsp_rw_trn_result *result, u8 cs_num,
2035 			       int min_val, bool rw)
2036 {
2037 	u16 phy_ofs;
2038 	u8 cs;
2039 	u8 dq;
2040 
2041 	result->min_val = min_val;
2042 
2043 	for (cs = 0; cs < cs_num; cs++) {
2044 		phy_ofs = cs == 0 ? 0x170 : 0x1a0;
2045 		phy_ofs += rw == SKEW_RX_SIGNAL ? 0x1 : 0x17;
2046 		for (dq = 0; dq < 8; dq++) {
2047 			result->cs[cs].dqs[0].dq_deskew[dq] =
2048 				readb(PHY_REG(phy_base, phy_ofs + dq));
2049 			result->cs[cs].dqs[1].dq_deskew[dq] =
2050 				readb(PHY_REG(phy_base, phy_ofs + 0xb + dq));
2051 			result->cs[cs].dqs[2].dq_deskew[dq] =
2052 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + dq));
2053 			result->cs[cs].dqs[3].dq_deskew[dq] =
2054 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + dq));
2055 		}
2056 
2057 		result->cs[cs].dqs[0].dqs_deskew =
2058 			readb(PHY_REG(phy_base, phy_ofs + 0x8));
2059 		result->cs[cs].dqs[1].dqs_deskew =
2060 			readb(PHY_REG(phy_base, phy_ofs + 0xb + 0x8));
2061 		result->cs[cs].dqs[2].dqs_deskew =
2062 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0x8));
2063 		result->cs[cs].dqs[3].dqs_deskew =
2064 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + 0x8));
2065 	}
2066 }
2067 
2068 static void save_rw_trn_result_to_ddr(struct rw_trn_result *result)
2069 {
2070 	result->flag = DDR_DQ_EYE_FLAG;
2071 	memcpy((void *)(RW_TRN_RESULT_ADDR), result, sizeof(*result));
2072 }
2073 #endif
2074 
2075 static int high_freq_training(struct dram_info *dram,
2076 			      struct rv1126_sdram_params *sdram_params,
2077 			      u32 fsp)
2078 {
2079 	u32 i, j;
2080 	void __iomem *phy_base = dram->phy;
2081 	u32 dramtype = sdram_params->base.dramtype;
2082 	int min_val;
2083 	int dqs_skew, clk_skew, ca_skew;
2084 	u8 byte_en;
2085 	int ret;
2086 
2087 	byte_en = readl(PHY_REG(phy_base, 0xf)) & PHY_DQ_WIDTH_MASK;
2088 	dqs_skew = 0;
2089 	for (j = 0; j < sdram_params->ch.cap_info.rank; j++) {
2090 		for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) {
2091 			if ((byte_en & BIT(i)) != 0)
2092 				dqs_skew += wrlvl_result[j][i];
2093 		}
2094 	}
2095 	dqs_skew = dqs_skew /
2096 		   (int)(sdram_params->ch.cap_info.rank * (1 << sdram_params->ch.cap_info.bw));
2097 
2098 	clk_skew = 0x20 - dqs_skew;
2099 	dqs_skew = 0x20;
2100 
2101 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
2102 		min_val = 0xff;
2103 		for (j = 0; j < sdram_params->ch.cap_info.rank; j++)
2104 			for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) {
2105 				if ((byte_en & BIT(i)) != 0)
2106 					min_val = MIN(wrlvl_result[j][i], min_val);
2107 			}
2108 
2109 		if (min_val < 0) {
2110 			clk_skew = -min_val;
2111 			ca_skew = -min_val;
2112 		} else {
2113 			clk_skew = 0;
2114 			ca_skew = 0;
2115 		}
2116 	} else if (dramtype == LPDDR3) {
2117 		ca_skew = clk_skew - 4;
2118 	} else {
2119 		ca_skew = clk_skew;
2120 	}
2121 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3,
2122 			 dramtype);
2123 
2124 	writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233));
2125 	writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237));
2126 	writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2127 	writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2128 	ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING |
2129 			    READ_TRAINING | WRITE_TRAINING);
2130 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2131 	rw_trn_result.fsp_mhz[fsp] = (u16)sdram_params->base.ddr_freq;
2132 	save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[0],
2133 			    &rw_trn_result.wr_fsp[fsp].cs[0],
2134 			    rw_trn_result.byte_en);
2135 #endif
2136 	if (sdram_params->ch.cap_info.rank == 2) {
2137 		writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233));
2138 		writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237));
2139 		writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2140 		writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2141 		ret |= data_training(dram, 1, sdram_params, fsp,
2142 				     READ_GATE_TRAINING | READ_TRAINING |
2143 				     WRITE_TRAINING);
2144 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2145 		save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[1],
2146 				    &rw_trn_result.wr_fsp[fsp].cs[1],
2147 				    rw_trn_result.byte_en);
2148 #endif
2149 	}
2150 	if (ret)
2151 		goto out;
2152 
2153 	record_dq_prebit(dram);
2154 
2155 	min_val = get_min_value(dram, SKEW_RX_SIGNAL,
2156 				sdram_params->ch.cap_info.rank) * -1;
2157 	modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2158 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2159 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2160 	save_rw_trn_deskew(phy_base, &rw_trn_result.rd_fsp[fsp],
2161 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2162 			   SKEW_RX_SIGNAL);
2163 #endif
2164 
2165 	min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL,
2166 				    sdram_params->ch.cap_info.rank),
2167 		      get_min_value(dram, SKEW_CA_SIGNAL,
2168 				    sdram_params->ch.cap_info.rank)) * -1;
2169 
2170 	/* clk = 0, rx all skew -7, tx - min_value */
2171 	modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3,
2172 			 dramtype);
2173 
2174 	modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2175 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2176 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2177 	save_rw_trn_deskew(phy_base, &rw_trn_result.wr_fsp[fsp],
2178 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2179 			   SKEW_TX_SIGNAL);
2180 #endif
2181 
2182 	ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING);
2183 	if (sdram_params->ch.cap_info.rank == 2)
2184 		ret |= data_training(dram, 1, sdram_params, 0,
2185 				     READ_GATE_TRAINING);
2186 out:
2187 	return ret;
2188 }
2189 
2190 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
2191 {
2192 	writel(ddrconfig, &dram->msch->deviceconf);
2193 	clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0);
2194 }
2195 
2196 static void update_noc_timing(struct dram_info *dram,
2197 			      struct rv1126_sdram_params *sdram_params)
2198 {
2199 	void __iomem *pctl_base = dram->pctl;
2200 	u32 bw, bl;
2201 
2202 	bw = 8 << sdram_params->ch.cap_info.bw;
2203 	bl = ((readl(pctl_base + DDR_PCTL2_MSTR) >> 16) & 0xf) * 2;
2204 
2205 	/* update the noc timing related to data bus width */
2206 	if ((bw / 8 * bl) <= 16)
2207 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 0;
2208 	else if ((bw / 8 * bl) == 32)
2209 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 1;
2210 	else if ((bw / 8 * bl) == 64)
2211 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 2;
2212 	else
2213 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 3;
2214 
2215 	sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty =
2216 		(bl * bw / 8) > 16 ? (bl / 4) : (16 / (bl * bw / 8)) * bl / 4;
2217 
2218 	if (sdram_params->base.dramtype == LPDDR4 ||
2219 	    sdram_params->base.dramtype == LPDDR4X) {
2220 		sdram_params->ch.noc_timings.ddrmode.b.mwrsize =
2221 			(bw == 16) ? 0x1 : 0x2;
2222 		sdram_params->ch.noc_timings.ddrtimingc0.b.wrtomwr =
2223 			3 * sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty;
2224 	}
2225 
2226 	writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
2227 	       &dram->msch->ddrtiminga0);
2228 	writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
2229 	       &dram->msch->ddrtimingb0);
2230 	writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
2231 	       &dram->msch->ddrtimingc0);
2232 	writel(sdram_params->ch.noc_timings.devtodev0.d32,
2233 	       &dram->msch->devtodev0);
2234 	writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
2235 	writel(sdram_params->ch.noc_timings.ddr4timing.d32,
2236 	       &dram->msch->ddr4timing);
2237 }
2238 
2239 static int split_setup(struct dram_info *dram,
2240 		       struct rv1126_sdram_params *sdram_params)
2241 {
2242 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2243 	u32 dramtype = sdram_params->base.dramtype;
2244 	u32 split_size, split_mode;
2245 	u64 cs_cap[2], cap;
2246 
2247 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dramtype);
2248 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dramtype);
2249 	/* only support the larger cap is in low 16bit */
2250 	if (cap_info->cs0_high16bit_row < cap_info->cs0_row) {
2251 		cap = cs_cap[0] / (1 << (cap_info->cs0_row -
2252 		cap_info->cs0_high16bit_row));
2253 	} else if ((cap_info->cs1_high16bit_row < cap_info->cs1_row) &&
2254 		   (cap_info->rank == 2)) {
2255 		if (!cap_info->cs1_high16bit_row)
2256 			cap = cs_cap[0];
2257 		else
2258 			cap = cs_cap[0] + cs_cap[1] / (1 << (cap_info->cs1_row -
2259 				cap_info->cs1_high16bit_row));
2260 	} else {
2261 		goto out;
2262 	}
2263 	split_size = (u32)(cap >> 24) & SPLIT_SIZE_MASK;
2264 	if (cap_info->bw == 2)
2265 		split_mode = SPLIT_MODE_32_L16_VALID;
2266 	else
2267 		split_mode = SPLIT_MODE_16_L8_VALID;
2268 
2269 	rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2270 		     (SPLIT_MODE_MASK << SPLIT_MODE_OFFSET) |
2271 		     (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2272 		     (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2273 		     (split_mode << SPLIT_MODE_OFFSET) |
2274 		     (0x0 << SPLIT_BYPASS_OFFSET) |
2275 		     (split_size << SPLIT_SIZE_OFFSET));
2276 
2277 	rk_clrsetreg(BUS_SGRF_BASE_ADDR + SGRF_SOC_CON2,
2278 		     MSCH_AXI_BYPASS_ALL_MASK << MSCH_AXI_BYPASS_ALL_SHIFT,
2279 		     0x0 << MSCH_AXI_BYPASS_ALL_SHIFT);
2280 
2281 out:
2282 	return 0;
2283 }
2284 
2285 static void split_bypass(struct dram_info *dram)
2286 {
2287 	if ((readl(&dram->ddrgrf->grf_ddrsplit_con) &
2288 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2289 		return;
2290 
2291 	/* bypass split */
2292 	rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2293 		     (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2294 		     (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2295 		     (0x1 << SPLIT_BYPASS_OFFSET) |
2296 		     (0x0 << SPLIT_SIZE_OFFSET));
2297 }
2298 
2299 static void dram_all_config(struct dram_info *dram,
2300 			    struct rv1126_sdram_params *sdram_params)
2301 {
2302 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2303 	u32 dram_type = sdram_params->base.dramtype;
2304 	void __iomem *pctl_base = dram->pctl;
2305 	u32 sys_reg2 = 0;
2306 	u32 sys_reg3 = 0;
2307 	u64 cs_cap[2];
2308 	u32 cs_pst;
2309 
2310 	set_ddrconfig(dram, cap_info->ddrconfig);
2311 	sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
2312 			 &sys_reg3, 0);
2313 	writel(sys_reg2, &dram->pmugrf->os_reg[2]);
2314 	writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2315 
2316 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2317 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2318 
2319 	if (cap_info->rank == 2) {
2320 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2321 			6 + 2;
2322 		if (cs_pst > 28)
2323 			cs_cap[0] = 1llu << cs_pst;
2324 	}
2325 
2326 	writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
2327 			(((cs_cap[0] >> 20) / 64) & 0xff),
2328 			&dram->msch->devicesize);
2329 	update_noc_timing(dram, sdram_params);
2330 }
2331 
2332 static void enable_low_power(struct dram_info *dram,
2333 			     struct rv1126_sdram_params *sdram_params)
2334 {
2335 	void __iomem *pctl_base = dram->pctl;
2336 	u32 grf_lp_con;
2337 
2338 	writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]);
2339 
2340 	if (sdram_params->base.dramtype == DDR4)
2341 		grf_lp_con = (0x7 << 16) | (1 << 1);
2342 	else if (sdram_params->base.dramtype == DDR3)
2343 		grf_lp_con = (0x7 << 16) | (1 << 0);
2344 	else
2345 		grf_lp_con = (0x7 << 16) | (1 << 2);
2346 
2347 	/* en lpckdis_en */
2348 	grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
2349 	writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con);
2350 
2351 	/* enable sr, pd */
2352 	if (dram->pd_idle == 0)
2353 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2354 	else
2355 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2356 	if (dram->sr_idle == 0)
2357 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2358 	else
2359 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2360 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
2361 }
2362 
2363 static void ddr_set_atags(struct dram_info *dram,
2364 			  struct rv1126_sdram_params *sdram_params)
2365 {
2366 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2367 	u32 dram_type = sdram_params->base.dramtype;
2368 	void __iomem *pctl_base = dram->pctl;
2369 	struct tag_serial t_serial;
2370 	struct tag_ddr_mem t_ddrmem;
2371 	struct tag_soc_info t_socinfo;
2372 	u64 cs_cap[2];
2373 	u32 cs_pst = 0;
2374 	u32 split, split_size;
2375 	u64 reduce_cap = 0;
2376 
2377 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2378 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2379 
2380 	memset(&t_serial, 0, sizeof(struct tag_serial));
2381 
2382 	t_serial.version = 0;
2383 	t_serial.enable = 1;
2384 	t_serial.addr = CONFIG_DEBUG_UART_BASE;
2385 	t_serial.baudrate = CONFIG_BAUDRATE;
2386 	t_serial.m_mode = SERIAL_M_MODE_M0;
2387 	t_serial.id = 2;
2388 
2389 	atags_destroy();
2390 	atags_set_tag(ATAG_SERIAL,  &t_serial);
2391 
2392 	split = readl(&dram->ddrgrf->grf_ddrsplit_con);
2393 	memset(&t_ddrmem, 0, sizeof(struct tag_ddr_mem));
2394 	if (cap_info->row_3_4) {
2395 		cs_cap[0] =  cs_cap[0] * 3 / 4;
2396 		cs_cap[1] =  cs_cap[1] * 3 / 4;
2397 	} else if (!(split & (1 << SPLIT_BYPASS_OFFSET))) {
2398 		split_size = (split >> SPLIT_SIZE_OFFSET) & SPLIT_SIZE_MASK;
2399 		reduce_cap = (cs_cap[0] + cs_cap[1] - (split_size << 24)) / 2;
2400 	}
2401 	t_ddrmem.version = 0;
2402 	t_ddrmem.bank[0] = CONFIG_SYS_SDRAM_BASE;
2403 	if (cs_cap[1]) {
2404 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2405 			6 + 2;
2406 	}
2407 
2408 	if (cs_cap[1] && cs_pst > 27) {
2409 		t_ddrmem.count = 2;
2410 		t_ddrmem.bank[1] = 1 << cs_pst;
2411 		t_ddrmem.bank[2] = cs_cap[0];
2412 		t_ddrmem.bank[3] = cs_cap[1] - reduce_cap;
2413 	} else {
2414 		t_ddrmem.count = 1;
2415 		t_ddrmem.bank[1] = (u64)cs_cap[0] + (u64)cs_cap[1] - reduce_cap;
2416 	}
2417 
2418 	atags_set_tag(ATAG_DDR_MEM,  &t_ddrmem);
2419 
2420 	memset(&t_socinfo, 0, sizeof(struct tag_soc_info));
2421 	t_socinfo.version = 0;
2422 	t_socinfo.name = 0x1126;
2423 }
2424 
2425 static void print_ddr_info(struct rv1126_sdram_params *sdram_params)
2426 {
2427 	u32 split;
2428 
2429 	if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2430 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2431 		split = 0;
2432 	else
2433 		split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2434 			SPLIT_SIZE_MASK;
2435 
2436 	sdram_print_ddr_info(&sdram_params->ch.cap_info,
2437 			     &sdram_params->base, split);
2438 }
2439 
2440 static int sdram_init_(struct dram_info *dram,
2441 		       struct rv1126_sdram_params *sdram_params, u32 post_init)
2442 {
2443 	void __iomem *pctl_base = dram->pctl;
2444 	void __iomem *phy_base = dram->phy;
2445 	u32 ddr4_vref;
2446 	u32 mr_tmp;
2447 
2448 	rkclk_configure_ddr(dram, sdram_params);
2449 
2450 	rkclk_ddr_reset(dram, 1, 1, 1, 1);
2451 	udelay(10);
2452 
2453 	rkclk_ddr_reset(dram, 1, 1, 1, 0);
2454 	phy_cfg(dram, sdram_params);
2455 
2456 	rkclk_ddr_reset(dram, 1, 1, 0, 0);
2457 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1);
2458 
2459 	rkclk_ddr_reset(dram, 1, 0, 0, 0);
2460 	pctl_cfg(dram->pctl, &sdram_params->pctl_regs,
2461 		 dram->sr_idle, dram->pd_idle);
2462 
2463 	if (sdram_params->ch.cap_info.bw == 2) {
2464 		/* 32bit interface use pageclose */
2465 		setbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2466 		/* pageclose = 1, pageclose_timer = 0 will err in lp4 328MHz */
2467 		clrsetbits_le32(pctl_base + DDR_PCTL2_SCHED1, 0xff, 0x1 << 0);
2468 	} else {
2469 		clrbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2470 	}
2471 
2472 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2473 	u32 tmp, trefi;
2474 
2475 	tmp = readl(pctl_base + DDR_PCTL2_RFSHTMG);
2476 	trefi = (tmp >> 16) & 0xfff;
2477 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2478 	       pctl_base + DDR_PCTL2_RFSHTMG);
2479 #endif
2480 
2481 	/* set frequency_mode */
2482 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
2483 	/* set target_frequency to Frequency 0 */
2484 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0);
2485 
2486 	set_ds_odt(dram, sdram_params, 0);
2487 	sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params);
2488 	set_ctl_address_map(dram, sdram_params);
2489 
2490 	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
2491 
2492 	rkclk_ddr_reset(dram, 0, 0, 0, 0);
2493 
2494 	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
2495 		continue;
2496 
2497 	if (sdram_params->base.dramtype == LPDDR3) {
2498 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3);
2499 	} else if (sdram_params->base.dramtype == LPDDR4 ||
2500 		   sdram_params->base.dramtype == LPDDR4X) {
2501 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6);
2502 		/* MR11 */
2503 		pctl_write_mr(dram->pctl, 3, 11,
2504 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2505 			      LPDDR4);
2506 		/* MR12 */
2507 		pctl_write_mr(dram->pctl, 3, 12,
2508 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2509 			      LPDDR4);
2510 
2511 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2512 		/* MR22 */
2513 		pctl_write_mr(dram->pctl, 3, 22,
2514 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2515 			      LPDDR4);
2516 	}
2517 
2518 	if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) != 0) {
2519 		if (post_init != 0)
2520 			printascii("DTT cs0 error\n");
2521 		return -1;
2522 	}
2523 
2524 	if (sdram_params->base.dramtype == LPDDR4) {
2525 		mr_tmp = read_mr(dram, 1, 14, LPDDR4);
2526 
2527 		if (mr_tmp != 0x4d)
2528 			return -1;
2529 	}
2530 
2531 	if (sdram_params->base.dramtype == LPDDR4 ||
2532 	    sdram_params->base.dramtype == LPDDR4X) {
2533 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2534 		/* MR14 */
2535 		pctl_write_mr(dram->pctl, 3, 14,
2536 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2537 			      LPDDR4);
2538 	}
2539 	if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) {
2540 		if (data_training(dram, 1, sdram_params, 0,
2541 				  READ_GATE_TRAINING) != 0) {
2542 			printascii("DTT cs1 error\n");
2543 			return -1;
2544 		}
2545 	}
2546 
2547 	if (sdram_params->base.dramtype == DDR4) {
2548 		ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39;
2549 		pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref,
2550 				  sdram_params->base.dramtype);
2551 	}
2552 
2553 	dram_all_config(dram, sdram_params);
2554 	enable_low_power(dram, sdram_params);
2555 
2556 	return 0;
2557 }
2558 
2559 static u64 dram_detect_cap(struct dram_info *dram,
2560 			   struct rv1126_sdram_params *sdram_params,
2561 			   unsigned char channel)
2562 {
2563 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2564 	void __iomem *pctl_base = dram->pctl;
2565 	void __iomem *phy_base = dram->phy;
2566 	u32 mr8;
2567 
2568 	u32 bktmp;
2569 	u32 coltmp;
2570 	u32 rowtmp;
2571 	u32 cs;
2572 	u32 dram_type = sdram_params->base.dramtype;
2573 	u32 pwrctl;
2574 	u32 i, dq_map;
2575 	u32 byte1 = 0, byte0 = 0;
2576 	u32 tmp, byte;
2577 	struct sdram_head_info_index_v2 *index = (struct sdram_head_info_index_v2 *)common_info;
2578 	struct dq_map_info *map_info = (struct dq_map_info *)
2579 				       ((void *)common_info + index->dq_map_index.offset * 4);
2580 
2581 	cap_info->bw = dram_type == DDR3 ? 0 : 1;
2582 	if (dram_type != LPDDR4 && dram_type != LPDDR4X) {
2583 		if (dram_type != DDR4) {
2584 			coltmp = 12;
2585 			bktmp = 3;
2586 			if (dram_type == LPDDR2)
2587 				rowtmp = 15;
2588 			else
2589 				rowtmp = 16;
2590 
2591 			if (sdram_detect_col(cap_info, coltmp) != 0)
2592 				goto cap_err;
2593 
2594 			sdram_detect_bank(cap_info, coltmp, bktmp);
2595 			if (dram_type != LPDDR3)
2596 				sdram_detect_dbw(cap_info, dram_type);
2597 		} else {
2598 			coltmp = 10;
2599 			bktmp = 4;
2600 			rowtmp = 17;
2601 
2602 			cap_info->col = 10;
2603 			cap_info->bk = 2;
2604 			sdram_detect_bg(cap_info, coltmp);
2605 		}
2606 
2607 		if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
2608 			goto cap_err;
2609 
2610 		sdram_detect_row_3_4(cap_info, coltmp, bktmp);
2611 	} else {
2612 		cap_info->col = 10;
2613 		cap_info->bk = 3;
2614 		mr8 = read_mr(dram, 1, 8, dram_type);
2615 		cap_info->dbw = ((mr8 >> 6) & 0x3) == 0 ? 1 : 0;
2616 		mr8 = (mr8 >> 2) & 0xf;
2617 		if (mr8 >= 0 && mr8 <= 6) {
2618 			cap_info->cs0_row = 14 + (mr8 + 1) / 2;
2619 		} else if (mr8 == 0xc) {
2620 			cap_info->cs0_row = 13;
2621 		} else {
2622 			printascii("Cap ERR: Fail to get cap of LPDDR4/X from MR8\n");
2623 			goto cap_err;
2624 		}
2625 		if (cap_info->dbw == 0)
2626 			cap_info->cs0_row++;
2627 		cap_info->row_3_4 = mr8 % 2 == 1 ? 1 : 0;
2628 		if (cap_info->cs0_row >= 17) {
2629 			printascii("Cap ERR: ");
2630 			printascii("RV1126 LPDDR4/X cannot support row >= 17\n");
2631 			goto cap_err;
2632 			// cap_info->cs0_row = 16;
2633 			// cap_info->row_3_4 = 0;
2634 		}
2635 	}
2636 
2637 	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
2638 	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
2639 
2640 	if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0)
2641 		cs = 1;
2642 	else
2643 		cs = 0;
2644 	cap_info->rank = cs + 1;
2645 
2646 	setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2647 
2648 	tmp = data_training_rg(dram, 0, dram_type) & 0xf;
2649 
2650 	if (tmp == 0) {
2651 		cap_info->bw = 2;
2652 	} else {
2653 		if (dram_type == DDR3 || dram_type == DDR4) {
2654 			dq_map = 0;
2655 			byte = 0;
2656 			for (i = 0; i < 4; i++) {
2657 				if ((tmp & BIT(i)) == 0) {
2658 					dq_map |= byte << (i * 2);
2659 					byte++;
2660 				}
2661 			}
2662 			cap_info->bw = byte / 2;
2663 			for (i = 0; i < 4; i++) {
2664 				if ((tmp & BIT(i)) != 0) {
2665 					dq_map |= byte << (i * 2);
2666 					byte++;
2667 				}
2668 			}
2669 			clrsetbits_le32(&map_info->byte_map[0], 0xff << 24, dq_map << 24);
2670 		} else {
2671 			dq_map = readl(PHY_REG(phy_base, 0x4f));
2672 			for (i = 0; i < 4; i++) {
2673 				if (((dq_map >> (i * 2)) & 0x3) == 0)
2674 					byte0 = i;
2675 				if (((dq_map >> (i * 2)) & 0x3) == 1)
2676 					byte1 = i;
2677 			}
2678 			clrsetbits_le32(PHY_REG(phy_base, 0xf), PHY_DQ_WIDTH_MASK,
2679 					BIT(byte0) | BIT(byte1));
2680 			if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) == 0)
2681 				cap_info->bw = 1;
2682 			else
2683 				cap_info->bw = 0;
2684 		}
2685 	}
2686 	if (cap_info->bw > 0)
2687 		cap_info->dbw = 1;
2688 
2689 	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
2690 
2691 	cap_info->cs0_high16bit_row = cap_info->cs0_row;
2692 	if (cs) {
2693 		cap_info->cs1_row = cap_info->cs0_row;
2694 		cap_info->cs1_high16bit_row = cap_info->cs0_row;
2695 	} else {
2696 		cap_info->cs1_row = 0;
2697 		cap_info->cs1_high16bit_row = 0;
2698 	}
2699 
2700 	if (dram_type == LPDDR3)
2701 		sdram_detect_dbw(cap_info, dram_type);
2702 
2703 	return 0;
2704 cap_err:
2705 	return -1;
2706 }
2707 
2708 static int dram_detect_cs1_row(struct dram_info *dram,
2709 			       struct rv1126_sdram_params *sdram_params,
2710 			       unsigned char channel)
2711 {
2712 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2713 	void __iomem *pctl_base = dram->pctl;
2714 	u32 ret = 0;
2715 	void __iomem *test_addr;
2716 	u32 row, bktmp, coltmp, bw;
2717 	u64 cs0_cap;
2718 	u32 byte_mask;
2719 	u32 cs_pst;
2720 	u32 cs_add = 0;
2721 	u32 max_row;
2722 
2723 	if (cap_info->rank == 2) {
2724 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2725 			6 + 2;
2726 		if (cs_pst < 28)
2727 			cs_add = 1;
2728 
2729 		cs0_cap = 1 << cs_pst;
2730 
2731 		if (sdram_params->base.dramtype == DDR4) {
2732 			if (cap_info->dbw == 0)
2733 				bktmp = cap_info->bk + 2;
2734 			else
2735 				bktmp = cap_info->bk + 1;
2736 		} else {
2737 			bktmp = cap_info->bk;
2738 		}
2739 		bw = cap_info->bw;
2740 		coltmp = cap_info->col;
2741 
2742 		if (bw == 2)
2743 			byte_mask = 0xFFFF;
2744 		else
2745 			byte_mask = 0xFF;
2746 
2747 		max_row = (cs_pst == 31) ? 30 : 31;
2748 
2749 		max_row = max_row - bktmp - coltmp - bw - cs_add + 1;
2750 
2751 		row = (cap_info->cs0_row > max_row) ? max_row :
2752 			cap_info->cs0_row;
2753 
2754 		for (; row > 12; row--) {
2755 			test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
2756 				    (u32)cs0_cap +
2757 				    (1ul << (row + bktmp + coltmp +
2758 					     cs_add + bw - 1ul)));
2759 
2760 			writel(0, CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap);
2761 			writel(PATTERN, test_addr);
2762 
2763 			if (((readl(test_addr) & byte_mask) ==
2764 			     (PATTERN & byte_mask)) &&
2765 			    ((readl(CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap) &
2766 			      byte_mask) == 0)) {
2767 				ret = row;
2768 				break;
2769 			}
2770 		}
2771 	}
2772 
2773 	return ret;
2774 }
2775 
2776 /* return: 0 = success, other = fail */
2777 static int sdram_init_detect(struct dram_info *dram,
2778 			     struct rv1126_sdram_params *sdram_params)
2779 {
2780 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2781 	u32 ret;
2782 	u32 sys_reg = 0;
2783 	u32 sys_reg3 = 0;
2784 	struct sdram_head_info_index_v2 *index =
2785 		(struct sdram_head_info_index_v2 *)common_info;
2786 	struct dq_map_info *map_info;
2787 
2788 	map_info = (struct dq_map_info *)((void *)common_info +
2789 		index->dq_map_index.offset * 4);
2790 
2791 	if (sdram_init_(dram, sdram_params, 0)) {
2792 		if (sdram_params->base.dramtype == DDR3) {
2793 			clrsetbits_le32(&map_info->byte_map[0], 0xff << 24,
2794 					((0x1 << 6) | (0x3 << 4) | (0x2 << 2) |
2795 					(0x0 << 0)) << 24);
2796 			if (sdram_init_(dram, sdram_params, 0))
2797 				return -1;
2798 		} else {
2799 			return -1;
2800 		}
2801 	}
2802 
2803 	if (sdram_params->base.dramtype == DDR3) {
2804 		writel(PATTERN, CONFIG_SYS_SDRAM_BASE);
2805 		if (readl(CONFIG_SYS_SDRAM_BASE) != PATTERN)
2806 			return -1;
2807 	}
2808 
2809 	split_bypass(dram);
2810 	if (dram_detect_cap(dram, sdram_params, 0) != 0)
2811 		return -1;
2812 
2813 	pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
2814 				   sdram_params->base.dramtype);
2815 	ret = sdram_init_(dram, sdram_params, 1);
2816 	if (ret != 0)
2817 		goto out;
2818 
2819 	cap_info->cs1_row =
2820 		dram_detect_cs1_row(dram, sdram_params, 0);
2821 	if (cap_info->cs1_row) {
2822 		sys_reg = readl(&dram->pmugrf->os_reg[2]);
2823 		sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
2824 		SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
2825 				    sys_reg, sys_reg3, 0);
2826 		writel(sys_reg, &dram->pmugrf->os_reg[2]);
2827 		writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2828 	}
2829 
2830 	sdram_detect_high_row(cap_info, sdram_params->base.dramtype);
2831 	split_setup(dram, sdram_params);
2832 out:
2833 	return ret;
2834 }
2835 
2836 struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz)
2837 {
2838 	u32 i;
2839 	u32 offset = 0;
2840 	struct ddr2_3_4_lp2_3_info *ddr_info;
2841 
2842 	if (!freq_mhz) {
2843 		ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype);
2844 		if (ddr_info)
2845 			freq_mhz =
2846 				(ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
2847 				DDR_FREQ_MASK;
2848 		else
2849 			freq_mhz = 0;
2850 	}
2851 
2852 	for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) {
2853 		if (sdram_configs[i].base.ddr_freq == 0 ||
2854 		    freq_mhz < sdram_configs[i].base.ddr_freq)
2855 			break;
2856 	}
2857 	offset = i == 0 ? 0 : i - 1;
2858 
2859 	return &sdram_configs[offset];
2860 }
2861 
2862 static const u16 pctl_need_update_reg[] = {
2863 	DDR_PCTL2_RFSHTMG,
2864 	DDR_PCTL2_INIT3,
2865 	DDR_PCTL2_INIT4,
2866 	DDR_PCTL2_INIT6,
2867 	DDR_PCTL2_INIT7,
2868 	DDR_PCTL2_DRAMTMG0,
2869 	DDR_PCTL2_DRAMTMG1,
2870 	DDR_PCTL2_DRAMTMG2,
2871 	DDR_PCTL2_DRAMTMG3,
2872 	DDR_PCTL2_DRAMTMG4,
2873 	DDR_PCTL2_DRAMTMG5,
2874 	DDR_PCTL2_DRAMTMG6,
2875 	DDR_PCTL2_DRAMTMG7,
2876 	DDR_PCTL2_DRAMTMG8,
2877 	DDR_PCTL2_DRAMTMG9,
2878 	DDR_PCTL2_DRAMTMG12,
2879 	DDR_PCTL2_DRAMTMG13,
2880 	DDR_PCTL2_DRAMTMG14,
2881 	DDR_PCTL2_ZQCTL0,
2882 	DDR_PCTL2_DFITMG0,
2883 	DDR_PCTL2_ODTCFG
2884 };
2885 
2886 static const u16 phy_need_update_reg[] = {
2887 	0x14,
2888 	0x18,
2889 	0x1c
2890 };
2891 
2892 static void pre_set_rate(struct dram_info *dram,
2893 			 struct rv1126_sdram_params *sdram_params,
2894 			 u32 dst_fsp, u32 dst_fsp_lp4)
2895 {
2896 	u32 i, j, find;
2897 	void __iomem *pctl_base = dram->pctl;
2898 	void __iomem *phy_base = dram->phy;
2899 	u32 phy_offset;
2900 	u32 mr_tmp;
2901 	u32 dramtype = sdram_params->base.dramtype;
2902 
2903 	sw_set_req(dram);
2904 	/* pctl timing update */
2905 	for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) {
2906 		for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF;
2907 		     j++) {
2908 			if (sdram_params->pctl_regs.pctl[j][0] ==
2909 			    pctl_need_update_reg[i]) {
2910 				writel(sdram_params->pctl_regs.pctl[j][1],
2911 				       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2912 				       pctl_need_update_reg[i]);
2913 				find = j;
2914 				break;
2915 			}
2916 		}
2917 	}
2918 
2919 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2920 	u32 tmp, trefi;
2921 
2922 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2923 	trefi = (tmp >> 16) & 0xfff;
2924 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2925 	       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2926 #endif
2927 
2928 	sw_set_ack(dram);
2929 
2930 	/* phy timing update */
2931 	if (dst_fsp == 0)
2932 		phy_offset = 0;
2933 	else
2934 		phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3);
2935 	/* cl cwl al update */
2936 	for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) {
2937 		for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF;
2938 		     j++) {
2939 			if (sdram_params->phy_regs.phy[j][0] ==
2940 			    phy_need_update_reg[i]) {
2941 				writel(sdram_params->phy_regs.phy[j][1],
2942 				       phy_base + phy_offset +
2943 				       phy_need_update_reg[i]);
2944 				find = j;
2945 				break;
2946 			}
2947 		}
2948 	}
2949 
2950 	set_ds_odt(dram, sdram_params, dst_fsp);
2951 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
2952 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2953 			       DDR_PCTL2_INIT4);
2954 		/* MR13 */
2955 		pctl_write_mr(dram->pctl, 3, 13,
2956 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2957 			       PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2958 			      ((0x2 << 6) >> dst_fsp_lp4), dramtype);
2959 		writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2960 				      PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2961 				      ((0x2 << 6) >> dst_fsp_lp4),
2962 				       PHY_REG(phy_base, 0x1b));
2963 		/* MR3 */
2964 		pctl_write_mr(dram->pctl, 3, 3,
2965 			      mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT &
2966 			      PCTL2_MR_MASK,
2967 			      dramtype);
2968 		writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK,
2969 		       PHY_REG(phy_base, 0x19));
2970 
2971 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2972 			       DDR_PCTL2_INIT3);
2973 		/* MR1 */
2974 		pctl_write_mr(dram->pctl, 3, 1,
2975 			      mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT &
2976 			      PCTL2_MR_MASK,
2977 			      dramtype);
2978 		writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK,
2979 		       PHY_REG(phy_base, 0x17));
2980 		/* MR2 */
2981 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
2982 			      dramtype);
2983 		writel(mr_tmp & PCTL2_MR_MASK,
2984 		       PHY_REG(phy_base, 0x18));
2985 
2986 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2987 			       DDR_PCTL2_INIT6);
2988 		/* MR11 */
2989 		pctl_write_mr(dram->pctl, 3, 11,
2990 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2991 			      dramtype);
2992 		writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2993 		       PHY_REG(phy_base, 0x1a));
2994 		/* MR12 */
2995 		pctl_write_mr(dram->pctl, 3, 12,
2996 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2997 			      dramtype);
2998 
2999 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3000 			       DDR_PCTL2_INIT7);
3001 		/* MR22 */
3002 		pctl_write_mr(dram->pctl, 3, 22,
3003 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
3004 			      dramtype);
3005 		writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
3006 		       PHY_REG(phy_base, 0x1d));
3007 		/* MR14 */
3008 		pctl_write_mr(dram->pctl, 3, 14,
3009 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
3010 			      dramtype);
3011 		writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
3012 		       PHY_REG(phy_base, 0x1c));
3013 	}
3014 
3015 	update_noc_timing(dram, sdram_params);
3016 }
3017 
3018 static void save_fsp_param(struct dram_info *dram, u32 dst_fsp,
3019 			   struct rv1126_sdram_params *sdram_params)
3020 {
3021 	void __iomem *pctl_base = dram->pctl;
3022 	void __iomem *phy_base = dram->phy;
3023 	struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp];
3024 	u32 temp, temp1;
3025 	struct ddr2_3_4_lp2_3_info *ddr_info;
3026 
3027 	ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype);
3028 
3029 	p_fsp_param->freq_mhz = sdram_params->base.ddr_freq;
3030 
3031 	if (sdram_params->base.dramtype == LPDDR4 ||
3032 	    sdram_params->base.dramtype == LPDDR4X) {
3033 		p_fsp_param->rd_odt_up_en = 0;
3034 		p_fsp_param->rd_odt_down_en = 1;
3035 	} else {
3036 		p_fsp_param->rd_odt_up_en =
3037 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
3038 		p_fsp_param->rd_odt_down_en =
3039 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
3040 	}
3041 
3042 	if (p_fsp_param->rd_odt_up_en)
3043 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111));
3044 	else if (p_fsp_param->rd_odt_down_en)
3045 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110));
3046 	else
3047 		p_fsp_param->rd_odt = 0;
3048 	p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112));
3049 	p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100));
3050 	p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102));
3051 	p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128));
3052 	p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105));
3053 
3054 	if (sdram_params->base.dramtype == DDR3) {
3055 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3056 			     DDR_PCTL2_INIT3);
3057 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
3058 		p_fsp_param->ds_pdds = temp & DDR3_DS_MASK;
3059 		p_fsp_param->dq_odt = temp & DDR3_RTT_NOM_MASK;
3060 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3061 	} else if (sdram_params->base.dramtype == DDR4) {
3062 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3063 			     DDR_PCTL2_INIT3);
3064 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
3065 		p_fsp_param->ds_pdds = temp & DDR4_DS_MASK;
3066 		p_fsp_param->dq_odt = temp & DDR4_RTT_NOM_MASK;
3067 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3068 	} else if (sdram_params->base.dramtype == LPDDR3) {
3069 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3070 			     DDR_PCTL2_INIT4);
3071 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
3072 		p_fsp_param->ds_pdds = temp & 0xf;
3073 
3074 		p_fsp_param->dq_odt = lp3_odt_value;
3075 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3076 	} else if (sdram_params->base.dramtype == LPDDR4 ||
3077 		   sdram_params->base.dramtype == LPDDR4X) {
3078 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3079 			     DDR_PCTL2_INIT4);
3080 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
3081 		p_fsp_param->ds_pdds = temp & LPDDR4_PDDS_MASK;
3082 
3083 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3084 			     DDR_PCTL2_INIT6);
3085 		temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK;
3086 		p_fsp_param->dq_odt = temp & LPDDR4_DQODT_MASK;
3087 		p_fsp_param->ca_odt = temp & LPDDR4_CAODT_MASK;
3088 
3089 		temp = MAX(readl(PHY_REG(phy_base, 0x3ae)),
3090 			   readl(PHY_REG(phy_base, 0x3ce)));
3091 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)),
3092 			    readl(PHY_REG(phy_base, 0x3de)));
3093 		p_fsp_param->vref_ca[0] = (temp + temp1) / 2;
3094 		temp = MAX(readl(PHY_REG(phy_base, 0x3af)),
3095 			   readl(PHY_REG(phy_base, 0x3cf)));
3096 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)),
3097 			    readl(PHY_REG(phy_base, 0x3df)));
3098 		p_fsp_param->vref_ca[1] = (temp + temp1) / 2;
3099 		p_fsp_param->vref_ca[0] |=
3100 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
3101 		p_fsp_param->vref_ca[1] |=
3102 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
3103 
3104 		p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >>
3105 					      3) & 0x1;
3106 	}
3107 
3108 	p_fsp_param->noc_timings.ddrtiminga0 =
3109 		sdram_params->ch.noc_timings.ddrtiminga0;
3110 	p_fsp_param->noc_timings.ddrtimingb0 =
3111 		sdram_params->ch.noc_timings.ddrtimingb0;
3112 	p_fsp_param->noc_timings.ddrtimingc0 =
3113 		sdram_params->ch.noc_timings.ddrtimingc0;
3114 	p_fsp_param->noc_timings.devtodev0 =
3115 		sdram_params->ch.noc_timings.devtodev0;
3116 	p_fsp_param->noc_timings.ddrmode =
3117 		sdram_params->ch.noc_timings.ddrmode;
3118 	p_fsp_param->noc_timings.ddr4timing =
3119 		sdram_params->ch.noc_timings.ddr4timing;
3120 	p_fsp_param->noc_timings.agingx0 =
3121 		sdram_params->ch.noc_timings.agingx0;
3122 	p_fsp_param->noc_timings.aging0 =
3123 		sdram_params->ch.noc_timings.aging0;
3124 	p_fsp_param->noc_timings.aging1 =
3125 		sdram_params->ch.noc_timings.aging1;
3126 	p_fsp_param->noc_timings.aging2 =
3127 		sdram_params->ch.noc_timings.aging2;
3128 	p_fsp_param->noc_timings.aging3 =
3129 		sdram_params->ch.noc_timings.aging3;
3130 
3131 	p_fsp_param->flag = FSP_FLAG;
3132 }
3133 
3134 #ifndef CONFIG_SPL_KERNEL_BOOT
3135 static void copy_fsp_param_to_ddr(void)
3136 {
3137 	memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param,
3138 	       sizeof(fsp_param));
3139 }
3140 #endif
3141 
3142 static void pctl_modify_trfc(struct ddr_pctl_regs *pctl_regs,
3143 			     struct sdram_cap_info *cap_info, u32 dram_type,
3144 			     u32 freq)
3145 {
3146 	u64 cs0_cap;
3147 	u32 die_cap;
3148 	u32 trfc_ns, trfc4_ns;
3149 	u32 trfc, txsnr;
3150 	u32 txs_abort_fast = 0;
3151 	u32 tmp;
3152 
3153 	cs0_cap = sdram_get_cs_cap(cap_info, 0, dram_type);
3154 	die_cap = (u32)(cs0_cap >> (20 + (cap_info->bw - cap_info->dbw)));
3155 
3156 	switch (dram_type) {
3157 	case DDR3:
3158 		if (die_cap <= DIE_CAP_512MBIT)
3159 			trfc_ns = 90;
3160 		else if (die_cap <= DIE_CAP_1GBIT)
3161 			trfc_ns = 110;
3162 		else if (die_cap <= DIE_CAP_2GBIT)
3163 			trfc_ns = 160;
3164 		else if (die_cap <= DIE_CAP_4GBIT)
3165 			trfc_ns = 260;
3166 		else
3167 			trfc_ns = 350;
3168 		txsnr = MAX(5, ((trfc_ns + 10) * freq + 999) / 1000);
3169 		break;
3170 
3171 	case DDR4:
3172 		if (die_cap <= DIE_CAP_2GBIT) {
3173 			trfc_ns = 160;
3174 			trfc4_ns = 90;
3175 		} else if (die_cap <= DIE_CAP_4GBIT) {
3176 			trfc_ns = 260;
3177 			trfc4_ns = 110;
3178 		} else if (die_cap <= DIE_CAP_8GBIT) {
3179 			trfc_ns = 350;
3180 			trfc4_ns = 160;
3181 		} else {
3182 			trfc_ns = 550;
3183 			trfc4_ns = 260;
3184 		}
3185 		txsnr = ((trfc_ns + 10) * freq + 999) / 1000;
3186 		txs_abort_fast = ((trfc4_ns + 10) * freq + 999) / 1000;
3187 		break;
3188 
3189 	case LPDDR3:
3190 		if (die_cap <= DIE_CAP_4GBIT)
3191 			trfc_ns = 130;
3192 		else
3193 			trfc_ns = 210;
3194 		txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3195 		break;
3196 
3197 	case LPDDR4:
3198 	case LPDDR4X:
3199 		if (die_cap <= DIE_CAP_2GBIT)
3200 			trfc_ns = 130;
3201 		else if (die_cap <= DIE_CAP_4GBIT)
3202 			trfc_ns = 180;
3203 		else if (die_cap <= DIE_CAP_8GBIT)
3204 			trfc_ns = 280;
3205 		else
3206 			trfc_ns = 380;
3207 		txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3208 		break;
3209 
3210 	default:
3211 		return;
3212 	}
3213 	trfc = (trfc_ns * freq + 999) / 1000;
3214 
3215 	for (int i = 0; pctl_regs->pctl[i][0] != 0xffffffff; i++) {
3216 		switch (pctl_regs->pctl[i][0]) {
3217 		case DDR_PCTL2_RFSHTMG:
3218 			tmp = pctl_regs->pctl[i][1];
3219 			/* t_rfc_min */
3220 			tmp &= ~((u32)0x3ff);
3221 			tmp |= ((trfc + 1) / 2) & 0x3ff;
3222 			pctl_regs->pctl[i][1] = tmp;
3223 			break;
3224 
3225 		case DDR_PCTL2_DRAMTMG8:
3226 			if (dram_type == DDR3 || dram_type == DDR4) {
3227 				tmp = pctl_regs->pctl[i][1];
3228 				/* t_xs_x32 */
3229 				tmp &= ~((u32)0x7f);
3230 				tmp |= ((txsnr + 63) / 64 + 1) & 0x7f;
3231 
3232 				if (dram_type == DDR4) {
3233 					/* t_xs_abort_x32 */
3234 					tmp &= ~((u32)(0x7f << 16));
3235 					tmp |= (((txs_abort_fast + 63) / 64 + 1) & 0x7f) << 16;
3236 					/* t_xs_fast_x32 */
3237 					tmp &= ~((u32)(0x7f << 24));
3238 					tmp |= (((txs_abort_fast + 63) / 64 + 1) & 0x7f) << 24;
3239 				}
3240 
3241 				pctl_regs->pctl[i][1] = tmp;
3242 			}
3243 			break;
3244 
3245 		case DDR_PCTL2_DRAMTMG14:
3246 			if (dram_type == LPDDR3 ||
3247 			    dram_type == LPDDR4 || dram_type == LPDDR4X) {
3248 				tmp = pctl_regs->pctl[i][1];
3249 				/* t_xsr */
3250 				tmp &= ~((u32)0xfff);
3251 				tmp |= ((txsnr + 1) / 2) & 0xfff;
3252 				pctl_regs->pctl[i][1] = tmp;
3253 			}
3254 			break;
3255 
3256 		default:
3257 			break;
3258 		}
3259 	}
3260 }
3261 
3262 void ddr_set_rate(struct dram_info *dram,
3263 		  struct rv1126_sdram_params *sdram_params,
3264 		  u32 freq, u32 cur_freq, u32 dst_fsp,
3265 		  u32 dst_fsp_lp4, u32 training_en)
3266 {
3267 	u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off;
3268 	u32 mr_tmp;
3269 	u32 lp_stat;
3270 	u32 dramtype = sdram_params->base.dramtype;
3271 	struct rv1126_sdram_params *sdram_params_new;
3272 	void __iomem *pctl_base = dram->pctl;
3273 	void __iomem *phy_base = dram->phy;
3274 
3275 	lp_stat = low_power_update(dram, 0);
3276 	sdram_params_new = get_default_sdram_config(freq);
3277 	sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank;
3278 	sdram_params_new->ch.cap_info.bw = sdram_params->ch.cap_info.bw;
3279 
3280 	pctl_modify_trfc(&sdram_params_new->pctl_regs,
3281 			 &sdram_params->ch.cap_info, dramtype, freq);
3282 	pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4);
3283 
3284 	while ((readl(pctl_base + DDR_PCTL2_STAT) &
3285 			 PCTL2_OPERATING_MODE_MASK) ==
3286 			 PCTL2_OPERATING_MODE_SR)
3287 		continue;
3288 
3289 	dest_dll_off = 0;
3290 	dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3291 			  DDR_PCTL2_INIT3);
3292 	if ((dramtype == DDR3 && (dst_init3 & 1)) ||
3293 	    (dramtype == DDR4 && !(dst_init3 & 1)))
3294 		dest_dll_off = 1;
3295 
3296 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
3297 	cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
3298 			  DDR_PCTL2_INIT3);
3299 	cur_init3 &= PCTL2_MR_MASK;
3300 	cur_dll_off = 1;
3301 	if ((dramtype == DDR3 && !(cur_init3 & 1)) ||
3302 	    (dramtype == DDR4 && (cur_init3 & 1)))
3303 		cur_dll_off = 0;
3304 
3305 	if (!cur_dll_off) {
3306 		if (dramtype == DDR3)
3307 			cur_init3 |= 1;
3308 		else
3309 			cur_init3 &= ~1;
3310 		pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype);
3311 	}
3312 
3313 	setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3314 		     PCTL2_DIS_AUTO_REFRESH);
3315 	update_refresh_reg(dram);
3316 
3317 	enter_sr(dram, 1);
3318 
3319 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3320 	       PMUGRF_CON_DDRPHY_BUFFEREN_EN,
3321 	       &dram->pmugrf->soc_con[0]);
3322 	sw_set_req(dram);
3323 	clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC,
3324 		     PCTL2_DFI_INIT_COMPLETE_EN);
3325 	sw_set_ack(dram);
3326 
3327 	sw_set_req(dram);
3328 	if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off)
3329 		setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3330 	else
3331 		clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3332 
3333 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0,
3334 		     PCTL2_DIS_SRX_ZQCL);
3335 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0,
3336 		     PCTL2_DIS_SRX_ZQCL);
3337 	sw_set_ack(dram);
3338 
3339 	writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT),
3340 	       &dram->cru->clkgate_con[21]);
3341 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3342 					(0x1 << CLK_DDR_UPCTL_EN_SHIFT) |
3343 					(0x1 << ACLK_DDR_UPCTL_EN_SHIFT),
3344 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3345 
3346 	clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3347 	rkclk_set_dpll(dram, freq * MHz / 2);
3348 	phy_pll_set(dram, freq * MHz, 0);
3349 	phy_pll_set(dram, freq * MHz, 1);
3350 	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3351 
3352 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3353 			PMUGRF_CON_DDRPHY_BUFFEREN_DIS,
3354 			&dram->pmugrf->soc_con[0]);
3355 	writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT),
3356 	       &dram->cru->clkgate_con[21]);
3357 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3358 					(0x0 << CLK_DDR_UPCTL_EN_SHIFT) |
3359 					(0x0 << ACLK_DDR_UPCTL_EN_SHIFT),
3360 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3361 	while ((readl(pctl_base + DDR_PCTL2_DFISTAT) &
3362 	       PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE)
3363 		continue;
3364 
3365 	sw_set_req(dram);
3366 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
3367 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp);
3368 	sw_set_ack(dram);
3369 	update_refresh_reg(dram);
3370 	clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2);
3371 
3372 	enter_sr(dram, 0);
3373 
3374 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3375 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3376 
3377 	mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4);
3378 	if (dramtype == LPDDR3) {
3379 		pctl_write_mr(dram->pctl, 3, 1,
3380 			      (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) &
3381 			      PCTL2_MR_MASK,
3382 			      dramtype);
3383 		pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK,
3384 			      dramtype);
3385 		pctl_write_mr(dram->pctl, 3, 3,
3386 			      (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) &
3387 			      PCTL2_MR_MASK,
3388 			      dramtype);
3389 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype);
3390 	} else if ((dramtype == DDR3) || (dramtype == DDR4)) {
3391 		pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK,
3392 			      dramtype);
3393 		if (!dest_dll_off) {
3394 			pctl_write_mr(dram->pctl, 3, 0,
3395 				      ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) &
3396 				      PCTL2_MR_MASK) | DDR3_DLL_RESET,
3397 				      dramtype);
3398 			udelay(2);
3399 		}
3400 		pctl_write_mr(dram->pctl, 3, 0,
3401 			      (dst_init3 >> PCTL2_DDR34_MR0_SHIFT &
3402 			       PCTL2_MR_MASK) & (~DDR3_DLL_RESET),
3403 			      dramtype);
3404 		pctl_write_mr(dram->pctl, 3, 2,
3405 			      ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) &
3406 			       PCTL2_MR_MASK), dramtype);
3407 		if (dramtype == DDR4) {
3408 			pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK,
3409 				      dramtype);
3410 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3411 				       DDR_PCTL2_INIT6);
3412 			pctl_write_mr(dram->pctl, 3, 4,
3413 				      (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) &
3414 				       PCTL2_MR_MASK,
3415 				      dramtype);
3416 			pctl_write_mr(dram->pctl, 3, 5,
3417 				      mr_tmp >> PCTL2_DDR4_MR5_SHIFT &
3418 				      PCTL2_MR_MASK,
3419 				      dramtype);
3420 
3421 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3422 				       DDR_PCTL2_INIT7);
3423 			pctl_write_mr(dram->pctl, 3, 6,
3424 				      mr_tmp >> PCTL2_DDR4_MR6_SHIFT &
3425 				      PCTL2_MR_MASK,
3426 				      dramtype);
3427 		}
3428 	} else if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
3429 		pctl_write_mr(dram->pctl, 3, 13,
3430 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
3431 			       PCTL2_MR_MASK) & (~(BIT(7)))) |
3432 			      dst_fsp_lp4 << 7, dramtype);
3433 	}
3434 	clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3435 		     PCTL2_DIS_AUTO_REFRESH);
3436 	update_refresh_reg(dram);
3437 
3438 	/* training */
3439 	high_freq_training(dram, sdram_params_new, dst_fsp);
3440 	low_power_update(dram, lp_stat);
3441 
3442 	save_fsp_param(dram, dst_fsp, sdram_params_new);
3443 }
3444 
3445 static void ddr_set_rate_for_fsp(struct dram_info *dram,
3446 				 struct rv1126_sdram_params *sdram_params)
3447 {
3448 	struct ddr2_3_4_lp2_3_info *ddr_info;
3449 	u32 f0;
3450 	u32 dramtype = sdram_params->base.dramtype;
3451 #ifndef CONFIG_SPL_KERNEL_BOOT
3452 	u32 f1, f2, f3;
3453 #endif
3454 
3455 	ddr_info = get_ddr_drv_odt_info(dramtype);
3456 	if (!ddr_info)
3457 		return;
3458 
3459 	f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
3460 	     DDR_FREQ_MASK;
3461 
3462 #ifndef CONFIG_SPL_KERNEL_BOOT
3463 	memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param));
3464 	memset((void *)&fsp_param, 0, sizeof(fsp_param));
3465 
3466 	f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) &
3467 	     DDR_FREQ_MASK;
3468 	f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) &
3469 	     DDR_FREQ_MASK;
3470 	f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) &
3471 	     DDR_FREQ_MASK;
3472 #endif
3473 
3474 	if (get_wrlvl_val(dram, sdram_params))
3475 		printascii("get wrlvl value fail\n");
3476 
3477 #ifndef CONFIG_SPL_KERNEL_BOOT
3478 	printascii("change to: ");
3479 	printdec(f1);
3480 	printascii("MHz\n");
3481 	ddr_set_rate(&dram_info, sdram_params, f1,
3482 		     sdram_params->base.ddr_freq, 1, 1, 1);
3483 	printascii("change to: ");
3484 	printdec(f2);
3485 	printascii("MHz\n");
3486 	ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1);
3487 	printascii("change to: ");
3488 	printdec(f3);
3489 	printascii("MHz\n");
3490 	ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1);
3491 #endif
3492 	printascii("change to: ");
3493 	printdec(f0);
3494 	printascii("MHz(final freq)\n");
3495 #ifndef CONFIG_SPL_KERNEL_BOOT
3496 	ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1);
3497 #else
3498 	ddr_set_rate(&dram_info, sdram_params, f0, sdram_params->base.ddr_freq, 1, 1, 1);
3499 #endif
3500 }
3501 
3502 int get_uart_config(void)
3503 {
3504 	struct sdram_head_info_index_v2 *index =
3505 		(struct sdram_head_info_index_v2 *)common_info;
3506 	struct global_info *gbl_info;
3507 
3508 	gbl_info = (struct global_info *)((void *)common_info +
3509 		index->global_index.offset * 4);
3510 
3511 	return gbl_info->uart_info;
3512 }
3513 
3514 /* return: 0 = success, other = fail */
3515 int sdram_init(void)
3516 {
3517 	struct rv1126_sdram_params *sdram_params;
3518 	int ret = 0;
3519 	struct sdram_head_info_index_v2 *index =
3520 		(struct sdram_head_info_index_v2 *)common_info;
3521 	struct global_info *gbl_info;
3522 
3523 	dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
3524 	dram_info.pctl = (void *)UPCTL2_BASE_ADDR;
3525 	dram_info.grf = (void *)GRF_BASE_ADDR;
3526 	dram_info.cru = (void *)CRU_BASE_ADDR;
3527 	dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR;
3528 	dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR;
3529 	dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR;
3530 
3531 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
3532 	printascii("extended temp support\n");
3533 #endif
3534 	if (index->version_info != 2 ||
3535 	    (index->global_index.size != sizeof(struct global_info) / 4) ||
3536 	    (index->ddr3_index.size !=
3537 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3538 	    (index->ddr4_index.size !=
3539 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3540 	    (index->lp3_index.size !=
3541 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3542 	    (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) ||
3543 	    (index->lp4x_index.size != (sizeof(struct lp4_info) / 4)) ||
3544 	    index->global_index.offset == 0 ||
3545 	    index->ddr3_index.offset == 0 ||
3546 	    index->ddr4_index.offset == 0 ||
3547 	    index->lp3_index.offset == 0 ||
3548 	    index->lp4_index.offset == 0 ||
3549 	    index->lp4x_index.offset == 0) {
3550 		printascii("common info error\n");
3551 		goto error;
3552 	}
3553 
3554 	gbl_info = (struct global_info *)((void *)common_info +
3555 		index->global_index.offset * 4);
3556 
3557 	dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info);
3558 	dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info);
3559 
3560 	sdram_params = &sdram_configs[0];
3561 	#if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 8)
3562 	for (int j = 0; j < ARRAY_SIZE(sdram_configs); j++)
3563 		sdram_configs[j].base.dramtype = LPDDR4X;
3564 	#endif
3565 	if (sdram_params->base.dramtype == DDR3 ||
3566 	    sdram_params->base.dramtype == DDR4) {
3567 		if (DDR_2T_INFO(gbl_info->info_2t))
3568 			sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10;
3569 		else
3570 			sdram_params->pctl_regs.pctl[0][1] &=
3571 				~(0x1 << 10);
3572 	}
3573 	ret = sdram_init_detect(&dram_info, sdram_params);
3574 	if (ret) {
3575 		sdram_print_dram_type(sdram_params->base.dramtype);
3576 		printascii(", ");
3577 		printdec(sdram_params->base.ddr_freq);
3578 		printascii("MHz\n");
3579 		goto error;
3580 	}
3581 	print_ddr_info(sdram_params);
3582 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3583 	init_rw_trn_result_struct(&rw_trn_result, dram_info.phy,
3584 				  (u8)sdram_params->ch.cap_info.rank);
3585 #endif
3586 
3587 	ddr_set_rate_for_fsp(&dram_info, sdram_params);
3588 #ifndef CONFIG_SPL_KERNEL_BOOT
3589 	copy_fsp_param_to_ddr();
3590 #endif
3591 
3592 	ddr_set_atags(&dram_info, sdram_params);
3593 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3594 	save_rw_trn_result_to_ddr(&rw_trn_result);
3595 #endif
3596 
3597 	printascii("out\n");
3598 
3599 	return ret;
3600 error:
3601 	printascii("error\n");
3602 	return (-1);
3603 }
3604 #endif /* CONFIG_TPL_BUILD */
3605