xref: /rk3399_rockchip-uboot/drivers/ram/rockchip/sdram_rv1126.c (revision 70ad550d6f37befbc82a8e5e7c7753be83ff08b1)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * (C) Copyright 2020 Rockchip Electronics Co., Ltd.
4  */
5 
6 #include <common.h>
7 #include <debug_uart.h>
8 #include <dm.h>
9 #include <ram.h>
10 #include <syscon.h>
11 #include <asm/io.h>
12 #include <asm/arch/clock.h>
13 #include <asm/arch/hardware.h>
14 #include <asm/arch/rk_atags.h>
15 #include <asm/arch/cru_rv1126.h>
16 #include <asm/arch/grf_rv1126.h>
17 #include <asm/arch/sdram_common.h>
18 #include <asm/arch/sdram_rv1126.h>
19 
20 /* define training flag */
21 #define CA_TRAINING			(0x1 << 0)
22 #define READ_GATE_TRAINING		(0x1 << 1)
23 #define WRITE_LEVELING			(0x1 << 2)
24 #define WRITE_TRAINING			(0x1 << 3)
25 #define READ_TRAINING			(0x1 << 4)
26 #define FULL_TRAINING			(0xff)
27 
28 #define SKEW_RX_SIGNAL			(0)
29 #define SKEW_TX_SIGNAL			(1)
30 #define SKEW_CA_SIGNAL			(2)
31 
32 #define DESKEW_MDF_ABS_VAL		(0)
33 #define DESKEW_MDF_DIFF_VAL		(1)
34 
35 #ifdef CONFIG_TPL_BUILD
36 #ifndef CONFIG_TPL_TINY_FRAMEWORK
37 #error please defined CONFIG_TPL_TINY_FRAMEWORK for RV1126 !!!
38 #endif
39 #endif
40 
41 #ifdef CONFIG_TPL_BUILD
42 
43 struct dram_info {
44 	void __iomem *pctl;
45 	void __iomem *phy;
46 	struct rv1126_cru *cru;
47 	struct msch_regs *msch;
48 	struct rv1126_ddrgrf *ddrgrf;
49 	struct rv1126_grf *grf;
50 	struct ram_info info;
51 	struct rv1126_pmugrf *pmugrf;
52 	u32 sr_idle;
53 	u32 pd_idle;
54 };
55 
56 #define GRF_BASE_ADDR			0xfe000000
57 #define PMU_GRF_BASE_ADDR		0xfe020000
58 #define DDR_GRF_BASE_ADDR		0xfe030000
59 #define BUS_SGRF_BASE_ADDR		0xfe0a0000
60 #define SERVER_MSCH_BASE_ADDR		0xfe800000
61 #define CRU_BASE_ADDR			0xff490000
62 #define DDR_PHY_BASE_ADDR		0xff4a0000
63 #define UPCTL2_BASE_ADDR		0xffa50000
64 
65 #define SGRF_SOC_CON2			0x8
66 #define SGRF_SOC_CON12			0x30
67 #define SGRF_SOC_CON13			0x34
68 
69 struct dram_info dram_info;
70 
71 #if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 3)
72 struct rv1126_sdram_params sdram_configs[] = {
73 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-328.inc"
74 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-396.inc"
75 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-528.inc"
76 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-664.inc"
77 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-784.inc"
78 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-924.inc"
79 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-1056.inc"
80 };
81 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 0)
82 struct rv1126_sdram_params sdram_configs[] = {
83 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-328.inc"
84 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-396.inc"
85 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-528.inc"
86 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-664.inc"
87 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-784.inc"
88 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-924.inc"
89 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-1056.inc"
90 };
91 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 6)
92 struct rv1126_sdram_params sdram_configs[] = {
93 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-328.inc"
94 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-396.inc"
95 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-528.inc"
96 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-664.inc"
97 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-784.inc"
98 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-924.inc"
99 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-1056.inc"
100 };
101 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 7) || (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 8)
102 struct rv1126_sdram_params sdram_configs[] = {
103 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-328.inc"
104 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-396.inc"
105 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-528.inc"
106 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-664.inc"
107 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-784.inc"
108 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-924.inc"
109 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-1056.inc"
110 };
111 #endif
112 
113 u32 common_info[] = {
114 	#include "sdram_inc/rv1126/sdram-rv1126-loader_params.inc"
115 };
116 
117 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
118 static struct rw_trn_result rw_trn_result;
119 #endif
120 
121 static struct rv1126_fsp_param fsp_param[MAX_IDX];
122 
123 static u8 lp3_odt_value;
124 
125 static s8 wrlvl_result[2][4];
126 
127 /* DDR configuration 0-9 */
128 u16 ddr_cfg_2_rbc[] = {
129 	((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */
130 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */
131 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */
132 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */
133 	((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */
134 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */
135 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */
136 	((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */
137 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */
138 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */
139 };
140 
141 /* DDR configuration 10-21 */
142 u8 ddr4_cfg_2_rbc[] = {
143 	((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */
144 	((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */
145 	((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */
146 	((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */
147 	((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */
148 	((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */
149 	((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */
150 	((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */
151 	((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */
152 	((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */
153 	((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */
154 	((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */
155 };
156 
157 /* DDR configuration 22-28 */
158 u16 ddr_cfg_2_rbc_p2[] = {
159 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */
160 	((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */
161 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */
162 	((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */
163 	((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */
164 	((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */
165 	((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */
166 };
167 
168 u8 d4_rbc_2_d3_rbc[][2] = {
169 	{10, 0},
170 	{11, 2},
171 	{12, 23},
172 	{13, 1},
173 	{14, 28},
174 	{15, 24},
175 	{16, 27},
176 	{17, 7},
177 	{18, 6},
178 	{19, 25},
179 	{20, 26},
180 	{21, 3}
181 };
182 
183 u32 addrmap[29][9] = {
184 	{24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
185 		0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */
186 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
187 		0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */
188 	{23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
189 		0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */
190 	{22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606,
191 		0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */
192 	{24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909,
193 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */
194 	{6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707,
195 		0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */
196 	{7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808,
197 		0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */
198 	{8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909,
199 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */
200 	{22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
201 		0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */
202 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
203 		0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */
204 
205 	{24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
206 		0x08080808, 0x00000f0f, 0x0801}, /* 10 */
207 	{23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
208 		0x0f080808, 0x00000f0f, 0x0801}, /* 11 */
209 	{24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
210 		0x07070707, 0x00000f07, 0x0700}, /* 12 */
211 	{23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
212 		0x07070707, 0x00000f0f, 0x0700}, /* 13 */
213 	{24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
214 		0x07070707, 0x00000f07, 0x3f01}, /* 14 */
215 	{23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
216 		0x07070707, 0x00000f0f, 0x3f01}, /* 15 */
217 	{23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606,
218 		0x06060606, 0x00000f06, 0x3f00}, /* 16 */
219 	{8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909,
220 		0x0f090909, 0x00000f0f, 0x0801}, /* 17 */
221 	{7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808,
222 		0x08080808, 0x00000f0f, 0x0700}, /* 18 */
223 	{7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
224 		0x08080808, 0x00000f0f, 0x3f01}, /* 19 */
225 
226 	{6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
227 		0x07070707, 0x00000f07, 0x3f00}, /* 20 */
228 	{23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606,
229 		0x06060606, 0x00000f06, 0x0600}, /* 21 */
230 	{21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505,
231 		0x05050505, 0x00000f0f, 0x3f3f}, /* 22 */
232 
233 	{24, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
234 		0x07070707, 0x00000f07, 0x3f3f}, /* 23 */
235 	{23, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
236 		0x07070707, 0x00000f0f, 0x3f3f}, /* 24 */
237 	{7, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
238 		0x08080808, 0x00000f0f, 0x3f3f}, /* 25 */
239 	{6, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
240 		0x07070707, 0x00000f07, 0x3f3f}, /* 26 */
241 	{23, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
242 		0x06060606, 0x00000f06, 0x3f3f}, /* 27 */
243 	{24, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
244 		0x07070707, 0x00000f07, 0x3f3f} /* 28 */
245 };
246 
247 static u8 dq_sel[22][3] = {
248 	{0x0, 0x17, 0x22},
249 	{0x1, 0x18, 0x23},
250 	{0x2, 0x19, 0x24},
251 	{0x3, 0x1a, 0x25},
252 	{0x4, 0x1b, 0x26},
253 	{0x5, 0x1c, 0x27},
254 	{0x6, 0x1d, 0x28},
255 	{0x7, 0x1e, 0x29},
256 	{0x8, 0x16, 0x21},
257 	{0x9, 0x1f, 0x2a},
258 	{0xa, 0x20, 0x2b},
259 	{0x10, 0x1, 0xc},
260 	{0x11, 0x2, 0xd},
261 	{0x12, 0x3, 0xe},
262 	{0x13, 0x4, 0xf},
263 	{0x14, 0x5, 0x10},
264 	{0x15, 0x6, 0x11},
265 	{0x16, 0x7, 0x12},
266 	{0x17, 0x8, 0x13},
267 	{0x18, 0x0, 0xb},
268 	{0x19, 0x9, 0x14},
269 	{0x1a, 0xa, 0x15}
270 };
271 
272 static u16 grp_addr[4] = {
273 	ADD_GROUP_CS0_A,
274 	ADD_GROUP_CS0_B,
275 	ADD_GROUP_CS1_A,
276 	ADD_GROUP_CS1_B
277 };
278 
279 static u8 wrlvl_result_offset[2][4] = {
280 	{0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27},
281 	{0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29},
282 };
283 
284 static u16 dqs_dq_skew_adr[16] = {
285 	0x170 + 0,	/* SKEW_UPDATE_RX_CS0_DQS0 */
286 	0x170 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS1 */
287 	0x1d0 + 0,	/* SKEW_UPDATE_RX_CS0_DQS2 */
288 	0x1d0 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS3 */
289 	0x1a0 + 0,	/* SKEW_UPDATE_RX_CS1_DQS0 */
290 	0x1a0 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS1 */
291 	0x200 + 0,	/* SKEW_UPDATE_RX_CS1_DQS2 */
292 	0x200 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS3 */
293 	0x170 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS0 */
294 	0x170 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS1 */
295 	0x1d0 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS2 */
296 	0x1d0 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS3 */
297 	0x1a0 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS0 */
298 	0x1a0 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS1 */
299 	0x200 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS2 */
300 	0x200 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS3 */
301 };
302 
303 static void rkclk_ddr_reset(struct dram_info *dram,
304 			    u32 ctl_srstn, u32 ctl_psrstn,
305 			    u32 phy_srstn, u32 phy_psrstn)
306 {
307 	writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) |
308 	       UPCTL2_ASRSTN_REQ(ctl_srstn),
309 	       BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13);
310 
311 	writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn),
312 	       &dram->cru->softrst_con[12]);
313 }
314 
315 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
316 {
317 	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
318 	int delay = 1000;
319 	u32 mhz = hz / MHz;
320 	struct global_info *gbl_info;
321 	struct sdram_head_info_index_v2 *index =
322 		(struct sdram_head_info_index_v2 *)common_info;
323 	u32 ssmod_info;
324 	u32 dsmpd = 1;
325 
326 	gbl_info = (struct global_info *)((void *)common_info +
327 		    index->global_index.offset * 4);
328 	ssmod_info = gbl_info->info_2t;
329 	refdiv = 1;
330 	if (mhz <= 100) {
331 		postdiv1 = 6;
332 		postdiv2 = 4;
333 	} else if (mhz <= 150) {
334 		postdiv1 = 4;
335 		postdiv2 = 4;
336 	} else if (mhz <= 200) {
337 		postdiv1 = 6;
338 		postdiv2 = 2;
339 	} else if (mhz <= 300) {
340 		postdiv1 = 4;
341 		postdiv2 = 2;
342 	} else if (mhz <= 400) {
343 		postdiv1 = 6;
344 		postdiv2 = 1;
345 	} else {
346 		postdiv1 = 4;
347 		postdiv2 = 1;
348 	}
349 	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
350 
351 	writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
352 
353 	writel(0x1f000000, &dram->cru->clksel_con[64]);
354 	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
355 	/* enable ssmod */
356 	if (PLL_SSMOD_SPREAD(ssmod_info)) {
357 		dsmpd = 0;
358 		clrsetbits_le32(&dram->cru->pll[1].con2,
359 				0xffffff << 0, 0x0 << 0);
360 		writel(SSMOD_SPREAD(PLL_SSMOD_SPREAD(ssmod_info)) |
361 		       SSMOD_DIVVAL(PLL_SSMOD_DIV(ssmod_info)) |
362 		       SSMOD_DOWNSPREAD(PLL_SSMOD_DOWNSPREAD(ssmod_info)) |
363 		       SSMOD_RESET(0) |
364 		       SSMOD_DIS_SSCG(0) |
365 		       SSMOD_BP(0),
366 		       &dram->cru->pll[1].con3);
367 	}
368 	writel(DSMPD(dsmpd) | POSTDIV2(postdiv2) | REFDIV(refdiv),
369 	       &dram->cru->pll[1].con1);
370 
371 	while (delay > 0) {
372 		udelay(1);
373 		if (LOCK(readl(&dram->cru->pll[1].con1)))
374 			break;
375 		delay--;
376 	}
377 
378 	writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
379 }
380 
381 static void rkclk_configure_ddr(struct dram_info *dram,
382 				struct rv1126_sdram_params *sdram_params)
383 {
384 	/* for inno ddr phy need freq / 2 */
385 	rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2);
386 }
387 
388 static unsigned int
389 	calculate_ddrconfig(struct rv1126_sdram_params *sdram_params)
390 {
391 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
392 	u32 cs, bw, die_bw, col, row, bank;
393 	u32 cs1_row;
394 	u32 i, tmp;
395 	u32 ddrconf = -1;
396 	u32 row_3_4;
397 
398 	cs = cap_info->rank;
399 	bw = cap_info->bw;
400 	die_bw = cap_info->dbw;
401 	col = cap_info->col;
402 	row = cap_info->cs0_row;
403 	cs1_row = cap_info->cs1_row;
404 	bank = cap_info->bk;
405 	row_3_4 = cap_info->row_3_4;
406 
407 	if (sdram_params->base.dramtype == DDR4) {
408 		if (cs == 2 && row == cs1_row && !row_3_4) {
409 			tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) |
410 			      die_bw;
411 			for (i = 17; i < 21; i++) {
412 				if (((tmp & 0xf) ==
413 				     (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
414 				    ((tmp & 0x70) <=
415 				     (ddr4_cfg_2_rbc[i - 10] & 0x70))) {
416 					ddrconf = i;
417 					goto out;
418 				}
419 			}
420 		}
421 
422 		tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw;
423 		for (i = 10; i < 21; i++) {
424 			if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
425 			    ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) &&
426 			    ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) {
427 				ddrconf = i;
428 				goto out;
429 			}
430 		}
431 	} else {
432 		if (cs == 2 && row == cs1_row && bank == 3) {
433 			for (i = 5; i < 8; i++) {
434 				if (((bw + col - 10) == (ddr_cfg_2_rbc[i] &
435 							 0x7)) &&
436 				    ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] &
437 							  (0x7 << 5))) {
438 					ddrconf = i;
439 					goto out;
440 				}
441 			}
442 		}
443 
444 		tmp = ((cs - 1) << 8) | ((row - 13) << 5) |
445 		      ((bw + col - 10) << 0);
446 		if (bank == 3)
447 			tmp |= (1 << 3);
448 
449 		for (i = 0; i < 9; i++)
450 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) &&
451 			    ((tmp & (7 << 5)) <=
452 			     (ddr_cfg_2_rbc[i] & (7 << 5))) &&
453 			    ((tmp & (1 << 8)) <=
454 			     (ddr_cfg_2_rbc[i] & (1 << 8)))) {
455 				ddrconf = i;
456 				goto out;
457 			}
458 
459 		for (i = 0; i < 7; i++)
460 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc_p2[i] & 0x1f)) &&
461 			    ((tmp & (7 << 5)) <=
462 			     (ddr_cfg_2_rbc_p2[i] & (7 << 5))) &&
463 			    ((tmp & (1 << 8)) <=
464 			     (ddr_cfg_2_rbc_p2[i] & (1 << 8)))) {
465 				ddrconf = i + 22;
466 				goto out;
467 			}
468 
469 		if (cs == 1 && bank == 3 && row <= 17 &&
470 		    (col + bw) == 12)
471 			ddrconf = 23;
472 	}
473 
474 out:
475 	if (ddrconf > 28)
476 		printascii("calculate ddrconfig error\n");
477 
478 	if (sdram_params->base.dramtype == DDR4) {
479 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
480 			if (ddrconf == d4_rbc_2_d3_rbc[i][0]) {
481 				if (ddrconf == 21 && row > 16)
482 					printascii("warn:ddrconf21 row > 16\n");
483 				else
484 					ddrconf = d4_rbc_2_d3_rbc[i][1];
485 				break;
486 			}
487 		}
488 	}
489 
490 	return ddrconf;
491 }
492 
493 static void sw_set_req(struct dram_info *dram)
494 {
495 	void __iomem *pctl_base = dram->pctl;
496 
497 	/* clear sw_done=0 */
498 	writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL);
499 }
500 
501 static void sw_set_ack(struct dram_info *dram)
502 {
503 	void __iomem *pctl_base = dram->pctl;
504 
505 	/* set sw_done=1 */
506 	writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL);
507 	while (1) {
508 		/* wait programming done */
509 		if (readl(pctl_base + DDR_PCTL2_SWSTAT) &
510 				PCTL2_SW_DONE_ACK)
511 			break;
512 	}
513 }
514 
515 static void set_ctl_address_map(struct dram_info *dram,
516 				struct rv1126_sdram_params *sdram_params)
517 {
518 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
519 	void __iomem *pctl_base = dram->pctl;
520 	u32 ddrconf = cap_info->ddrconfig;
521 	u32 i, row;
522 
523 	row = cap_info->cs0_row;
524 	if (sdram_params->base.dramtype == DDR4) {
525 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
526 			if (ddrconf == d4_rbc_2_d3_rbc[i][1]) {
527 				ddrconf = d4_rbc_2_d3_rbc[i][0];
528 				break;
529 			}
530 		}
531 	}
532 
533 	if (ddrconf >= ARRAY_SIZE(addrmap)) {
534 		printascii("set ctl address map fail\n");
535 		return;
536 	}
537 
538 	sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
539 			  &addrmap[ddrconf][0], ARRAY_SIZE(addrmap[ddrconf]) * 4);
540 
541 	/* unused row set to 0xf */
542 	for (i = 17; i >= row; i--)
543 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
544 			((i - 12) * 8 / 32) * 4,
545 			0xf << ((i - 12) * 8 % 32));
546 
547 	if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4)
548 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
549 	if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1)
550 		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
551 
552 	if (cap_info->rank == 1)
553 		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
554 }
555 
556 static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait)
557 {
558 	void __iomem *phy_base = dram->phy;
559 	u32 fbdiv, prediv, postdiv, postdiv_en;
560 
561 	if (wait) {
562 		clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB);
563 		while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK))
564 			continue;
565 	} else {
566 		freq /= MHz;
567 		prediv = 1;
568 		if (freq <= 200) {
569 			fbdiv = 16;
570 			postdiv = 2;
571 			postdiv_en = 1;
572 		} else if (freq <= 456) {
573 			fbdiv = 8;
574 			postdiv = 1;
575 			postdiv_en = 1;
576 		} else {
577 			fbdiv = 4;
578 			postdiv = 0;
579 			postdiv_en = 0;
580 		}
581 		writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50));
582 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK,
583 				(fbdiv >> 8) & 1);
584 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK,
585 				postdiv_en << PHY_POSTDIV_EN_SHIFT);
586 
587 		clrsetbits_le32(PHY_REG(phy_base, 0x52),
588 				PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv);
589 		clrsetbits_le32(PHY_REG(phy_base, 0x53),
590 				PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT,
591 				postdiv << PHY_POSTDIV_SHIFT);
592 	}
593 }
594 
595 static const u16 d3_phy_drv_2_ohm[][2] = {
596 	{PHY_DDR3_RON_455ohm, 455},
597 	{PHY_DDR3_RON_230ohm, 230},
598 	{PHY_DDR3_RON_153ohm, 153},
599 	{PHY_DDR3_RON_115ohm, 115},
600 	{PHY_DDR3_RON_91ohm, 91},
601 	{PHY_DDR3_RON_76ohm, 76},
602 	{PHY_DDR3_RON_65ohm, 65},
603 	{PHY_DDR3_RON_57ohm, 57},
604 	{PHY_DDR3_RON_51ohm, 51},
605 	{PHY_DDR3_RON_46ohm, 46},
606 	{PHY_DDR3_RON_41ohm, 41},
607 	{PHY_DDR3_RON_38ohm, 38},
608 	{PHY_DDR3_RON_35ohm, 35},
609 	{PHY_DDR3_RON_32ohm, 32},
610 	{PHY_DDR3_RON_30ohm, 30},
611 	{PHY_DDR3_RON_28ohm, 28},
612 	{PHY_DDR3_RON_27ohm, 27},
613 	{PHY_DDR3_RON_25ohm, 25},
614 	{PHY_DDR3_RON_24ohm, 24},
615 	{PHY_DDR3_RON_23ohm, 23},
616 	{PHY_DDR3_RON_22ohm, 22},
617 	{PHY_DDR3_RON_21ohm, 21},
618 	{PHY_DDR3_RON_20ohm, 20}
619 };
620 
621 static u16 d3_phy_odt_2_ohm[][2] = {
622 	{PHY_DDR3_RTT_DISABLE, 0},
623 	{PHY_DDR3_RTT_561ohm, 561},
624 	{PHY_DDR3_RTT_282ohm, 282},
625 	{PHY_DDR3_RTT_188ohm, 188},
626 	{PHY_DDR3_RTT_141ohm, 141},
627 	{PHY_DDR3_RTT_113ohm, 113},
628 	{PHY_DDR3_RTT_94ohm, 94},
629 	{PHY_DDR3_RTT_81ohm, 81},
630 	{PHY_DDR3_RTT_72ohm, 72},
631 	{PHY_DDR3_RTT_64ohm, 64},
632 	{PHY_DDR3_RTT_58ohm, 58},
633 	{PHY_DDR3_RTT_52ohm, 52},
634 	{PHY_DDR3_RTT_48ohm, 48},
635 	{PHY_DDR3_RTT_44ohm, 44},
636 	{PHY_DDR3_RTT_41ohm, 41},
637 	{PHY_DDR3_RTT_38ohm, 38},
638 	{PHY_DDR3_RTT_37ohm, 37},
639 	{PHY_DDR3_RTT_34ohm, 34},
640 	{PHY_DDR3_RTT_32ohm, 32},
641 	{PHY_DDR3_RTT_31ohm, 31},
642 	{PHY_DDR3_RTT_29ohm, 29},
643 	{PHY_DDR3_RTT_28ohm, 28},
644 	{PHY_DDR3_RTT_27ohm, 27},
645 	{PHY_DDR3_RTT_25ohm, 25}
646 };
647 
648 static u16 d4lp3_phy_drv_2_ohm[][2] = {
649 	{PHY_DDR4_LPDDR3_RON_482ohm, 482},
650 	{PHY_DDR4_LPDDR3_RON_244ohm, 244},
651 	{PHY_DDR4_LPDDR3_RON_162ohm, 162},
652 	{PHY_DDR4_LPDDR3_RON_122ohm, 122},
653 	{PHY_DDR4_LPDDR3_RON_97ohm, 97},
654 	{PHY_DDR4_LPDDR3_RON_81ohm, 81},
655 	{PHY_DDR4_LPDDR3_RON_69ohm, 69},
656 	{PHY_DDR4_LPDDR3_RON_61ohm, 61},
657 	{PHY_DDR4_LPDDR3_RON_54ohm, 54},
658 	{PHY_DDR4_LPDDR3_RON_48ohm, 48},
659 	{PHY_DDR4_LPDDR3_RON_44ohm, 44},
660 	{PHY_DDR4_LPDDR3_RON_40ohm, 40},
661 	{PHY_DDR4_LPDDR3_RON_37ohm, 37},
662 	{PHY_DDR4_LPDDR3_RON_34ohm, 34},
663 	{PHY_DDR4_LPDDR3_RON_32ohm, 32},
664 	{PHY_DDR4_LPDDR3_RON_30ohm, 30},
665 	{PHY_DDR4_LPDDR3_RON_28ohm, 28},
666 	{PHY_DDR4_LPDDR3_RON_27ohm, 27},
667 	{PHY_DDR4_LPDDR3_RON_25ohm, 25},
668 	{PHY_DDR4_LPDDR3_RON_24ohm, 24},
669 	{PHY_DDR4_LPDDR3_RON_23ohm, 23},
670 	{PHY_DDR4_LPDDR3_RON_22ohm, 22},
671 	{PHY_DDR4_LPDDR3_RON_21ohm, 21}
672 };
673 
674 static u16 d4lp3_phy_odt_2_ohm[][2] = {
675 	{PHY_DDR4_LPDDR3_RTT_DISABLE, 0},
676 	{PHY_DDR4_LPDDR3_RTT_586ohm, 586},
677 	{PHY_DDR4_LPDDR3_RTT_294ohm, 294},
678 	{PHY_DDR4_LPDDR3_RTT_196ohm, 196},
679 	{PHY_DDR4_LPDDR3_RTT_148ohm, 148},
680 	{PHY_DDR4_LPDDR3_RTT_118ohm, 118},
681 	{PHY_DDR4_LPDDR3_RTT_99ohm, 99},
682 	{PHY_DDR4_LPDDR3_RTT_85ohm, 58},
683 	{PHY_DDR4_LPDDR3_RTT_76ohm, 76},
684 	{PHY_DDR4_LPDDR3_RTT_67ohm, 67},
685 	{PHY_DDR4_LPDDR3_RTT_60ohm, 60},
686 	{PHY_DDR4_LPDDR3_RTT_55ohm, 55},
687 	{PHY_DDR4_LPDDR3_RTT_50ohm, 50},
688 	{PHY_DDR4_LPDDR3_RTT_46ohm, 46},
689 	{PHY_DDR4_LPDDR3_RTT_43ohm, 43},
690 	{PHY_DDR4_LPDDR3_RTT_40ohm, 40},
691 	{PHY_DDR4_LPDDR3_RTT_38ohm, 38},
692 	{PHY_DDR4_LPDDR3_RTT_36ohm, 36},
693 	{PHY_DDR4_LPDDR3_RTT_34ohm, 34},
694 	{PHY_DDR4_LPDDR3_RTT_32ohm, 32},
695 	{PHY_DDR4_LPDDR3_RTT_31ohm, 31},
696 	{PHY_DDR4_LPDDR3_RTT_29ohm, 29},
697 	{PHY_DDR4_LPDDR3_RTT_28ohm, 28},
698 	{PHY_DDR4_LPDDR3_RTT_27ohm, 27}
699 };
700 
701 static u16 lp4_phy_drv_2_ohm[][2] = {
702 	{PHY_LPDDR4_RON_501ohm, 501},
703 	{PHY_LPDDR4_RON_253ohm, 253},
704 	{PHY_LPDDR4_RON_168ohm, 168},
705 	{PHY_LPDDR4_RON_126ohm, 126},
706 	{PHY_LPDDR4_RON_101ohm, 101},
707 	{PHY_LPDDR4_RON_84ohm, 84},
708 	{PHY_LPDDR4_RON_72ohm, 72},
709 	{PHY_LPDDR4_RON_63ohm, 63},
710 	{PHY_LPDDR4_RON_56ohm, 56},
711 	{PHY_LPDDR4_RON_50ohm, 50},
712 	{PHY_LPDDR4_RON_46ohm, 46},
713 	{PHY_LPDDR4_RON_42ohm, 42},
714 	{PHY_LPDDR4_RON_38ohm, 38},
715 	{PHY_LPDDR4_RON_36ohm, 36},
716 	{PHY_LPDDR4_RON_33ohm, 33},
717 	{PHY_LPDDR4_RON_31ohm, 31},
718 	{PHY_LPDDR4_RON_29ohm, 29},
719 	{PHY_LPDDR4_RON_28ohm, 28},
720 	{PHY_LPDDR4_RON_26ohm, 26},
721 	{PHY_LPDDR4_RON_25ohm, 25},
722 	{PHY_LPDDR4_RON_24ohm, 24},
723 	{PHY_LPDDR4_RON_23ohm, 23},
724 	{PHY_LPDDR4_RON_22ohm, 22}
725 };
726 
727 static u16 lp4_phy_odt_2_ohm[][2] = {
728 	{PHY_LPDDR4_RTT_DISABLE, 0},
729 	{PHY_LPDDR4_RTT_604ohm, 604},
730 	{PHY_LPDDR4_RTT_303ohm, 303},
731 	{PHY_LPDDR4_RTT_202ohm, 202},
732 	{PHY_LPDDR4_RTT_152ohm, 152},
733 	{PHY_LPDDR4_RTT_122ohm, 122},
734 	{PHY_LPDDR4_RTT_101ohm, 101},
735 	{PHY_LPDDR4_RTT_87ohm,	87},
736 	{PHY_LPDDR4_RTT_78ohm, 78},
737 	{PHY_LPDDR4_RTT_69ohm, 69},
738 	{PHY_LPDDR4_RTT_62ohm, 62},
739 	{PHY_LPDDR4_RTT_56ohm, 56},
740 	{PHY_LPDDR4_RTT_52ohm, 52},
741 	{PHY_LPDDR4_RTT_48ohm, 48},
742 	{PHY_LPDDR4_RTT_44ohm, 44},
743 	{PHY_LPDDR4_RTT_41ohm, 41},
744 	{PHY_LPDDR4_RTT_39ohm, 39},
745 	{PHY_LPDDR4_RTT_37ohm, 37},
746 	{PHY_LPDDR4_RTT_35ohm, 35},
747 	{PHY_LPDDR4_RTT_33ohm, 33},
748 	{PHY_LPDDR4_RTT_32ohm, 32},
749 	{PHY_LPDDR4_RTT_30ohm, 30},
750 	{PHY_LPDDR4_RTT_29ohm, 29},
751 	{PHY_LPDDR4_RTT_27ohm, 27}
752 };
753 
754 static u32 lp4_odt_calc(u32 odt_ohm)
755 {
756 	u32 odt;
757 
758 	if (odt_ohm == 0)
759 		odt = LPDDR4_DQODT_DIS;
760 	else if (odt_ohm <= 40)
761 		odt = LPDDR4_DQODT_40;
762 	else if (odt_ohm <= 48)
763 		odt = LPDDR4_DQODT_48;
764 	else if (odt_ohm <= 60)
765 		odt = LPDDR4_DQODT_60;
766 	else if (odt_ohm <= 80)
767 		odt = LPDDR4_DQODT_80;
768 	else if (odt_ohm <= 120)
769 		odt = LPDDR4_DQODT_120;
770 	else
771 		odt = LPDDR4_DQODT_240;
772 
773 	return odt;
774 }
775 
776 static void *get_ddr_drv_odt_info(u32 dramtype)
777 {
778 	struct sdram_head_info_index_v2 *index =
779 		(struct sdram_head_info_index_v2 *)common_info;
780 	void *ddr_info = 0;
781 
782 	if (dramtype == DDR4)
783 		ddr_info = (void *)common_info + index->ddr4_index.offset * 4;
784 	else if (dramtype == DDR3)
785 		ddr_info = (void *)common_info + index->ddr3_index.offset * 4;
786 	else if (dramtype == LPDDR3)
787 		ddr_info = (void *)common_info + index->lp3_index.offset * 4;
788 	else if (dramtype == LPDDR4)
789 		ddr_info = (void *)common_info + index->lp4_index.offset * 4;
790 	else if (dramtype == LPDDR4X)
791 		ddr_info = (void *)common_info + index->lp4x_index.offset * 4;
792 	else
793 		printascii("unsupported dram type\n");
794 	return ddr_info;
795 }
796 
797 static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info,
798 			 u32 freq_mhz, u32 dst_fsp, u32 dramtype)
799 {
800 	void __iomem *pctl_base = dram->pctl;
801 	u32 ca_vref, dq_vref;
802 
803 	if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
804 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff);
805 	else
806 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten);
807 
808 	if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq))
809 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff);
810 	else
811 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten);
812 
813 	if (dramtype == LPDDR4) {
814 		if (ca_vref < 100)
815 			ca_vref = 100;
816 		if (ca_vref > 420)
817 			ca_vref = 420;
818 
819 		if (ca_vref <= 300)
820 			ca_vref = (0 << 6) | (ca_vref - 100) / 4;
821 		else
822 			ca_vref = (1 << 6) | (ca_vref - 220) / 4;
823 
824 		if (dq_vref < 100)
825 			dq_vref = 100;
826 		if (dq_vref > 420)
827 			dq_vref = 420;
828 
829 		if (dq_vref <= 300)
830 			dq_vref = (0 << 6) | (dq_vref - 100) / 4;
831 		else
832 			dq_vref = (1 << 6) | (dq_vref - 220) / 4;
833 	} else {
834 		ca_vref = ca_vref * 11 / 6;
835 		if (ca_vref < 150)
836 			ca_vref = 150;
837 		if (ca_vref > 629)
838 			ca_vref = 629;
839 
840 		if (ca_vref <= 449)
841 			ca_vref = (0 << 6) | (ca_vref - 150) / 4;
842 		else
843 			ca_vref = (1 << 6) | (ca_vref - 329) / 4;
844 
845 		if (dq_vref < 150)
846 			dq_vref = 150;
847 		if (dq_vref > 629)
848 			dq_vref = 629;
849 
850 		if (dq_vref <= 449)
851 			dq_vref = (0 << 6) | (dq_vref - 150) / 6;
852 		else
853 			dq_vref = (1 << 6) | (dq_vref - 329) / 6;
854 	}
855 	sw_set_req(dram);
856 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
857 			DDR_PCTL2_INIT6,
858 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT,
859 			ca_vref << PCTL2_LPDDR4_MR12_SHIFT);
860 
861 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
862 			DDR_PCTL2_INIT7,
863 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT,
864 			dq_vref << PCTL2_LPDDR4_MR14_SHIFT);
865 	sw_set_ack(dram);
866 }
867 
868 static void set_ds_odt(struct dram_info *dram,
869 		       struct rv1126_sdram_params *sdram_params, u32 dst_fsp)
870 {
871 	void __iomem *phy_base = dram->phy;
872 	void __iomem *pctl_base = dram->pctl;
873 	u32 dramtype = sdram_params->base.dramtype;
874 	struct ddr2_3_4_lp2_3_info *ddr_info;
875 	struct lp4_info *lp4_info;
876 	u32 i, j, tmp;
877 	const u16 (*p_drv)[2];
878 	const u16 (*p_odt)[2];
879 	u32 drv_info, sr_info;
880 	u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm;
881 	u32 phy_odt_ohm, dram_odt_ohm;
882 	u32 lp4_pu_cal, phy_lp4_drv_pd_en;
883 	u32 phy_odt_up_en, phy_odt_dn_en;
884 	u32 sr_dq, sr_clk;
885 	u32 freq = sdram_params->base.ddr_freq;
886 	u32 mr1_mr3, mr11, mr22, vref_out, vref_inner;
887 	u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0;
888 	u32 phy_dq_drv = 0;
889 	u32 phy_odt_up = 0, phy_odt_dn = 0;
890 
891 	ddr_info = get_ddr_drv_odt_info(dramtype);
892 	lp4_info = (void *)ddr_info;
893 
894 	if (!ddr_info)
895 		return;
896 
897 	/* dram odt en freq control phy drv, dram odt and phy sr */
898 	if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) {
899 		drv_info = ddr_info->drv_when_odtoff;
900 		dram_odt_ohm = 0;
901 		sr_info = ddr_info->sr_when_odtoff;
902 		phy_lp4_drv_pd_en =
903 			PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info);
904 	} else {
905 		drv_info = ddr_info->drv_when_odten;
906 		dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info);
907 		sr_info = ddr_info->sr_when_odten;
908 		phy_lp4_drv_pd_en =
909 			PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info);
910 	}
911 	phy_dq_drv_ohm =
912 		DRV_INFO_PHY_DQ_DRV(drv_info);
913 	phy_clk_drv_ohm =
914 		DRV_INFO_PHY_CLK_DRV(drv_info);
915 	phy_ca_drv_ohm =
916 		DRV_INFO_PHY_CA_DRV(drv_info);
917 
918 	sr_dq = DQ_SR_INFO(sr_info);
919 	sr_clk = CLK_SR_INFO(sr_info);
920 
921 	/* phy odt en freq control dram drv and phy odt */
922 	if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) {
923 		dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff);
924 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info);
925 		phy_odt_ohm = 0;
926 		phy_odt_up_en = 0;
927 		phy_odt_dn_en = 0;
928 	} else {
929 		dram_drv_ohm =
930 			DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten);
931 		phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info);
932 		phy_odt_up_en =
933 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
934 		phy_odt_dn_en =
935 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
936 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info);
937 	}
938 
939 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
940 		if (phy_odt_ohm) {
941 			phy_odt_up_en = 0;
942 			phy_odt_dn_en = 1;
943 		}
944 		if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
945 			dram_caodt_ohm = 0;
946 		else
947 			dram_caodt_ohm =
948 				ODT_INFO_LP4_CA_ODT(lp4_info->odt_info);
949 	}
950 
951 	if (dramtype == DDR3) {
952 		p_drv = d3_phy_drv_2_ohm;
953 		p_odt = d3_phy_odt_2_ohm;
954 	} else if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
955 		p_drv = lp4_phy_drv_2_ohm;
956 		p_odt = lp4_phy_odt_2_ohm;
957 	} else {
958 		p_drv = d4lp3_phy_drv_2_ohm;
959 		p_odt = d4lp3_phy_odt_2_ohm;
960 	}
961 
962 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
963 		if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) {
964 			phy_dq_drv = **(p_drv + i);
965 			break;
966 		}
967 		if (i == 0)
968 			break;
969 	}
970 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
971 		if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) {
972 			phy_clk_drv = **(p_drv + i);
973 			break;
974 		}
975 		if (i == 0)
976 			break;
977 	}
978 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
979 		if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) {
980 			phy_ca_drv = **(p_drv + i);
981 			break;
982 		}
983 		if (i == 0)
984 			break;
985 	}
986 	if (!phy_odt_ohm)
987 		phy_odt = 0;
988 	else
989 		for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) {
990 			if (phy_odt_ohm <= *(*(p_odt + i) + 1)) {
991 				phy_odt = **(p_odt + i);
992 				break;
993 			}
994 			if (i == 0)
995 				break;
996 		}
997 
998 	if (dramtype != LPDDR4 && dramtype != LPDDR4X) {
999 		if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en))
1000 			vref_inner = 0x80;
1001 		else if (phy_odt_up_en)
1002 			vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 /
1003 				     (dram_drv_ohm + phy_odt_ohm);
1004 		else
1005 			vref_inner = phy_odt_ohm * 128 /
1006 				(phy_odt_ohm + dram_drv_ohm);
1007 
1008 		if (dramtype != DDR3 && dram_odt_ohm)
1009 			vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 /
1010 				   (phy_dq_drv_ohm + dram_odt_ohm);
1011 		else
1012 			vref_out = 0x80;
1013 	} else {
1014 		/* for lp4 and lp4x*/
1015 		if (phy_odt_ohm)
1016 			vref_inner =
1017 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) *
1018 				 256) / 1000;
1019 		else
1020 			vref_inner =
1021 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) *
1022 				 256) / 1000;
1023 
1024 		vref_out = 0x80;
1025 	}
1026 
1027 	/* default ZQCALIB bypass mode */
1028 	clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv);
1029 	clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv);
1030 	clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv);
1031 	clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv);
1032 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1033 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_clk_drv);
1034 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_clk_drv);
1035 	} else {
1036 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_ca_drv);
1037 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_ca_drv);
1038 	}
1039 	/* clk / cmd slew rate */
1040 	clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk);
1041 
1042 	phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1;
1043 	if (phy_odt_up_en)
1044 		phy_odt_up = phy_odt;
1045 	if (phy_odt_dn_en)
1046 		phy_odt_dn = phy_odt;
1047 
1048 	for (i = 0; i < 4; i++) {
1049 		j = 0x110 + i * 0x10;
1050 		clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up);
1051 		clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn);
1052 		clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv);
1053 		clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv);
1054 		writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10));
1055 
1056 		clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10),
1057 				1 << 3, phy_lp4_drv_pd_en << 3);
1058 		if (dramtype == LPDDR4 || dramtype == LPDDR4X)
1059 			clrbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10), BIT(5));
1060 		/* dq slew rate */
1061 		clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10),
1062 				0x1f, sr_dq);
1063 	}
1064 
1065 	/* reg_rx_vref_value_update */
1066 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1067 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1068 
1069 	/* RAM VREF */
1070 	writel(vref_out, PHY_REG(phy_base, 0x105));
1071 	if (dramtype == LPDDR3)
1072 		udelay(100);
1073 
1074 	if (dramtype == LPDDR4 || dramtype == LPDDR4X)
1075 		set_lp4_vref(dram, lp4_info, freq, dst_fsp, dramtype);
1076 
1077 	if (dramtype == DDR3 || dramtype == DDR4) {
1078 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1079 				DDR_PCTL2_INIT3);
1080 		mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK;
1081 	} else {
1082 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1083 				DDR_PCTL2_INIT4);
1084 		mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK;
1085 	}
1086 
1087 	if (dramtype == DDR3) {
1088 		mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK);
1089 		if (dram_drv_ohm == 34)
1090 			mr1_mr3 |= DDR3_DS_34;
1091 
1092 		if (dram_odt_ohm == 0)
1093 			mr1_mr3 |= DDR3_RTT_NOM_DIS;
1094 		else if (dram_odt_ohm <= 40)
1095 			mr1_mr3 |= DDR3_RTT_NOM_40;
1096 		else if (dram_odt_ohm <= 60)
1097 			mr1_mr3 |= DDR3_RTT_NOM_60;
1098 		else
1099 			mr1_mr3 |= DDR3_RTT_NOM_120;
1100 
1101 	} else if (dramtype == DDR4) {
1102 		mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK);
1103 		if (dram_drv_ohm == 48)
1104 			mr1_mr3 |= DDR4_DS_48;
1105 
1106 		if (dram_odt_ohm == 0)
1107 			mr1_mr3 |= DDR4_RTT_NOM_DIS;
1108 		else if (dram_odt_ohm <= 34)
1109 			mr1_mr3 |= DDR4_RTT_NOM_34;
1110 		else if (dram_odt_ohm <= 40)
1111 			mr1_mr3 |= DDR4_RTT_NOM_40;
1112 		else if (dram_odt_ohm <= 48)
1113 			mr1_mr3 |= DDR4_RTT_NOM_48;
1114 		else if (dram_odt_ohm <= 60)
1115 			mr1_mr3 |= DDR4_RTT_NOM_60;
1116 		else
1117 			mr1_mr3 |= DDR4_RTT_NOM_120;
1118 
1119 	} else if (dramtype == LPDDR3) {
1120 		if (dram_drv_ohm <= 34)
1121 			mr1_mr3 |= LPDDR3_DS_34;
1122 		else if (dram_drv_ohm <= 40)
1123 			mr1_mr3 |= LPDDR3_DS_40;
1124 		else if (dram_drv_ohm <= 48)
1125 			mr1_mr3 |= LPDDR3_DS_48;
1126 		else if (dram_drv_ohm <= 60)
1127 			mr1_mr3 |= LPDDR3_DS_60;
1128 		else if (dram_drv_ohm <= 80)
1129 			mr1_mr3 |= LPDDR3_DS_80;
1130 
1131 		if (dram_odt_ohm == 0)
1132 			lp3_odt_value = LPDDR3_ODT_DIS;
1133 		else if (dram_odt_ohm <= 60)
1134 			lp3_odt_value = LPDDR3_ODT_60;
1135 		else if (dram_odt_ohm <= 120)
1136 			lp3_odt_value = LPDDR3_ODT_120;
1137 		else
1138 			lp3_odt_value = LPDDR3_ODT_240;
1139 	} else {/* for lpddr4 and lpddr4x */
1140 		/* MR3 for lp4 PU-CAL and PDDS */
1141 		mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK);
1142 		mr1_mr3 |= lp4_pu_cal;
1143 
1144 		tmp = lp4_odt_calc(dram_drv_ohm);
1145 		if (!tmp)
1146 			tmp = LPDDR4_PDDS_240;
1147 		mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT);
1148 
1149 		/* MR11 for lp4 ca odt, dq odt set */
1150 		mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1151 			     DDR_PCTL2_INIT6);
1152 		mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK;
1153 
1154 		mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK);
1155 
1156 		tmp = lp4_odt_calc(dram_odt_ohm);
1157 		mr11 |= (tmp << LPDDR4_DQODT_SHIFT);
1158 
1159 		tmp = lp4_odt_calc(dram_caodt_ohm);
1160 		mr11 |= (tmp << LPDDR4_CAODT_SHIFT);
1161 		sw_set_req(dram);
1162 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1163 				DDR_PCTL2_INIT6,
1164 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT,
1165 				mr11 << PCTL2_LPDDR4_MR11_SHIFT);
1166 		sw_set_ack(dram);
1167 
1168 		/* MR22 for soc odt/odt-ck/odt-cs/odt-ca */
1169 		mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1170 			     DDR_PCTL2_INIT7);
1171 		mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK;
1172 		mr22 &= ~LPDDR4_SOC_ODT_MASK;
1173 
1174 		tmp = lp4_odt_calc(phy_odt_ohm);
1175 		mr22 |= tmp;
1176 		mr22 = mr22 |
1177 		       (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) <<
1178 			LPDDR4_ODTE_CK_SHIFT) |
1179 		       (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) <<
1180 			LPDDR4_ODTE_CS_SHIFT) |
1181 		       (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) <<
1182 			LPDDR4_ODTD_CA_SHIFT);
1183 
1184 		sw_set_req(dram);
1185 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1186 				DDR_PCTL2_INIT7,
1187 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT,
1188 				mr22 << PCTL2_LPDDR4_MR22_SHIFT);
1189 		sw_set_ack(dram);
1190 	}
1191 
1192 	if (dramtype == DDR4 || dramtype == DDR3) {
1193 		sw_set_req(dram);
1194 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1195 				DDR_PCTL2_INIT3,
1196 				PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT,
1197 				mr1_mr3 << PCTL2_DDR34_MR1_SHIFT);
1198 		sw_set_ack(dram);
1199 	} else {
1200 		sw_set_req(dram);
1201 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1202 				DDR_PCTL2_INIT4,
1203 				PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT,
1204 				mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT);
1205 		sw_set_ack(dram);
1206 	}
1207 }
1208 
1209 static int sdram_cmd_dq_path_remap(struct dram_info *dram,
1210 				   struct rv1126_sdram_params *sdram_params)
1211 {
1212 	void __iomem *phy_base = dram->phy;
1213 	u32 dramtype = sdram_params->base.dramtype;
1214 	struct sdram_head_info_index_v2 *index =
1215 		(struct sdram_head_info_index_v2 *)common_info;
1216 	struct dq_map_info *map_info;
1217 
1218 	map_info = (struct dq_map_info *)((void *)common_info +
1219 		index->dq_map_index.offset * 4);
1220 
1221 	if (dramtype == LPDDR4X)
1222 		dramtype = LPDDR4;
1223 
1224 	if (dramtype <= LPDDR4)
1225 		writel((map_info->byte_map[dramtype / 4] >>
1226 			((dramtype % 4) * 8)) & 0xff,
1227 		       PHY_REG(phy_base, 0x4f));
1228 
1229 	return 0;
1230 }
1231 
1232 static void phy_cfg(struct dram_info *dram,
1233 		    struct rv1126_sdram_params *sdram_params)
1234 {
1235 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
1236 	void __iomem *phy_base = dram->phy;
1237 	u32 i, dq_map, tmp;
1238 	u32 byte1 = 0, byte0 = 0;
1239 
1240 	sdram_cmd_dq_path_remap(dram, sdram_params);
1241 
1242 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0);
1243 	for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
1244 		writel(sdram_params->phy_regs.phy[i][1],
1245 		       phy_base + sdram_params->phy_regs.phy[i][0]);
1246 	}
1247 
1248 	clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5));
1249 	dq_map = readl(PHY_REG(phy_base, 0x4f));
1250 	for (i = 0; i < 4; i++) {
1251 		if (((dq_map >> (i * 2)) & 0x3) == 0) {
1252 			byte0 = i;
1253 			break;
1254 		}
1255 	}
1256 	for (i = 0; i < 4; i++) {
1257 		if (((dq_map >> (i * 2)) & 0x3) == 1) {
1258 			byte1 = i;
1259 			break;
1260 		}
1261 	}
1262 
1263 	tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK);
1264 	if (cap_info->bw == 2)
1265 		tmp |= 0xf;
1266 	else if (cap_info->bw == 1)
1267 		tmp |= ((1 << byte0) | (1 << byte1));
1268 	else
1269 		tmp |= (1 << byte0);
1270 
1271 	writel(tmp, PHY_REG(phy_base, 0xf));
1272 
1273 	/* lpddr4 odt control by phy, enable cs0 odt */
1274 	if (sdram_params->base.dramtype == LPDDR4 ||
1275 	    sdram_params->base.dramtype == LPDDR4X)
1276 		clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4,
1277 				(1 << 6) | (1 << 4));
1278 	/* for ca training ca vref choose range1 */
1279 	setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6));
1280 	setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6));
1281 	/* for wr training PHY_0x7c[5], choose range0 */
1282 	clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5));
1283 }
1284 
1285 static int update_refresh_reg(struct dram_info *dram)
1286 {
1287 	void __iomem *pctl_base = dram->pctl;
1288 	u32 ret;
1289 
1290 	ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
1291 	writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
1292 
1293 	return 0;
1294 }
1295 
1296 /*
1297  * rank = 1: cs0
1298  * rank = 2: cs1
1299  */
1300 u32 read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype)
1301 {
1302 	u32 ret;
1303 	u32 i, temp;
1304 	void __iomem *pctl_base = dram->pctl;
1305 	struct sdram_head_info_index_v2 *index =
1306 		(struct sdram_head_info_index_v2 *)common_info;
1307 	struct dq_map_info *map_info;
1308 
1309 	map_info = (struct dq_map_info *)((void *)common_info +
1310 		index->dq_map_index.offset * 4);
1311 
1312 	pctl_read_mr(pctl_base, rank, mr_num);
1313 
1314 	if (dramtype == LPDDR3) {
1315 		temp = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff);
1316 		ret = 0;
1317 		for (i = 0; i < 8; i++)
1318 			ret |= ((temp >> i) & 0x1) << ((map_info->lp3_dq0_7_map >> (i * 4)) & 0xf);
1319 	} else {
1320 		ret = readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff;
1321 	}
1322 
1323 	return ret;
1324 }
1325 
1326 /* before call this function autorefresh should be disabled */
1327 void send_a_refresh(struct dram_info *dram)
1328 {
1329 	void __iomem *pctl_base = dram->pctl;
1330 
1331 	while (readl(pctl_base + DDR_PCTL2_DBGSTAT) & 0x3)
1332 		continue;
1333 	writel(0x3, pctl_base + DDR_PCTL2_DBGCMD);
1334 }
1335 
1336 static void enter_sr(struct dram_info *dram, u32 en)
1337 {
1338 	void __iomem *pctl_base = dram->pctl;
1339 
1340 	if (en) {
1341 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1342 		while (1) {
1343 			if (((readl(pctl_base + DDR_PCTL2_STAT) &
1344 			      PCTL2_SELFREF_TYPE_MASK) ==
1345 			     PCTL2_SELFREF_TYPE_SR_NOT_AUTO) &&
1346 			    ((readl(pctl_base + DDR_PCTL2_STAT) &
1347 			      PCTL2_OPERATING_MODE_MASK) ==
1348 			     PCTL2_OPERATING_MODE_SR))
1349 				break;
1350 		}
1351 	} else {
1352 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1353 		while ((readl(pctl_base + DDR_PCTL2_STAT) &
1354 		       PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR)
1355 			continue;
1356 	}
1357 }
1358 
1359 void record_dq_prebit(struct dram_info *dram)
1360 {
1361 	u32 group, i, tmp;
1362 	void __iomem *phy_base = dram->phy;
1363 
1364 	for (group = 0; group < 4; group++) {
1365 		for (i = 0; i < ARRAY_SIZE(dq_sel); i++) {
1366 			/* l_loop_invdelaysel */
1367 			writel(dq_sel[i][0], PHY_REG(phy_base,
1368 						     grp_addr[group] + 0x2c));
1369 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e));
1370 			writel(tmp, PHY_REG(phy_base,
1371 					    grp_addr[group] + dq_sel[i][1]));
1372 
1373 			/* r_loop_invdelaysel */
1374 			writel(dq_sel[i][0], PHY_REG(phy_base,
1375 						     grp_addr[group] + 0x2d));
1376 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f));
1377 			writel(tmp, PHY_REG(phy_base,
1378 					    grp_addr[group] + dq_sel[i][2]));
1379 		}
1380 	}
1381 }
1382 
1383 static void update_dq_rx_prebit(struct dram_info *dram)
1384 {
1385 	void __iomem *phy_base = dram->phy;
1386 
1387 	clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4),
1388 			BIT(4));
1389 	udelay(1);
1390 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4));
1391 }
1392 
1393 static void update_dq_tx_prebit(struct dram_info *dram)
1394 {
1395 	void __iomem *phy_base = dram->phy;
1396 
1397 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1398 	setbits_le32(PHY_REG(phy_base, 0x2), BIT(3));
1399 	setbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1400 	udelay(1);
1401 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1402 }
1403 
1404 static void update_ca_prebit(struct dram_info *dram)
1405 {
1406 	void __iomem *phy_base = dram->phy;
1407 
1408 	clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2));
1409 	setbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1410 	udelay(1);
1411 	clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1412 }
1413 
1414 /*
1415  * dir: 0: de-skew = delta_*
1416  *	1: de-skew = reg val - delta_*
1417  * delta_dir: value for differential signal: clk/
1418  * delta_sig: value for single signal: ca/cmd
1419  */
1420 static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif,
1421 			     int delta_sig, u32 cs, u32 dramtype)
1422 {
1423 	void __iomem *phy_base = dram->phy;
1424 	u32 i, cs_en, tmp;
1425 	u32 dfi_lp_stat = 0;
1426 
1427 	if (cs == 0)
1428 		cs_en = 1;
1429 	else if (cs == 2)
1430 		cs_en = 2;
1431 	else
1432 		cs_en = 3;
1433 
1434 	if ((dramtype == LPDDR4 || dramtype == LPDDR4X) &&
1435 	    ((readl(PHY_REG(phy_base, 0x60)) & BIT(5)) == 0)) {
1436 		dfi_lp_stat = 1;
1437 		setbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1438 	}
1439 	enter_sr(dram, 1);
1440 
1441 	for (i = 0; i < 0x20; i++) {
1442 		if (dir == DESKEW_MDF_ABS_VAL)
1443 			tmp = delta_sig;
1444 		else
1445 			tmp = readl(PHY_REG(phy_base, 0x150 + i)) +
1446 			      delta_sig;
1447 		writel(tmp, PHY_REG(phy_base, 0x150 + i));
1448 	}
1449 
1450 	if (dir == DESKEW_MDF_ABS_VAL)
1451 		tmp = delta_dif;
1452 	else
1453 		tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) -
1454 		       delta_sig + delta_dif;
1455 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x17));
1456 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x18));
1457 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1458 		writel(tmp, PHY_REG(phy_base, 0x150 + 0x4));
1459 		writel(tmp, PHY_REG(phy_base, 0x150 + 0xa));
1460 
1461 		clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6);
1462 		update_ca_prebit(dram);
1463 	}
1464 	enter_sr(dram, 0);
1465 
1466 	if (dfi_lp_stat)
1467 		clrbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1468 
1469 }
1470 
1471 static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank)
1472 {
1473 	u32 i, j, offset = 0;
1474 	u32 min = 0x3f;
1475 	void __iomem *phy_base = dram->phy;
1476 	u32 byte_en;
1477 
1478 	if (signal == SKEW_TX_SIGNAL)
1479 		offset = 8;
1480 
1481 	if (signal == SKEW_CA_SIGNAL) {
1482 		for (i = 0; i < 0x20; i++)
1483 			min = MIN(min, readl(PHY_REG(phy_base, 0x150  + i)));
1484 	} else {
1485 		byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1486 		for (j = offset; j < offset + rank * 4; j++) {
1487 			if (!((byte_en >> (j % 4)) & 1))
1488 				continue;
1489 			for (i = 0; i < 11; i++)
1490 				min = MIN(min,
1491 					  readl(PHY_REG(phy_base,
1492 							dqs_dq_skew_adr[j] +
1493 							i)));
1494 		}
1495 	}
1496 
1497 	return min;
1498 }
1499 
1500 static u32 low_power_update(struct dram_info *dram, u32 en)
1501 {
1502 	void __iomem *pctl_base = dram->pctl;
1503 	u32 lp_stat = 0;
1504 
1505 	if (en) {
1506 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf);
1507 	} else {
1508 		lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf;
1509 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf);
1510 	}
1511 
1512 	return lp_stat;
1513 }
1514 
1515 /*
1516  * signal:
1517  * dir: 0: de-skew = delta_*
1518  *	1: de-skew = reg val - delta_*
1519  * delta_dir: value for differential signal: dqs
1520  * delta_sig: value for single signal: dq/dm
1521  */
1522 static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir,
1523 			     int delta_dif, int delta_sig, u32 rank)
1524 {
1525 	void __iomem *phy_base = dram->phy;
1526 	u32 i, j, tmp, offset;
1527 	u32 byte_en;
1528 
1529 	byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1530 
1531 	if (signal == SKEW_RX_SIGNAL)
1532 		offset = 0;
1533 	else
1534 		offset = 8;
1535 
1536 	for (j = offset; j < (offset + rank * 4); j++) {
1537 		if (!((byte_en >> (j % 4)) & 1))
1538 			continue;
1539 		for (i = 0; i < 0x9; i++) {
1540 			if (dir == DESKEW_MDF_ABS_VAL)
1541 				tmp = delta_sig;
1542 			else
1543 				tmp = delta_sig + readl(PHY_REG(phy_base,
1544 							dqs_dq_skew_adr[j] +
1545 							i));
1546 			writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i));
1547 		}
1548 		if (dir == DESKEW_MDF_ABS_VAL)
1549 			tmp = delta_dif;
1550 		else
1551 			tmp = delta_dif + readl(PHY_REG(phy_base,
1552 						dqs_dq_skew_adr[j] + 9));
1553 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9));
1554 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa));
1555 	}
1556 	if (signal == SKEW_RX_SIGNAL)
1557 		update_dq_rx_prebit(dram);
1558 	else
1559 		update_dq_tx_prebit(dram);
1560 }
1561 
1562 static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype)
1563 {
1564 	void __iomem *phy_base = dram->phy;
1565 	u32 ret;
1566 	u32 dis_auto_zq = 0;
1567 	u32 odt_val_up, odt_val_dn;
1568 	u32 i, j;
1569 
1570 	odt_val_dn = readl(PHY_REG(phy_base, 0x110));
1571 	odt_val_up = readl(PHY_REG(phy_base, 0x111));
1572 
1573 	if (dramtype != LPDDR4 || dramtype != LPDDR4X) {
1574 		for (i = 0; i < 4; i++) {
1575 			j = 0x110 + i * 0x10;
1576 			writel(PHY_DDR4_LPDDR3_RTT_294ohm,
1577 			       PHY_REG(phy_base, j));
1578 			writel(PHY_DDR4_LPDDR3_RTT_DISABLE,
1579 			       PHY_REG(phy_base, j + 0x1));
1580 		}
1581 	}
1582 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1583 	/* use normal read mode for data training */
1584 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1585 
1586 	if (dramtype == DDR4)
1587 		setbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1588 
1589 	/* choose training cs */
1590 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
1591 	/* enable gate training */
1592 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
1593 	udelay(50);
1594 	ret = readl(PHY_REG(phy_base, 0x91));
1595 	/* disable gate training */
1596 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
1597 	clrbits_le32(PHY_REG(phy_base, 2), 0x30);
1598 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1599 
1600 	ret = (ret & 0x2f) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf);
1601 
1602 	if (dramtype != LPDDR4 || dramtype != LPDDR4X) {
1603 		for (i = 0; i < 4; i++) {
1604 			j = 0x110 + i * 0x10;
1605 			writel(odt_val_dn, PHY_REG(phy_base, j));
1606 			writel(odt_val_up, PHY_REG(phy_base, j + 0x1));
1607 		}
1608 	}
1609 	return ret;
1610 }
1611 
1612 static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype,
1613 			    u32 rank)
1614 {
1615 	void __iomem *pctl_base = dram->pctl;
1616 	void __iomem *phy_base = dram->phy;
1617 	u32 dis_auto_zq = 0;
1618 	u32 tmp;
1619 	u32 cur_fsp;
1620 	u32 timeout_us = 1000;
1621 
1622 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1623 
1624 	clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1625 
1626 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1627 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) &
1628 	      0xffff;
1629 	writel(tmp & 0xff, PHY_REG(phy_base, 0x3));
1630 
1631 	/* disable another cs's output */
1632 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1633 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12),
1634 			      dramtype);
1635 	if (dramtype == DDR3 || dramtype == DDR4)
1636 		writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1637 	else
1638 		writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1639 
1640 	/* choose cs */
1641 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1642 			((0x2 >> cs) << 6) | (0 << 2));
1643 	/* enable write leveling */
1644 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1645 			((0x2 >> cs) << 6) | (1 << 2));
1646 
1647 	while (1) {
1648 		if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) ==
1649 		    (readl(PHY_REG(phy_base, 0xf)) & 0xf))
1650 			break;
1651 
1652 		udelay(1);
1653 		if (timeout_us-- == 0) {
1654 			printascii("error: write leveling timeout\n");
1655 			while (1)
1656 				;
1657 		}
1658 	}
1659 
1660 	/* disable write leveling */
1661 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1662 			((0x2 >> cs) << 6) | (0 << 2));
1663 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6);
1664 
1665 	/* enable another cs's output */
1666 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1667 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12),
1668 			      dramtype);
1669 
1670 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1671 
1672 	return 0;
1673 }
1674 
1675 char pattern[32] = {
1676 	0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa,
1677 	0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55,
1678 	0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55,
1679 	0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa
1680 };
1681 
1682 static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype,
1683 			    u32 mhz)
1684 {
1685 	void __iomem *pctl_base = dram->pctl;
1686 	void __iomem *phy_base = dram->phy;
1687 	u32 trefi_1x, trfc_1x;
1688 	u32 dis_auto_zq = 0;
1689 	u32 timeout_us = 1000;
1690 	u32 dqs_default;
1691 	u32 cur_fsp;
1692 	u32 vref_inner;
1693 	u32 i;
1694 	struct sdram_head_info_index_v2 *index =
1695 		(struct sdram_head_info_index_v2 *)common_info;
1696 	struct dq_map_info *map_info;
1697 
1698 	vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff;
1699 	if (dramtype == DDR3 && vref_inner == 0x80) {
1700 		for (i = 0; i < 4; i++)
1701 			writel(vref_inner - 0xa,
1702 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1703 
1704 		/* reg_rx_vref_value_update */
1705 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1706 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1707 	}
1708 
1709 	map_info = (struct dq_map_info *)((void *)common_info +
1710 		index->dq_map_index.offset * 4);
1711 	/* only 1cs a time, 0:cs0 1 cs1 */
1712 	if (cs > 1)
1713 		return -1;
1714 
1715 	dqs_default = 0xf;
1716 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1717 
1718 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1719 	/* config refresh timing */
1720 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1721 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1722 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1723 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1724 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1725 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1726 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1727 	/* reg_phy_trfc */
1728 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1729 	/* reg_max_refi_cnt */
1730 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1731 
1732 	/* choose training cs */
1733 	clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6);
1734 
1735 	/* set dq map for ddr4 */
1736 	if (dramtype == DDR4) {
1737 		setbits_le32(PHY_REG(phy_base, 0x70), BIT(7));
1738 		for (i = 0; i < 4; i++) {
1739 			writel((map_info->ddr4_dq_map[cs * 2] >>
1740 				((i % 4) * 8)) & 0xff,
1741 				PHY_REG(phy_base, 0x238 + i));
1742 			writel((map_info->ddr4_dq_map[cs * 2 + 1] >>
1743 				((i % 4) * 8)) & 0xff,
1744 				PHY_REG(phy_base, 0x2b8 + i));
1745 		}
1746 	}
1747 
1748 	/* cha_l reg_l_rd_train_dqs_default[5:0] */
1749 	clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default);
1750 	/* cha_h reg_h_rd_train_dqs_default[5:0] */
1751 	clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default);
1752 	/* chb_l reg_l_rd_train_dqs_default[5:0] */
1753 	clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default);
1754 	/* chb_h reg_h_rd_train_dqs_default[5:0] */
1755 	clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default);
1756 
1757 	/* Choose the read train auto mode */
1758 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1);
1759 	/* Enable the auto train of the read train */
1760 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3);
1761 
1762 	/* Wait the train done. */
1763 	while (1) {
1764 		if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1)
1765 			break;
1766 
1767 		udelay(1);
1768 		if (timeout_us-- == 0) {
1769 			printascii("error: read training timeout\n");
1770 			return -1;
1771 		}
1772 	}
1773 
1774 	/* Check the read train state */
1775 	if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) ||
1776 	    (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) {
1777 		printascii("error: read training error\n");
1778 		return -1;
1779 	}
1780 
1781 	/* Exit the Read Training by setting */
1782 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1));
1783 
1784 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1785 
1786 	if (dramtype == DDR3 && vref_inner == 0x80) {
1787 		for (i = 0; i < 4; i++)
1788 			writel(vref_inner,
1789 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1790 
1791 		/* reg_rx_vref_value_update */
1792 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1793 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1794 	}
1795 
1796 	return 0;
1797 }
1798 
1799 static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype,
1800 			    u32 mhz, u32 dst_fsp)
1801 {
1802 	void __iomem *pctl_base = dram->pctl;
1803 	void __iomem *phy_base = dram->phy;
1804 	u32 trefi_1x, trfc_1x;
1805 	u32 dis_auto_zq = 0;
1806 	u32 timeout_us = 1000;
1807 	u32 cur_fsp;
1808 	u32 mr_tmp, cl, cwl, phy_fsp, offset = 0;
1809 
1810 	if (dramtype == LPDDR3 && mhz <= 400) {
1811 		phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3;
1812 		offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3;
1813 		cl = readl(PHY_REG(phy_base, offset));
1814 		cwl = readl(PHY_REG(phy_base, offset + 2));
1815 
1816 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8);
1817 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4);
1818 		pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype);
1819 	}
1820 
1821 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1822 
1823 	/* PHY_0x7b[7:0] reg_train_col_addr[7:0] */
1824 	clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0);
1825 	/* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */
1826 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2);
1827 	/* PHY_0x7c[1:0] reg_train_col_addr[9:8] */
1828 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0);
1829 	/* PHY_0x7d[7:0] reg_train_row_addr[7:0] */
1830 	clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0);
1831 	/* PHY_0x7e[7:0] reg_train_row_addr[15:8] */
1832 	clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0);
1833 
1834 	/* PHY_0x71[3] wrtrain_check_data_value_random_gen */
1835 	clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3));
1836 
1837 	/* config refresh timing */
1838 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1839 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1840 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1841 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1842 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1843 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1844 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1845 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1846 	/* reg_phy_trfc */
1847 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1848 	/* reg_max_refi_cnt */
1849 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1850 
1851 	/* choose training cs */
1852 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6);
1853 
1854 	/* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */
1855 	/* 0: Use the write-leveling value. */
1856 	/* 1: use reg0x233 0x237 0x2b3 0x2b7 */
1857 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4));
1858 
1859 	/* PHY_0x7a [0] reg_dq_wr_train_auto */
1860 	setbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1861 
1862 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1863 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1864 
1865 	send_a_refresh(dram);
1866 
1867 	while (1) {
1868 		if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1)
1869 			break;
1870 
1871 		udelay(1);
1872 		if (timeout_us-- == 0) {
1873 			printascii("error: write training timeout\n");
1874 			while (1)
1875 				;
1876 		}
1877 	}
1878 
1879 	/* Check the write train state */
1880 	if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) {
1881 		printascii("error: write training error\n");
1882 		return -1;
1883 	}
1884 
1885 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1886 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1887 
1888 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1889 
1890 	/* save LPDDR4/LPDDR4X write vref to fsp_param for dfs */
1891 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1892 		fsp_param[dst_fsp].vref_dq[cs] =
1893 			((readl(PHY_REG(phy_base, 0x384)) & 0x3f) +
1894 			 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2;
1895 		/* add range info */
1896 		fsp_param[dst_fsp].vref_dq[cs] |=
1897 			((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1);
1898 	}
1899 
1900 	if (dramtype == LPDDR3 && mhz <= 400) {
1901 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl);
1902 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl);
1903 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1904 			       DDR_PCTL2_INIT3);
1905 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
1906 			      dramtype);
1907 	}
1908 
1909 	return 0;
1910 }
1911 
1912 static int data_training(struct dram_info *dram, u32 cs,
1913 			 struct rv1126_sdram_params *sdram_params, u32 dst_fsp,
1914 			 u32 training_flag)
1915 {
1916 	u32 ret = 0;
1917 
1918 	if (training_flag == FULL_TRAINING)
1919 		training_flag = READ_GATE_TRAINING | WRITE_LEVELING |
1920 				WRITE_TRAINING | READ_TRAINING;
1921 
1922 	if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) {
1923 		ret = data_training_wl(dram, cs,
1924 				       sdram_params->base.dramtype,
1925 				       sdram_params->ch.cap_info.rank);
1926 		if (ret != 0)
1927 			goto out;
1928 	}
1929 
1930 	if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) {
1931 		ret = data_training_rg(dram, cs,
1932 				       sdram_params->base.dramtype);
1933 		if (ret != 0)
1934 			goto out;
1935 	}
1936 
1937 	if ((training_flag & READ_TRAINING) == READ_TRAINING) {
1938 		ret = data_training_rd(dram, cs,
1939 				       sdram_params->base.dramtype,
1940 				       sdram_params->base.ddr_freq);
1941 		if (ret != 0)
1942 			goto out;
1943 	}
1944 
1945 	if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) {
1946 		ret = data_training_wr(dram, cs,
1947 				       sdram_params->base.dramtype,
1948 				       sdram_params->base.ddr_freq, dst_fsp);
1949 		if (ret != 0)
1950 			goto out;
1951 	}
1952 
1953 out:
1954 	return ret;
1955 }
1956 
1957 static int get_wrlvl_val(struct dram_info *dram,
1958 			 struct rv1126_sdram_params *sdram_params)
1959 {
1960 	int i, j, clk_skew;
1961 	void __iomem *phy_base = dram->phy;
1962 	u32 lp_stat;
1963 	int ret;
1964 
1965 	lp_stat = low_power_update(dram, 0);
1966 
1967 	clk_skew = 0x1f;
1968 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, clk_skew, 3,
1969 			 sdram_params->base.dramtype);
1970 
1971 	ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING);
1972 	if (sdram_params->ch.cap_info.rank == 2)
1973 		ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING);
1974 
1975 	for (j = 0; j < 2; j++)
1976 		for (i = 0; i < 4; i++)
1977 			wrlvl_result[j][i] =
1978 				(readl(PHY_REG(phy_base, wrlvl_result_offset[j][i])) & 0x3f) -
1979 				clk_skew;
1980 
1981 	low_power_update(dram, lp_stat);
1982 
1983 	return ret;
1984 }
1985 
1986 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
1987 static void init_rw_trn_result_struct(struct rw_trn_result *result,
1988 				      void __iomem *phy_base, u8 cs_num)
1989 {
1990 	int i;
1991 
1992 	result->cs_num = cs_num;
1993 	result->byte_en = readb(PHY_REG(dram_info.phy, 0xf)) &
1994 			  PHY_DQ_WIDTH_MASK;
1995 	for (i = 0; i < FSP_NUM; i++)
1996 		result->fsp_mhz[i] = 0;
1997 }
1998 
1999 static void save_rw_trn_min_max(void __iomem *phy_base,
2000 				struct cs_rw_trn_result *rd_result,
2001 				struct cs_rw_trn_result *wr_result,
2002 				u8 byte_en)
2003 {
2004 	u16 phy_ofs;
2005 	u8 dqs;
2006 	u8 dq;
2007 
2008 	for (dqs = 0; dqs < BYTE_NUM; dqs++) {
2009 		if ((byte_en & BIT(dqs)) == 0)
2010 			continue;
2011 
2012 		/* Channel A or B (low or high 16 bit) */
2013 		phy_ofs = dqs < 2 ? 0x230 : 0x2b0;
2014 		/* low or high 8 bit */
2015 		phy_ofs += (dqs & 0x1) == 0 ? 0 : 0x9;
2016 		for (dq = 0; dq < 8; dq++) {
2017 			rd_result->dqs[dqs].dq_min[dq] =
2018 				readb(PHY_REG(phy_base, phy_ofs + 0x15 + dq));
2019 			rd_result->dqs[dqs].dq_max[dq] =
2020 				readb(PHY_REG(phy_base, phy_ofs + 0x27 + dq));
2021 			wr_result->dqs[dqs].dq_min[dq] =
2022 				readb(PHY_REG(phy_base, phy_ofs + 0x3d + dq));
2023 			wr_result->dqs[dqs].dq_max[dq] =
2024 				readb(PHY_REG(phy_base, phy_ofs + 0x4f + dq));
2025 		}
2026 	}
2027 }
2028 
2029 static void save_rw_trn_deskew(void __iomem *phy_base,
2030 			       struct fsp_rw_trn_result *result, u8 cs_num,
2031 			       int min_val, bool rw)
2032 {
2033 	u16 phy_ofs;
2034 	u8 cs;
2035 	u8 dq;
2036 
2037 	result->min_val = min_val;
2038 
2039 	for (cs = 0; cs < cs_num; cs++) {
2040 		phy_ofs = cs == 0 ? 0x170 : 0x1a0;
2041 		phy_ofs += rw == SKEW_RX_SIGNAL ? 0x1 : 0x17;
2042 		for (dq = 0; dq < 8; dq++) {
2043 			result->cs[cs].dqs[0].dq_deskew[dq] =
2044 				readb(PHY_REG(phy_base, phy_ofs + dq));
2045 			result->cs[cs].dqs[1].dq_deskew[dq] =
2046 				readb(PHY_REG(phy_base, phy_ofs + 0xb + dq));
2047 			result->cs[cs].dqs[2].dq_deskew[dq] =
2048 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + dq));
2049 			result->cs[cs].dqs[3].dq_deskew[dq] =
2050 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + dq));
2051 		}
2052 
2053 		result->cs[cs].dqs[0].dqs_deskew =
2054 			readb(PHY_REG(phy_base, phy_ofs + 0x8));
2055 		result->cs[cs].dqs[1].dqs_deskew =
2056 			readb(PHY_REG(phy_base, phy_ofs + 0xb + 0x8));
2057 		result->cs[cs].dqs[2].dqs_deskew =
2058 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0x8));
2059 		result->cs[cs].dqs[3].dqs_deskew =
2060 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + 0x8));
2061 	}
2062 }
2063 
2064 static void save_rw_trn_result_to_ddr(struct rw_trn_result *result)
2065 {
2066 	result->flag = DDR_DQ_EYE_FLAG;
2067 	memcpy((void *)(RW_TRN_RESULT_ADDR), result, sizeof(*result));
2068 }
2069 #endif
2070 
2071 static int high_freq_training(struct dram_info *dram,
2072 			      struct rv1126_sdram_params *sdram_params,
2073 			      u32 fsp)
2074 {
2075 	u32 i, j;
2076 	void __iomem *phy_base = dram->phy;
2077 	u32 dramtype = sdram_params->base.dramtype;
2078 	int min_val;
2079 	int dqs_skew, clk_skew, ca_skew;
2080 	u8 byte_en;
2081 	int ret;
2082 
2083 	byte_en = readl(PHY_REG(phy_base, 0xf)) & PHY_DQ_WIDTH_MASK;
2084 	dqs_skew = 0;
2085 	for (j = 0; j < sdram_params->ch.cap_info.rank; j++) {
2086 		for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) {
2087 			if ((byte_en & BIT(i)) != 0)
2088 				dqs_skew += wrlvl_result[j][i];
2089 		}
2090 	}
2091 	dqs_skew = dqs_skew /
2092 		   (int)(sdram_params->ch.cap_info.rank * (1 << sdram_params->ch.cap_info.bw));
2093 
2094 	clk_skew = 0x20 - dqs_skew;
2095 	dqs_skew = 0x20;
2096 
2097 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
2098 		min_val = 0xff;
2099 		for (j = 0; j < sdram_params->ch.cap_info.rank; j++)
2100 			for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) {
2101 				if ((byte_en & BIT(i)) != 0)
2102 					min_val = MIN(wrlvl_result[j][i], min_val);
2103 			}
2104 
2105 		if (min_val < 0) {
2106 			clk_skew = -min_val;
2107 			ca_skew = -min_val;
2108 		} else {
2109 			clk_skew = 0;
2110 			ca_skew = 0;
2111 		}
2112 	} else if (dramtype == LPDDR3) {
2113 		ca_skew = clk_skew - 4;
2114 	} else {
2115 		ca_skew = clk_skew;
2116 	}
2117 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3,
2118 			 dramtype);
2119 
2120 	writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233));
2121 	writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237));
2122 	writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2123 	writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2124 	ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING |
2125 			    READ_TRAINING | WRITE_TRAINING);
2126 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2127 	rw_trn_result.fsp_mhz[fsp] = (u16)sdram_params->base.ddr_freq;
2128 	save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[0],
2129 			    &rw_trn_result.wr_fsp[fsp].cs[0],
2130 			    rw_trn_result.byte_en);
2131 #endif
2132 	if (sdram_params->ch.cap_info.rank == 2) {
2133 		writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233));
2134 		writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237));
2135 		writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2136 		writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2137 		ret |= data_training(dram, 1, sdram_params, fsp,
2138 				     READ_GATE_TRAINING | READ_TRAINING |
2139 				     WRITE_TRAINING);
2140 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2141 		save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[1],
2142 				    &rw_trn_result.wr_fsp[fsp].cs[1],
2143 				    rw_trn_result.byte_en);
2144 #endif
2145 	}
2146 	if (ret)
2147 		goto out;
2148 
2149 	record_dq_prebit(dram);
2150 
2151 	min_val = get_min_value(dram, SKEW_RX_SIGNAL,
2152 				sdram_params->ch.cap_info.rank) * -1;
2153 	modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2154 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2155 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2156 	save_rw_trn_deskew(phy_base, &rw_trn_result.rd_fsp[fsp],
2157 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2158 			   SKEW_RX_SIGNAL);
2159 #endif
2160 
2161 	min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL,
2162 				    sdram_params->ch.cap_info.rank),
2163 		      get_min_value(dram, SKEW_CA_SIGNAL,
2164 				    sdram_params->ch.cap_info.rank)) * -1;
2165 
2166 	/* clk = 0, rx all skew -7, tx - min_value */
2167 	modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3,
2168 			 dramtype);
2169 
2170 	modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2171 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2172 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2173 	save_rw_trn_deskew(phy_base, &rw_trn_result.wr_fsp[fsp],
2174 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2175 			   SKEW_TX_SIGNAL);
2176 #endif
2177 
2178 	ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING);
2179 	if (sdram_params->ch.cap_info.rank == 2)
2180 		ret |= data_training(dram, 1, sdram_params, 0,
2181 				     READ_GATE_TRAINING);
2182 out:
2183 	return ret;
2184 }
2185 
2186 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
2187 {
2188 	writel(ddrconfig, &dram->msch->deviceconf);
2189 	clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0);
2190 }
2191 
2192 static void update_noc_timing(struct dram_info *dram,
2193 			      struct rv1126_sdram_params *sdram_params)
2194 {
2195 	void __iomem *pctl_base = dram->pctl;
2196 	u32 bw, bl;
2197 
2198 	bw = 8 << sdram_params->ch.cap_info.bw;
2199 	bl = ((readl(pctl_base + DDR_PCTL2_MSTR) >> 16) & 0xf) * 2;
2200 
2201 	/* update the noc timing related to data bus width */
2202 	if ((bw / 8 * bl) <= 16)
2203 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 0;
2204 	else if ((bw / 8 * bl) == 32)
2205 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 1;
2206 	else if ((bw / 8 * bl) == 64)
2207 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 2;
2208 	else
2209 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 3;
2210 
2211 	sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty =
2212 		(bl * bw / 8) > 16 ? (bl / 4) : (16 / (bl * bw / 8)) * bl / 4;
2213 
2214 	if (sdram_params->base.dramtype == LPDDR4 ||
2215 	    sdram_params->base.dramtype == LPDDR4X) {
2216 		sdram_params->ch.noc_timings.ddrmode.b.mwrsize =
2217 			(bw == 16) ? 0x1 : 0x2;
2218 		sdram_params->ch.noc_timings.ddrtimingc0.b.wrtomwr =
2219 			3 * sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty;
2220 	}
2221 
2222 	writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
2223 	       &dram->msch->ddrtiminga0);
2224 	writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
2225 	       &dram->msch->ddrtimingb0);
2226 	writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
2227 	       &dram->msch->ddrtimingc0);
2228 	writel(sdram_params->ch.noc_timings.devtodev0.d32,
2229 	       &dram->msch->devtodev0);
2230 	writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
2231 	writel(sdram_params->ch.noc_timings.ddr4timing.d32,
2232 	       &dram->msch->ddr4timing);
2233 }
2234 
2235 static int split_setup(struct dram_info *dram,
2236 		       struct rv1126_sdram_params *sdram_params)
2237 {
2238 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2239 	u32 dramtype = sdram_params->base.dramtype;
2240 	u32 split_size, split_mode;
2241 	u64 cs_cap[2], cap;
2242 
2243 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dramtype);
2244 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dramtype);
2245 	/* only support the larger cap is in low 16bit */
2246 	if (cap_info->cs0_high16bit_row < cap_info->cs0_row) {
2247 		cap = cs_cap[0] / (1 << (cap_info->cs0_row -
2248 		cap_info->cs0_high16bit_row));
2249 	} else if ((cap_info->cs1_high16bit_row < cap_info->cs1_row) &&
2250 		   (cap_info->rank == 2)) {
2251 		if (!cap_info->cs1_high16bit_row)
2252 			cap = cs_cap[0];
2253 		else
2254 			cap = cs_cap[0] + cs_cap[1] / (1 << (cap_info->cs1_row -
2255 				cap_info->cs1_high16bit_row));
2256 	} else {
2257 		goto out;
2258 	}
2259 	split_size = (u32)(cap >> 24) & SPLIT_SIZE_MASK;
2260 	if (cap_info->bw == 2)
2261 		split_mode = SPLIT_MODE_32_L16_VALID;
2262 	else
2263 		split_mode = SPLIT_MODE_16_L8_VALID;
2264 
2265 	rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2266 		     (SPLIT_MODE_MASK << SPLIT_MODE_OFFSET) |
2267 		     (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2268 		     (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2269 		     (split_mode << SPLIT_MODE_OFFSET) |
2270 		     (0x0 << SPLIT_BYPASS_OFFSET) |
2271 		     (split_size << SPLIT_SIZE_OFFSET));
2272 
2273 	rk_clrsetreg(BUS_SGRF_BASE_ADDR + SGRF_SOC_CON2,
2274 		     MSCH_AXI_BYPASS_ALL_MASK << MSCH_AXI_BYPASS_ALL_SHIFT,
2275 		     0x0 << MSCH_AXI_BYPASS_ALL_SHIFT);
2276 
2277 out:
2278 	return 0;
2279 }
2280 
2281 static void split_bypass(struct dram_info *dram)
2282 {
2283 	if ((readl(&dram->ddrgrf->grf_ddrsplit_con) &
2284 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2285 		return;
2286 
2287 	/* bypass split */
2288 	rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2289 		     (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2290 		     (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2291 		     (0x1 << SPLIT_BYPASS_OFFSET) |
2292 		     (0x0 << SPLIT_SIZE_OFFSET));
2293 }
2294 
2295 static void dram_all_config(struct dram_info *dram,
2296 			    struct rv1126_sdram_params *sdram_params)
2297 {
2298 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2299 	u32 dram_type = sdram_params->base.dramtype;
2300 	void __iomem *pctl_base = dram->pctl;
2301 	u32 sys_reg2 = 0;
2302 	u32 sys_reg3 = 0;
2303 	u64 cs_cap[2];
2304 	u32 cs_pst;
2305 
2306 	set_ddrconfig(dram, cap_info->ddrconfig);
2307 	sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
2308 			 &sys_reg3, 0);
2309 	writel(sys_reg2, &dram->pmugrf->os_reg[2]);
2310 	writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2311 
2312 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2313 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2314 
2315 	if (cap_info->rank == 2) {
2316 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2317 			6 + 2;
2318 		if (cs_pst > 28)
2319 			cs_cap[0] = 1llu << cs_pst;
2320 	}
2321 
2322 	writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
2323 			(((cs_cap[0] >> 20) / 64) & 0xff),
2324 			&dram->msch->devicesize);
2325 	update_noc_timing(dram, sdram_params);
2326 }
2327 
2328 static void enable_low_power(struct dram_info *dram,
2329 			     struct rv1126_sdram_params *sdram_params)
2330 {
2331 	void __iomem *pctl_base = dram->pctl;
2332 	u32 grf_lp_con;
2333 
2334 	writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]);
2335 
2336 	if (sdram_params->base.dramtype == DDR4)
2337 		grf_lp_con = (0x7 << 16) | (1 << 1);
2338 	else if (sdram_params->base.dramtype == DDR3)
2339 		grf_lp_con = (0x7 << 16) | (1 << 0);
2340 	else
2341 		grf_lp_con = (0x7 << 16) | (1 << 2);
2342 
2343 	/* en lpckdis_en */
2344 	grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
2345 	writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con);
2346 
2347 	/* enable sr, pd */
2348 	if (dram->pd_idle == 0)
2349 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2350 	else
2351 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2352 	if (dram->sr_idle == 0)
2353 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2354 	else
2355 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2356 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
2357 }
2358 
2359 static void ddr_set_atags(struct dram_info *dram,
2360 			  struct rv1126_sdram_params *sdram_params)
2361 {
2362 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2363 	u32 dram_type = sdram_params->base.dramtype;
2364 	void __iomem *pctl_base = dram->pctl;
2365 	struct tag_serial t_serial;
2366 	struct tag_ddr_mem t_ddrmem;
2367 	struct tag_soc_info t_socinfo;
2368 	u64 cs_cap[2];
2369 	u32 cs_pst = 0;
2370 	u32 split, split_size;
2371 	u64 reduce_cap = 0;
2372 
2373 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2374 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2375 
2376 	memset(&t_serial, 0, sizeof(struct tag_serial));
2377 
2378 	t_serial.version = 0;
2379 	t_serial.enable = 1;
2380 	t_serial.addr = CONFIG_DEBUG_UART_BASE;
2381 	t_serial.baudrate = CONFIG_BAUDRATE;
2382 	t_serial.m_mode = SERIAL_M_MODE_M0;
2383 	t_serial.id = 2;
2384 
2385 	atags_destroy();
2386 	atags_set_tag(ATAG_SERIAL, &t_serial);
2387 
2388 	split = readl(&dram->ddrgrf->grf_ddrsplit_con);
2389 	memset(&t_ddrmem, 0, sizeof(struct tag_ddr_mem));
2390 	if (cap_info->row_3_4) {
2391 		cs_cap[0] =  cs_cap[0] * 3 / 4;
2392 		cs_cap[1] =  cs_cap[1] * 3 / 4;
2393 	} else if (!(split & (1 << SPLIT_BYPASS_OFFSET))) {
2394 		split_size = (split >> SPLIT_SIZE_OFFSET) & SPLIT_SIZE_MASK;
2395 		reduce_cap = (cs_cap[0] + cs_cap[1] - (split_size << 24)) / 2;
2396 	}
2397 	t_ddrmem.version = 0;
2398 	t_ddrmem.bank[0] = CONFIG_SYS_SDRAM_BASE;
2399 	if (cs_cap[1]) {
2400 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2401 			6 + 2;
2402 	}
2403 
2404 	if (cs_cap[1] && cs_pst > 27) {
2405 		t_ddrmem.count = 2;
2406 		t_ddrmem.bank[1] = 1 << cs_pst;
2407 		t_ddrmem.bank[2] = cs_cap[0];
2408 		t_ddrmem.bank[3] = cs_cap[1] - reduce_cap;
2409 	} else {
2410 		t_ddrmem.count = 1;
2411 		t_ddrmem.bank[1] = (u64)cs_cap[0] + (u64)cs_cap[1] - reduce_cap;
2412 	}
2413 
2414 	atags_set_tag(ATAG_DDR_MEM, &t_ddrmem);
2415 
2416 	memset(&t_socinfo, 0, sizeof(struct tag_soc_info));
2417 	t_socinfo.version = 0x1;
2418 	t_socinfo.name = 0x1126;
2419 	t_socinfo.flags = SOC_FLAGS_TDBT;
2420 	atags_set_tag(ATAG_SOC_INFO, &t_socinfo);
2421 }
2422 
2423 static void print_ddr_info(struct rv1126_sdram_params *sdram_params)
2424 {
2425 	u32 split;
2426 
2427 	if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2428 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2429 		split = 0;
2430 	else
2431 		split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2432 			SPLIT_SIZE_MASK;
2433 
2434 	sdram_print_ddr_info(&sdram_params->ch.cap_info,
2435 			     &sdram_params->base, split);
2436 }
2437 
2438 static int modify_ddr34_bw_byte_map(u8 rg_result, struct rv1126_sdram_params *sdram_params)
2439 {
2440 	struct sdram_head_info_index_v2 *index = (struct sdram_head_info_index_v2 *)common_info;
2441 	struct dq_map_info *map_info = (struct dq_map_info *)
2442 				       ((void *)common_info + index->dq_map_index.offset * 4);
2443 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2444 	u32 dramtype = sdram_params->base.dramtype;
2445 	u32 byte_map = 0;
2446 	u32 byte = 0;
2447 	u32 byte_map_shift;
2448 	int i;
2449 
2450 	if (dramtype == DDR3)
2451 		byte_map_shift = 24;
2452 	else if (dramtype == DDR4)
2453 		byte_map_shift = 0;
2454 	else
2455 		return -1;
2456 
2457 	for (i = 0; i < 4; i++) {
2458 		if ((rg_result & BIT(i)) == 0) {
2459 			byte_map |= byte << (i * 2);
2460 			byte++;
2461 		}
2462 	}
2463 	if (byte != 1 && byte != 2 && byte != 4) {
2464 		printascii("DTT result is abnormal: ");
2465 		printdec(byte);
2466 		printascii("byte\n");
2467 		return -1;
2468 	}
2469 	cap_info->bw = byte / 2;
2470 	for (i = 0; i < 4; i++) {
2471 		if ((rg_result & BIT(i)) != 0) {
2472 			byte_map |= byte << (i * 2);
2473 			byte++;
2474 		}
2475 	}
2476 
2477 	if ((u8)byte_map != (u8)(map_info->byte_map[0] >> byte_map_shift)) {
2478 		clrsetbits_le32(&map_info->byte_map[0],
2479 				0xff << byte_map_shift, byte_map << byte_map_shift);
2480 		pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info, dramtype);
2481 		return 1;
2482 	}
2483 
2484 	return 0;
2485 }
2486 
2487 static int sdram_init_(struct dram_info *dram,
2488 		       struct rv1126_sdram_params *sdram_params, u32 post_init)
2489 {
2490 	void __iomem *pctl_base = dram->pctl;
2491 	void __iomem *phy_base = dram->phy;
2492 	u32 ddr4_vref;
2493 	u32 mr_tmp, tmp;
2494 
2495 	rkclk_configure_ddr(dram, sdram_params);
2496 
2497 	rkclk_ddr_reset(dram, 1, 1, 1, 1);
2498 	udelay(10);
2499 
2500 	rkclk_ddr_reset(dram, 1, 1, 1, 0);
2501 	phy_cfg(dram, sdram_params);
2502 
2503 	rkclk_ddr_reset(dram, 1, 1, 0, 0);
2504 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1);
2505 
2506 	rkclk_ddr_reset(dram, 1, 0, 0, 0);
2507 	pctl_cfg(dram->pctl, &sdram_params->pctl_regs,
2508 		 dram->sr_idle, dram->pd_idle);
2509 
2510 	if (sdram_params->ch.cap_info.bw == 2) {
2511 		/* 32bit interface use pageclose */
2512 		setbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2513 		/* pageclose = 1, pageclose_timer = 0 will err in lp4 328MHz */
2514 		clrsetbits_le32(pctl_base + DDR_PCTL2_SCHED1, 0xff, 0x1 << 0);
2515 	} else {
2516 		clrbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2517 	}
2518 
2519 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2520 	u32 trefi;
2521 
2522 	tmp = readl(pctl_base + DDR_PCTL2_RFSHTMG);
2523 	trefi = (tmp >> 16) & 0xfff;
2524 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2525 	       pctl_base + DDR_PCTL2_RFSHTMG);
2526 #endif
2527 
2528 	/* set frequency_mode */
2529 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
2530 	/* set target_frequency to Frequency 0 */
2531 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0);
2532 
2533 	set_ds_odt(dram, sdram_params, 0);
2534 	sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params);
2535 	set_ctl_address_map(dram, sdram_params);
2536 
2537 	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
2538 
2539 	rkclk_ddr_reset(dram, 0, 0, 0, 0);
2540 
2541 	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
2542 		continue;
2543 
2544 	if (sdram_params->base.dramtype == LPDDR3) {
2545 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3);
2546 	} else if (sdram_params->base.dramtype == LPDDR4 ||
2547 		   sdram_params->base.dramtype == LPDDR4X) {
2548 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6);
2549 		/* MR11 */
2550 		pctl_write_mr(dram->pctl, 3, 11,
2551 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2552 			      LPDDR4);
2553 		/* MR12 */
2554 		pctl_write_mr(dram->pctl, 3, 12,
2555 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2556 			      LPDDR4);
2557 
2558 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2559 		/* MR22 */
2560 		pctl_write_mr(dram->pctl, 3, 22,
2561 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2562 			      LPDDR4);
2563 	}
2564 
2565 	if (sdram_params->base.dramtype == DDR3 && post_init == 0)
2566 		setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2567 	tmp = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) & 0xf;
2568 
2569 	if (tmp != 0) {
2570 		if (post_init != 0) {
2571 			printascii("DTT cs0 error\n");
2572 			return -1;
2573 		}
2574 		if (sdram_params->base.dramtype != DDR3 || tmp == 0xf)
2575 			return -1;
2576 	}
2577 
2578 	if (sdram_params->base.dramtype == DDR3 && post_init == 0) {
2579 		if (modify_ddr34_bw_byte_map((u8)tmp, sdram_params) != 0)
2580 			return -1;
2581 	}
2582 
2583 	if (sdram_params->base.dramtype == LPDDR4) {
2584 		mr_tmp = read_mr(dram, 1, 14, LPDDR4);
2585 
2586 		if (mr_tmp != 0x4d)
2587 			return -1;
2588 	}
2589 
2590 	if (sdram_params->base.dramtype == LPDDR4 ||
2591 	    sdram_params->base.dramtype == LPDDR4X) {
2592 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2593 		/* MR14 */
2594 		pctl_write_mr(dram->pctl, 3, 14,
2595 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2596 			      LPDDR4);
2597 	}
2598 	if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) {
2599 		if (data_training(dram, 1, sdram_params, 0,
2600 				  READ_GATE_TRAINING) != 0) {
2601 			printascii("DTT cs1 error\n");
2602 			return -1;
2603 		}
2604 	}
2605 
2606 	if (sdram_params->base.dramtype == DDR4) {
2607 		ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39;
2608 		pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref,
2609 				  sdram_params->base.dramtype);
2610 	}
2611 
2612 	dram_all_config(dram, sdram_params);
2613 	enable_low_power(dram, sdram_params);
2614 
2615 	return 0;
2616 }
2617 
2618 static u64 dram_detect_cap(struct dram_info *dram,
2619 			   struct rv1126_sdram_params *sdram_params,
2620 			   unsigned char channel)
2621 {
2622 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2623 	void __iomem *pctl_base = dram->pctl;
2624 	void __iomem *phy_base = dram->phy;
2625 	u32 mr8;
2626 
2627 	u32 bktmp;
2628 	u32 coltmp;
2629 	u32 rowtmp;
2630 	u32 cs;
2631 	u32 dram_type = sdram_params->base.dramtype;
2632 	u32 pwrctl;
2633 	u32 i, dq_map;
2634 	u32 byte1 = 0, byte0 = 0;
2635 
2636 	if (dram_type != LPDDR4 && dram_type != LPDDR4X) {
2637 		if (dram_type != DDR4) {
2638 			if (dram_type == DDR3)
2639 				coltmp = 11;
2640 			else
2641 				coltmp = 12;
2642 			bktmp = 3;
2643 			if (dram_type == LPDDR2)
2644 				rowtmp = 15;
2645 			else
2646 				rowtmp = 16;
2647 
2648 			if (sdram_detect_col(cap_info, coltmp) != 0)
2649 				goto cap_err;
2650 
2651 			sdram_detect_bank(cap_info, coltmp, bktmp);
2652 			if (dram_type != LPDDR3)
2653 				sdram_detect_dbw(cap_info, dram_type);
2654 		} else {
2655 			coltmp = 10;
2656 			bktmp = 4;
2657 			rowtmp = 17;
2658 
2659 			cap_info->col = 10;
2660 			cap_info->bk = 2;
2661 			sdram_detect_bg(cap_info, coltmp);
2662 		}
2663 
2664 		if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
2665 			goto cap_err;
2666 
2667 		sdram_detect_row_3_4(cap_info, coltmp, bktmp);
2668 	} else {
2669 		cap_info->col = 10;
2670 		cap_info->bk = 3;
2671 		mr8 = read_mr(dram, 1, 8, dram_type);
2672 		cap_info->dbw = ((mr8 >> 6) & 0x3) == 0 ? 1 : 0;
2673 		mr8 = (mr8 >> 2) & 0xf;
2674 		if (mr8 >= 0 && mr8 <= 6) {
2675 			cap_info->cs0_row = 14 + (mr8 + 1) / 2;
2676 		} else if (mr8 == 0xc) {
2677 			cap_info->cs0_row = 13;
2678 		} else {
2679 			printascii("Cap ERR: Fail to get cap of LPDDR4/X from MR8\n");
2680 			goto cap_err;
2681 		}
2682 		if (cap_info->dbw == 0)
2683 			cap_info->cs0_row++;
2684 		cap_info->row_3_4 = mr8 % 2 == 1 ? 1 : 0;
2685 		if (cap_info->cs0_row >= 17) {
2686 			printascii("Cap ERR: ");
2687 			printascii("RV1126 LPDDR4/X cannot support row >= 17\n");
2688 			goto cap_err;
2689 			// cap_info->cs0_row = 16;
2690 			// cap_info->row_3_4 = 0;
2691 		}
2692 	}
2693 
2694 	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
2695 	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
2696 
2697 	if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0)
2698 		cs = 1;
2699 	else
2700 		cs = 0;
2701 	cap_info->rank = cs + 1;
2702 
2703 	setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2704 
2705 	if (dram_type != DDR3) {
2706 		if ((data_training_rg(dram, 0, dram_type) & 0xf) == 0) {
2707 			cap_info->bw = 2;
2708 		} else {
2709 			dq_map = readl(PHY_REG(phy_base, 0x4f));
2710 			for (i = 0; i < 4; i++) {
2711 				if (((dq_map >> (i * 2)) & 0x3) == 0)
2712 					byte0 = i;
2713 				if (((dq_map >> (i * 2)) & 0x3) == 1)
2714 					byte1 = i;
2715 			}
2716 			clrsetbits_le32(PHY_REG(phy_base, 0xf), PHY_DQ_WIDTH_MASK,
2717 					BIT(byte0) | BIT(byte1));
2718 			if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) == 0)
2719 				cap_info->bw = 1;
2720 			else
2721 				cap_info->bw = 0;
2722 		}
2723 	}
2724 	if (cap_info->bw > 0)
2725 		cap_info->dbw = 1;
2726 
2727 	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
2728 
2729 	cap_info->cs0_high16bit_row = cap_info->cs0_row;
2730 	if (cs) {
2731 		cap_info->cs1_row = cap_info->cs0_row;
2732 		cap_info->cs1_high16bit_row = cap_info->cs0_row;
2733 	} else {
2734 		cap_info->cs1_row = 0;
2735 		cap_info->cs1_high16bit_row = 0;
2736 	}
2737 
2738 	if (dram_type == LPDDR3)
2739 		sdram_detect_dbw(cap_info, dram_type);
2740 
2741 	return 0;
2742 cap_err:
2743 	return -1;
2744 }
2745 
2746 static int dram_detect_cs1_row(struct dram_info *dram,
2747 			       struct rv1126_sdram_params *sdram_params,
2748 			       unsigned char channel)
2749 {
2750 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2751 	void __iomem *pctl_base = dram->pctl;
2752 	u32 ret = 0;
2753 	void __iomem *test_addr;
2754 	u32 row, bktmp, coltmp, bw;
2755 	u64 cs0_cap;
2756 	u32 byte_mask;
2757 	u32 cs_pst;
2758 	u32 cs_add = 0;
2759 	u32 max_row;
2760 
2761 	if (cap_info->rank == 2) {
2762 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2763 			6 + 2;
2764 		if (cs_pst < 28)
2765 			cs_add = 1;
2766 
2767 		cs0_cap = 1 << cs_pst;
2768 
2769 		if (sdram_params->base.dramtype == DDR4) {
2770 			if (cap_info->dbw == 0)
2771 				bktmp = cap_info->bk + 2;
2772 			else
2773 				bktmp = cap_info->bk + 1;
2774 		} else {
2775 			bktmp = cap_info->bk;
2776 		}
2777 		bw = cap_info->bw;
2778 		coltmp = cap_info->col;
2779 
2780 		if (bw == 2)
2781 			byte_mask = 0xFFFF;
2782 		else
2783 			byte_mask = 0xFF;
2784 
2785 		max_row = (cs_pst == 31) ? 30 : 31;
2786 
2787 		max_row = max_row - bktmp - coltmp - bw - cs_add + 1;
2788 
2789 		row = (cap_info->cs0_row > max_row) ? max_row :
2790 			cap_info->cs0_row;
2791 
2792 		for (; row > 12; row--) {
2793 			test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
2794 				    (u32)cs0_cap +
2795 				    (1ul << (row + bktmp + coltmp +
2796 					     cs_add + bw - 1ul)));
2797 
2798 			writel(0, CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap);
2799 			writel(PATTERN, test_addr);
2800 
2801 			if (((readl(test_addr) & byte_mask) ==
2802 			     (PATTERN & byte_mask)) &&
2803 			    ((readl(CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap) &
2804 			      byte_mask) == 0)) {
2805 				ret = row;
2806 				break;
2807 			}
2808 		}
2809 	}
2810 
2811 	return ret;
2812 }
2813 
2814 /* return: 0 = success, other = fail */
2815 static int sdram_init_detect(struct dram_info *dram,
2816 			     struct rv1126_sdram_params *sdram_params)
2817 {
2818 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2819 	u32 ret;
2820 	u32 sys_reg = 0;
2821 	u32 sys_reg3 = 0;
2822 
2823 	if (sdram_init_(dram, sdram_params, 0)) {
2824 		if (sdram_params->base.dramtype == DDR3) {
2825 			if (sdram_init_(dram, sdram_params, 0))
2826 				return -1;
2827 		} else {
2828 			return -1;
2829 		}
2830 	}
2831 
2832 	if (sdram_params->base.dramtype == DDR3) {
2833 		writel(PATTERN, CONFIG_SYS_SDRAM_BASE);
2834 		if (readl(CONFIG_SYS_SDRAM_BASE) != PATTERN)
2835 			return -1;
2836 	}
2837 
2838 	split_bypass(dram);
2839 	if (dram_detect_cap(dram, sdram_params, 0) != 0)
2840 		return -1;
2841 
2842 	pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
2843 				   sdram_params->base.dramtype);
2844 	ret = sdram_init_(dram, sdram_params, 1);
2845 	if (ret != 0)
2846 		goto out;
2847 
2848 	cap_info->cs1_row =
2849 		dram_detect_cs1_row(dram, sdram_params, 0);
2850 	if (cap_info->cs1_row) {
2851 		sys_reg = readl(&dram->pmugrf->os_reg[2]);
2852 		sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
2853 		SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
2854 				    sys_reg, sys_reg3, 0);
2855 		writel(sys_reg, &dram->pmugrf->os_reg[2]);
2856 		writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2857 	}
2858 
2859 	sdram_detect_high_row(cap_info, sdram_params->base.dramtype);
2860 	split_setup(dram, sdram_params);
2861 out:
2862 	return ret;
2863 }
2864 
2865 struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz)
2866 {
2867 	u32 i;
2868 	u32 offset = 0;
2869 	struct ddr2_3_4_lp2_3_info *ddr_info;
2870 
2871 	if (!freq_mhz) {
2872 		ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype);
2873 		if (ddr_info)
2874 			freq_mhz =
2875 				(ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
2876 				DDR_FREQ_MASK;
2877 		else
2878 			freq_mhz = 0;
2879 	}
2880 
2881 	for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) {
2882 		if (sdram_configs[i].base.ddr_freq == 0 ||
2883 		    freq_mhz < sdram_configs[i].base.ddr_freq)
2884 			break;
2885 	}
2886 	offset = i == 0 ? 0 : i - 1;
2887 
2888 	return &sdram_configs[offset];
2889 }
2890 
2891 static const u16 pctl_need_update_reg[] = {
2892 	DDR_PCTL2_RFSHTMG,
2893 	DDR_PCTL2_INIT3,
2894 	DDR_PCTL2_INIT4,
2895 	DDR_PCTL2_INIT6,
2896 	DDR_PCTL2_INIT7,
2897 	DDR_PCTL2_DRAMTMG0,
2898 	DDR_PCTL2_DRAMTMG1,
2899 	DDR_PCTL2_DRAMTMG2,
2900 	DDR_PCTL2_DRAMTMG3,
2901 	DDR_PCTL2_DRAMTMG4,
2902 	DDR_PCTL2_DRAMTMG5,
2903 	DDR_PCTL2_DRAMTMG6,
2904 	DDR_PCTL2_DRAMTMG7,
2905 	DDR_PCTL2_DRAMTMG8,
2906 	DDR_PCTL2_DRAMTMG9,
2907 	DDR_PCTL2_DRAMTMG12,
2908 	DDR_PCTL2_DRAMTMG13,
2909 	DDR_PCTL2_DRAMTMG14,
2910 	DDR_PCTL2_ZQCTL0,
2911 	DDR_PCTL2_DFITMG0,
2912 	DDR_PCTL2_ODTCFG
2913 };
2914 
2915 static const u16 phy_need_update_reg[] = {
2916 	0x14,
2917 	0x18,
2918 	0x1c
2919 };
2920 
2921 static void pre_set_rate(struct dram_info *dram,
2922 			 struct rv1126_sdram_params *sdram_params,
2923 			 u32 dst_fsp, u32 dst_fsp_lp4)
2924 {
2925 	u32 i, j, find;
2926 	void __iomem *pctl_base = dram->pctl;
2927 	void __iomem *phy_base = dram->phy;
2928 	u32 phy_offset;
2929 	u32 mr_tmp;
2930 	u32 dramtype = sdram_params->base.dramtype;
2931 
2932 	sw_set_req(dram);
2933 	/* pctl timing update */
2934 	for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) {
2935 		for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF;
2936 		     j++) {
2937 			if (sdram_params->pctl_regs.pctl[j][0] ==
2938 			    pctl_need_update_reg[i]) {
2939 				writel(sdram_params->pctl_regs.pctl[j][1],
2940 				       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2941 				       pctl_need_update_reg[i]);
2942 				find = j;
2943 				break;
2944 			}
2945 		}
2946 	}
2947 
2948 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2949 	u32 tmp, trefi;
2950 
2951 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2952 	trefi = (tmp >> 16) & 0xfff;
2953 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2954 	       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2955 #endif
2956 
2957 	sw_set_ack(dram);
2958 
2959 	/* phy timing update */
2960 	if (dst_fsp == 0)
2961 		phy_offset = 0;
2962 	else
2963 		phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3);
2964 	/* cl cwl al update */
2965 	for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) {
2966 		for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF;
2967 		     j++) {
2968 			if (sdram_params->phy_regs.phy[j][0] ==
2969 			    phy_need_update_reg[i]) {
2970 				writel(sdram_params->phy_regs.phy[j][1],
2971 				       phy_base + phy_offset +
2972 				       phy_need_update_reg[i]);
2973 				find = j;
2974 				break;
2975 			}
2976 		}
2977 	}
2978 
2979 	set_ds_odt(dram, sdram_params, dst_fsp);
2980 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
2981 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2982 			       DDR_PCTL2_INIT4);
2983 		/* MR13 */
2984 		pctl_write_mr(dram->pctl, 3, 13,
2985 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2986 			       PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2987 			      ((0x2 << 6) >> dst_fsp_lp4), dramtype);
2988 		writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2989 				      PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2990 				      ((0x2 << 6) >> dst_fsp_lp4),
2991 				       PHY_REG(phy_base, 0x1b));
2992 		/* MR3 */
2993 		pctl_write_mr(dram->pctl, 3, 3,
2994 			      mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT &
2995 			      PCTL2_MR_MASK,
2996 			      dramtype);
2997 		writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK,
2998 		       PHY_REG(phy_base, 0x19));
2999 
3000 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3001 			       DDR_PCTL2_INIT3);
3002 		/* MR1 */
3003 		pctl_write_mr(dram->pctl, 3, 1,
3004 			      mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT &
3005 			      PCTL2_MR_MASK,
3006 			      dramtype);
3007 		writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK,
3008 		       PHY_REG(phy_base, 0x17));
3009 		/* MR2 */
3010 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
3011 			      dramtype);
3012 		writel(mr_tmp & PCTL2_MR_MASK,
3013 		       PHY_REG(phy_base, 0x18));
3014 
3015 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3016 			       DDR_PCTL2_INIT6);
3017 		/* MR11 */
3018 		pctl_write_mr(dram->pctl, 3, 11,
3019 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
3020 			      dramtype);
3021 		writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
3022 		       PHY_REG(phy_base, 0x1a));
3023 		/* MR12 */
3024 		pctl_write_mr(dram->pctl, 3, 12,
3025 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
3026 			      dramtype);
3027 
3028 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3029 			       DDR_PCTL2_INIT7);
3030 		/* MR22 */
3031 		pctl_write_mr(dram->pctl, 3, 22,
3032 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
3033 			      dramtype);
3034 		writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
3035 		       PHY_REG(phy_base, 0x1d));
3036 		/* MR14 */
3037 		pctl_write_mr(dram->pctl, 3, 14,
3038 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
3039 			      dramtype);
3040 		writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
3041 		       PHY_REG(phy_base, 0x1c));
3042 	}
3043 
3044 	update_noc_timing(dram, sdram_params);
3045 }
3046 
3047 static void save_fsp_param(struct dram_info *dram, u32 dst_fsp,
3048 			   struct rv1126_sdram_params *sdram_params)
3049 {
3050 	void __iomem *pctl_base = dram->pctl;
3051 	void __iomem *phy_base = dram->phy;
3052 	struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp];
3053 	u32 temp, temp1;
3054 	struct ddr2_3_4_lp2_3_info *ddr_info;
3055 
3056 	ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype);
3057 
3058 	p_fsp_param->freq_mhz = sdram_params->base.ddr_freq;
3059 
3060 	if (sdram_params->base.dramtype == LPDDR4 ||
3061 	    sdram_params->base.dramtype == LPDDR4X) {
3062 		p_fsp_param->rd_odt_up_en = 0;
3063 		p_fsp_param->rd_odt_down_en = 1;
3064 	} else {
3065 		p_fsp_param->rd_odt_up_en =
3066 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
3067 		p_fsp_param->rd_odt_down_en =
3068 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
3069 	}
3070 
3071 	if (p_fsp_param->rd_odt_up_en)
3072 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111));
3073 	else if (p_fsp_param->rd_odt_down_en)
3074 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110));
3075 	else
3076 		p_fsp_param->rd_odt = 0;
3077 	p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112));
3078 	p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100));
3079 	p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102));
3080 	p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128));
3081 	p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105));
3082 
3083 	if (sdram_params->base.dramtype == DDR3) {
3084 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3085 			     DDR_PCTL2_INIT3);
3086 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
3087 		p_fsp_param->ds_pdds = temp & DDR3_DS_MASK;
3088 		p_fsp_param->dq_odt = temp & DDR3_RTT_NOM_MASK;
3089 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3090 	} else if (sdram_params->base.dramtype == DDR4) {
3091 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3092 			     DDR_PCTL2_INIT3);
3093 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
3094 		p_fsp_param->ds_pdds = temp & DDR4_DS_MASK;
3095 		p_fsp_param->dq_odt = temp & DDR4_RTT_NOM_MASK;
3096 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3097 	} else if (sdram_params->base.dramtype == LPDDR3) {
3098 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3099 			     DDR_PCTL2_INIT4);
3100 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
3101 		p_fsp_param->ds_pdds = temp & 0xf;
3102 
3103 		p_fsp_param->dq_odt = lp3_odt_value;
3104 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3105 	} else if (sdram_params->base.dramtype == LPDDR4 ||
3106 		   sdram_params->base.dramtype == LPDDR4X) {
3107 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3108 			     DDR_PCTL2_INIT4);
3109 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
3110 		p_fsp_param->ds_pdds = temp & LPDDR4_PDDS_MASK;
3111 
3112 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3113 			     DDR_PCTL2_INIT6);
3114 		temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK;
3115 		p_fsp_param->dq_odt = temp & LPDDR4_DQODT_MASK;
3116 		p_fsp_param->ca_odt = temp & LPDDR4_CAODT_MASK;
3117 
3118 		temp = MAX(readl(PHY_REG(phy_base, 0x3ae)),
3119 			   readl(PHY_REG(phy_base, 0x3ce)));
3120 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)),
3121 			    readl(PHY_REG(phy_base, 0x3de)));
3122 		p_fsp_param->vref_ca[0] = (temp + temp1) / 2;
3123 		temp = MAX(readl(PHY_REG(phy_base, 0x3af)),
3124 			   readl(PHY_REG(phy_base, 0x3cf)));
3125 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)),
3126 			    readl(PHY_REG(phy_base, 0x3df)));
3127 		p_fsp_param->vref_ca[1] = (temp + temp1) / 2;
3128 		p_fsp_param->vref_ca[0] |=
3129 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
3130 		p_fsp_param->vref_ca[1] |=
3131 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
3132 
3133 		p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >>
3134 					      3) & 0x1;
3135 	}
3136 
3137 	p_fsp_param->noc_timings.ddrtiminga0 =
3138 		sdram_params->ch.noc_timings.ddrtiminga0;
3139 	p_fsp_param->noc_timings.ddrtimingb0 =
3140 		sdram_params->ch.noc_timings.ddrtimingb0;
3141 	p_fsp_param->noc_timings.ddrtimingc0 =
3142 		sdram_params->ch.noc_timings.ddrtimingc0;
3143 	p_fsp_param->noc_timings.devtodev0 =
3144 		sdram_params->ch.noc_timings.devtodev0;
3145 	p_fsp_param->noc_timings.ddrmode =
3146 		sdram_params->ch.noc_timings.ddrmode;
3147 	p_fsp_param->noc_timings.ddr4timing =
3148 		sdram_params->ch.noc_timings.ddr4timing;
3149 	p_fsp_param->noc_timings.agingx0 =
3150 		sdram_params->ch.noc_timings.agingx0;
3151 	p_fsp_param->noc_timings.aging0 =
3152 		sdram_params->ch.noc_timings.aging0;
3153 	p_fsp_param->noc_timings.aging1 =
3154 		sdram_params->ch.noc_timings.aging1;
3155 	p_fsp_param->noc_timings.aging2 =
3156 		sdram_params->ch.noc_timings.aging2;
3157 	p_fsp_param->noc_timings.aging3 =
3158 		sdram_params->ch.noc_timings.aging3;
3159 
3160 	p_fsp_param->flag = FSP_FLAG;
3161 }
3162 
3163 #ifndef CONFIG_SPL_KERNEL_BOOT
3164 static void copy_fsp_param_to_ddr(void)
3165 {
3166 	memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param,
3167 	       sizeof(fsp_param));
3168 }
3169 #endif
3170 
3171 static void pctl_modify_trfc(struct ddr_pctl_regs *pctl_regs,
3172 			     struct sdram_cap_info *cap_info, u32 dram_type,
3173 			     u32 freq)
3174 {
3175 	u64 cs0_cap;
3176 	u32 die_cap;
3177 	u32 trfc_ns, trfc4_ns;
3178 	u32 trfc, txsnr;
3179 	u32 txs_abort_fast = 0;
3180 	u32 tmp;
3181 
3182 	cs0_cap = sdram_get_cs_cap(cap_info, 0, dram_type);
3183 	die_cap = (u32)(cs0_cap >> (20 + (cap_info->bw - cap_info->dbw)));
3184 
3185 	switch (dram_type) {
3186 	case DDR3:
3187 		if (die_cap <= DIE_CAP_512MBIT)
3188 			trfc_ns = 90;
3189 		else if (die_cap <= DIE_CAP_1GBIT)
3190 			trfc_ns = 110;
3191 		else if (die_cap <= DIE_CAP_2GBIT)
3192 			trfc_ns = 160;
3193 		else if (die_cap <= DIE_CAP_4GBIT)
3194 			trfc_ns = 260;
3195 		else
3196 			trfc_ns = 350;
3197 		txsnr = MAX(5, ((trfc_ns + 10) * freq + 999) / 1000);
3198 		break;
3199 
3200 	case DDR4:
3201 		if (die_cap <= DIE_CAP_2GBIT) {
3202 			trfc_ns = 160;
3203 			trfc4_ns = 90;
3204 		} else if (die_cap <= DIE_CAP_4GBIT) {
3205 			trfc_ns = 260;
3206 			trfc4_ns = 110;
3207 		} else if (die_cap <= DIE_CAP_8GBIT) {
3208 			trfc_ns = 350;
3209 			trfc4_ns = 160;
3210 		} else {
3211 			trfc_ns = 550;
3212 			trfc4_ns = 260;
3213 		}
3214 		txsnr = ((trfc_ns + 10) * freq + 999) / 1000;
3215 		txs_abort_fast = ((trfc4_ns + 10) * freq + 999) / 1000;
3216 		break;
3217 
3218 	case LPDDR3:
3219 		if (die_cap <= DIE_CAP_4GBIT)
3220 			trfc_ns = 130;
3221 		else
3222 			trfc_ns = 210;
3223 		txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3224 		break;
3225 
3226 	case LPDDR4:
3227 	case LPDDR4X:
3228 		if (die_cap <= DIE_CAP_2GBIT)
3229 			trfc_ns = 130;
3230 		else if (die_cap <= DIE_CAP_4GBIT)
3231 			trfc_ns = 180;
3232 		else if (die_cap <= DIE_CAP_8GBIT)
3233 			trfc_ns = 280;
3234 		else
3235 			trfc_ns = 380;
3236 		txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3237 		break;
3238 
3239 	default:
3240 		return;
3241 	}
3242 	trfc = (trfc_ns * freq + 999) / 1000;
3243 
3244 	for (int i = 0; pctl_regs->pctl[i][0] != 0xffffffff; i++) {
3245 		switch (pctl_regs->pctl[i][0]) {
3246 		case DDR_PCTL2_RFSHTMG:
3247 			tmp = pctl_regs->pctl[i][1];
3248 			/* t_rfc_min */
3249 			tmp &= ~((u32)0x3ff);
3250 			tmp |= ((trfc + 1) / 2) & 0x3ff;
3251 			pctl_regs->pctl[i][1] = tmp;
3252 			break;
3253 
3254 		case DDR_PCTL2_DRAMTMG8:
3255 			if (dram_type == DDR3 || dram_type == DDR4) {
3256 				tmp = pctl_regs->pctl[i][1];
3257 				/* t_xs_x32 */
3258 				tmp &= ~((u32)0x7f);
3259 				tmp |= ((txsnr + 63) / 64 + 1) & 0x7f;
3260 
3261 				if (dram_type == DDR4) {
3262 					/* t_xs_abort_x32 */
3263 					tmp &= ~((u32)(0x7f << 16));
3264 					tmp |= (((txs_abort_fast + 63) / 64 + 1) & 0x7f) << 16;
3265 					/* t_xs_fast_x32 */
3266 					tmp &= ~((u32)(0x7f << 24));
3267 					tmp |= (((txs_abort_fast + 63) / 64 + 1) & 0x7f) << 24;
3268 				}
3269 
3270 				pctl_regs->pctl[i][1] = tmp;
3271 			}
3272 			break;
3273 
3274 		case DDR_PCTL2_DRAMTMG14:
3275 			if (dram_type == LPDDR3 ||
3276 			    dram_type == LPDDR4 || dram_type == LPDDR4X) {
3277 				tmp = pctl_regs->pctl[i][1];
3278 				/* t_xsr */
3279 				tmp &= ~((u32)0xfff);
3280 				tmp |= ((txsnr + 1) / 2) & 0xfff;
3281 				pctl_regs->pctl[i][1] = tmp;
3282 			}
3283 			break;
3284 
3285 		default:
3286 			break;
3287 		}
3288 	}
3289 }
3290 
3291 void ddr_set_rate(struct dram_info *dram,
3292 		  struct rv1126_sdram_params *sdram_params,
3293 		  u32 freq, u32 cur_freq, u32 dst_fsp,
3294 		  u32 dst_fsp_lp4, u32 training_en)
3295 {
3296 	u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off;
3297 	u32 mr_tmp;
3298 	u32 lp_stat;
3299 	u32 dramtype = sdram_params->base.dramtype;
3300 	struct rv1126_sdram_params *sdram_params_new;
3301 	void __iomem *pctl_base = dram->pctl;
3302 	void __iomem *phy_base = dram->phy;
3303 
3304 	lp_stat = low_power_update(dram, 0);
3305 	sdram_params_new = get_default_sdram_config(freq);
3306 	sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank;
3307 	sdram_params_new->ch.cap_info.bw = sdram_params->ch.cap_info.bw;
3308 
3309 	pctl_modify_trfc(&sdram_params_new->pctl_regs,
3310 			 &sdram_params->ch.cap_info, dramtype, freq);
3311 	pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4);
3312 
3313 	while ((readl(pctl_base + DDR_PCTL2_STAT) &
3314 			 PCTL2_OPERATING_MODE_MASK) ==
3315 			 PCTL2_OPERATING_MODE_SR)
3316 		continue;
3317 
3318 	dest_dll_off = 0;
3319 	dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3320 			  DDR_PCTL2_INIT3);
3321 	if ((dramtype == DDR3 && (dst_init3 & 1)) ||
3322 	    (dramtype == DDR4 && !(dst_init3 & 1)))
3323 		dest_dll_off = 1;
3324 
3325 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
3326 	cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
3327 			  DDR_PCTL2_INIT3);
3328 	cur_init3 &= PCTL2_MR_MASK;
3329 	cur_dll_off = 1;
3330 	if ((dramtype == DDR3 && !(cur_init3 & 1)) ||
3331 	    (dramtype == DDR4 && (cur_init3 & 1)))
3332 		cur_dll_off = 0;
3333 
3334 	if (!cur_dll_off) {
3335 		if (dramtype == DDR3)
3336 			cur_init3 |= 1;
3337 		else
3338 			cur_init3 &= ~1;
3339 		pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype);
3340 	}
3341 
3342 	setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3343 		     PCTL2_DIS_AUTO_REFRESH);
3344 	update_refresh_reg(dram);
3345 
3346 	enter_sr(dram, 1);
3347 
3348 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3349 	       PMUGRF_CON_DDRPHY_BUFFEREN_EN,
3350 	       &dram->pmugrf->soc_con[0]);
3351 	sw_set_req(dram);
3352 	clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC,
3353 		     PCTL2_DFI_INIT_COMPLETE_EN);
3354 	sw_set_ack(dram);
3355 
3356 	sw_set_req(dram);
3357 	if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off)
3358 		setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3359 	else
3360 		clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3361 
3362 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0,
3363 		     PCTL2_DIS_SRX_ZQCL);
3364 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0,
3365 		     PCTL2_DIS_SRX_ZQCL);
3366 	sw_set_ack(dram);
3367 
3368 	writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT),
3369 	       &dram->cru->clkgate_con[21]);
3370 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3371 					(0x1 << CLK_DDR_UPCTL_EN_SHIFT) |
3372 					(0x1 << ACLK_DDR_UPCTL_EN_SHIFT),
3373 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3374 
3375 	clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3376 	rkclk_set_dpll(dram, freq * MHz / 2);
3377 	phy_pll_set(dram, freq * MHz, 0);
3378 	phy_pll_set(dram, freq * MHz, 1);
3379 	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3380 
3381 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3382 			PMUGRF_CON_DDRPHY_BUFFEREN_DIS,
3383 			&dram->pmugrf->soc_con[0]);
3384 	writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT),
3385 	       &dram->cru->clkgate_con[21]);
3386 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3387 					(0x0 << CLK_DDR_UPCTL_EN_SHIFT) |
3388 					(0x0 << ACLK_DDR_UPCTL_EN_SHIFT),
3389 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3390 	while ((readl(pctl_base + DDR_PCTL2_DFISTAT) &
3391 	       PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE)
3392 		continue;
3393 
3394 	sw_set_req(dram);
3395 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
3396 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp);
3397 	sw_set_ack(dram);
3398 	update_refresh_reg(dram);
3399 	clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2);
3400 
3401 	enter_sr(dram, 0);
3402 
3403 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3404 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3405 
3406 	mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4);
3407 	if (dramtype == LPDDR3) {
3408 		pctl_write_mr(dram->pctl, 3, 1,
3409 			      (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) &
3410 			      PCTL2_MR_MASK,
3411 			      dramtype);
3412 		pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK,
3413 			      dramtype);
3414 		pctl_write_mr(dram->pctl, 3, 3,
3415 			      (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) &
3416 			      PCTL2_MR_MASK,
3417 			      dramtype);
3418 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype);
3419 	} else if ((dramtype == DDR3) || (dramtype == DDR4)) {
3420 		pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK,
3421 			      dramtype);
3422 		if (!dest_dll_off) {
3423 			pctl_write_mr(dram->pctl, 3, 0,
3424 				      ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) &
3425 				      PCTL2_MR_MASK) | DDR3_DLL_RESET,
3426 				      dramtype);
3427 			udelay(2);
3428 		}
3429 		pctl_write_mr(dram->pctl, 3, 0,
3430 			      (dst_init3 >> PCTL2_DDR34_MR0_SHIFT &
3431 			       PCTL2_MR_MASK) & (~DDR3_DLL_RESET),
3432 			      dramtype);
3433 		pctl_write_mr(dram->pctl, 3, 2,
3434 			      ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) &
3435 			       PCTL2_MR_MASK), dramtype);
3436 		if (dramtype == DDR4) {
3437 			pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK,
3438 				      dramtype);
3439 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3440 				       DDR_PCTL2_INIT6);
3441 			pctl_write_mr(dram->pctl, 3, 4,
3442 				      (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) &
3443 				       PCTL2_MR_MASK,
3444 				      dramtype);
3445 			pctl_write_mr(dram->pctl, 3, 5,
3446 				      mr_tmp >> PCTL2_DDR4_MR5_SHIFT &
3447 				      PCTL2_MR_MASK,
3448 				      dramtype);
3449 
3450 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3451 				       DDR_PCTL2_INIT7);
3452 			pctl_write_mr(dram->pctl, 3, 6,
3453 				      mr_tmp >> PCTL2_DDR4_MR6_SHIFT &
3454 				      PCTL2_MR_MASK,
3455 				      dramtype);
3456 		}
3457 	} else if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
3458 		pctl_write_mr(dram->pctl, 3, 13,
3459 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
3460 			       PCTL2_MR_MASK) & (~(BIT(7)))) |
3461 			      dst_fsp_lp4 << 7, dramtype);
3462 	}
3463 	clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3464 		     PCTL2_DIS_AUTO_REFRESH);
3465 	update_refresh_reg(dram);
3466 
3467 	/* training */
3468 	high_freq_training(dram, sdram_params_new, dst_fsp);
3469 	low_power_update(dram, lp_stat);
3470 
3471 	save_fsp_param(dram, dst_fsp, sdram_params_new);
3472 }
3473 
3474 static void ddr_set_rate_for_fsp(struct dram_info *dram,
3475 				 struct rv1126_sdram_params *sdram_params)
3476 {
3477 	struct ddr2_3_4_lp2_3_info *ddr_info;
3478 	u32 f0;
3479 	u32 dramtype = sdram_params->base.dramtype;
3480 #ifndef CONFIG_SPL_KERNEL_BOOT
3481 	u32 f1, f2, f3;
3482 #endif
3483 
3484 	ddr_info = get_ddr_drv_odt_info(dramtype);
3485 	if (!ddr_info)
3486 		return;
3487 
3488 	f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
3489 	     DDR_FREQ_MASK;
3490 
3491 #ifndef CONFIG_SPL_KERNEL_BOOT
3492 	memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param));
3493 	memset((void *)&fsp_param, 0, sizeof(fsp_param));
3494 
3495 	f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) &
3496 	     DDR_FREQ_MASK;
3497 	f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) &
3498 	     DDR_FREQ_MASK;
3499 	f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) &
3500 	     DDR_FREQ_MASK;
3501 #endif
3502 
3503 	if (get_wrlvl_val(dram, sdram_params))
3504 		printascii("get wrlvl value fail\n");
3505 
3506 #ifndef CONFIG_SPL_KERNEL_BOOT
3507 	printascii("change to: ");
3508 	printdec(f1);
3509 	printascii("MHz\n");
3510 	ddr_set_rate(&dram_info, sdram_params, f1,
3511 		     sdram_params->base.ddr_freq, 1, 1, 1);
3512 	printascii("change to: ");
3513 	printdec(f2);
3514 	printascii("MHz\n");
3515 	ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1);
3516 	printascii("change to: ");
3517 	printdec(f3);
3518 	printascii("MHz\n");
3519 	ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1);
3520 #endif
3521 	printascii("change to: ");
3522 	printdec(f0);
3523 	printascii("MHz(final freq)\n");
3524 #ifndef CONFIG_SPL_KERNEL_BOOT
3525 	ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1);
3526 #else
3527 	ddr_set_rate(&dram_info, sdram_params, f0, sdram_params->base.ddr_freq, 1, 1, 1);
3528 #endif
3529 }
3530 
3531 int get_uart_config(void)
3532 {
3533 	struct sdram_head_info_index_v2 *index =
3534 		(struct sdram_head_info_index_v2 *)common_info;
3535 	struct global_info *gbl_info;
3536 
3537 	gbl_info = (struct global_info *)((void *)common_info +
3538 		index->global_index.offset * 4);
3539 
3540 	return gbl_info->uart_info;
3541 }
3542 
3543 /* return: 0 = success, other = fail */
3544 int sdram_init(void)
3545 {
3546 	struct rv1126_sdram_params *sdram_params;
3547 	int ret = 0;
3548 	struct sdram_head_info_index_v2 *index =
3549 		(struct sdram_head_info_index_v2 *)common_info;
3550 	struct global_info *gbl_info;
3551 
3552 	dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
3553 	dram_info.pctl = (void *)UPCTL2_BASE_ADDR;
3554 	dram_info.grf = (void *)GRF_BASE_ADDR;
3555 	dram_info.cru = (void *)CRU_BASE_ADDR;
3556 	dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR;
3557 	dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR;
3558 	dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR;
3559 
3560 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
3561 	printascii("extended temp support\n");
3562 #endif
3563 	if (index->version_info != 2 ||
3564 	    (index->global_index.size != sizeof(struct global_info) / 4) ||
3565 	    (index->ddr3_index.size !=
3566 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3567 	    (index->ddr4_index.size !=
3568 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3569 	    (index->lp3_index.size !=
3570 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3571 	    (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) ||
3572 	    (index->lp4x_index.size != (sizeof(struct lp4_info) / 4)) ||
3573 	    index->global_index.offset == 0 ||
3574 	    index->ddr3_index.offset == 0 ||
3575 	    index->ddr4_index.offset == 0 ||
3576 	    index->lp3_index.offset == 0 ||
3577 	    index->lp4_index.offset == 0 ||
3578 	    index->lp4x_index.offset == 0) {
3579 		printascii("common info error\n");
3580 		goto error;
3581 	}
3582 
3583 	gbl_info = (struct global_info *)((void *)common_info +
3584 		index->global_index.offset * 4);
3585 
3586 	dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info);
3587 	dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info);
3588 
3589 	sdram_params = &sdram_configs[0];
3590 	#if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 8)
3591 	for (int j = 0; j < ARRAY_SIZE(sdram_configs); j++)
3592 		sdram_configs[j].base.dramtype = LPDDR4X;
3593 	#endif
3594 	if (sdram_params->base.dramtype == DDR3 ||
3595 	    sdram_params->base.dramtype == DDR4) {
3596 		if (DDR_2T_INFO(gbl_info->info_2t))
3597 			sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10;
3598 		else
3599 			sdram_params->pctl_regs.pctl[0][1] &=
3600 				~(0x1 << 10);
3601 	}
3602 	ret = sdram_init_detect(&dram_info, sdram_params);
3603 	if (ret) {
3604 		sdram_print_dram_type(sdram_params->base.dramtype);
3605 		printascii(", ");
3606 		printdec(sdram_params->base.ddr_freq);
3607 		printascii("MHz\n");
3608 		goto error;
3609 	}
3610 	print_ddr_info(sdram_params);
3611 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3612 	init_rw_trn_result_struct(&rw_trn_result, dram_info.phy,
3613 				  (u8)sdram_params->ch.cap_info.rank);
3614 #endif
3615 
3616 	ddr_set_rate_for_fsp(&dram_info, sdram_params);
3617 #ifndef CONFIG_SPL_KERNEL_BOOT
3618 	copy_fsp_param_to_ddr();
3619 #endif
3620 
3621 	ddr_set_atags(&dram_info, sdram_params);
3622 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3623 	save_rw_trn_result_to_ddr(&rw_trn_result);
3624 #endif
3625 
3626 	printascii("out\n");
3627 
3628 	return ret;
3629 error:
3630 	printascii("error\n");
3631 	return (-1);
3632 }
3633 #endif /* CONFIG_TPL_BUILD */
3634