xref: /rk3399_rockchip-uboot/drivers/ram/rockchip/sdram_rv1126.c (revision a1f6fc00a03ae255aefc9170884a5086b29d3fa5)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * (C) Copyright 2020 Rockchip Electronics Co., Ltd.
4  */
5 
6 #include <common.h>
7 #include <debug_uart.h>
8 #include <dm.h>
9 #include <ram.h>
10 #include <syscon.h>
11 #include <asm/io.h>
12 #include <asm/arch/clock.h>
13 #include <asm/arch/hardware.h>
14 #include <asm/arch/rk_atags.h>
15 #include <asm/arch/cru_rv1126.h>
16 #include <asm/arch/grf_rv1126.h>
17 #include <asm/arch/sdram_common.h>
18 #include <asm/arch/sdram_rv1126.h>
19 
20 /* define training flag */
21 #define CA_TRAINING			(0x1 << 0)
22 #define READ_GATE_TRAINING		(0x1 << 1)
23 #define WRITE_LEVELING			(0x1 << 2)
24 #define WRITE_TRAINING			(0x1 << 3)
25 #define READ_TRAINING			(0x1 << 4)
26 #define FULL_TRAINING			(0xff)
27 
28 #define SKEW_RX_SIGNAL			(0)
29 #define SKEW_TX_SIGNAL			(1)
30 #define SKEW_CA_SIGNAL			(2)
31 
32 #define DESKEW_MDF_ABS_VAL		(0)
33 #define DESKEW_MDF_DIFF_VAL		(1)
34 
35 #ifdef CONFIG_TPL_BUILD
36 #ifndef CONFIG_TPL_TINY_FRAMEWORK
37 #error please defined CONFIG_TPL_TINY_FRAMEWORK for RV1126 !!!
38 #endif
39 #endif
40 
41 #ifdef CONFIG_TPL_BUILD
42 
43 struct dram_info {
44 	void __iomem *pctl;
45 	void __iomem *phy;
46 	struct rv1126_cru *cru;
47 	struct msch_regs *msch;
48 	struct rv1126_ddrgrf *ddrgrf;
49 	struct rv1126_grf *grf;
50 	struct ram_info info;
51 	struct rv1126_pmugrf *pmugrf;
52 	u32 sr_idle;
53 	u32 pd_idle;
54 };
55 
56 #define GRF_BASE_ADDR			0xfe000000
57 #define PMU_GRF_BASE_ADDR		0xfe020000
58 #define DDR_GRF_BASE_ADDR		0xfe030000
59 #define BUS_SGRF_BASE_ADDR		0xfe0a0000
60 #define SERVER_MSCH_BASE_ADDR		0xfe800000
61 #define CRU_BASE_ADDR			0xff490000
62 #define DDR_PHY_BASE_ADDR		0xff4a0000
63 #define UPCTL2_BASE_ADDR		0xffa50000
64 
65 #define SGRF_SOC_CON12			0x30
66 #define SGRF_SOC_CON13			0x34
67 
68 struct dram_info dram_info;
69 
70 #if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 3)
71 struct rv1126_sdram_params sdram_configs[] = {
72 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-328.inc"
73 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-396.inc"
74 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-528.inc"
75 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-664.inc"
76 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-784.inc"
77 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-924.inc"
78 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-1056.inc"
79 };
80 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 0)
81 struct rv1126_sdram_params sdram_configs[] = {
82 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-328.inc"
83 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-396.inc"
84 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-528.inc"
85 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-664.inc"
86 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-784.inc"
87 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-924.inc"
88 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-1056.inc"
89 };
90 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 6)
91 struct rv1126_sdram_params sdram_configs[] = {
92 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-328.inc"
93 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-396.inc"
94 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-528.inc"
95 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-664.inc"
96 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-784.inc"
97 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-924.inc"
98 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-1056.inc"
99 };
100 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 7)
101 struct rv1126_sdram_params sdram_configs[] = {
102 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-328.inc"
103 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-396.inc"
104 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-528.inc"
105 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-664.inc"
106 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-784.inc"
107 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-924.inc"
108 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-1056.inc"
109 };
110 #endif
111 
112 u32 common_info[] = {
113 	#include "sdram_inc/rv1126/sdram-rv1126-loader_params.inc"
114 };
115 
116 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
117 static struct rw_trn_result rw_trn_result;
118 #endif
119 
120 static struct rv1126_fsp_param fsp_param[MAX_IDX];
121 
122 static u8 lp3_odt_value;
123 
124 static s8 wrlvl_result[2][4];
125 
126 /* DDR configuration 0-9 */
127 u16 ddr_cfg_2_rbc[] = {
128 	((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */
129 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */
130 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */
131 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */
132 	((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */
133 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */
134 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */
135 	((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */
136 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */
137 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */
138 };
139 
140 /* DDR configuration 10-21 */
141 u8 ddr4_cfg_2_rbc[] = {
142 	((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */
143 	((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */
144 	((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */
145 	((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */
146 	((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */
147 	((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */
148 	((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */
149 	((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */
150 	((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */
151 	((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */
152 	((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */
153 	((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */
154 };
155 
156 /* DDR configuration 22-28 */
157 u16 ddr_cfg_2_rbc_p2[] = {
158 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */
159 	((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */
160 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */
161 	((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */
162 	((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */
163 	((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */
164 	((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */
165 };
166 
167 u8 d4_rbc_2_d3_rbc[][2] = {
168 	{10, 0},
169 	{11, 2},
170 	{12, 23},
171 	{13, 1},
172 	{14, 28},
173 	{15, 24},
174 	{16, 27},
175 	{17, 7},
176 	{18, 6},
177 	{19, 25},
178 	{20, 26},
179 	{21, 3}
180 };
181 
182 u32 addrmap[23][9] = {
183 	{24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
184 		0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */
185 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
186 		0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */
187 	{23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
188 		0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */
189 	{22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606,
190 		0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */
191 	{24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909,
192 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */
193 	{6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707,
194 		0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */
195 	{7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808,
196 		0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */
197 	{8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909,
198 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */
199 	{22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
200 		0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */
201 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
202 		0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */
203 
204 	{24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
205 		0x08080808, 0x00000f0f, 0x0801}, /* 10 */
206 	{23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
207 		0x0f080808, 0x00000f0f, 0x0801}, /* 11 */
208 	{24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
209 		0x07070707, 0x00000f07, 0x0700}, /* 12 */
210 	{23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
211 		0x07070707, 0x00000f0f, 0x0700}, /* 13 */
212 	{24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
213 		0x07070707, 0x00000f07, 0x3f01}, /* 14 */
214 	{23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
215 		0x07070707, 0x00000f0f, 0x3f01}, /* 15 */
216 	{23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606,
217 		0x06060606, 0x00000f06, 0x3f00}, /* 16 */
218 	{8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909,
219 		0x0f090909, 0x00000f0f, 0x0801}, /* 17 */
220 	{7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808,
221 		0x08080808, 0x00000f0f, 0x0700}, /* 18 */
222 	{7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
223 		0x08080808, 0x00000f0f, 0x3f01}, /* 19 */
224 
225 	{6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
226 		0x07070707, 0x00000f07, 0x3f00}, /* 20 */
227 	{23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606,
228 		0x06060606, 0x00000f06, 0x0600}, /* 21 */
229 	{21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505,
230 		0x05050505, 0x00000f0f, 0x3f3f} /* 22 */
231 };
232 
233 static u8 dq_sel[22][3] = {
234 	{0x0, 0x17, 0x22},
235 	{0x1, 0x18, 0x23},
236 	{0x2, 0x19, 0x24},
237 	{0x3, 0x1a, 0x25},
238 	{0x4, 0x1b, 0x26},
239 	{0x5, 0x1c, 0x27},
240 	{0x6, 0x1d, 0x28},
241 	{0x7, 0x1e, 0x29},
242 	{0x8, 0x16, 0x21},
243 	{0x9, 0x1f, 0x2a},
244 	{0xa, 0x20, 0x2b},
245 	{0x10, 0x1, 0xc},
246 	{0x11, 0x2, 0xd},
247 	{0x12, 0x3, 0xe},
248 	{0x13, 0x4, 0xf},
249 	{0x14, 0x5, 0x10},
250 	{0x15, 0x6, 0x11},
251 	{0x16, 0x7, 0x12},
252 	{0x17, 0x8, 0x13},
253 	{0x18, 0x0, 0xb},
254 	{0x19, 0x9, 0x14},
255 	{0x1a, 0xa, 0x15}
256 };
257 
258 static u16 grp_addr[4] = {
259 	ADD_GROUP_CS0_A,
260 	ADD_GROUP_CS0_B,
261 	ADD_GROUP_CS1_A,
262 	ADD_GROUP_CS1_B
263 };
264 
265 static u8 wrlvl_result_offset[2][4] = {
266 	{0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27},
267 	{0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29},
268 };
269 
270 static u16 dqs_dq_skew_adr[16] = {
271 	0x170 + 0,	/* SKEW_UPDATE_RX_CS0_DQS0 */
272 	0x170 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS1 */
273 	0x1d0 + 0,	/* SKEW_UPDATE_RX_CS0_DQS2 */
274 	0x1d0 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS3 */
275 	0x1a0 + 0,	/* SKEW_UPDATE_RX_CS1_DQS0 */
276 	0x1a0 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS1 */
277 	0x200 + 0,	/* SKEW_UPDATE_RX_CS1_DQS2 */
278 	0x200 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS3 */
279 	0x170 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS0 */
280 	0x170 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS1 */
281 	0x1d0 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS2 */
282 	0x1d0 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS3 */
283 	0x1a0 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS0 */
284 	0x1a0 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS1 */
285 	0x200 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS2 */
286 	0x200 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS3 */
287 };
288 
289 static void rkclk_ddr_reset(struct dram_info *dram,
290 			    u32 ctl_srstn, u32 ctl_psrstn,
291 			    u32 phy_srstn, u32 phy_psrstn)
292 {
293 	writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) |
294 	       UPCTL2_ASRSTN_REQ(ctl_srstn),
295 	       BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13);
296 
297 	writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn),
298 	       &dram->cru->softrst_con[12]);
299 }
300 
301 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
302 {
303 	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
304 	int delay = 1000;
305 	u32 mhz = hz / MHz;
306 
307 	refdiv = 1;
308 	if (mhz <= 100) {
309 		postdiv1 = 6;
310 		postdiv2 = 4;
311 	} else if (mhz <= 150) {
312 		postdiv1 = 4;
313 		postdiv2 = 4;
314 	} else if (mhz <= 200) {
315 		postdiv1 = 6;
316 		postdiv2 = 2;
317 	} else if (mhz <= 300) {
318 		postdiv1 = 4;
319 		postdiv2 = 2;
320 	} else if (mhz <= 400) {
321 		postdiv1 = 6;
322 		postdiv2 = 1;
323 	} else {
324 		postdiv1 = 4;
325 		postdiv2 = 1;
326 	}
327 	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
328 
329 	writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
330 
331 	writel(0x1f000000, &dram->cru->clksel_con[64]);
332 	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
333 	writel(DSMPD(1) | POSTDIV2(postdiv2) | REFDIV(refdiv),
334 	       &dram->cru->pll[1].con1);
335 
336 	while (delay > 0) {
337 		udelay(1);
338 		if (LOCK(readl(&dram->cru->pll[1].con1)))
339 			break;
340 		delay--;
341 	}
342 
343 	writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
344 }
345 
346 static void rkclk_configure_ddr(struct dram_info *dram,
347 				struct rv1126_sdram_params *sdram_params)
348 {
349 	/* for inno ddr phy need freq / 2 */
350 	rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2);
351 }
352 
353 static unsigned int
354 	calculate_ddrconfig(struct rv1126_sdram_params *sdram_params)
355 {
356 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
357 	u32 cs, bw, die_bw, col, row, bank;
358 	u32 cs1_row;
359 	u32 i, tmp;
360 	u32 ddrconf = -1;
361 	u32 row_3_4;
362 
363 	cs = cap_info->rank;
364 	bw = cap_info->bw;
365 	die_bw = cap_info->dbw;
366 	col = cap_info->col;
367 	row = cap_info->cs0_row;
368 	cs1_row = cap_info->cs1_row;
369 	bank = cap_info->bk;
370 	row_3_4 = cap_info->row_3_4;
371 
372 	if (sdram_params->base.dramtype == DDR4) {
373 		if (cs == 2 && row == cs1_row && !row_3_4) {
374 			tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) |
375 			      die_bw;
376 			for (i = 17; i < 21; i++) {
377 				if (((tmp & 0xf) ==
378 				     (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
379 				    ((tmp & 0x70) <=
380 				     (ddr4_cfg_2_rbc[i - 10] & 0x70))) {
381 					ddrconf = i;
382 					goto out;
383 				}
384 			}
385 		}
386 
387 		tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw;
388 		for (i = 10; i < 21; i++) {
389 			if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
390 			    ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) &&
391 			    ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) {
392 				ddrconf = i;
393 				goto out;
394 			}
395 		}
396 	} else {
397 		if (cs == 2 && row == cs1_row && bank == 3) {
398 			for (i = 5; i < 8; i++) {
399 				if (((bw + col - 10) == (ddr_cfg_2_rbc[i] &
400 							 0x7)) &&
401 				    ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] &
402 							  (0x7 << 5))) {
403 					ddrconf = i;
404 					goto out;
405 				}
406 			}
407 		}
408 
409 		tmp = ((cs - 1) << 8) | ((row - 13) << 5) |
410 		      ((bw + col - 10) << 0);
411 		if (bank == 3)
412 			tmp |= (1 << 3);
413 
414 		for (i = 0; i < 9; i++)
415 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) &&
416 			    ((tmp & (7 << 5)) <=
417 			     (ddr_cfg_2_rbc[i] & (7 << 5))) &&
418 			    ((tmp & (1 << 8)) <=
419 			     (ddr_cfg_2_rbc[i] & (1 << 8)))) {
420 				ddrconf = i;
421 				goto out;
422 			}
423 		if (cs == 1 && bank == 3 && row <= 17 &&
424 		    (col + bw) == 12)
425 			ddrconf = 23;
426 	}
427 
428 out:
429 	if (ddrconf > 28)
430 		printascii("calculate ddrconfig error\n");
431 
432 	if (sdram_params->base.dramtype == DDR4) {
433 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
434 			if (ddrconf == d4_rbc_2_d3_rbc[i][0]) {
435 				if (ddrconf == 21 && row > 16)
436 					printascii("warn:ddrconf21 row > 16\n");
437 				else
438 					ddrconf = d4_rbc_2_d3_rbc[i][1];
439 				break;
440 			}
441 		}
442 	}
443 
444 	return ddrconf;
445 }
446 
447 static void sw_set_req(struct dram_info *dram)
448 {
449 	void __iomem *pctl_base = dram->pctl;
450 
451 	/* clear sw_done=0 */
452 	writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL);
453 }
454 
455 static void sw_set_ack(struct dram_info *dram)
456 {
457 	void __iomem *pctl_base = dram->pctl;
458 
459 	/* set sw_done=1 */
460 	writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL);
461 	while (1) {
462 		/* wait programming done */
463 		if (readl(pctl_base + DDR_PCTL2_SWSTAT) &
464 				PCTL2_SW_DONE_ACK)
465 			break;
466 	}
467 }
468 
469 static void set_ctl_address_map(struct dram_info *dram,
470 				struct rv1126_sdram_params *sdram_params)
471 {
472 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
473 	void __iomem *pctl_base = dram->pctl;
474 	u32 ddrconf = cap_info->ddrconfig;
475 	u32 i, row;
476 
477 	row = cap_info->cs0_row;
478 	if (sdram_params->base.dramtype == DDR4) {
479 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
480 			if (ddrconf == d4_rbc_2_d3_rbc[i][1]) {
481 				ddrconf = d4_rbc_2_d3_rbc[i][0];
482 				break;
483 			}
484 		}
485 	}
486 
487 	if (ddrconf > ARRAY_SIZE(addrmap)) {
488 		printascii("set ctl address map fail\n");
489 		return;
490 	}
491 
492 	sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
493 			  &addrmap[ddrconf][0], 9 * 4);
494 
495 	/* unused row set to 0xf */
496 	for (i = 17; i >= row; i--)
497 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
498 			((i - 12) * 8 / 32) * 4,
499 			0xf << ((i - 12) * 8 % 32));
500 
501 	if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4)
502 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
503 	if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1)
504 		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
505 
506 	if (cap_info->rank == 1)
507 		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
508 }
509 
510 static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait)
511 {
512 	void __iomem *phy_base = dram->phy;
513 	u32 fbdiv, prediv, postdiv, postdiv_en;
514 
515 	if (wait) {
516 		clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB);
517 		while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK))
518 			continue;
519 	} else {
520 		freq /= MHz;
521 		prediv = 1;
522 		if (freq <= 200) {
523 			fbdiv = 16;
524 			postdiv = 2;
525 			postdiv_en = 1;
526 		} else if (freq <= 456) {
527 			fbdiv = 8;
528 			postdiv = 1;
529 			postdiv_en = 1;
530 		} else {
531 			fbdiv = 4;
532 			postdiv = 0;
533 			postdiv_en = 0;
534 		}
535 		writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50));
536 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK,
537 				(fbdiv >> 8) & 1);
538 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK,
539 				postdiv_en << PHY_POSTDIV_EN_SHIFT);
540 
541 		clrsetbits_le32(PHY_REG(phy_base, 0x52),
542 				PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv);
543 		clrsetbits_le32(PHY_REG(phy_base, 0x53),
544 				PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT,
545 				postdiv << PHY_POSTDIV_SHIFT);
546 	}
547 }
548 
549 static const u16 d3_phy_drv_2_ohm[][2] = {
550 	{PHY_DDR3_RON_455ohm, 455},
551 	{PHY_DDR3_RON_230ohm, 230},
552 	{PHY_DDR3_RON_153ohm, 153},
553 	{PHY_DDR3_RON_115ohm, 115},
554 	{PHY_DDR3_RON_91ohm, 91},
555 	{PHY_DDR3_RON_76ohm, 76},
556 	{PHY_DDR3_RON_65ohm, 65},
557 	{PHY_DDR3_RON_57ohm, 57},
558 	{PHY_DDR3_RON_51ohm, 51},
559 	{PHY_DDR3_RON_46ohm, 46},
560 	{PHY_DDR3_RON_41ohm, 41},
561 	{PHY_DDR3_RON_38ohm, 38},
562 	{PHY_DDR3_RON_35ohm, 35},
563 	{PHY_DDR3_RON_32ohm, 32},
564 	{PHY_DDR3_RON_30ohm, 30},
565 	{PHY_DDR3_RON_28ohm, 28},
566 	{PHY_DDR3_RON_27ohm, 27},
567 	{PHY_DDR3_RON_25ohm, 25},
568 	{PHY_DDR3_RON_24ohm, 24},
569 	{PHY_DDR3_RON_23ohm, 23},
570 	{PHY_DDR3_RON_22ohm, 22},
571 	{PHY_DDR3_RON_21ohm, 21},
572 	{PHY_DDR3_RON_20ohm, 20}
573 };
574 
575 static u16 d3_phy_odt_2_ohm[][2] = {
576 	{PHY_DDR3_RTT_DISABLE, 0},
577 	{PHY_DDR3_RTT_561ohm, 561},
578 	{PHY_DDR3_RTT_282ohm, 282},
579 	{PHY_DDR3_RTT_188ohm, 188},
580 	{PHY_DDR3_RTT_141ohm, 141},
581 	{PHY_DDR3_RTT_113ohm, 113},
582 	{PHY_DDR3_RTT_94ohm, 94},
583 	{PHY_DDR3_RTT_81ohm, 81},
584 	{PHY_DDR3_RTT_72ohm, 72},
585 	{PHY_DDR3_RTT_64ohm, 64},
586 	{PHY_DDR3_RTT_58ohm, 58},
587 	{PHY_DDR3_RTT_52ohm, 52},
588 	{PHY_DDR3_RTT_48ohm, 48},
589 	{PHY_DDR3_RTT_44ohm, 44},
590 	{PHY_DDR3_RTT_41ohm, 41},
591 	{PHY_DDR3_RTT_38ohm, 38},
592 	{PHY_DDR3_RTT_37ohm, 37},
593 	{PHY_DDR3_RTT_34ohm, 34},
594 	{PHY_DDR3_RTT_32ohm, 32},
595 	{PHY_DDR3_RTT_31ohm, 31},
596 	{PHY_DDR3_RTT_29ohm, 29},
597 	{PHY_DDR3_RTT_28ohm, 28},
598 	{PHY_DDR3_RTT_27ohm, 27},
599 	{PHY_DDR3_RTT_25ohm, 25}
600 };
601 
602 static u16 d4lp3_phy_drv_2_ohm[][2] = {
603 	{PHY_DDR4_LPDDR3_RON_482ohm, 482},
604 	{PHY_DDR4_LPDDR3_RON_244ohm, 244},
605 	{PHY_DDR4_LPDDR3_RON_162ohm, 162},
606 	{PHY_DDR4_LPDDR3_RON_122ohm, 122},
607 	{PHY_DDR4_LPDDR3_RON_97ohm, 97},
608 	{PHY_DDR4_LPDDR3_RON_81ohm, 81},
609 	{PHY_DDR4_LPDDR3_RON_69ohm, 69},
610 	{PHY_DDR4_LPDDR3_RON_61ohm, 61},
611 	{PHY_DDR4_LPDDR3_RON_54ohm, 54},
612 	{PHY_DDR4_LPDDR3_RON_48ohm, 48},
613 	{PHY_DDR4_LPDDR3_RON_44ohm, 44},
614 	{PHY_DDR4_LPDDR3_RON_40ohm, 40},
615 	{PHY_DDR4_LPDDR3_RON_37ohm, 37},
616 	{PHY_DDR4_LPDDR3_RON_34ohm, 34},
617 	{PHY_DDR4_LPDDR3_RON_32ohm, 32},
618 	{PHY_DDR4_LPDDR3_RON_30ohm, 30},
619 	{PHY_DDR4_LPDDR3_RON_28ohm, 28},
620 	{PHY_DDR4_LPDDR3_RON_27ohm, 27},
621 	{PHY_DDR4_LPDDR3_RON_25ohm, 25},
622 	{PHY_DDR4_LPDDR3_RON_24ohm, 24},
623 	{PHY_DDR4_LPDDR3_RON_23ohm, 23},
624 	{PHY_DDR4_LPDDR3_RON_22ohm, 22},
625 	{PHY_DDR4_LPDDR3_RON_21ohm, 21}
626 };
627 
628 static u16 d4lp3_phy_odt_2_ohm[][2] = {
629 	{PHY_DDR4_LPDDR3_RTT_DISABLE, 0},
630 	{PHY_DDR4_LPDDR3_RTT_586ohm, 586},
631 	{PHY_DDR4_LPDDR3_RTT_294ohm, 294},
632 	{PHY_DDR4_LPDDR3_RTT_196ohm, 196},
633 	{PHY_DDR4_LPDDR3_RTT_148ohm, 148},
634 	{PHY_DDR4_LPDDR3_RTT_118ohm, 118},
635 	{PHY_DDR4_LPDDR3_RTT_99ohm, 99},
636 	{PHY_DDR4_LPDDR3_RTT_85ohm, 58},
637 	{PHY_DDR4_LPDDR3_RTT_76ohm, 76},
638 	{PHY_DDR4_LPDDR3_RTT_67ohm, 67},
639 	{PHY_DDR4_LPDDR3_RTT_60ohm, 60},
640 	{PHY_DDR4_LPDDR3_RTT_55ohm, 55},
641 	{PHY_DDR4_LPDDR3_RTT_50ohm, 50},
642 	{PHY_DDR4_LPDDR3_RTT_46ohm, 46},
643 	{PHY_DDR4_LPDDR3_RTT_43ohm, 43},
644 	{PHY_DDR4_LPDDR3_RTT_40ohm, 40},
645 	{PHY_DDR4_LPDDR3_RTT_38ohm, 38},
646 	{PHY_DDR4_LPDDR3_RTT_36ohm, 36},
647 	{PHY_DDR4_LPDDR3_RTT_34ohm, 34},
648 	{PHY_DDR4_LPDDR3_RTT_32ohm, 32},
649 	{PHY_DDR4_LPDDR3_RTT_31ohm, 31},
650 	{PHY_DDR4_LPDDR3_RTT_29ohm, 29},
651 	{PHY_DDR4_LPDDR3_RTT_28ohm, 28},
652 	{PHY_DDR4_LPDDR3_RTT_27ohm, 27}
653 };
654 
655 static u16 lp4_phy_drv_2_ohm[][2] = {
656 	{PHY_LPDDR4_RON_501ohm, 501},
657 	{PHY_LPDDR4_RON_253ohm, 253},
658 	{PHY_LPDDR4_RON_168ohm, 168},
659 	{PHY_LPDDR4_RON_126ohm, 126},
660 	{PHY_LPDDR4_RON_101ohm, 101},
661 	{PHY_LPDDR4_RON_84ohm, 84},
662 	{PHY_LPDDR4_RON_72ohm, 72},
663 	{PHY_LPDDR4_RON_63ohm, 63},
664 	{PHY_LPDDR4_RON_56ohm, 56},
665 	{PHY_LPDDR4_RON_50ohm, 50},
666 	{PHY_LPDDR4_RON_46ohm, 46},
667 	{PHY_LPDDR4_RON_42ohm, 42},
668 	{PHY_LPDDR4_RON_38ohm, 38},
669 	{PHY_LPDDR4_RON_36ohm, 36},
670 	{PHY_LPDDR4_RON_33ohm, 33},
671 	{PHY_LPDDR4_RON_31ohm, 31},
672 	{PHY_LPDDR4_RON_29ohm, 29},
673 	{PHY_LPDDR4_RON_28ohm, 28},
674 	{PHY_LPDDR4_RON_26ohm, 26},
675 	{PHY_LPDDR4_RON_25ohm, 25},
676 	{PHY_LPDDR4_RON_24ohm, 24},
677 	{PHY_LPDDR4_RON_23ohm, 23},
678 	{PHY_LPDDR4_RON_22ohm, 22}
679 };
680 
681 static u16 lp4_phy_odt_2_ohm[][2] = {
682 	{PHY_LPDDR4_RTT_DISABLE, 0},
683 	{PHY_LPDDR4_RTT_604ohm, 604},
684 	{PHY_LPDDR4_RTT_303ohm, 303},
685 	{PHY_LPDDR4_RTT_202ohm, 202},
686 	{PHY_LPDDR4_RTT_152ohm, 152},
687 	{PHY_LPDDR4_RTT_122ohm, 122},
688 	{PHY_LPDDR4_RTT_101ohm, 101},
689 	{PHY_LPDDR4_RTT_87ohm,	87},
690 	{PHY_LPDDR4_RTT_78ohm, 78},
691 	{PHY_LPDDR4_RTT_69ohm, 69},
692 	{PHY_LPDDR4_RTT_62ohm, 62},
693 	{PHY_LPDDR4_RTT_56ohm, 56},
694 	{PHY_LPDDR4_RTT_52ohm, 52},
695 	{PHY_LPDDR4_RTT_48ohm, 48},
696 	{PHY_LPDDR4_RTT_44ohm, 44},
697 	{PHY_LPDDR4_RTT_41ohm, 41},
698 	{PHY_LPDDR4_RTT_39ohm, 39},
699 	{PHY_LPDDR4_RTT_37ohm, 37},
700 	{PHY_LPDDR4_RTT_35ohm, 35},
701 	{PHY_LPDDR4_RTT_33ohm, 33},
702 	{PHY_LPDDR4_RTT_32ohm, 32},
703 	{PHY_LPDDR4_RTT_30ohm, 30},
704 	{PHY_LPDDR4_RTT_29ohm, 29},
705 	{PHY_LPDDR4_RTT_27ohm, 27}
706 };
707 
708 static u32 lp4_odt_calc(u32 odt_ohm)
709 {
710 	u32 odt;
711 
712 	if (odt_ohm == 0)
713 		odt = LPDDR4_DQODT_DIS;
714 	else if (odt_ohm <= 40)
715 		odt = LPDDR4_DQODT_40;
716 	else if (odt_ohm <= 48)
717 		odt = LPDDR4_DQODT_48;
718 	else if (odt_ohm <= 60)
719 		odt = LPDDR4_DQODT_60;
720 	else if (odt_ohm <= 80)
721 		odt = LPDDR4_DQODT_80;
722 	else if (odt_ohm <= 120)
723 		odt = LPDDR4_DQODT_120;
724 	else
725 		odt = LPDDR4_DQODT_240;
726 
727 	return odt;
728 }
729 
730 static void *get_ddr_drv_odt_info(u32 dramtype)
731 {
732 	struct sdram_head_info_index_v2 *index =
733 		(struct sdram_head_info_index_v2 *)common_info;
734 	void *ddr_info = 0;
735 
736 	if (dramtype == DDR4)
737 		ddr_info = (void *)common_info + index->ddr4_index.offset * 4;
738 	else if (dramtype == DDR3)
739 		ddr_info = (void *)common_info + index->ddr3_index.offset * 4;
740 	else if (dramtype == LPDDR3)
741 		ddr_info = (void *)common_info + index->lp3_index.offset * 4;
742 	else if (dramtype == LPDDR4)
743 		ddr_info = (void *)common_info + index->lp4_index.offset * 4;
744 	else
745 		printascii("unsupported dram type\n");
746 	return ddr_info;
747 }
748 
749 static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info,
750 			 u32 freq_mhz, u32 dst_fsp)
751 {
752 	void __iomem *pctl_base = dram->pctl;
753 	u32 ca_vref, dq_vref;
754 
755 	if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
756 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff);
757 	else
758 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten);
759 
760 	if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq))
761 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff);
762 	else
763 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten);
764 
765 	if (ca_vref < 100)
766 		ca_vref = 100;
767 	if (ca_vref > 420)
768 		ca_vref = 420;
769 
770 	if (ca_vref <= 300)
771 		ca_vref = (0 << 6) | (ca_vref - 100) / 4;
772 	else
773 		ca_vref = (1 << 6) | (ca_vref - 220) / 4;
774 
775 	if (dq_vref < 100)
776 		dq_vref = 100;
777 	if (dq_vref > 420)
778 		dq_vref = 420;
779 
780 	if (dq_vref <= 300)
781 		dq_vref = (0 << 6) | (dq_vref - 100) / 4;
782 	else
783 		dq_vref = (1 << 6) | (dq_vref - 220) / 4;
784 
785 	sw_set_req(dram);
786 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
787 			DDR_PCTL2_INIT6,
788 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT,
789 			ca_vref << PCTL2_LPDDR4_MR12_SHIFT);
790 
791 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
792 			DDR_PCTL2_INIT7,
793 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT,
794 			dq_vref << PCTL2_LPDDR4_MR14_SHIFT);
795 	sw_set_ack(dram);
796 }
797 
798 static void set_ds_odt(struct dram_info *dram,
799 		       struct rv1126_sdram_params *sdram_params, u32 dst_fsp)
800 {
801 	void __iomem *phy_base = dram->phy;
802 	void __iomem *pctl_base = dram->pctl;
803 	u32 dramtype = sdram_params->base.dramtype;
804 	struct ddr2_3_4_lp2_3_info *ddr_info;
805 	struct lp4_info *lp4_info;
806 	u32 i, j, tmp;
807 	const u16 (*p_drv)[2];
808 	const u16 (*p_odt)[2];
809 	u32 drv_info, sr_info;
810 	u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm;
811 	u32 phy_odt_ohm, dram_odt_ohm;
812 	u32 lp4_pu_cal, phy_lp4_drv_pd_en;
813 	u32 phy_odt_up_en, phy_odt_dn_en;
814 	u32 sr_dq, sr_clk;
815 	u32 freq = sdram_params->base.ddr_freq;
816 	u32 mr1_mr3, mr11, mr22, vref_out, vref_inner;
817 	u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0;
818 	u32 phy_dq_drv = 0;
819 	u32 phy_odt_up = 0, phy_odt_dn = 0;
820 
821 	ddr_info = get_ddr_drv_odt_info(dramtype);
822 	lp4_info = (void *)ddr_info;
823 
824 	if (!ddr_info)
825 		return;
826 
827 	/* dram odt en freq control phy drv, dram odt and phy sr */
828 	if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) {
829 		drv_info = ddr_info->drv_when_odtoff;
830 		dram_odt_ohm = 0;
831 		sr_info = ddr_info->sr_when_odtoff;
832 		phy_lp4_drv_pd_en =
833 			PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info);
834 	} else {
835 		drv_info = ddr_info->drv_when_odten;
836 		dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info);
837 		sr_info = ddr_info->sr_when_odten;
838 		phy_lp4_drv_pd_en =
839 			PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info);
840 	}
841 	phy_dq_drv_ohm =
842 		DRV_INFO_PHY_DQ_DRV(drv_info);
843 	phy_clk_drv_ohm =
844 		DRV_INFO_PHY_CLK_DRV(drv_info);
845 	phy_ca_drv_ohm =
846 		DRV_INFO_PHY_CA_DRV(drv_info);
847 
848 	sr_dq = DQ_SR_INFO(sr_info);
849 	sr_clk = CLK_SR_INFO(sr_info);
850 
851 	/* phy odt en freq control dram drv and phy odt */
852 	if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) {
853 		dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff);
854 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info);
855 		phy_odt_ohm = 0;
856 		phy_odt_up_en = 0;
857 		phy_odt_dn_en = 0;
858 	} else {
859 		dram_drv_ohm =
860 			DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten);
861 		phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info);
862 		phy_odt_up_en =
863 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
864 		phy_odt_dn_en =
865 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
866 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info);
867 	}
868 
869 	if (dramtype == LPDDR4) {
870 		if (phy_odt_ohm) {
871 			phy_odt_up_en = 0;
872 			phy_odt_dn_en = 1;
873 		}
874 		if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
875 			dram_caodt_ohm = 0;
876 		else
877 			dram_caodt_ohm =
878 				ODT_INFO_LP4_CA_ODT(lp4_info->odt_info);
879 	}
880 
881 	if (dramtype == DDR3) {
882 		p_drv = d3_phy_drv_2_ohm;
883 		p_odt = d3_phy_odt_2_ohm;
884 	} else if (dramtype == LPDDR4) {
885 		p_drv = lp4_phy_drv_2_ohm;
886 		p_odt = lp4_phy_odt_2_ohm;
887 	} else {
888 		p_drv = d4lp3_phy_drv_2_ohm;
889 		p_odt = d4lp3_phy_odt_2_ohm;
890 	}
891 
892 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
893 		if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) {
894 			phy_dq_drv = **(p_drv + i);
895 			break;
896 		}
897 		if (i == 0)
898 			break;
899 	}
900 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
901 		if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) {
902 			phy_clk_drv = **(p_drv + i);
903 			break;
904 		}
905 		if (i == 0)
906 			break;
907 	}
908 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
909 		if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) {
910 			phy_ca_drv = **(p_drv + i);
911 			break;
912 		}
913 		if (i == 0)
914 			break;
915 	}
916 	if (!phy_odt_ohm)
917 		phy_odt = 0;
918 	else
919 		for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) {
920 			if (phy_odt_ohm <= *(*(p_odt + i) + 1)) {
921 				phy_odt = **(p_odt + i);
922 				break;
923 			}
924 			if (i == 0)
925 				break;
926 		}
927 
928 	if (dramtype != LPDDR4) {
929 		if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en))
930 			vref_inner = 0x80;
931 		else if (phy_odt_up_en)
932 			vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 /
933 				     (dram_drv_ohm + phy_odt_ohm);
934 		else
935 			vref_inner = phy_odt_ohm * 128 /
936 				(phy_odt_ohm + dram_drv_ohm);
937 
938 		if (dramtype != DDR3 && dram_odt_ohm)
939 			vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 /
940 				   (phy_dq_drv_ohm + dram_odt_ohm);
941 		else
942 			vref_out = 0x80;
943 	} else {
944 		/* for lp4 */
945 		if (phy_odt_ohm)
946 			vref_inner =
947 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) *
948 				 256) / 1000;
949 		else
950 			vref_inner =
951 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) *
952 				 256) / 1000;
953 
954 		vref_out = 0x80;
955 	}
956 
957 	/* default ZQCALIB bypass mode */
958 	clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv);
959 	clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv);
960 	clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv);
961 	clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv);
962 	if (dramtype == LPDDR4) {
963 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_clk_drv);
964 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_clk_drv);
965 	} else {
966 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_ca_drv);
967 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_ca_drv);
968 	}
969 	/* clk / cmd slew rate */
970 	clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk);
971 
972 	phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1;
973 	if (phy_odt_up_en)
974 		phy_odt_up = phy_odt;
975 	if (phy_odt_dn_en)
976 		phy_odt_dn = phy_odt;
977 
978 	for (i = 0; i < 4; i++) {
979 		j = 0x110 + i * 0x10;
980 		clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up);
981 		clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn);
982 		clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv);
983 		clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv);
984 		writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10));
985 
986 		clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10),
987 				1 << 3, phy_lp4_drv_pd_en << 3);
988 		/* dq slew rate */
989 		clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10),
990 				0x1f, sr_dq);
991 	}
992 
993 	/* reg_rx_vref_value_update */
994 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
995 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
996 
997 	/* RAM VREF */
998 	writel(vref_out, PHY_REG(phy_base, 0x105));
999 	if (dramtype == LPDDR3)
1000 		udelay(100);
1001 
1002 	if (dramtype == LPDDR4)
1003 		set_lp4_vref(dram, lp4_info, freq, dst_fsp);
1004 
1005 	if (dramtype == DDR3 || dramtype == DDR4) {
1006 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1007 				DDR_PCTL2_INIT3);
1008 		mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK;
1009 	} else {
1010 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1011 				DDR_PCTL2_INIT4);
1012 		mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK;
1013 	}
1014 
1015 	if (dramtype == DDR3) {
1016 		mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK);
1017 		if (dram_drv_ohm == 34)
1018 			mr1_mr3 |= DDR3_DS_34;
1019 
1020 		if (dram_odt_ohm == 0)
1021 			mr1_mr3 |= DDR3_RTT_NOM_DIS;
1022 		else if (dram_odt_ohm <= 40)
1023 			mr1_mr3 |= DDR3_RTT_NOM_40;
1024 		else if (dram_odt_ohm <= 60)
1025 			mr1_mr3 |= DDR3_RTT_NOM_60;
1026 		else
1027 			mr1_mr3 |= DDR3_RTT_NOM_120;
1028 
1029 	} else if (dramtype == DDR4) {
1030 		mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK);
1031 		if (dram_drv_ohm == 48)
1032 			mr1_mr3 |= DDR4_DS_48;
1033 
1034 		if (dram_odt_ohm == 0)
1035 			mr1_mr3 |= DDR4_RTT_NOM_DIS;
1036 		else if (dram_odt_ohm <= 34)
1037 			mr1_mr3 |= DDR4_RTT_NOM_34;
1038 		else if (dram_odt_ohm <= 40)
1039 			mr1_mr3 |= DDR4_RTT_NOM_40;
1040 		else if (dram_odt_ohm <= 48)
1041 			mr1_mr3 |= DDR4_RTT_NOM_48;
1042 		else if (dram_odt_ohm <= 60)
1043 			mr1_mr3 |= DDR4_RTT_NOM_60;
1044 		else
1045 			mr1_mr3 |= DDR4_RTT_NOM_120;
1046 
1047 	} else if (dramtype == LPDDR3) {
1048 		if (dram_drv_ohm <= 34)
1049 			mr1_mr3 |= LPDDR3_DS_34;
1050 		else if (dram_drv_ohm <= 40)
1051 			mr1_mr3 |= LPDDR3_DS_40;
1052 		else if (dram_drv_ohm <= 48)
1053 			mr1_mr3 |= LPDDR3_DS_48;
1054 		else if (dram_drv_ohm <= 60)
1055 			mr1_mr3 |= LPDDR3_DS_60;
1056 		else if (dram_drv_ohm <= 80)
1057 			mr1_mr3 |= LPDDR3_DS_80;
1058 
1059 		if (dram_odt_ohm == 0)
1060 			lp3_odt_value = LPDDR3_ODT_DIS;
1061 		else if (dram_odt_ohm <= 60)
1062 			lp3_odt_value = LPDDR3_ODT_60;
1063 		else if (dram_odt_ohm <= 120)
1064 			lp3_odt_value = LPDDR3_ODT_120;
1065 		else
1066 			lp3_odt_value = LPDDR3_ODT_240;
1067 	} else {/* for lpddr4 */
1068 		/* MR3 for lp4 PU-CAL and PDDS */
1069 		mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK);
1070 		mr1_mr3 |= lp4_pu_cal;
1071 
1072 		tmp = lp4_odt_calc(dram_drv_ohm);
1073 		if (!tmp)
1074 			tmp = LPDDR4_PDDS_240;
1075 		mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT);
1076 
1077 		/* MR11 for lp4 ca odt, dq odt set */
1078 		mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1079 			     DDR_PCTL2_INIT6);
1080 		mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK;
1081 
1082 		mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK);
1083 
1084 		tmp = lp4_odt_calc(dram_odt_ohm);
1085 		mr11 |= (tmp << LPDDR4_DQODT_SHIFT);
1086 
1087 		tmp = lp4_odt_calc(dram_caodt_ohm);
1088 		mr11 |= (tmp << LPDDR4_CAODT_SHIFT);
1089 		sw_set_req(dram);
1090 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1091 				DDR_PCTL2_INIT6,
1092 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT,
1093 				mr11 << PCTL2_LPDDR4_MR11_SHIFT);
1094 		sw_set_ack(dram);
1095 
1096 		/* MR22 for soc odt/odt-ck/odt-cs/odt-ca */
1097 		mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1098 			     DDR_PCTL2_INIT7);
1099 		mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK;
1100 		mr22 &= ~LPDDR4_SOC_ODT_MASK;
1101 
1102 		tmp = lp4_odt_calc(phy_odt_ohm);
1103 		mr22 |= tmp;
1104 		mr22 = mr22 |
1105 		       (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) <<
1106 			LPDDR4_ODTE_CK_SHIFT) |
1107 		       (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) <<
1108 			LPDDR4_ODTE_CS_SHIFT) |
1109 		       (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) <<
1110 			LPDDR4_ODTD_CA_SHIFT);
1111 
1112 		sw_set_req(dram);
1113 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1114 				DDR_PCTL2_INIT7,
1115 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT,
1116 				mr22 << PCTL2_LPDDR4_MR22_SHIFT);
1117 		sw_set_ack(dram);
1118 	}
1119 
1120 	if (dramtype == DDR4 || dramtype == DDR3) {
1121 		sw_set_req(dram);
1122 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1123 				DDR_PCTL2_INIT3,
1124 				PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT,
1125 				mr1_mr3 << PCTL2_DDR34_MR1_SHIFT);
1126 		sw_set_ack(dram);
1127 	} else {
1128 		sw_set_req(dram);
1129 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1130 				DDR_PCTL2_INIT4,
1131 				PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT,
1132 				mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT);
1133 		sw_set_ack(dram);
1134 	}
1135 }
1136 
1137 static int sdram_cmd_dq_path_remap(struct dram_info *dram,
1138 				   struct rv1126_sdram_params *sdram_params)
1139 {
1140 	void __iomem *phy_base = dram->phy;
1141 	u32 dramtype = sdram_params->base.dramtype;
1142 	struct sdram_head_info_index_v2 *index =
1143 		(struct sdram_head_info_index_v2 *)common_info;
1144 	struct dq_map_info *map_info;
1145 
1146 	map_info = (struct dq_map_info *)((void *)common_info +
1147 		index->dq_map_index.offset * 4);
1148 
1149 	if (dramtype <= LPDDR4)
1150 		writel((map_info->byte_map[dramtype / 4] >>
1151 			((dramtype % 4) * 8)) & 0xff,
1152 		       PHY_REG(phy_base, 0x4f));
1153 
1154 	return 0;
1155 }
1156 
1157 static void phy_cfg(struct dram_info *dram,
1158 		    struct rv1126_sdram_params *sdram_params)
1159 {
1160 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
1161 	void __iomem *phy_base = dram->phy;
1162 	u32 i, dq_map, tmp;
1163 	u32 byte1 = 0, byte0 = 0;
1164 
1165 	sdram_cmd_dq_path_remap(dram, sdram_params);
1166 
1167 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0);
1168 	for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
1169 		writel(sdram_params->phy_regs.phy[i][1],
1170 		       phy_base + sdram_params->phy_regs.phy[i][0]);
1171 	}
1172 
1173 	clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5));
1174 	dq_map = readl(PHY_REG(phy_base, 0x4f));
1175 	for (i = 0; i < 4; i++) {
1176 		if (((dq_map >> (i * 2)) & 0x3) == 0)
1177 			byte0 = i;
1178 		if (((dq_map >> (i * 2)) & 0x3) == 1)
1179 			byte1 = i;
1180 	}
1181 
1182 	tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK);
1183 	if (cap_info->bw == 2)
1184 		tmp |= 0xf;
1185 	else if (cap_info->bw == 1)
1186 		tmp |= ((1 << byte0) | (1 << byte1));
1187 	else
1188 		tmp |= (1 << byte0);
1189 
1190 	writel(tmp, PHY_REG(phy_base, 0xf));
1191 
1192 	/* lpddr4 odt control by phy, enable cs0 odt */
1193 	if (sdram_params->base.dramtype == LPDDR4)
1194 		clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4,
1195 				(1 << 6) | (1 << 4));
1196 	/* for ca training ca vref choose range1 */
1197 	setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6));
1198 	setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6));
1199 	/* for wr training PHY_0x7c[5], choose range0 */
1200 	clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5));
1201 }
1202 
1203 static int update_refresh_reg(struct dram_info *dram)
1204 {
1205 	void __iomem *pctl_base = dram->pctl;
1206 	u32 ret;
1207 
1208 	ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
1209 	writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
1210 
1211 	return 0;
1212 }
1213 
1214 /*
1215  * rank = 1: cs0
1216  * rank = 2: cs1
1217  */
1218 int read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype)
1219 {
1220 	u32 ret;
1221 	u32 i, temp;
1222 	u32 dqmap;
1223 
1224 	void __iomem *pctl_base = dram->pctl;
1225 	struct sdram_head_info_index_v2 *index =
1226 		(struct sdram_head_info_index_v2 *)common_info;
1227 	struct dq_map_info *map_info;
1228 
1229 	map_info = (struct dq_map_info *)((void *)common_info +
1230 		index->dq_map_index.offset * 4);
1231 
1232 	if (dramtype == LPDDR2)
1233 		dqmap = map_info->lp2_dq0_7_map;
1234 	else
1235 		dqmap = map_info->lp3_dq0_7_map;
1236 
1237 	pctl_read_mr(pctl_base, rank, mr_num);
1238 
1239 	ret = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff);
1240 
1241 	if (dramtype != LPDDR4) {
1242 		temp = 0;
1243 		for (i = 0; i < 8; i++) {
1244 			temp = temp | (((ret >> i) & 0x1) <<
1245 				       ((dqmap >> (i * 4)) & 0xf));
1246 		}
1247 	} else {
1248 		ret = (readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff);
1249 	}
1250 
1251 	return ret;
1252 }
1253 
1254 /* before call this function autorefresh should be disabled */
1255 void send_a_refresh(struct dram_info *dram)
1256 {
1257 	void __iomem *pctl_base = dram->pctl;
1258 
1259 	while (readl(pctl_base + DDR_PCTL2_DBGSTAT) & 0x3)
1260 		continue;
1261 	writel(0x3, pctl_base + DDR_PCTL2_DBGCMD);
1262 }
1263 
1264 static void enter_sr(struct dram_info *dram, u32 en)
1265 {
1266 	void __iomem *pctl_base = dram->pctl;
1267 
1268 	if (en) {
1269 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1270 		while (1) {
1271 			if (((readl(pctl_base + DDR_PCTL2_STAT) &
1272 			      PCTL2_SELFREF_TYPE_MASK) ==
1273 			     PCTL2_SELFREF_TYPE_SR_NOT_AUTO) &&
1274 			    ((readl(pctl_base + DDR_PCTL2_STAT) &
1275 			      PCTL2_OPERATING_MODE_MASK) ==
1276 			     PCTL2_OPERATING_MODE_SR))
1277 				break;
1278 		}
1279 	} else {
1280 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1281 		while ((readl(pctl_base + DDR_PCTL2_STAT) &
1282 		       PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR)
1283 			continue;
1284 	}
1285 }
1286 
1287 void record_dq_prebit(struct dram_info *dram)
1288 {
1289 	u32 group, i, tmp;
1290 	void __iomem *phy_base = dram->phy;
1291 
1292 	for (group = 0; group < 4; group++) {
1293 		for (i = 0; i < ARRAY_SIZE(dq_sel); i++) {
1294 			/* l_loop_invdelaysel */
1295 			writel(dq_sel[i][0], PHY_REG(phy_base,
1296 						     grp_addr[group] + 0x2c));
1297 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e));
1298 			writel(tmp, PHY_REG(phy_base,
1299 					    grp_addr[group] + dq_sel[i][1]));
1300 
1301 			/* r_loop_invdelaysel */
1302 			writel(dq_sel[i][0], PHY_REG(phy_base,
1303 						     grp_addr[group] + 0x2d));
1304 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f));
1305 			writel(tmp, PHY_REG(phy_base,
1306 					    grp_addr[group] + dq_sel[i][2]));
1307 		}
1308 	}
1309 }
1310 
1311 static void update_dq_rx_prebit(struct dram_info *dram)
1312 {
1313 	void __iomem *phy_base = dram->phy;
1314 
1315 	clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4),
1316 			BIT(4));
1317 	udelay(1);
1318 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4));
1319 }
1320 
1321 static void update_dq_tx_prebit(struct dram_info *dram)
1322 {
1323 	void __iomem *phy_base = dram->phy;
1324 
1325 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1326 	setbits_le32(PHY_REG(phy_base, 0x2), BIT(3));
1327 	setbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1328 	udelay(1);
1329 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1330 }
1331 
1332 static void update_ca_prebit(struct dram_info *dram)
1333 {
1334 	void __iomem *phy_base = dram->phy;
1335 
1336 	clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2));
1337 	setbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1338 	udelay(1);
1339 	clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1340 }
1341 
1342 /*
1343  * dir: 0: de-skew = delta_*
1344  *	1: de-skew = reg val - delta_*
1345  * delta_dir: value for differential signal: clk/
1346  * delta_sig: value for single signal: ca/cmd
1347  */
1348 static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif,
1349 			     int delta_sig, u32 cs, u32 dramtype)
1350 {
1351 	void __iomem *phy_base = dram->phy;
1352 	u32 i, cs_en, tmp;
1353 	u32 dfi_lp_stat = 0;
1354 
1355 	if (cs == 0)
1356 		cs_en = 1;
1357 	else if (cs == 2)
1358 		cs_en = 2;
1359 	else
1360 		cs_en = 3;
1361 
1362 	if (dramtype == LPDDR4 &&
1363 	    ((readl(PHY_REG(phy_base, 0x60)) & BIT(5)) == 0)) {
1364 		dfi_lp_stat = 1;
1365 		setbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1366 	}
1367 	enter_sr(dram, 1);
1368 
1369 	for (i = 0; i < 0x20; i++) {
1370 		if (dir == DESKEW_MDF_ABS_VAL)
1371 			tmp = delta_sig;
1372 		else
1373 			tmp = readl(PHY_REG(phy_base, 0x150 + i)) +
1374 			      delta_sig;
1375 		writel(tmp, PHY_REG(phy_base, 0x150 + i));
1376 	}
1377 
1378 	if (dir == DESKEW_MDF_ABS_VAL)
1379 		tmp = delta_dif;
1380 	else
1381 		tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) -
1382 		       delta_sig + delta_dif;
1383 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x17));
1384 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x18));
1385 	if (dramtype == LPDDR4) {
1386 		writel(tmp, PHY_REG(phy_base, 0x150 + 0x4));
1387 		writel(tmp, PHY_REG(phy_base, 0x150 + 0xa));
1388 
1389 		clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6);
1390 		update_ca_prebit(dram);
1391 	}
1392 	enter_sr(dram, 0);
1393 
1394 	if (dfi_lp_stat)
1395 		clrbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1396 
1397 }
1398 
1399 static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank)
1400 {
1401 	u32 i, j, offset = 0;
1402 	u32 min = 0x3f;
1403 	void __iomem *phy_base = dram->phy;
1404 	u32 byte_en;
1405 
1406 	if (signal == SKEW_TX_SIGNAL)
1407 		offset = 8;
1408 
1409 	if (signal == SKEW_CA_SIGNAL) {
1410 		for (i = 0; i < 0x20; i++)
1411 			min = MIN(min, readl(PHY_REG(phy_base, 0x150  + i)));
1412 	} else {
1413 		byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1414 		for (j = offset; j < offset + rank * 4; j++) {
1415 			if (!((byte_en >> (j % 4)) & 1))
1416 				continue;
1417 			for (i = 0; i < 11; i++)
1418 				min = MIN(min,
1419 					  readl(PHY_REG(phy_base,
1420 							dqs_dq_skew_adr[j] +
1421 							i)));
1422 		}
1423 	}
1424 
1425 	return min;
1426 }
1427 
1428 static u32 low_power_update(struct dram_info *dram, u32 en)
1429 {
1430 	void __iomem *pctl_base = dram->pctl;
1431 	u32 lp_stat = 0;
1432 
1433 	if (en) {
1434 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf);
1435 	} else {
1436 		lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf;
1437 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf);
1438 	}
1439 
1440 	return lp_stat;
1441 }
1442 
1443 /*
1444  * signal:
1445  * dir: 0: de-skew = delta_*
1446  *	1: de-skew = reg val - delta_*
1447  * delta_dir: value for differential signal: dqs
1448  * delta_sig: value for single signal: dq/dm
1449  */
1450 static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir,
1451 			     int delta_dif, int delta_sig, u32 rank)
1452 {
1453 	void __iomem *phy_base = dram->phy;
1454 	u32 i, j, tmp, offset;
1455 	u32 byte_en;
1456 
1457 	byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1458 
1459 	if (signal == SKEW_RX_SIGNAL)
1460 		offset = 0;
1461 	else
1462 		offset = 8;
1463 
1464 	for (j = offset; j < (offset + rank * 4); j++) {
1465 		if (!((byte_en >> (j % 4)) & 1))
1466 			continue;
1467 		for (i = 0; i < 0x9; i++) {
1468 			if (dir == DESKEW_MDF_ABS_VAL)
1469 				tmp = delta_sig;
1470 			else
1471 				tmp = delta_sig + readl(PHY_REG(phy_base,
1472 							dqs_dq_skew_adr[j] +
1473 							i));
1474 			writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i));
1475 		}
1476 		if (dir == DESKEW_MDF_ABS_VAL)
1477 			tmp = delta_dif;
1478 		else
1479 			tmp = delta_dif + readl(PHY_REG(phy_base,
1480 						dqs_dq_skew_adr[j] + 9));
1481 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9));
1482 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa));
1483 	}
1484 	if (signal == SKEW_RX_SIGNAL)
1485 		update_dq_rx_prebit(dram);
1486 	else
1487 		update_dq_tx_prebit(dram);
1488 }
1489 
1490 static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype)
1491 {
1492 	void __iomem *phy_base = dram->phy;
1493 	u32 ret;
1494 	u32 dis_auto_zq = 0;
1495 	u32 odt_val_up, odt_val_dn;
1496 	u32 i, j;
1497 
1498 	odt_val_dn = readl(PHY_REG(phy_base, 0x110));
1499 	odt_val_up = readl(PHY_REG(phy_base, 0x111));
1500 
1501 	if (dramtype != LPDDR4) {
1502 		for (i = 0; i < 4; i++) {
1503 			j = 0x110 + i * 0x10;
1504 			writel(PHY_DDR4_LPDDR3_RTT_294ohm,
1505 			       PHY_REG(phy_base, j));
1506 			writel(PHY_DDR4_LPDDR3_RTT_DISABLE,
1507 			       PHY_REG(phy_base, j + 0x1));
1508 		}
1509 	}
1510 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1511 	/* use normal read mode for data training */
1512 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1513 
1514 	if (dramtype == DDR4)
1515 		setbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1516 
1517 	/* choose training cs */
1518 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
1519 	/* enable gate training */
1520 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
1521 	udelay(50);
1522 	ret = readl(PHY_REG(phy_base, 0x91));
1523 	/* disable gate training */
1524 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
1525 	clrbits_le32(PHY_REG(phy_base, 2), 0x30);
1526 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1527 
1528 	if (ret & 0x20)
1529 		ret = -1;
1530 	else
1531 		ret = (ret & 0xf) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf);
1532 
1533 	if (dramtype != LPDDR4) {
1534 		for (i = 0; i < 4; i++) {
1535 			j = 0x110 + i * 0x10;
1536 			writel(odt_val_dn, PHY_REG(phy_base, j));
1537 			writel(odt_val_up, PHY_REG(phy_base, j + 0x1));
1538 		}
1539 	}
1540 	return ret;
1541 }
1542 
1543 static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype,
1544 			    u32 rank)
1545 {
1546 	void __iomem *pctl_base = dram->pctl;
1547 	void __iomem *phy_base = dram->phy;
1548 	u32 dis_auto_zq = 0;
1549 	u32 tmp;
1550 	u32 cur_fsp;
1551 	u32 timeout_us = 1000;
1552 
1553 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1554 
1555 	clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1556 
1557 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1558 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) &
1559 	      0xffff;
1560 	writel(tmp & 0xff, PHY_REG(phy_base, 0x3));
1561 
1562 	/* disable another cs's output */
1563 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1564 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12),
1565 			      dramtype);
1566 	if (dramtype == DDR3 || dramtype == DDR4)
1567 		writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1568 	else
1569 		writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1570 
1571 	/* choose cs */
1572 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1573 			((0x2 >> cs) << 6) | (0 << 2));
1574 	/* enable write leveling */
1575 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1576 			((0x2 >> cs) << 6) | (1 << 2));
1577 
1578 	while (1) {
1579 		if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) ==
1580 		    (readl(PHY_REG(phy_base, 0xf)) & 0xf))
1581 			break;
1582 
1583 		udelay(1);
1584 		if (timeout_us-- == 0) {
1585 			printascii("error: write leveling timeout\n");
1586 			while (1)
1587 				;
1588 		}
1589 	}
1590 
1591 	/* disable write leveling */
1592 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1593 			((0x2 >> cs) << 6) | (0 << 2));
1594 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6);
1595 
1596 	/* enable another cs's output */
1597 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1598 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12),
1599 			      dramtype);
1600 
1601 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1602 
1603 	return 0;
1604 }
1605 
1606 char pattern[32] = {
1607 	0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa,
1608 	0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55,
1609 	0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55,
1610 	0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa
1611 };
1612 
1613 static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype,
1614 			    u32 mhz)
1615 {
1616 	void __iomem *pctl_base = dram->pctl;
1617 	void __iomem *phy_base = dram->phy;
1618 	u32 trefi_1x, trfc_1x;
1619 	u32 dis_auto_zq = 0;
1620 	u32 timeout_us = 1000;
1621 	u32 dqs_default;
1622 	u32 cur_fsp;
1623 	u32 vref_inner;
1624 	u32 i;
1625 	struct sdram_head_info_index_v2 *index =
1626 		(struct sdram_head_info_index_v2 *)common_info;
1627 	struct dq_map_info *map_info;
1628 
1629 	vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff;
1630 	if (dramtype == DDR3 && vref_inner == 0x80) {
1631 		for (i = 0; i < 4; i++)
1632 			writel(vref_inner - 0xa,
1633 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1634 
1635 		/* reg_rx_vref_value_update */
1636 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1637 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1638 	}
1639 
1640 	map_info = (struct dq_map_info *)((void *)common_info +
1641 		index->dq_map_index.offset * 4);
1642 	/* only 1cs a time, 0:cs0 1 cs1 */
1643 	if (cs > 1)
1644 		return -1;
1645 
1646 	dqs_default = 0xf;
1647 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1648 
1649 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1650 	/* config refresh timing */
1651 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1652 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1653 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1654 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1655 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1656 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1657 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1658 	/* reg_phy_trfc */
1659 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1660 	/* reg_max_refi_cnt */
1661 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1662 
1663 	/* choose training cs */
1664 	clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6);
1665 
1666 	/* set dq map for ddr4 */
1667 	if (dramtype == DDR4) {
1668 		setbits_le32(PHY_REG(phy_base, 0x70), BIT(7));
1669 		for (i = 0; i < 4; i++) {
1670 			writel((map_info->ddr4_dq_map[cs * 2] >>
1671 				((i % 4) * 8)) & 0xff,
1672 				PHY_REG(phy_base, 0x238 + i));
1673 			writel((map_info->ddr4_dq_map[cs * 2 + 1] >>
1674 				((i % 4) * 8)) & 0xff,
1675 				PHY_REG(phy_base, 0x2b8 + i));
1676 		}
1677 	}
1678 
1679 	/* cha_l reg_l_rd_train_dqs_default[5:0] */
1680 	clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default);
1681 	/* cha_h reg_h_rd_train_dqs_default[5:0] */
1682 	clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default);
1683 	/* chb_l reg_l_rd_train_dqs_default[5:0] */
1684 	clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default);
1685 	/* chb_h reg_h_rd_train_dqs_default[5:0] */
1686 	clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default);
1687 
1688 	/* Choose the read train auto mode */
1689 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1);
1690 	/* Enable the auto train of the read train */
1691 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3);
1692 
1693 	/* Wait the train done. */
1694 	while (1) {
1695 		if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1)
1696 			break;
1697 
1698 		udelay(1);
1699 		if (timeout_us-- == 0) {
1700 			printascii("error: read training timeout\n");
1701 			return -1;
1702 		}
1703 	}
1704 
1705 	/* Check the read train state */
1706 	if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) ||
1707 	    (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) {
1708 		printascii("error: read training error\n");
1709 		return -1;
1710 	}
1711 
1712 	/* Exit the Read Training by setting */
1713 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1));
1714 
1715 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1716 
1717 	if (dramtype == DDR3 && vref_inner == 0x80) {
1718 		for (i = 0; i < 4; i++)
1719 			writel(vref_inner,
1720 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1721 
1722 		/* reg_rx_vref_value_update */
1723 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1724 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1725 	}
1726 
1727 	return 0;
1728 }
1729 
1730 static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype,
1731 			    u32 mhz, u32 dst_fsp)
1732 {
1733 	void __iomem *pctl_base = dram->pctl;
1734 	void __iomem *phy_base = dram->phy;
1735 	u32 trefi_1x, trfc_1x;
1736 	u32 dis_auto_zq = 0;
1737 	u32 timeout_us = 1000;
1738 	u32 cur_fsp;
1739 	u32 mr_tmp, cl, cwl, phy_fsp, offset = 0;
1740 
1741 	if (dramtype == LPDDR3 && mhz <= 400) {
1742 		phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3;
1743 		offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3;
1744 		cl = readl(PHY_REG(phy_base, offset));
1745 		cwl = readl(PHY_REG(phy_base, offset + 2));
1746 
1747 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8);
1748 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4);
1749 		pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype);
1750 	}
1751 
1752 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1753 
1754 	/* PHY_0x7b[7:0] reg_train_col_addr[7:0] */
1755 	clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0);
1756 	/* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */
1757 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2);
1758 	/* PHY_0x7c[1:0] reg_train_col_addr[9:8] */
1759 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0);
1760 	/* PHY_0x7d[7:0] reg_train_row_addr[7:0] */
1761 	clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0);
1762 	/* PHY_0x7e[7:0] reg_train_row_addr[15:8] */
1763 	clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0);
1764 
1765 	/* PHY_0x71[3] wrtrain_check_data_value_random_gen */
1766 	clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3));
1767 
1768 	/* config refresh timing */
1769 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1770 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1771 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1772 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1773 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1774 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1775 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1776 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1777 	/* reg_phy_trfc */
1778 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1779 	/* reg_max_refi_cnt */
1780 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1781 
1782 	/* choose training cs */
1783 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6);
1784 
1785 	/* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */
1786 	/* 0: Use the write-leveling value. */
1787 	/* 1: use reg0x233 0x237 0x2b3 0x2b7 */
1788 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4));
1789 
1790 	/* PHY_0x7a [0] reg_dq_wr_train_auto */
1791 	setbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1792 
1793 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1794 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1795 
1796 	send_a_refresh(dram);
1797 
1798 	while (1) {
1799 		if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1)
1800 			break;
1801 
1802 		udelay(1);
1803 		if (timeout_us-- == 0) {
1804 			printascii("error: write training timeout\n");
1805 			while (1)
1806 				;
1807 		}
1808 	}
1809 
1810 	/* Check the write train state */
1811 	if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) {
1812 		printascii("error: write training error\n");
1813 		return -1;
1814 	}
1815 
1816 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1817 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1818 
1819 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1820 
1821 	/* save LPDDR4 write vref to fsp_param for dfs */
1822 	if (dramtype == LPDDR4) {
1823 		fsp_param[dst_fsp].vref_dq[cs] =
1824 			((readl(PHY_REG(phy_base, 0x384)) & 0x3f) +
1825 			 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2;
1826 		/* add range info */
1827 		fsp_param[dst_fsp].vref_dq[cs] |=
1828 			((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1);
1829 	}
1830 
1831 	if (dramtype == LPDDR3 && mhz <= 400) {
1832 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl);
1833 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl);
1834 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1835 			       DDR_PCTL2_INIT3);
1836 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
1837 			      dramtype);
1838 	}
1839 
1840 	return 0;
1841 }
1842 
1843 static int data_training(struct dram_info *dram, u32 cs,
1844 			 struct rv1126_sdram_params *sdram_params, u32 dst_fsp,
1845 			 u32 training_flag)
1846 {
1847 	u32 ret = 0;
1848 
1849 	if (training_flag == FULL_TRAINING)
1850 		training_flag = READ_GATE_TRAINING | WRITE_LEVELING |
1851 				WRITE_TRAINING | READ_TRAINING;
1852 
1853 	if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) {
1854 		ret = data_training_wl(dram, cs,
1855 				       sdram_params->base.dramtype,
1856 				       sdram_params->ch.cap_info.rank);
1857 		if (ret != 0)
1858 			goto out;
1859 	}
1860 
1861 	if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) {
1862 		ret = data_training_rg(dram, cs,
1863 				       sdram_params->base.dramtype);
1864 		if (ret != 0)
1865 			goto out;
1866 	}
1867 
1868 	if ((training_flag & READ_TRAINING) == READ_TRAINING) {
1869 		ret = data_training_rd(dram, cs,
1870 				       sdram_params->base.dramtype,
1871 				       sdram_params->base.ddr_freq);
1872 		if (ret != 0)
1873 			goto out;
1874 	}
1875 
1876 	if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) {
1877 		ret = data_training_wr(dram, cs,
1878 				       sdram_params->base.dramtype,
1879 				       sdram_params->base.ddr_freq, dst_fsp);
1880 		if (ret != 0)
1881 			goto out;
1882 	}
1883 
1884 out:
1885 	return ret;
1886 }
1887 
1888 static int get_wrlvl_val(struct dram_info *dram,
1889 			 struct rv1126_sdram_params *sdram_params)
1890 {
1891 	u32 i, j, clk_skew;
1892 	void __iomem *phy_base = dram->phy;
1893 	u32 lp_stat;
1894 	int ret;
1895 
1896 	lp_stat = low_power_update(dram, 0);
1897 
1898 	clk_skew = 0x1f;
1899 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, clk_skew, 3,
1900 			 sdram_params->base.dramtype);
1901 
1902 	ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING);
1903 	if (sdram_params->ch.cap_info.rank == 2)
1904 		ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING);
1905 
1906 	for (j = 0; j < 2; j++)
1907 		for (i = 0; i < 4; i++)
1908 			wrlvl_result[j][i] =
1909 				readl(PHY_REG(phy_base,
1910 					      wrlvl_result_offset[j][i])) -
1911 				clk_skew;
1912 
1913 	low_power_update(dram, lp_stat);
1914 
1915 	return ret;
1916 }
1917 
1918 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
1919 static void init_rw_trn_result_struct(struct rw_trn_result *result,
1920 				      void __iomem *phy_base, u8 cs_num)
1921 {
1922 	int i;
1923 
1924 	result->cs_num = cs_num;
1925 	result->byte_en = readb(PHY_REG(dram_info.phy, 0xf)) &
1926 			  PHY_DQ_WIDTH_MASK;
1927 	for (i = 0; i < FSP_NUM; i++)
1928 		result->fsp_mhz[i] = 0;
1929 }
1930 
1931 static void save_rw_trn_min_max(void __iomem *phy_base,
1932 				struct cs_rw_trn_result *rd_result,
1933 				struct cs_rw_trn_result *wr_result,
1934 				u8 byte_en)
1935 {
1936 	u16 phy_ofs;
1937 	u8 dqs;
1938 	u8 dq;
1939 
1940 	for (dqs = 0; dqs < BYTE_NUM; dqs++) {
1941 		if ((byte_en & BIT(dqs)) == 0)
1942 			continue;
1943 
1944 		/* Channel A or B (low or high 16 bit) */
1945 		phy_ofs = dqs < 2 ? 0x230 : 0x2b0;
1946 		/* low or high 8 bit */
1947 		phy_ofs += (dqs & 0x1) == 0 ? 0 : 0x9;
1948 		for (dq = 0; dq < 8; dq++) {
1949 			rd_result->dqs[dqs].dq_min[dq] =
1950 				readb(PHY_REG(phy_base, phy_ofs + 0x15 + dq));
1951 			rd_result->dqs[dqs].dq_max[dq] =
1952 				readb(PHY_REG(phy_base, phy_ofs + 0x27 + dq));
1953 			wr_result->dqs[dqs].dq_min[dq] =
1954 				readb(PHY_REG(phy_base, phy_ofs + 0x3d + dq));
1955 			wr_result->dqs[dqs].dq_max[dq] =
1956 				readb(PHY_REG(phy_base, phy_ofs + 0x4f + dq));
1957 		}
1958 	}
1959 }
1960 
1961 static void save_rw_trn_deskew(void __iomem *phy_base,
1962 			       struct fsp_rw_trn_result *result, u8 cs_num,
1963 			       int min_val, bool rw)
1964 {
1965 	u16 phy_ofs;
1966 	u8 cs;
1967 	u8 dq;
1968 
1969 	result->min_val = min_val;
1970 
1971 	for (cs = 0; cs < cs_num; cs++) {
1972 		phy_ofs = cs == 0 ? 0x170 : 0x1a0;
1973 		phy_ofs += rw == SKEW_RX_SIGNAL ? 0x1 : 0x17;
1974 		for (dq = 0; dq < 8; dq++) {
1975 			result->cs[cs].dqs[0].dq_deskew[dq] =
1976 				readb(PHY_REG(phy_base, phy_ofs + dq));
1977 			result->cs[cs].dqs[1].dq_deskew[dq] =
1978 				readb(PHY_REG(phy_base, phy_ofs + 0xb + dq));
1979 			result->cs[cs].dqs[2].dq_deskew[dq] =
1980 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + dq));
1981 			result->cs[cs].dqs[3].dq_deskew[dq] =
1982 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + dq));
1983 		}
1984 
1985 		result->cs[cs].dqs[0].dqs_deskew =
1986 			readb(PHY_REG(phy_base, phy_ofs + 0x8));
1987 		result->cs[cs].dqs[1].dqs_deskew =
1988 			readb(PHY_REG(phy_base, phy_ofs + 0xb + 0x8));
1989 		result->cs[cs].dqs[2].dqs_deskew =
1990 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0x8));
1991 		result->cs[cs].dqs[3].dqs_deskew =
1992 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + 0x8));
1993 	}
1994 }
1995 
1996 static void save_rw_trn_result_to_ddr(struct rw_trn_result *result)
1997 {
1998 	result->flag = DDR_DQ_EYE_FLAG;
1999 	memcpy((void *)(RW_TRN_RESULT_ADDR), result, sizeof(*result));
2000 }
2001 #endif
2002 
2003 static int high_freq_training(struct dram_info *dram,
2004 			      struct rv1126_sdram_params *sdram_params,
2005 			      u32 fsp)
2006 {
2007 	u32 i, j;
2008 	void __iomem *phy_base = dram->phy;
2009 	u32 dramtype = sdram_params->base.dramtype;
2010 	int min_val;
2011 	int dqs_skew, clk_skew, ca_skew;
2012 	int ret;
2013 
2014 	dqs_skew = 0;
2015 	for (j = 0; j < sdram_params->ch.cap_info.rank; j++)
2016 		for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++)
2017 			dqs_skew += wrlvl_result[j][i];
2018 	dqs_skew = dqs_skew / (sdram_params->ch.cap_info.rank *
2019 			       ARRAY_SIZE(wrlvl_result[0]));
2020 
2021 	clk_skew = 0x20 - dqs_skew;
2022 	dqs_skew = 0x20;
2023 
2024 	if (dramtype == LPDDR4) {
2025 		clk_skew = 0;
2026 		ca_skew = 0;
2027 	} else if (dramtype == LPDDR3) {
2028 		ca_skew = clk_skew - 4;
2029 	} else {
2030 		ca_skew = clk_skew;
2031 	}
2032 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3,
2033 			 dramtype);
2034 
2035 	writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233));
2036 	writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237));
2037 	writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2038 	writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2039 	ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING |
2040 			    READ_TRAINING | WRITE_TRAINING);
2041 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2042 	rw_trn_result.fsp_mhz[fsp] = (u16)sdram_params->base.ddr_freq;
2043 	save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[0],
2044 			    &rw_trn_result.wr_fsp[fsp].cs[0],
2045 			    rw_trn_result.byte_en);
2046 #endif
2047 	if (sdram_params->ch.cap_info.rank == 2) {
2048 		writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233));
2049 		writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237));
2050 		writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2051 		writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2052 		ret |= data_training(dram, 1, sdram_params, fsp,
2053 				     READ_GATE_TRAINING | READ_TRAINING |
2054 				     WRITE_TRAINING);
2055 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2056 		save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[1],
2057 				    &rw_trn_result.wr_fsp[fsp].cs[1],
2058 				    rw_trn_result.byte_en);
2059 #endif
2060 	}
2061 	if (ret)
2062 		goto out;
2063 
2064 	record_dq_prebit(dram);
2065 
2066 	min_val = get_min_value(dram, SKEW_RX_SIGNAL,
2067 				sdram_params->ch.cap_info.rank) * -1;
2068 	modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2069 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2070 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2071 	save_rw_trn_deskew(phy_base, &rw_trn_result.rd_fsp[fsp],
2072 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2073 			   SKEW_RX_SIGNAL);
2074 #endif
2075 
2076 	min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL,
2077 				    sdram_params->ch.cap_info.rank),
2078 		      get_min_value(dram, SKEW_CA_SIGNAL,
2079 				    sdram_params->ch.cap_info.rank)) * -1;
2080 
2081 	/* clk = 0, rx all skew -7, tx - min_value */
2082 	modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3,
2083 			 dramtype);
2084 
2085 	modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2086 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2087 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2088 	save_rw_trn_deskew(phy_base, &rw_trn_result.wr_fsp[fsp],
2089 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2090 			   SKEW_TX_SIGNAL);
2091 #endif
2092 
2093 	ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING);
2094 	if (sdram_params->ch.cap_info.rank == 2)
2095 		ret |= data_training(dram, 1, sdram_params, 0,
2096 				     READ_GATE_TRAINING);
2097 out:
2098 	return ret;
2099 }
2100 
2101 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
2102 {
2103 	writel(ddrconfig, &dram->msch->deviceconf);
2104 	clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0);
2105 }
2106 
2107 static void update_noc_timing(struct dram_info *dram,
2108 			      struct rv1126_sdram_params *sdram_params)
2109 {
2110 	void __iomem *pctl_base = dram->pctl;
2111 	u32 bw, bl;
2112 
2113 	bw = 8 << sdram_params->ch.cap_info.bw;
2114 	bl = ((readl(pctl_base + DDR_PCTL2_MSTR) >> 16) & 0xf) * 2;
2115 
2116 	/* update the noc timing related to data bus width */
2117 	if ((bw / 8 * bl) == 16)
2118 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 0;
2119 	else if ((bw / 8 * bl) == 32)
2120 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 1;
2121 	else if ((bw / 8 * bl) == 64)
2122 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 2;
2123 	else
2124 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 3;
2125 
2126 	sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty =
2127 		(bl * bw / 8) > 16 ? (bl / 4) : (16 / (bl * bw / 8)) * bl / 4;
2128 
2129 	if (sdram_params->base.dramtype == LPDDR4) {
2130 		sdram_params->ch.noc_timings.ddrmode.b.mwrsize =
2131 			(bw == 16) ? 0x1 : 0x2;
2132 		sdram_params->ch.noc_timings.ddrtimingc0.b.wrtomwr =
2133 			3 * sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty;
2134 	}
2135 
2136 	writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
2137 	       &dram->msch->ddrtiminga0);
2138 	writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
2139 	       &dram->msch->ddrtimingb0);
2140 	writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
2141 	       &dram->msch->ddrtimingc0);
2142 	writel(sdram_params->ch.noc_timings.devtodev0.d32,
2143 	       &dram->msch->devtodev0);
2144 	writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
2145 	writel(sdram_params->ch.noc_timings.ddr4timing.d32,
2146 	       &dram->msch->ddr4timing);
2147 }
2148 
2149 static void dram_all_config(struct dram_info *dram,
2150 			    struct rv1126_sdram_params *sdram_params)
2151 {
2152 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2153 	u32 dram_type = sdram_params->base.dramtype;
2154 	void __iomem *pctl_base = dram->pctl;
2155 	u32 sys_reg2 = 0;
2156 	u32 sys_reg3 = 0;
2157 	u64 cs_cap[2];
2158 	u32 cs_pst;
2159 
2160 	set_ddrconfig(dram, cap_info->ddrconfig);
2161 	sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
2162 			 &sys_reg3, 0);
2163 	writel(sys_reg2, &dram->pmugrf->os_reg[2]);
2164 	writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2165 
2166 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2167 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2168 
2169 	if (cap_info->rank == 2) {
2170 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2171 			6 + 2;
2172 		if (cs_pst > 28)
2173 			cs_cap[0] = 1 << cs_pst;
2174 	}
2175 
2176 	writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
2177 			(((cs_cap[0] >> 20) / 64) & 0xff),
2178 			&dram->msch->devicesize);
2179 	update_noc_timing(dram, sdram_params);
2180 }
2181 
2182 static void enable_low_power(struct dram_info *dram,
2183 			     struct rv1126_sdram_params *sdram_params)
2184 {
2185 	void __iomem *pctl_base = dram->pctl;
2186 	u32 grf_lp_con;
2187 
2188 	writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]);
2189 
2190 	if (sdram_params->base.dramtype == DDR4)
2191 		grf_lp_con = (0x7 << 16) | (1 << 1);
2192 	else if (sdram_params->base.dramtype == DDR3)
2193 		grf_lp_con = (0x7 << 16) | (1 << 0);
2194 	else
2195 		grf_lp_con = (0x7 << 16) | (1 << 2);
2196 
2197 	/* en lpckdis_en */
2198 	grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
2199 	writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con);
2200 
2201 	/* enable sr, pd */
2202 	if (dram->pd_idle == 0)
2203 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2204 	else
2205 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2206 	if (dram->sr_idle == 0)
2207 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2208 	else
2209 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2210 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
2211 }
2212 
2213 static void ddr_set_atags(struct dram_info *dram,
2214 			  struct rv1126_sdram_params *sdram_params)
2215 {
2216 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2217 	u32 dram_type = sdram_params->base.dramtype;
2218 	void __iomem *pctl_base = dram->pctl;
2219 	struct tag_serial t_serial;
2220 	struct tag_ddr_mem t_ddrmem;
2221 	struct tag_soc_info t_socinfo;
2222 	u64 cs_cap[2];
2223 	u32 cs_pst = 0;
2224 
2225 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2226 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2227 
2228 	memset(&t_serial, 0, sizeof(struct tag_serial));
2229 
2230 	t_serial.version = 0;
2231 	t_serial.enable = 1;
2232 	t_serial.addr = CONFIG_DEBUG_UART_BASE;
2233 	t_serial.baudrate = CONFIG_BAUDRATE;
2234 	t_serial.m_mode = SERIAL_M_MODE_M0;
2235 	t_serial.id = 2;
2236 
2237 	atags_destroy();
2238 	atags_set_tag(ATAG_SERIAL,  &t_serial);
2239 
2240 	memset(&t_ddrmem, 0, sizeof(struct tag_ddr_mem));
2241 	if (cap_info->row_3_4) {
2242 		cs_cap[0] =  cs_cap[0] * 3 / 4;
2243 		cs_cap[1] =  cs_cap[1] * 3 / 4;
2244 	}
2245 	t_ddrmem.version = 0;
2246 	t_ddrmem.bank[0] = CONFIG_SYS_SDRAM_BASE;
2247 	if (cs_cap[1]) {
2248 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2249 			6 + 2;
2250 	}
2251 
2252 	if (cs_cap[1] && cs_pst > 27) {
2253 		t_ddrmem.count = 2;
2254 		t_ddrmem.bank[1] = 1 << cs_pst;
2255 		t_ddrmem.bank[2] = cs_cap[0];
2256 		t_ddrmem.bank[3] = cs_cap[1];
2257 	} else {
2258 		t_ddrmem.count = 1;
2259 		t_ddrmem.bank[1] = (u64)cs_cap[0] + (u64)cs_cap[1];
2260 	}
2261 
2262 	atags_set_tag(ATAG_DDR_MEM,  &t_ddrmem);
2263 
2264 	memset(&t_socinfo, 0, sizeof(struct tag_soc_info));
2265 	t_socinfo.version = 0;
2266 	t_socinfo.name = 0x1126;
2267 }
2268 
2269 static void print_ddr_info(struct rv1126_sdram_params *sdram_params)
2270 {
2271 	u32 split;
2272 
2273 	if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2274 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2275 		split = 0;
2276 	else
2277 		split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2278 			SPLIT_SIZE_MASK;
2279 
2280 	sdram_print_ddr_info(&sdram_params->ch.cap_info,
2281 			     &sdram_params->base, split);
2282 }
2283 
2284 static int sdram_init_(struct dram_info *dram,
2285 		       struct rv1126_sdram_params *sdram_params, u32 post_init)
2286 {
2287 	void __iomem *pctl_base = dram->pctl;
2288 	void __iomem *phy_base = dram->phy;
2289 	u32 ddr4_vref;
2290 	u32 mr_tmp;
2291 
2292 	rkclk_configure_ddr(dram, sdram_params);
2293 
2294 	rkclk_ddr_reset(dram, 1, 1, 1, 1);
2295 	udelay(10);
2296 
2297 	rkclk_ddr_reset(dram, 1, 1, 1, 0);
2298 	phy_cfg(dram, sdram_params);
2299 
2300 	rkclk_ddr_reset(dram, 1, 1, 0, 0);
2301 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1);
2302 
2303 	rkclk_ddr_reset(dram, 1, 0, 0, 0);
2304 	pctl_cfg(dram->pctl, &sdram_params->pctl_regs,
2305 		 dram->sr_idle, dram->pd_idle);
2306 
2307 	if (sdram_params->ch.cap_info.bw == 2)
2308 		/* 32bit interface use pageclose */
2309 		setbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2310 	else
2311 		clrbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2312 
2313 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2314 	u32 tmp, trefi;
2315 
2316 	tmp = readl(pctl_base + DDR_PCTL2_RFSHTMG);
2317 	trefi = (tmp >> 16) & 0xfff;
2318 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2319 	       pctl_base + DDR_PCTL2_RFSHTMG);
2320 #endif
2321 
2322 	/* set frequency_mode */
2323 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
2324 	/* set target_frequency to Frequency 0 */
2325 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0);
2326 
2327 	set_ds_odt(dram, sdram_params, 0);
2328 	sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params);
2329 	set_ctl_address_map(dram, sdram_params);
2330 
2331 	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
2332 
2333 	rkclk_ddr_reset(dram, 0, 0, 0, 0);
2334 
2335 	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
2336 		continue;
2337 
2338 	if (sdram_params->base.dramtype == LPDDR3) {
2339 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3);
2340 	} else if (sdram_params->base.dramtype == LPDDR4) {
2341 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6);
2342 		/* MR11 */
2343 		pctl_write_mr(dram->pctl, 3, 11,
2344 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2345 			      LPDDR4);
2346 		/* MR12 */
2347 		pctl_write_mr(dram->pctl, 3, 12,
2348 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2349 			      LPDDR4);
2350 
2351 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2352 		/* MR22 */
2353 		pctl_write_mr(dram->pctl, 3, 22,
2354 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2355 			      LPDDR4);
2356 		/* MR14 */
2357 		pctl_write_mr(dram->pctl, 3, 14,
2358 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2359 			      LPDDR4);
2360 	}
2361 
2362 	if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) != 0) {
2363 		if (post_init != 0)
2364 			printascii("DTT cs0 error\n");
2365 		return -1;
2366 	}
2367 
2368 	if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) {
2369 		if (data_training(dram, 1, sdram_params, 0,
2370 				  READ_GATE_TRAINING) != 0) {
2371 			printascii("DTT cs1 error\n");
2372 			return -1;
2373 		}
2374 	}
2375 
2376 	if (sdram_params->base.dramtype == DDR4) {
2377 		ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39;
2378 		pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref,
2379 				  sdram_params->base.dramtype);
2380 	}
2381 
2382 	dram_all_config(dram, sdram_params);
2383 	enable_low_power(dram, sdram_params);
2384 
2385 	return 0;
2386 }
2387 
2388 static u64 dram_detect_cap(struct dram_info *dram,
2389 			   struct rv1126_sdram_params *sdram_params,
2390 			   unsigned char channel)
2391 {
2392 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2393 	void __iomem *pctl_base = dram->pctl;
2394 	void __iomem *phy_base = dram->phy;
2395 	u32 mr8;
2396 
2397 	u32 bktmp;
2398 	u32 coltmp;
2399 	u32 rowtmp;
2400 	u32 cs;
2401 	u32 bw = 1;
2402 	u32 dram_type = sdram_params->base.dramtype;
2403 	u32 pwrctl;
2404 
2405 	cap_info->bw = bw;
2406 	if (dram_type != LPDDR4) {
2407 		if (dram_type != DDR4) {
2408 			coltmp = 12;
2409 			bktmp = 3;
2410 			if (dram_type == LPDDR2)
2411 				rowtmp = 15;
2412 			else
2413 				rowtmp = 16;
2414 
2415 			if (sdram_detect_col(cap_info, coltmp) != 0)
2416 				goto cap_err;
2417 
2418 			sdram_detect_bank(cap_info, coltmp, bktmp);
2419 			sdram_detect_dbw(cap_info, dram_type);
2420 		} else {
2421 			coltmp = 10;
2422 			bktmp = 4;
2423 			rowtmp = 17;
2424 
2425 			cap_info->col = 10;
2426 			cap_info->bk = 2;
2427 			sdram_detect_bg(cap_info, coltmp);
2428 		}
2429 
2430 		if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
2431 			goto cap_err;
2432 
2433 		sdram_detect_row_3_4(cap_info, coltmp, bktmp);
2434 	} else {
2435 		mr8 = (read_mr(dram, 1, 8, dram_type) >> 2) & 0xf;
2436 		cap_info->col = 10;
2437 		cap_info->bk = 3;
2438 		cap_info->cs0_row = 14 + (mr8 + 1) / 2;
2439 		if (mr8 % 2)
2440 			cap_info->row_3_4 = 1;
2441 		else
2442 			cap_info->row_3_4 = 0;
2443 		cap_info->dbw = 1;
2444 		cap_info->bw = 2;
2445 	}
2446 
2447 	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
2448 	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
2449 
2450 	if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0)
2451 		cs = 1;
2452 	else
2453 		cs = 0;
2454 	cap_info->rank = cs + 1;
2455 
2456 	if (dram_type != LPDDR4) {
2457 		setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2458 
2459 		if (data_training(dram, 0, sdram_params, 0,
2460 				  READ_GATE_TRAINING) == 0)
2461 			cap_info->bw = 2;
2462 		else
2463 			cap_info->bw = 1;
2464 	}
2465 
2466 	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
2467 
2468 	cap_info->cs0_high16bit_row = cap_info->cs0_row;
2469 	if (cs) {
2470 		cap_info->cs1_row = cap_info->cs0_row;
2471 		cap_info->cs1_high16bit_row = cap_info->cs0_row;
2472 	} else {
2473 		cap_info->cs1_row = 0;
2474 		cap_info->cs1_high16bit_row = 0;
2475 	}
2476 
2477 	return 0;
2478 cap_err:
2479 	return -1;
2480 }
2481 
2482 static int dram_detect_cs1_row(struct dram_info *dram,
2483 			       struct rv1126_sdram_params *sdram_params,
2484 			       unsigned char channel)
2485 {
2486 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2487 	void __iomem *pctl_base = dram->pctl;
2488 	u32 ret = 0;
2489 	void __iomem *test_addr;
2490 	u32 row, bktmp, coltmp, bw;
2491 	u64 cs0_cap;
2492 	u32 byte_mask;
2493 	u32 cs_pst;
2494 	u32 cs_add = 0;
2495 	u32 max_row;
2496 
2497 	if (cap_info->rank == 2) {
2498 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2499 			6 + 2;
2500 		if (cs_pst < 28)
2501 			cs_add = 1;
2502 
2503 		cs0_cap = 1 << cs_pst;
2504 
2505 		if (sdram_params->base.dramtype == DDR4) {
2506 			if (cap_info->dbw == 0)
2507 				bktmp = cap_info->bk + 2;
2508 			else
2509 				bktmp = cap_info->bk + 1;
2510 		} else {
2511 			bktmp = cap_info->bk;
2512 		}
2513 		bw = cap_info->bw;
2514 		coltmp = cap_info->col;
2515 
2516 		if (bw == 2)
2517 			byte_mask = 0xFFFF;
2518 		else
2519 			byte_mask = 0xFF;
2520 
2521 		max_row = (cs_pst == 31) ? 30 : 31;
2522 
2523 		max_row = max_row - bktmp - coltmp - bw - cs_add + 1;
2524 
2525 		row = (cap_info->cs0_row > max_row) ? max_row :
2526 			cap_info->cs0_row;
2527 
2528 		for (; row > 12; row--) {
2529 			test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
2530 				    (u32)cs0_cap +
2531 				    (1ul << (row + bktmp + coltmp +
2532 					     cs_add + bw - 1ul)));
2533 
2534 			writel(0, CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap);
2535 			writel(PATTERN, test_addr);
2536 
2537 			if (((readl(test_addr) & byte_mask) ==
2538 			     (PATTERN & byte_mask)) &&
2539 			    ((readl(CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap) &
2540 			      byte_mask) == 0)) {
2541 				ret = row;
2542 				break;
2543 			}
2544 		}
2545 	}
2546 
2547 	return ret;
2548 }
2549 
2550 /* return: 0 = success, other = fail */
2551 static int sdram_init_detect(struct dram_info *dram,
2552 			     struct rv1126_sdram_params *sdram_params)
2553 {
2554 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2555 	u32 ret;
2556 	u32 sys_reg = 0;
2557 	u32 sys_reg3 = 0;
2558 	struct sdram_head_info_index_v2 *index =
2559 		(struct sdram_head_info_index_v2 *)common_info;
2560 	struct dq_map_info *map_info;
2561 
2562 	map_info = (struct dq_map_info *)((void *)common_info +
2563 		index->dq_map_index.offset * 4);
2564 
2565 	if (sdram_init_(dram, sdram_params, 0)) {
2566 		if (sdram_params->base.dramtype == DDR3) {
2567 			clrsetbits_le32(&map_info->byte_map[0], 0xff << 24,
2568 					((0x1 << 6) | (0x3 << 4) | (0x2 << 2) |
2569 					(0x0 << 0)) << 24);
2570 			if (sdram_init_(dram, sdram_params, 0))
2571 				return -1;
2572 		} else {
2573 			return -1;
2574 		}
2575 	}
2576 
2577 	if (sdram_params->base.dramtype == DDR3) {
2578 		writel(PATTERN, CONFIG_SYS_SDRAM_BASE);
2579 		if (readl(CONFIG_SYS_SDRAM_BASE) != PATTERN)
2580 			return -1;
2581 	}
2582 
2583 	if (dram_detect_cap(dram, sdram_params, 0) != 0)
2584 		return -1;
2585 
2586 	pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
2587 				   sdram_params->base.dramtype);
2588 	ret = sdram_init_(dram, sdram_params, 1);
2589 	if (ret != 0)
2590 		goto out;
2591 
2592 	cap_info->cs1_row =
2593 		dram_detect_cs1_row(dram, sdram_params, 0);
2594 	if (cap_info->cs1_row) {
2595 		sys_reg = readl(&dram->pmugrf->os_reg[2]);
2596 		sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
2597 		SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
2598 				    sys_reg, sys_reg3, 0);
2599 		writel(sys_reg, &dram->pmugrf->os_reg[2]);
2600 		writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2601 	}
2602 
2603 	sdram_detect_high_row(cap_info);
2604 
2605 out:
2606 	return ret;
2607 }
2608 
2609 struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz)
2610 {
2611 	u32 i;
2612 	u32 offset = 0;
2613 	struct ddr2_3_4_lp2_3_info *ddr_info;
2614 
2615 	if (!freq_mhz) {
2616 		ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype);
2617 		if (ddr_info)
2618 			freq_mhz =
2619 				(ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
2620 				DDR_FREQ_MASK;
2621 		else
2622 			freq_mhz = 0;
2623 	}
2624 
2625 	for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) {
2626 		if (sdram_configs[i].base.ddr_freq == 0 ||
2627 		    freq_mhz < sdram_configs[i].base.ddr_freq)
2628 			break;
2629 	}
2630 	offset = i == 0 ? 0 : i - 1;
2631 
2632 	return &sdram_configs[offset];
2633 }
2634 
2635 static const u16 pctl_need_update_reg[] = {
2636 	DDR_PCTL2_RFSHTMG,
2637 	DDR_PCTL2_INIT3,
2638 	DDR_PCTL2_INIT4,
2639 	DDR_PCTL2_INIT6,
2640 	DDR_PCTL2_INIT7,
2641 	DDR_PCTL2_DRAMTMG0,
2642 	DDR_PCTL2_DRAMTMG1,
2643 	DDR_PCTL2_DRAMTMG2,
2644 	DDR_PCTL2_DRAMTMG3,
2645 	DDR_PCTL2_DRAMTMG4,
2646 	DDR_PCTL2_DRAMTMG5,
2647 	DDR_PCTL2_DRAMTMG6,
2648 	DDR_PCTL2_DRAMTMG7,
2649 	DDR_PCTL2_DRAMTMG8,
2650 	DDR_PCTL2_DRAMTMG9,
2651 	DDR_PCTL2_DRAMTMG12,
2652 	DDR_PCTL2_DRAMTMG13,
2653 	DDR_PCTL2_DRAMTMG14,
2654 	DDR_PCTL2_ZQCTL0,
2655 	DDR_PCTL2_DFITMG0,
2656 	DDR_PCTL2_ODTCFG
2657 };
2658 
2659 static const u16 phy_need_update_reg[] = {
2660 	0x14,
2661 	0x18,
2662 	0x1c
2663 };
2664 
2665 static void pre_set_rate(struct dram_info *dram,
2666 			 struct rv1126_sdram_params *sdram_params,
2667 			 u32 dst_fsp, u32 dst_fsp_lp4)
2668 {
2669 	u32 i, j, find;
2670 	void __iomem *pctl_base = dram->pctl;
2671 	void __iomem *phy_base = dram->phy;
2672 	u32 phy_offset;
2673 	u32 mr_tmp;
2674 	u32 dramtype = sdram_params->base.dramtype;
2675 
2676 	sw_set_req(dram);
2677 	/* pctl timing update */
2678 	for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) {
2679 		for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF;
2680 		     j++) {
2681 			if (sdram_params->pctl_regs.pctl[j][0] ==
2682 			    pctl_need_update_reg[i]) {
2683 				writel(sdram_params->pctl_regs.pctl[j][1],
2684 				       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2685 				       pctl_need_update_reg[i]);
2686 				find = j;
2687 				break;
2688 			}
2689 		}
2690 	}
2691 
2692 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2693 	u32 tmp, trefi;
2694 
2695 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2696 	trefi = (tmp >> 16) & 0xfff;
2697 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2698 	       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2699 #endif
2700 
2701 	sw_set_ack(dram);
2702 
2703 	/* phy timing update */
2704 	if (dst_fsp == 0)
2705 		phy_offset = 0;
2706 	else
2707 		phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3);
2708 	/* cl cwl al update */
2709 	for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) {
2710 		for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF;
2711 		     j++) {
2712 			if (sdram_params->phy_regs.phy[j][0] ==
2713 			    phy_need_update_reg[i]) {
2714 				writel(sdram_params->phy_regs.phy[j][1],
2715 				       phy_base + phy_offset +
2716 				       phy_need_update_reg[i]);
2717 				find = j;
2718 				break;
2719 			}
2720 		}
2721 	}
2722 
2723 	set_ds_odt(dram, sdram_params, dst_fsp);
2724 	if (dramtype == LPDDR4) {
2725 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2726 			       DDR_PCTL2_INIT4);
2727 		/* MR13 */
2728 		pctl_write_mr(dram->pctl, 3, 13,
2729 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2730 			       PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2731 			      ((0x2 << 6) >> dst_fsp_lp4), dramtype);
2732 		writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2733 				      PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2734 				      ((0x2 << 6) >> dst_fsp_lp4),
2735 				       PHY_REG(phy_base, 0x1b));
2736 		/* MR3 */
2737 		pctl_write_mr(dram->pctl, 3, 3,
2738 			      mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT &
2739 			      PCTL2_MR_MASK,
2740 			      dramtype);
2741 		writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK,
2742 		       PHY_REG(phy_base, 0x19));
2743 
2744 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2745 			       DDR_PCTL2_INIT3);
2746 		/* MR1 */
2747 		pctl_write_mr(dram->pctl, 3, 1,
2748 			      mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT &
2749 			      PCTL2_MR_MASK,
2750 			      dramtype);
2751 		writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK,
2752 		       PHY_REG(phy_base, 0x17));
2753 		/* MR2 */
2754 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
2755 			      dramtype);
2756 		writel(mr_tmp & PCTL2_MR_MASK,
2757 		       PHY_REG(phy_base, 0x18));
2758 
2759 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2760 			       DDR_PCTL2_INIT6);
2761 		/* MR11 */
2762 		pctl_write_mr(dram->pctl, 3, 11,
2763 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2764 			      dramtype);
2765 		writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2766 		       PHY_REG(phy_base, 0x1a));
2767 		/* MR12 */
2768 		pctl_write_mr(dram->pctl, 3, 12,
2769 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2770 			      dramtype);
2771 
2772 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2773 			       DDR_PCTL2_INIT7);
2774 		/* MR22 */
2775 		pctl_write_mr(dram->pctl, 3, 22,
2776 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2777 			      dramtype);
2778 		writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2779 		       PHY_REG(phy_base, 0x1d));
2780 		/* MR14 */
2781 		pctl_write_mr(dram->pctl, 3, 14,
2782 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2783 			      dramtype);
2784 		writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2785 		       PHY_REG(phy_base, 0x1c));
2786 	}
2787 
2788 	update_noc_timing(dram, sdram_params);
2789 }
2790 
2791 static void save_fsp_param(struct dram_info *dram, u32 dst_fsp,
2792 			   struct rv1126_sdram_params *sdram_params)
2793 {
2794 	void __iomem *pctl_base = dram->pctl;
2795 	void __iomem *phy_base = dram->phy;
2796 	struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp];
2797 	u32 temp, temp1;
2798 	struct ddr2_3_4_lp2_3_info *ddr_info;
2799 
2800 	ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype);
2801 
2802 	p_fsp_param->freq_mhz = sdram_params->base.ddr_freq;
2803 
2804 	if (sdram_params->base.dramtype == LPDDR4) {
2805 		p_fsp_param->rd_odt_up_en = 0;
2806 		p_fsp_param->rd_odt_down_en = 1;
2807 	} else {
2808 		p_fsp_param->rd_odt_up_en =
2809 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
2810 		p_fsp_param->rd_odt_down_en =
2811 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
2812 	}
2813 
2814 	if (p_fsp_param->rd_odt_up_en)
2815 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111));
2816 	else if (p_fsp_param->rd_odt_down_en)
2817 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110));
2818 	else
2819 		p_fsp_param->rd_odt = 0;
2820 	p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112));
2821 	p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100));
2822 	p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102));
2823 	p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128));
2824 	p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105));
2825 
2826 	if (sdram_params->base.dramtype == DDR3) {
2827 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2828 			     DDR_PCTL2_INIT3);
2829 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
2830 		p_fsp_param->ds_pdds = temp & DDR3_DS_MASK;
2831 		p_fsp_param->dq_odt = temp & DDR3_RTT_NOM_MASK;
2832 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2833 	} else if (sdram_params->base.dramtype == DDR4) {
2834 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2835 			     DDR_PCTL2_INIT3);
2836 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
2837 		p_fsp_param->ds_pdds = temp & DDR4_DS_MASK;
2838 		p_fsp_param->dq_odt = temp & DDR4_RTT_NOM_MASK;
2839 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2840 	} else if (sdram_params->base.dramtype == LPDDR3) {
2841 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2842 			     DDR_PCTL2_INIT4);
2843 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
2844 		p_fsp_param->ds_pdds = temp & 0xf;
2845 
2846 		p_fsp_param->dq_odt = lp3_odt_value;
2847 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2848 	} else if (sdram_params->base.dramtype == LPDDR4) {
2849 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2850 			     DDR_PCTL2_INIT4);
2851 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
2852 		p_fsp_param->ds_pdds = temp & LPDDR4_PDDS_MASK;
2853 
2854 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2855 			     DDR_PCTL2_INIT6);
2856 		temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK;
2857 		p_fsp_param->dq_odt = temp & LPDDR4_DQODT_MASK;
2858 		p_fsp_param->ca_odt = temp & LPDDR4_CAODT_MASK;
2859 
2860 		temp = MAX(readl(PHY_REG(phy_base, 0x3ae)),
2861 			   readl(PHY_REG(phy_base, 0x3ce)));
2862 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)),
2863 			    readl(PHY_REG(phy_base, 0x3de)));
2864 		p_fsp_param->vref_ca[0] = (temp + temp1) / 2;
2865 		temp = MAX(readl(PHY_REG(phy_base, 0x3af)),
2866 			   readl(PHY_REG(phy_base, 0x3cf)));
2867 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)),
2868 			    readl(PHY_REG(phy_base, 0x3df)));
2869 		p_fsp_param->vref_ca[1] = (temp + temp1) / 2;
2870 		p_fsp_param->vref_ca[0] |=
2871 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
2872 		p_fsp_param->vref_ca[1] |=
2873 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
2874 
2875 		p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >>
2876 					      3) & 0x1;
2877 	}
2878 
2879 	p_fsp_param->noc_timings.ddrtiminga0 =
2880 		sdram_params->ch.noc_timings.ddrtiminga0;
2881 	p_fsp_param->noc_timings.ddrtimingb0 =
2882 		sdram_params->ch.noc_timings.ddrtimingb0;
2883 	p_fsp_param->noc_timings.ddrtimingc0 =
2884 		sdram_params->ch.noc_timings.ddrtimingc0;
2885 	p_fsp_param->noc_timings.devtodev0 =
2886 		sdram_params->ch.noc_timings.devtodev0;
2887 	p_fsp_param->noc_timings.ddrmode =
2888 		sdram_params->ch.noc_timings.ddrmode;
2889 	p_fsp_param->noc_timings.ddr4timing =
2890 		sdram_params->ch.noc_timings.ddr4timing;
2891 	p_fsp_param->noc_timings.agingx0 =
2892 		sdram_params->ch.noc_timings.agingx0;
2893 	p_fsp_param->noc_timings.aging0 =
2894 		sdram_params->ch.noc_timings.aging0;
2895 	p_fsp_param->noc_timings.aging1 =
2896 		sdram_params->ch.noc_timings.aging1;
2897 	p_fsp_param->noc_timings.aging2 =
2898 		sdram_params->ch.noc_timings.aging2;
2899 	p_fsp_param->noc_timings.aging3 =
2900 		sdram_params->ch.noc_timings.aging3;
2901 
2902 	p_fsp_param->flag = FSP_FLAG;
2903 }
2904 
2905 #ifndef CONFIG_SPL_KERNEL_BOOT
2906 static void copy_fsp_param_to_ddr(void)
2907 {
2908 	memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param,
2909 	       sizeof(fsp_param));
2910 }
2911 #endif
2912 
2913 void ddr_set_rate(struct dram_info *dram,
2914 		  struct rv1126_sdram_params *sdram_params,
2915 		  u32 freq, u32 cur_freq, u32 dst_fsp,
2916 		  u32 dst_fsp_lp4, u32 training_en)
2917 {
2918 	u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off;
2919 	u32 mr_tmp;
2920 	u32 lp_stat;
2921 	u32 dramtype = sdram_params->base.dramtype;
2922 	struct rv1126_sdram_params *sdram_params_new;
2923 	void __iomem *pctl_base = dram->pctl;
2924 	void __iomem *phy_base = dram->phy;
2925 
2926 	lp_stat = low_power_update(dram, 0);
2927 	sdram_params_new = get_default_sdram_config(freq);
2928 	sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank;
2929 	sdram_params_new->ch.cap_info.bw = sdram_params->ch.cap_info.bw;
2930 
2931 	pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4);
2932 
2933 	while ((readl(pctl_base + DDR_PCTL2_STAT) &
2934 			 PCTL2_OPERATING_MODE_MASK) ==
2935 			 PCTL2_OPERATING_MODE_SR)
2936 		continue;
2937 
2938 	dest_dll_off = 0;
2939 	dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2940 			  DDR_PCTL2_INIT3);
2941 	if ((dramtype == DDR3 && (dst_init3 & 1)) ||
2942 	    (dramtype == DDR4 && !(dst_init3 & 1)))
2943 		dest_dll_off = 1;
2944 
2945 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
2946 	cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
2947 			  DDR_PCTL2_INIT3);
2948 	cur_init3 &= PCTL2_MR_MASK;
2949 	cur_dll_off = 1;
2950 	if ((dramtype == DDR3 && !(cur_init3 & 1)) ||
2951 	    (dramtype == DDR4 && (cur_init3 & 1)))
2952 		cur_dll_off = 0;
2953 
2954 	if (!cur_dll_off) {
2955 		if (dramtype == DDR3)
2956 			cur_init3 |= 1;
2957 		else
2958 			cur_init3 &= ~1;
2959 		pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype);
2960 	}
2961 
2962 	setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
2963 		     PCTL2_DIS_AUTO_REFRESH);
2964 	update_refresh_reg(dram);
2965 
2966 	enter_sr(dram, 1);
2967 
2968 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
2969 	       PMUGRF_CON_DDRPHY_BUFFEREN_EN,
2970 	       &dram->pmugrf->soc_con[0]);
2971 	sw_set_req(dram);
2972 	clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC,
2973 		     PCTL2_DFI_INIT_COMPLETE_EN);
2974 	sw_set_ack(dram);
2975 
2976 	sw_set_req(dram);
2977 	if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off)
2978 		setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
2979 	else
2980 		clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
2981 
2982 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0,
2983 		     PCTL2_DIS_SRX_ZQCL);
2984 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0,
2985 		     PCTL2_DIS_SRX_ZQCL);
2986 	sw_set_ack(dram);
2987 
2988 	writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT),
2989 	       &dram->cru->clkgate_con[21]);
2990 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
2991 					(0x1 << CLK_DDR_UPCTL_EN_SHIFT) |
2992 					(0x1 << ACLK_DDR_UPCTL_EN_SHIFT),
2993 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
2994 
2995 	clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
2996 	rkclk_set_dpll(dram, freq * MHz / 2);
2997 	phy_pll_set(dram, freq * MHz, 0);
2998 	phy_pll_set(dram, freq * MHz, 1);
2999 	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3000 
3001 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3002 			PMUGRF_CON_DDRPHY_BUFFEREN_DIS,
3003 			&dram->pmugrf->soc_con[0]);
3004 	writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT),
3005 	       &dram->cru->clkgate_con[21]);
3006 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3007 					(0x0 << CLK_DDR_UPCTL_EN_SHIFT) |
3008 					(0x0 << ACLK_DDR_UPCTL_EN_SHIFT),
3009 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3010 	while ((readl(pctl_base + DDR_PCTL2_DFISTAT) &
3011 	       PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE)
3012 		continue;
3013 
3014 	sw_set_req(dram);
3015 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
3016 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp);
3017 	sw_set_ack(dram);
3018 	update_refresh_reg(dram);
3019 	clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2);
3020 
3021 	enter_sr(dram, 0);
3022 
3023 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3024 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3025 
3026 	mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4);
3027 	if (dramtype == LPDDR3) {
3028 		pctl_write_mr(dram->pctl, 3, 1,
3029 			      (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) &
3030 			      PCTL2_MR_MASK,
3031 			      dramtype);
3032 		pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK,
3033 			      dramtype);
3034 		pctl_write_mr(dram->pctl, 3, 3,
3035 			      (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) &
3036 			      PCTL2_MR_MASK,
3037 			      dramtype);
3038 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype);
3039 	} else if ((dramtype == DDR3) || (dramtype == DDR4)) {
3040 		pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK,
3041 			      dramtype);
3042 		if (!dest_dll_off) {
3043 			pctl_write_mr(dram->pctl, 3, 0,
3044 				      ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) &
3045 				      PCTL2_MR_MASK) | DDR3_DLL_RESET,
3046 				      dramtype);
3047 			udelay(2);
3048 		}
3049 		pctl_write_mr(dram->pctl, 3, 0,
3050 			      (dst_init3 >> PCTL2_DDR34_MR0_SHIFT &
3051 			       PCTL2_MR_MASK) & (~DDR3_DLL_RESET),
3052 			      dramtype);
3053 		pctl_write_mr(dram->pctl, 3, 2,
3054 			      ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) &
3055 			       PCTL2_MR_MASK), dramtype);
3056 		if (dramtype == DDR4) {
3057 			pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK,
3058 				      dramtype);
3059 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3060 				       DDR_PCTL2_INIT6);
3061 			pctl_write_mr(dram->pctl, 3, 4,
3062 				      (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) &
3063 				       PCTL2_MR_MASK,
3064 				      dramtype);
3065 			pctl_write_mr(dram->pctl, 3, 5,
3066 				      mr_tmp >> PCTL2_DDR4_MR5_SHIFT &
3067 				      PCTL2_MR_MASK,
3068 				      dramtype);
3069 
3070 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3071 				       DDR_PCTL2_INIT7);
3072 			pctl_write_mr(dram->pctl, 3, 6,
3073 				      mr_tmp >> PCTL2_DDR4_MR6_SHIFT &
3074 				      PCTL2_MR_MASK,
3075 				      dramtype);
3076 		}
3077 	} else if (dramtype == LPDDR4) {
3078 		pctl_write_mr(dram->pctl, 3, 13,
3079 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
3080 			       PCTL2_MR_MASK) & (~(BIT(7)))) |
3081 			      dst_fsp_lp4 << 7, dramtype);
3082 	}
3083 	clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3084 		     PCTL2_DIS_AUTO_REFRESH);
3085 	update_refresh_reg(dram);
3086 
3087 	/* training */
3088 	high_freq_training(dram, sdram_params_new, dst_fsp);
3089 	low_power_update(dram, lp_stat);
3090 
3091 	save_fsp_param(dram, dst_fsp, sdram_params_new);
3092 }
3093 
3094 static void ddr_set_rate_for_fsp(struct dram_info *dram,
3095 				 struct rv1126_sdram_params *sdram_params)
3096 {
3097 	struct ddr2_3_4_lp2_3_info *ddr_info;
3098 	u32 f0;
3099 	u32 dramtype = sdram_params->base.dramtype;
3100 #ifndef CONFIG_SPL_KERNEL_BOOT
3101 	u32 f1, f2, f3;
3102 #endif
3103 
3104 	ddr_info = get_ddr_drv_odt_info(dramtype);
3105 	if (!ddr_info)
3106 		return;
3107 
3108 	f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
3109 	     DDR_FREQ_MASK;
3110 
3111 #ifndef CONFIG_SPL_KERNEL_BOOT
3112 	memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param));
3113 	memset((void *)&fsp_param, 0, sizeof(fsp_param));
3114 
3115 	f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) &
3116 	     DDR_FREQ_MASK;
3117 	f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) &
3118 	     DDR_FREQ_MASK;
3119 	f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) &
3120 	     DDR_FREQ_MASK;
3121 #endif
3122 
3123 	if (get_wrlvl_val(dram, sdram_params))
3124 		printascii("get wrlvl value fail\n");
3125 
3126 #ifndef CONFIG_SPL_KERNEL_BOOT
3127 	printascii("change to: ");
3128 	printdec(f1);
3129 	printascii("MHz\n");
3130 	ddr_set_rate(&dram_info, sdram_params, f1,
3131 		     sdram_params->base.ddr_freq, 1, 1, 1);
3132 	printascii("change to: ");
3133 	printdec(f2);
3134 	printascii("MHz\n");
3135 	ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1);
3136 	printascii("change to: ");
3137 	printdec(f3);
3138 	printascii("MHz\n");
3139 	ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1);
3140 #endif
3141 	printascii("change to: ");
3142 	printdec(f0);
3143 	printascii("MHz(final freq)\n");
3144 #ifndef CONFIG_SPL_KERNEL_BOOT
3145 	ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1);
3146 #else
3147 	ddr_set_rate(&dram_info, sdram_params, f0, sdram_params->base.ddr_freq, 1, 1, 1);
3148 #endif
3149 }
3150 
3151 int get_uart_config(void)
3152 {
3153 	struct sdram_head_info_index_v2 *index =
3154 		(struct sdram_head_info_index_v2 *)common_info;
3155 	struct global_info *gbl_info;
3156 
3157 	gbl_info = (struct global_info *)((void *)common_info +
3158 		index->global_index.offset * 4);
3159 
3160 	return gbl_info->uart_info;
3161 }
3162 
3163 /* return: 0 = success, other = fail */
3164 int sdram_init(void)
3165 {
3166 	struct rv1126_sdram_params *sdram_params;
3167 	int ret = 0;
3168 	struct sdram_head_info_index_v2 *index =
3169 		(struct sdram_head_info_index_v2 *)common_info;
3170 	struct global_info *gbl_info;
3171 
3172 	dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
3173 	dram_info.pctl = (void *)UPCTL2_BASE_ADDR;
3174 	dram_info.grf = (void *)GRF_BASE_ADDR;
3175 	dram_info.cru = (void *)CRU_BASE_ADDR;
3176 	dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR;
3177 	dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR;
3178 	dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR;
3179 
3180 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
3181 	printascii("extended temp support\n");
3182 #endif
3183 	if (index->version_info != 2 ||
3184 	    (index->global_index.size != sizeof(struct global_info) / 4) ||
3185 	    (index->ddr3_index.size !=
3186 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3187 	    (index->ddr4_index.size !=
3188 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3189 	    (index->lp3_index.size !=
3190 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3191 	    (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) ||
3192 	    index->global_index.offset == 0 ||
3193 	    index->ddr3_index.offset == 0 ||
3194 	    index->ddr4_index.offset == 0 ||
3195 	    index->lp3_index.offset == 0 ||
3196 	    index->lp4_index.offset == 0) {
3197 		printascii("common info error\n");
3198 		goto error;
3199 	}
3200 
3201 	gbl_info = (struct global_info *)((void *)common_info +
3202 		index->global_index.offset * 4);
3203 
3204 	dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info);
3205 	dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info);
3206 
3207 	sdram_params = &sdram_configs[0];
3208 
3209 	if (sdram_params->base.dramtype == DDR3 ||
3210 	    sdram_params->base.dramtype == DDR4) {
3211 		if (DDR_2T_INFO(gbl_info->info_2t))
3212 			sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10;
3213 		else
3214 			sdram_params->pctl_regs.pctl[0][1] &=
3215 				~(0x1 << 10);
3216 	}
3217 	ret = sdram_init_detect(&dram_info, sdram_params);
3218 	if (ret) {
3219 		sdram_print_dram_type(sdram_params->base.dramtype);
3220 		printascii(", ");
3221 		printdec(sdram_params->base.ddr_freq);
3222 		printascii("MHz\n");
3223 		goto error;
3224 	}
3225 	print_ddr_info(sdram_params);
3226 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3227 	init_rw_trn_result_struct(&rw_trn_result, dram_info.phy,
3228 				  (u8)sdram_params->ch.cap_info.rank);
3229 #endif
3230 
3231 	ddr_set_rate_for_fsp(&dram_info, sdram_params);
3232 #ifndef CONFIG_SPL_KERNEL_BOOT
3233 	copy_fsp_param_to_ddr();
3234 #endif
3235 
3236 	ddr_set_atags(&dram_info, sdram_params);
3237 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3238 	save_rw_trn_result_to_ddr(&rw_trn_result);
3239 #endif
3240 
3241 	printascii("out\n");
3242 
3243 	return ret;
3244 error:
3245 	printascii("error\n");
3246 	return (-1);
3247 }
3248 #endif /* CONFIG_TPL_BUILD */
3249