xref: /rk3399_rockchip-uboot/drivers/ram/rockchip/sdram_rv1126.c (revision e091b6c996a68a6a0faa2bd3ffdd90b3ba5f44ce)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * (C) Copyright 2020 Rockchip Electronics Co., Ltd.
4  */
5 
6 #include <common.h>
7 #include <debug_uart.h>
8 #include <dm.h>
9 #include <ram.h>
10 #include <syscon.h>
11 #include <asm/io.h>
12 #include <asm/arch/clock.h>
13 #include <asm/arch/hardware.h>
14 #include <asm/arch/rk_atags.h>
15 #include <asm/arch/cru_rv1126.h>
16 #include <asm/arch/grf_rv1126.h>
17 #include <asm/arch/sdram_common.h>
18 #include <asm/arch/sdram_rv1126.h>
19 
20 /* define training flag */
21 #define CA_TRAINING			(0x1 << 0)
22 #define READ_GATE_TRAINING		(0x1 << 1)
23 #define WRITE_LEVELING			(0x1 << 2)
24 #define WRITE_TRAINING			(0x1 << 3)
25 #define READ_TRAINING			(0x1 << 4)
26 #define FULL_TRAINING			(0xff)
27 
28 #define SKEW_RX_SIGNAL			(0)
29 #define SKEW_TX_SIGNAL			(1)
30 #define SKEW_CA_SIGNAL			(2)
31 
32 #define DESKEW_MDF_ABS_VAL		(0)
33 #define DESKEW_MDF_DIFF_VAL		(1)
34 
35 #ifdef CONFIG_TPL_BUILD
36 #ifndef CONFIG_TPL_TINY_FRAMEWORK
37 #error please defined CONFIG_TPL_TINY_FRAMEWORK for RV1126 !!!
38 #endif
39 #endif
40 
41 #ifdef CONFIG_TPL_BUILD
42 
43 struct dram_info {
44 	void __iomem *pctl;
45 	void __iomem *phy;
46 	struct rv1126_cru *cru;
47 	struct msch_regs *msch;
48 	struct rv1126_ddrgrf *ddrgrf;
49 	struct rv1126_grf *grf;
50 	struct ram_info info;
51 	struct rv1126_pmugrf *pmugrf;
52 	u32 sr_idle;
53 	u32 pd_idle;
54 };
55 
56 #define GRF_BASE_ADDR			0xfe000000
57 #define PMU_GRF_BASE_ADDR		0xfe020000
58 #define DDR_GRF_BASE_ADDR		0xfe030000
59 #define BUS_SGRF_BASE_ADDR		0xfe0a0000
60 #define SERVER_MSCH_BASE_ADDR		0xfe800000
61 #define CRU_BASE_ADDR			0xff490000
62 #define DDR_PHY_BASE_ADDR		0xff4a0000
63 #define UPCTL2_BASE_ADDR		0xffa50000
64 
65 #define SGRF_SOC_CON12			0x30
66 #define SGRF_SOC_CON13			0x34
67 
68 struct dram_info dram_info;
69 
70 #if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 3)
71 struct rv1126_sdram_params sdram_configs[] = {
72 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-328.inc"
73 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-396.inc"
74 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-528.inc"
75 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-664.inc"
76 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-784.inc"
77 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-924.inc"
78 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-1056.inc"
79 };
80 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 0)
81 struct rv1126_sdram_params sdram_configs[] = {
82 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-328.inc"
83 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-396.inc"
84 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-528.inc"
85 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-664.inc"
86 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-784.inc"
87 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-924.inc"
88 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-1056.inc"
89 };
90 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 6)
91 struct rv1126_sdram_params sdram_configs[] = {
92 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-328.inc"
93 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-396.inc"
94 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-528.inc"
95 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-664.inc"
96 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-784.inc"
97 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-924.inc"
98 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-1056.inc"
99 };
100 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 7)
101 struct rv1126_sdram_params sdram_configs[] = {
102 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-328.inc"
103 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-396.inc"
104 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-528.inc"
105 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-664.inc"
106 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-784.inc"
107 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-924.inc"
108 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-1056.inc"
109 };
110 #endif
111 
112 u32 common_info[] = {
113 	#include "sdram_inc/rv1126/sdram-rv1126-loader_params.inc"
114 };
115 
116 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
117 static struct rw_trn_result rw_trn_result;
118 #endif
119 
120 static struct rv1126_fsp_param fsp_param[MAX_IDX];
121 
122 static u8 lp3_odt_value;
123 
124 static s8 wrlvl_result[2][4];
125 
126 /* DDR configuration 0-9 */
127 u16 ddr_cfg_2_rbc[] = {
128 	((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */
129 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */
130 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */
131 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */
132 	((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */
133 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */
134 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */
135 	((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */
136 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */
137 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */
138 };
139 
140 /* DDR configuration 10-21 */
141 u8 ddr4_cfg_2_rbc[] = {
142 	((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */
143 	((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */
144 	((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */
145 	((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */
146 	((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */
147 	((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */
148 	((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */
149 	((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */
150 	((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */
151 	((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */
152 	((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */
153 	((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */
154 };
155 
156 /* DDR configuration 22-28 */
157 u16 ddr_cfg_2_rbc_p2[] = {
158 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */
159 	((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */
160 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */
161 	((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */
162 	((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */
163 	((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */
164 	((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */
165 };
166 
167 u8 d4_rbc_2_d3_rbc[][2] = {
168 	{10, 0},
169 	{11, 2},
170 	{12, 23},
171 	{13, 1},
172 	{14, 28},
173 	{15, 24},
174 	{16, 27},
175 	{17, 7},
176 	{18, 6},
177 	{19, 25},
178 	{20, 26},
179 	{21, 3}
180 };
181 
182 u32 addrmap[23][9] = {
183 	{24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
184 		0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */
185 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
186 		0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */
187 	{23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
188 		0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */
189 	{22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606,
190 		0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */
191 	{24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909,
192 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */
193 	{6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707,
194 		0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */
195 	{7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808,
196 		0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */
197 	{8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909,
198 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */
199 	{22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
200 		0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */
201 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
202 		0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */
203 
204 	{24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
205 		0x08080808, 0x00000f0f, 0x0801}, /* 10 */
206 	{23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
207 		0x0f080808, 0x00000f0f, 0x0801}, /* 11 */
208 	{24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
209 		0x07070707, 0x00000f07, 0x0700}, /* 12 */
210 	{23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
211 		0x07070707, 0x00000f0f, 0x0700}, /* 13 */
212 	{24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
213 		0x07070707, 0x00000f07, 0x3f01}, /* 14 */
214 	{23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
215 		0x07070707, 0x00000f0f, 0x3f01}, /* 15 */
216 	{23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606,
217 		0x06060606, 0x00000f06, 0x3f00}, /* 16 */
218 	{8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909,
219 		0x0f090909, 0x00000f0f, 0x0801}, /* 17 */
220 	{7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808,
221 		0x08080808, 0x00000f0f, 0x0700}, /* 18 */
222 	{7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
223 		0x08080808, 0x00000f0f, 0x3f01}, /* 19 */
224 
225 	{6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
226 		0x07070707, 0x00000f07, 0x3f00}, /* 20 */
227 	{23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606,
228 		0x06060606, 0x00000f06, 0x0600}, /* 21 */
229 	{21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505,
230 		0x05050505, 0x00000f0f, 0x3f3f} /* 22 */
231 };
232 
233 static u8 dq_sel[22][3] = {
234 	{0x0, 0x17, 0x22},
235 	{0x1, 0x18, 0x23},
236 	{0x2, 0x19, 0x24},
237 	{0x3, 0x1a, 0x25},
238 	{0x4, 0x1b, 0x26},
239 	{0x5, 0x1c, 0x27},
240 	{0x6, 0x1d, 0x28},
241 	{0x7, 0x1e, 0x29},
242 	{0x8, 0x16, 0x21},
243 	{0x9, 0x1f, 0x2a},
244 	{0xa, 0x20, 0x2b},
245 	{0x10, 0x1, 0xc},
246 	{0x11, 0x2, 0xd},
247 	{0x12, 0x3, 0xe},
248 	{0x13, 0x4, 0xf},
249 	{0x14, 0x5, 0x10},
250 	{0x15, 0x6, 0x11},
251 	{0x16, 0x7, 0x12},
252 	{0x17, 0x8, 0x13},
253 	{0x18, 0x0, 0xb},
254 	{0x19, 0x9, 0x14},
255 	{0x1a, 0xa, 0x15}
256 };
257 
258 static u16 grp_addr[4] = {
259 	ADD_GROUP_CS0_A,
260 	ADD_GROUP_CS0_B,
261 	ADD_GROUP_CS1_A,
262 	ADD_GROUP_CS1_B
263 };
264 
265 static u8 wrlvl_result_offset[2][4] = {
266 	{0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27},
267 	{0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29},
268 };
269 
270 static u16 dqs_dq_skew_adr[16] = {
271 	0x170 + 0,	/* SKEW_UPDATE_RX_CS0_DQS0 */
272 	0x170 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS1 */
273 	0x1d0 + 0,	/* SKEW_UPDATE_RX_CS0_DQS2 */
274 	0x1d0 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS3 */
275 	0x1a0 + 0,	/* SKEW_UPDATE_RX_CS1_DQS0 */
276 	0x1a0 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS1 */
277 	0x200 + 0,	/* SKEW_UPDATE_RX_CS1_DQS2 */
278 	0x200 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS3 */
279 	0x170 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS0 */
280 	0x170 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS1 */
281 	0x1d0 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS2 */
282 	0x1d0 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS3 */
283 	0x1a0 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS0 */
284 	0x1a0 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS1 */
285 	0x200 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS2 */
286 	0x200 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS3 */
287 };
288 
289 static void rkclk_ddr_reset(struct dram_info *dram,
290 			    u32 ctl_srstn, u32 ctl_psrstn,
291 			    u32 phy_srstn, u32 phy_psrstn)
292 {
293 	writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) |
294 	       UPCTL2_ASRSTN_REQ(ctl_srstn),
295 	       BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13);
296 
297 	writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn),
298 	       &dram->cru->softrst_con[12]);
299 }
300 
301 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
302 {
303 	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
304 	int delay = 1000;
305 	u32 mhz = hz / MHz;
306 
307 	refdiv = 1;
308 	if (mhz <= 100) {
309 		postdiv1 = 6;
310 		postdiv2 = 4;
311 	} else if (mhz <= 150) {
312 		postdiv1 = 4;
313 		postdiv2 = 4;
314 	} else if (mhz <= 200) {
315 		postdiv1 = 6;
316 		postdiv2 = 2;
317 	} else if (mhz <= 300) {
318 		postdiv1 = 4;
319 		postdiv2 = 2;
320 	} else if (mhz <= 400) {
321 		postdiv1 = 6;
322 		postdiv2 = 1;
323 	} else {
324 		postdiv1 = 4;
325 		postdiv2 = 1;
326 	}
327 	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
328 
329 	writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
330 
331 	writel(0x1f000000, &dram->cru->clksel_con[64]);
332 	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
333 	writel(DSMPD(1) | POSTDIV2(postdiv2) | REFDIV(refdiv),
334 	       &dram->cru->pll[1].con1);
335 
336 	while (delay > 0) {
337 		udelay(1);
338 		if (LOCK(readl(&dram->cru->pll[1].con1)))
339 			break;
340 		delay--;
341 	}
342 
343 	writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
344 }
345 
346 static void rkclk_configure_ddr(struct dram_info *dram,
347 				struct rv1126_sdram_params *sdram_params)
348 {
349 	/* for inno ddr phy need freq / 2 */
350 	rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2);
351 }
352 
353 static unsigned int
354 	calculate_ddrconfig(struct rv1126_sdram_params *sdram_params)
355 {
356 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
357 	u32 cs, bw, die_bw, col, row, bank;
358 	u32 cs1_row;
359 	u32 i, tmp;
360 	u32 ddrconf = -1;
361 	u32 row_3_4;
362 
363 	cs = cap_info->rank;
364 	bw = cap_info->bw;
365 	die_bw = cap_info->dbw;
366 	col = cap_info->col;
367 	row = cap_info->cs0_row;
368 	cs1_row = cap_info->cs1_row;
369 	bank = cap_info->bk;
370 	row_3_4 = cap_info->row_3_4;
371 
372 	if (sdram_params->base.dramtype == DDR4) {
373 		if (cs == 2 && row == cs1_row && !row_3_4) {
374 			tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) |
375 			      die_bw;
376 			for (i = 17; i < 21; i++) {
377 				if (((tmp & 0xf) ==
378 				     (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
379 				    ((tmp & 0x70) <=
380 				     (ddr4_cfg_2_rbc[i - 10] & 0x70))) {
381 					ddrconf = i;
382 					goto out;
383 				}
384 			}
385 		}
386 
387 		tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw;
388 		for (i = 10; i < 21; i++) {
389 			if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
390 			    ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) &&
391 			    ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) {
392 				ddrconf = i;
393 				goto out;
394 			}
395 		}
396 	} else {
397 		if (cs == 2 && row == cs1_row && bank == 3) {
398 			for (i = 5; i < 8; i++) {
399 				if (((bw + col - 10) == (ddr_cfg_2_rbc[i] &
400 							 0x7)) &&
401 				    ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] &
402 							  (0x7 << 5))) {
403 					ddrconf = i;
404 					goto out;
405 				}
406 			}
407 		}
408 
409 		tmp = ((cs - 1) << 8) | ((row - 13) << 5) |
410 		      ((bw + col - 10) << 0);
411 		if (bank == 3)
412 			tmp |= (1 << 3);
413 
414 		for (i = 0; i < 9; i++)
415 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) &&
416 			    ((tmp & (7 << 5)) <=
417 			     (ddr_cfg_2_rbc[i] & (7 << 5))) &&
418 			    ((tmp & (1 << 8)) <=
419 			     (ddr_cfg_2_rbc[i] & (1 << 8)))) {
420 				ddrconf = i;
421 				goto out;
422 			}
423 		if (cs == 1 && bank == 3 && row <= 17 &&
424 		    (col + bw) == 12)
425 			ddrconf = 23;
426 	}
427 
428 out:
429 	if (ddrconf > 28)
430 		printascii("calculate ddrconfig error\n");
431 
432 	if (sdram_params->base.dramtype == DDR4) {
433 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
434 			if (ddrconf == d4_rbc_2_d3_rbc[i][0]) {
435 				if (ddrconf == 21 && row > 16)
436 					printascii("warn:ddrconf21 row > 16\n");
437 				else
438 					ddrconf = d4_rbc_2_d3_rbc[i][1];
439 				break;
440 			}
441 		}
442 	}
443 
444 	return ddrconf;
445 }
446 
447 static void sw_set_req(struct dram_info *dram)
448 {
449 	void __iomem *pctl_base = dram->pctl;
450 
451 	/* clear sw_done=0 */
452 	writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL);
453 }
454 
455 static void sw_set_ack(struct dram_info *dram)
456 {
457 	void __iomem *pctl_base = dram->pctl;
458 
459 	/* set sw_done=1 */
460 	writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL);
461 	while (1) {
462 		/* wait programming done */
463 		if (readl(pctl_base + DDR_PCTL2_SWSTAT) &
464 				PCTL2_SW_DONE_ACK)
465 			break;
466 	}
467 }
468 
469 static void set_ctl_address_map(struct dram_info *dram,
470 				struct rv1126_sdram_params *sdram_params)
471 {
472 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
473 	void __iomem *pctl_base = dram->pctl;
474 	u32 ddrconf = cap_info->ddrconfig;
475 	u32 i, row;
476 
477 	row = cap_info->cs0_row;
478 	if (sdram_params->base.dramtype == DDR4) {
479 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
480 			if (ddrconf == d4_rbc_2_d3_rbc[i][1]) {
481 				ddrconf = d4_rbc_2_d3_rbc[i][0];
482 				break;
483 			}
484 		}
485 	}
486 
487 	if (ddrconf > ARRAY_SIZE(addrmap)) {
488 		printascii("set ctl address map fail\n");
489 		return;
490 	}
491 
492 	sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
493 			  &addrmap[ddrconf][0], 9 * 4);
494 
495 	/* unused row set to 0xf */
496 	for (i = 17; i >= row; i--)
497 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
498 			((i - 12) * 8 / 32) * 4,
499 			0xf << ((i - 12) * 8 % 32));
500 
501 	if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4)
502 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
503 	if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1)
504 		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
505 
506 	if (cap_info->rank == 1)
507 		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
508 }
509 
510 static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait)
511 {
512 	void __iomem *phy_base = dram->phy;
513 	u32 fbdiv, prediv, postdiv, postdiv_en;
514 
515 	if (wait) {
516 		clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB);
517 		while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK))
518 			continue;
519 	} else {
520 		freq /= MHz;
521 		prediv = 1;
522 		if (freq <= 200) {
523 			fbdiv = 16;
524 			postdiv = 2;
525 			postdiv_en = 1;
526 		} else if (freq <= 456) {
527 			fbdiv = 8;
528 			postdiv = 1;
529 			postdiv_en = 1;
530 		} else {
531 			fbdiv = 4;
532 			postdiv = 0;
533 			postdiv_en = 0;
534 		}
535 		writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50));
536 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK,
537 				(fbdiv >> 8) & 1);
538 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK,
539 				postdiv_en << PHY_POSTDIV_EN_SHIFT);
540 
541 		clrsetbits_le32(PHY_REG(phy_base, 0x52),
542 				PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv);
543 		clrsetbits_le32(PHY_REG(phy_base, 0x53),
544 				PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT,
545 				postdiv << PHY_POSTDIV_SHIFT);
546 	}
547 }
548 
549 static const u16 d3_phy_drv_2_ohm[][2] = {
550 	{PHY_DDR3_RON_455ohm, 455},
551 	{PHY_DDR3_RON_230ohm, 230},
552 	{PHY_DDR3_RON_153ohm, 153},
553 	{PHY_DDR3_RON_115ohm, 115},
554 	{PHY_DDR3_RON_91ohm, 91},
555 	{PHY_DDR3_RON_76ohm, 76},
556 	{PHY_DDR3_RON_65ohm, 65},
557 	{PHY_DDR3_RON_57ohm, 57},
558 	{PHY_DDR3_RON_51ohm, 51},
559 	{PHY_DDR3_RON_46ohm, 46},
560 	{PHY_DDR3_RON_41ohm, 41},
561 	{PHY_DDR3_RON_38ohm, 38},
562 	{PHY_DDR3_RON_35ohm, 35},
563 	{PHY_DDR3_RON_32ohm, 32},
564 	{PHY_DDR3_RON_30ohm, 30},
565 	{PHY_DDR3_RON_28ohm, 28},
566 	{PHY_DDR3_RON_27ohm, 27},
567 	{PHY_DDR3_RON_25ohm, 25},
568 	{PHY_DDR3_RON_24ohm, 24},
569 	{PHY_DDR3_RON_23ohm, 23},
570 	{PHY_DDR3_RON_22ohm, 22},
571 	{PHY_DDR3_RON_21ohm, 21},
572 	{PHY_DDR3_RON_20ohm, 20}
573 };
574 
575 static u16 d3_phy_odt_2_ohm[][2] = {
576 	{PHY_DDR3_RTT_DISABLE, 0},
577 	{PHY_DDR3_RTT_561ohm, 561},
578 	{PHY_DDR3_RTT_282ohm, 282},
579 	{PHY_DDR3_RTT_188ohm, 188},
580 	{PHY_DDR3_RTT_141ohm, 141},
581 	{PHY_DDR3_RTT_113ohm, 113},
582 	{PHY_DDR3_RTT_94ohm, 94},
583 	{PHY_DDR3_RTT_81ohm, 81},
584 	{PHY_DDR3_RTT_72ohm, 72},
585 	{PHY_DDR3_RTT_64ohm, 64},
586 	{PHY_DDR3_RTT_58ohm, 58},
587 	{PHY_DDR3_RTT_52ohm, 52},
588 	{PHY_DDR3_RTT_48ohm, 48},
589 	{PHY_DDR3_RTT_44ohm, 44},
590 	{PHY_DDR3_RTT_41ohm, 41},
591 	{PHY_DDR3_RTT_38ohm, 38},
592 	{PHY_DDR3_RTT_37ohm, 37},
593 	{PHY_DDR3_RTT_34ohm, 34},
594 	{PHY_DDR3_RTT_32ohm, 32},
595 	{PHY_DDR3_RTT_31ohm, 31},
596 	{PHY_DDR3_RTT_29ohm, 29},
597 	{PHY_DDR3_RTT_28ohm, 28},
598 	{PHY_DDR3_RTT_27ohm, 27},
599 	{PHY_DDR3_RTT_25ohm, 25}
600 };
601 
602 static u16 d4lp3_phy_drv_2_ohm[][2] = {
603 	{PHY_DDR4_LPDDR3_RON_482ohm, 482},
604 	{PHY_DDR4_LPDDR3_RON_244ohm, 244},
605 	{PHY_DDR4_LPDDR3_RON_162ohm, 162},
606 	{PHY_DDR4_LPDDR3_RON_122ohm, 122},
607 	{PHY_DDR4_LPDDR3_RON_97ohm, 97},
608 	{PHY_DDR4_LPDDR3_RON_81ohm, 81},
609 	{PHY_DDR4_LPDDR3_RON_69ohm, 69},
610 	{PHY_DDR4_LPDDR3_RON_61ohm, 61},
611 	{PHY_DDR4_LPDDR3_RON_54ohm, 54},
612 	{PHY_DDR4_LPDDR3_RON_48ohm, 48},
613 	{PHY_DDR4_LPDDR3_RON_44ohm, 44},
614 	{PHY_DDR4_LPDDR3_RON_40ohm, 40},
615 	{PHY_DDR4_LPDDR3_RON_37ohm, 37},
616 	{PHY_DDR4_LPDDR3_RON_34ohm, 34},
617 	{PHY_DDR4_LPDDR3_RON_32ohm, 32},
618 	{PHY_DDR4_LPDDR3_RON_30ohm, 30},
619 	{PHY_DDR4_LPDDR3_RON_28ohm, 28},
620 	{PHY_DDR4_LPDDR3_RON_27ohm, 27},
621 	{PHY_DDR4_LPDDR3_RON_25ohm, 25},
622 	{PHY_DDR4_LPDDR3_RON_24ohm, 24},
623 	{PHY_DDR4_LPDDR3_RON_23ohm, 23},
624 	{PHY_DDR4_LPDDR3_RON_22ohm, 22},
625 	{PHY_DDR4_LPDDR3_RON_21ohm, 21}
626 };
627 
628 static u16 d4lp3_phy_odt_2_ohm[][2] = {
629 	{PHY_DDR4_LPDDR3_RTT_DISABLE, 0},
630 	{PHY_DDR4_LPDDR3_RTT_586ohm, 586},
631 	{PHY_DDR4_LPDDR3_RTT_294ohm, 294},
632 	{PHY_DDR4_LPDDR3_RTT_196ohm, 196},
633 	{PHY_DDR4_LPDDR3_RTT_148ohm, 148},
634 	{PHY_DDR4_LPDDR3_RTT_118ohm, 118},
635 	{PHY_DDR4_LPDDR3_RTT_99ohm, 99},
636 	{PHY_DDR4_LPDDR3_RTT_85ohm, 58},
637 	{PHY_DDR4_LPDDR3_RTT_76ohm, 76},
638 	{PHY_DDR4_LPDDR3_RTT_67ohm, 67},
639 	{PHY_DDR4_LPDDR3_RTT_60ohm, 60},
640 	{PHY_DDR4_LPDDR3_RTT_55ohm, 55},
641 	{PHY_DDR4_LPDDR3_RTT_50ohm, 50},
642 	{PHY_DDR4_LPDDR3_RTT_46ohm, 46},
643 	{PHY_DDR4_LPDDR3_RTT_43ohm, 43},
644 	{PHY_DDR4_LPDDR3_RTT_40ohm, 40},
645 	{PHY_DDR4_LPDDR3_RTT_38ohm, 38},
646 	{PHY_DDR4_LPDDR3_RTT_36ohm, 36},
647 	{PHY_DDR4_LPDDR3_RTT_34ohm, 34},
648 	{PHY_DDR4_LPDDR3_RTT_32ohm, 32},
649 	{PHY_DDR4_LPDDR3_RTT_31ohm, 31},
650 	{PHY_DDR4_LPDDR3_RTT_29ohm, 29},
651 	{PHY_DDR4_LPDDR3_RTT_28ohm, 28},
652 	{PHY_DDR4_LPDDR3_RTT_27ohm, 27}
653 };
654 
655 static u16 lp4_phy_drv_2_ohm[][2] = {
656 	{PHY_LPDDR4_RON_501ohm, 501},
657 	{PHY_LPDDR4_RON_253ohm, 253},
658 	{PHY_LPDDR4_RON_168ohm, 168},
659 	{PHY_LPDDR4_RON_126ohm, 126},
660 	{PHY_LPDDR4_RON_101ohm, 101},
661 	{PHY_LPDDR4_RON_84ohm, 84},
662 	{PHY_LPDDR4_RON_72ohm, 72},
663 	{PHY_LPDDR4_RON_63ohm, 63},
664 	{PHY_LPDDR4_RON_56ohm, 56},
665 	{PHY_LPDDR4_RON_50ohm, 50},
666 	{PHY_LPDDR4_RON_46ohm, 46},
667 	{PHY_LPDDR4_RON_42ohm, 42},
668 	{PHY_LPDDR4_RON_38ohm, 38},
669 	{PHY_LPDDR4_RON_36ohm, 36},
670 	{PHY_LPDDR4_RON_33ohm, 33},
671 	{PHY_LPDDR4_RON_31ohm, 31},
672 	{PHY_LPDDR4_RON_29ohm, 29},
673 	{PHY_LPDDR4_RON_28ohm, 28},
674 	{PHY_LPDDR4_RON_26ohm, 26},
675 	{PHY_LPDDR4_RON_25ohm, 25},
676 	{PHY_LPDDR4_RON_24ohm, 24},
677 	{PHY_LPDDR4_RON_23ohm, 23},
678 	{PHY_LPDDR4_RON_22ohm, 22}
679 };
680 
681 static u16 lp4_phy_odt_2_ohm[][2] = {
682 	{PHY_LPDDR4_RTT_DISABLE, 0},
683 	{PHY_LPDDR4_RTT_604ohm, 604},
684 	{PHY_LPDDR4_RTT_303ohm, 303},
685 	{PHY_LPDDR4_RTT_202ohm, 202},
686 	{PHY_LPDDR4_RTT_152ohm, 152},
687 	{PHY_LPDDR4_RTT_122ohm, 122},
688 	{PHY_LPDDR4_RTT_101ohm, 101},
689 	{PHY_LPDDR4_RTT_87ohm,	87},
690 	{PHY_LPDDR4_RTT_78ohm, 78},
691 	{PHY_LPDDR4_RTT_69ohm, 69},
692 	{PHY_LPDDR4_RTT_62ohm, 62},
693 	{PHY_LPDDR4_RTT_56ohm, 56},
694 	{PHY_LPDDR4_RTT_52ohm, 52},
695 	{PHY_LPDDR4_RTT_48ohm, 48},
696 	{PHY_LPDDR4_RTT_44ohm, 44},
697 	{PHY_LPDDR4_RTT_41ohm, 41},
698 	{PHY_LPDDR4_RTT_39ohm, 39},
699 	{PHY_LPDDR4_RTT_37ohm, 37},
700 	{PHY_LPDDR4_RTT_35ohm, 35},
701 	{PHY_LPDDR4_RTT_33ohm, 33},
702 	{PHY_LPDDR4_RTT_32ohm, 32},
703 	{PHY_LPDDR4_RTT_30ohm, 30},
704 	{PHY_LPDDR4_RTT_29ohm, 29},
705 	{PHY_LPDDR4_RTT_27ohm, 27}
706 };
707 
708 static u32 lp4_odt_calc(u32 odt_ohm)
709 {
710 	u32 odt;
711 
712 	if (odt_ohm == 0)
713 		odt = LPDDR4_DQODT_DIS;
714 	else if (odt_ohm <= 40)
715 		odt = LPDDR4_DQODT_40;
716 	else if (odt_ohm <= 48)
717 		odt = LPDDR4_DQODT_48;
718 	else if (odt_ohm <= 60)
719 		odt = LPDDR4_DQODT_60;
720 	else if (odt_ohm <= 80)
721 		odt = LPDDR4_DQODT_80;
722 	else if (odt_ohm <= 120)
723 		odt = LPDDR4_DQODT_120;
724 	else
725 		odt = LPDDR4_DQODT_240;
726 
727 	return odt;
728 }
729 
730 static void *get_ddr_drv_odt_info(u32 dramtype)
731 {
732 	struct sdram_head_info_index_v2 *index =
733 		(struct sdram_head_info_index_v2 *)common_info;
734 	void *ddr_info = 0;
735 
736 	if (dramtype == DDR4)
737 		ddr_info = (void *)common_info + index->ddr4_index.offset * 4;
738 	else if (dramtype == DDR3)
739 		ddr_info = (void *)common_info + index->ddr3_index.offset * 4;
740 	else if (dramtype == LPDDR3)
741 		ddr_info = (void *)common_info + index->lp3_index.offset * 4;
742 	else if (dramtype == LPDDR4)
743 		ddr_info = (void *)common_info + index->lp4_index.offset * 4;
744 	else
745 		printascii("unsupported dram type\n");
746 	return ddr_info;
747 }
748 
749 static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info,
750 			 u32 freq_mhz, u32 dst_fsp)
751 {
752 	void __iomem *pctl_base = dram->pctl;
753 	u32 ca_vref, dq_vref;
754 
755 	if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
756 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff);
757 	else
758 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten);
759 
760 	if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq))
761 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff);
762 	else
763 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten);
764 
765 	if (ca_vref < 100)
766 		ca_vref = 100;
767 	if (ca_vref > 420)
768 		ca_vref = 420;
769 
770 	if (ca_vref <= 300)
771 		ca_vref = (0 << 6) | (ca_vref - 100) / 4;
772 	else
773 		ca_vref = (1 << 6) | (ca_vref - 220) / 4;
774 
775 	if (dq_vref < 100)
776 		dq_vref = 100;
777 	if (dq_vref > 420)
778 		dq_vref = 420;
779 
780 	if (dq_vref <= 300)
781 		dq_vref = (0 << 6) | (dq_vref - 100) / 4;
782 	else
783 		dq_vref = (1 << 6) | (dq_vref - 220) / 4;
784 
785 	sw_set_req(dram);
786 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
787 			DDR_PCTL2_INIT6,
788 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT,
789 			ca_vref << PCTL2_LPDDR4_MR12_SHIFT);
790 
791 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
792 			DDR_PCTL2_INIT7,
793 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT,
794 			dq_vref << PCTL2_LPDDR4_MR14_SHIFT);
795 	sw_set_ack(dram);
796 }
797 
798 static void set_ds_odt(struct dram_info *dram,
799 		       struct rv1126_sdram_params *sdram_params, u32 dst_fsp)
800 {
801 	void __iomem *phy_base = dram->phy;
802 	void __iomem *pctl_base = dram->pctl;
803 	u32 dramtype = sdram_params->base.dramtype;
804 	struct ddr2_3_4_lp2_3_info *ddr_info;
805 	struct lp4_info *lp4_info;
806 	u32 i, j, tmp;
807 	const u16 (*p_drv)[2];
808 	const u16 (*p_odt)[2];
809 	u32 drv_info, sr_info;
810 	u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm;
811 	u32 phy_odt_ohm, dram_odt_ohm;
812 	u32 lp4_pu_cal, phy_lp4_drv_pd_en;
813 	u32 phy_odt_up_en, phy_odt_dn_en;
814 	u32 sr_dq, sr_clk;
815 	u32 freq = sdram_params->base.ddr_freq;
816 	u32 mr1_mr3, mr11, mr22, vref_out, vref_inner;
817 	u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0;
818 	u32 phy_dq_drv = 0;
819 	u32 phy_odt_up = 0, phy_odt_dn = 0;
820 
821 	ddr_info = get_ddr_drv_odt_info(dramtype);
822 	lp4_info = (void *)ddr_info;
823 
824 	if (!ddr_info)
825 		return;
826 
827 	/* dram odt en freq control phy drv, dram odt and phy sr */
828 	if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) {
829 		drv_info = ddr_info->drv_when_odtoff;
830 		dram_odt_ohm = 0;
831 		sr_info = ddr_info->sr_when_odtoff;
832 		phy_lp4_drv_pd_en =
833 			PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info);
834 	} else {
835 		drv_info = ddr_info->drv_when_odten;
836 		dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info);
837 		sr_info = ddr_info->sr_when_odten;
838 		phy_lp4_drv_pd_en =
839 			PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info);
840 	}
841 	phy_dq_drv_ohm =
842 		DRV_INFO_PHY_DQ_DRV(drv_info);
843 	phy_clk_drv_ohm =
844 		DRV_INFO_PHY_CLK_DRV(drv_info);
845 	phy_ca_drv_ohm =
846 		DRV_INFO_PHY_CA_DRV(drv_info);
847 
848 	sr_dq = DQ_SR_INFO(sr_info);
849 	sr_clk = CLK_SR_INFO(sr_info);
850 
851 	/* phy odt en freq control dram drv and phy odt */
852 	if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) {
853 		dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff);
854 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info);
855 		phy_odt_ohm = 0;
856 		phy_odt_up_en = 0;
857 		phy_odt_dn_en = 0;
858 	} else {
859 		dram_drv_ohm =
860 			DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten);
861 		phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info);
862 		phy_odt_up_en =
863 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
864 		phy_odt_dn_en =
865 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
866 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info);
867 	}
868 
869 	if (dramtype == LPDDR4) {
870 		if (phy_odt_ohm) {
871 			phy_odt_up_en = 0;
872 			phy_odt_dn_en = 1;
873 		}
874 		if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
875 			dram_caodt_ohm = 0;
876 		else
877 			dram_caodt_ohm =
878 				ODT_INFO_LP4_CA_ODT(lp4_info->odt_info);
879 	}
880 
881 	if (dramtype == DDR3) {
882 		p_drv = d3_phy_drv_2_ohm;
883 		p_odt = d3_phy_odt_2_ohm;
884 	} else if (dramtype == LPDDR4) {
885 		p_drv = lp4_phy_drv_2_ohm;
886 		p_odt = lp4_phy_odt_2_ohm;
887 	} else {
888 		p_drv = d4lp3_phy_drv_2_ohm;
889 		p_odt = d4lp3_phy_odt_2_ohm;
890 	}
891 
892 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
893 		if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) {
894 			phy_dq_drv = **(p_drv + i);
895 			break;
896 		}
897 		if (i == 0)
898 			break;
899 	}
900 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
901 		if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) {
902 			phy_clk_drv = **(p_drv + i);
903 			break;
904 		}
905 		if (i == 0)
906 			break;
907 	}
908 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
909 		if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) {
910 			phy_ca_drv = **(p_drv + i);
911 			break;
912 		}
913 		if (i == 0)
914 			break;
915 	}
916 	if (!phy_odt_ohm)
917 		phy_odt = 0;
918 	else
919 		for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) {
920 			if (phy_odt_ohm <= *(*(p_odt + i) + 1)) {
921 				phy_odt = **(p_odt + i);
922 				break;
923 			}
924 			if (i == 0)
925 				break;
926 		}
927 
928 	if (dramtype != LPDDR4) {
929 		if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en))
930 			vref_inner = 0x80;
931 		else if (phy_odt_up_en)
932 			vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 /
933 				     (dram_drv_ohm + phy_odt_ohm);
934 		else
935 			vref_inner = phy_odt_ohm * 128 /
936 				(phy_odt_ohm + dram_drv_ohm);
937 
938 		if (dramtype != DDR3 && dram_odt_ohm)
939 			vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 /
940 				   (phy_dq_drv_ohm + dram_odt_ohm);
941 		else
942 			vref_out = 0x80;
943 	} else {
944 		/* for lp4 */
945 		if (phy_odt_ohm)
946 			vref_inner =
947 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) *
948 				 256) / 1000;
949 		else
950 			vref_inner =
951 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) *
952 				 256) / 1000;
953 
954 		vref_out = 0x80;
955 	}
956 
957 	/* default ZQCALIB bypass mode */
958 	clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv);
959 	clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv);
960 	clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv);
961 	clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv);
962 	if (dramtype == LPDDR4) {
963 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_clk_drv);
964 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_clk_drv);
965 	} else {
966 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_ca_drv);
967 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_ca_drv);
968 	}
969 	/* clk / cmd slew rate */
970 	clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk);
971 
972 	phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1;
973 	if (phy_odt_up_en)
974 		phy_odt_up = phy_odt;
975 	if (phy_odt_dn_en)
976 		phy_odt_dn = phy_odt;
977 
978 	for (i = 0; i < 4; i++) {
979 		j = 0x110 + i * 0x10;
980 		clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up);
981 		clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn);
982 		clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv);
983 		clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv);
984 		writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10));
985 
986 		clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10),
987 				1 << 3, phy_lp4_drv_pd_en << 3);
988 		/* dq slew rate */
989 		clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10),
990 				0x1f, sr_dq);
991 	}
992 
993 	/* reg_rx_vref_value_update */
994 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
995 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
996 
997 	/* RAM VREF */
998 	writel(vref_out, PHY_REG(phy_base, 0x105));
999 	if (dramtype == LPDDR3)
1000 		udelay(100);
1001 
1002 	if (dramtype == LPDDR4)
1003 		set_lp4_vref(dram, lp4_info, freq, dst_fsp);
1004 
1005 	if (dramtype == DDR3 || dramtype == DDR4) {
1006 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1007 				DDR_PCTL2_INIT3);
1008 		mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK;
1009 	} else {
1010 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1011 				DDR_PCTL2_INIT4);
1012 		mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK;
1013 	}
1014 
1015 	if (dramtype == DDR3) {
1016 		mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK);
1017 		if (dram_drv_ohm == 34)
1018 			mr1_mr3 |= DDR3_DS_34;
1019 
1020 		if (dram_odt_ohm == 0)
1021 			mr1_mr3 |= DDR3_RTT_NOM_DIS;
1022 		else if (dram_odt_ohm <= 40)
1023 			mr1_mr3 |= DDR3_RTT_NOM_40;
1024 		else if (dram_odt_ohm <= 60)
1025 			mr1_mr3 |= DDR3_RTT_NOM_60;
1026 		else
1027 			mr1_mr3 |= DDR3_RTT_NOM_120;
1028 
1029 	} else if (dramtype == DDR4) {
1030 		mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK);
1031 		if (dram_drv_ohm == 48)
1032 			mr1_mr3 |= DDR4_DS_48;
1033 
1034 		if (dram_odt_ohm == 0)
1035 			mr1_mr3 |= DDR4_RTT_NOM_DIS;
1036 		else if (dram_odt_ohm <= 34)
1037 			mr1_mr3 |= DDR4_RTT_NOM_34;
1038 		else if (dram_odt_ohm <= 40)
1039 			mr1_mr3 |= DDR4_RTT_NOM_40;
1040 		else if (dram_odt_ohm <= 48)
1041 			mr1_mr3 |= DDR4_RTT_NOM_48;
1042 		else if (dram_odt_ohm <= 60)
1043 			mr1_mr3 |= DDR4_RTT_NOM_60;
1044 		else
1045 			mr1_mr3 |= DDR4_RTT_NOM_120;
1046 
1047 	} else if (dramtype == LPDDR3) {
1048 		if (dram_drv_ohm <= 34)
1049 			mr1_mr3 |= LPDDR3_DS_34;
1050 		else if (dram_drv_ohm <= 40)
1051 			mr1_mr3 |= LPDDR3_DS_40;
1052 		else if (dram_drv_ohm <= 48)
1053 			mr1_mr3 |= LPDDR3_DS_48;
1054 		else if (dram_drv_ohm <= 60)
1055 			mr1_mr3 |= LPDDR3_DS_60;
1056 		else if (dram_drv_ohm <= 80)
1057 			mr1_mr3 |= LPDDR3_DS_80;
1058 
1059 		if (dram_odt_ohm == 0)
1060 			lp3_odt_value = LPDDR3_ODT_DIS;
1061 		else if (dram_odt_ohm <= 60)
1062 			lp3_odt_value = LPDDR3_ODT_60;
1063 		else if (dram_odt_ohm <= 120)
1064 			lp3_odt_value = LPDDR3_ODT_120;
1065 		else
1066 			lp3_odt_value = LPDDR3_ODT_240;
1067 	} else {/* for lpddr4 */
1068 		/* MR3 for lp4 PU-CAL and PDDS */
1069 		mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK);
1070 		mr1_mr3 |= lp4_pu_cal;
1071 
1072 		tmp = lp4_odt_calc(dram_drv_ohm);
1073 		if (!tmp)
1074 			tmp = LPDDR4_PDDS_240;
1075 		mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT);
1076 
1077 		/* MR11 for lp4 ca odt, dq odt set */
1078 		mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1079 			     DDR_PCTL2_INIT6);
1080 		mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK;
1081 
1082 		mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK);
1083 
1084 		tmp = lp4_odt_calc(dram_odt_ohm);
1085 		mr11 |= (tmp << LPDDR4_DQODT_SHIFT);
1086 
1087 		tmp = lp4_odt_calc(dram_caodt_ohm);
1088 		mr11 |= (tmp << LPDDR4_CAODT_SHIFT);
1089 		sw_set_req(dram);
1090 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1091 				DDR_PCTL2_INIT6,
1092 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT,
1093 				mr11 << PCTL2_LPDDR4_MR11_SHIFT);
1094 		sw_set_ack(dram);
1095 
1096 		/* MR22 for soc odt/odt-ck/odt-cs/odt-ca */
1097 		mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1098 			     DDR_PCTL2_INIT7);
1099 		mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK;
1100 		mr22 &= ~LPDDR4_SOC_ODT_MASK;
1101 
1102 		tmp = lp4_odt_calc(phy_odt_ohm);
1103 		mr22 |= tmp;
1104 		mr22 = mr22 |
1105 		       (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) <<
1106 			LPDDR4_ODTE_CK_SHIFT) |
1107 		       (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) <<
1108 			LPDDR4_ODTE_CS_SHIFT) |
1109 		       (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) <<
1110 			LPDDR4_ODTD_CA_SHIFT);
1111 
1112 		sw_set_req(dram);
1113 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1114 				DDR_PCTL2_INIT7,
1115 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT,
1116 				mr22 << PCTL2_LPDDR4_MR22_SHIFT);
1117 		sw_set_ack(dram);
1118 	}
1119 
1120 	if (dramtype == DDR4 || dramtype == DDR3) {
1121 		sw_set_req(dram);
1122 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1123 				DDR_PCTL2_INIT3,
1124 				PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT,
1125 				mr1_mr3 << PCTL2_DDR34_MR1_SHIFT);
1126 		sw_set_ack(dram);
1127 	} else {
1128 		sw_set_req(dram);
1129 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1130 				DDR_PCTL2_INIT4,
1131 				PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT,
1132 				mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT);
1133 		sw_set_ack(dram);
1134 	}
1135 }
1136 
1137 static int sdram_cmd_dq_path_remap(struct dram_info *dram,
1138 				   struct rv1126_sdram_params *sdram_params)
1139 {
1140 	void __iomem *phy_base = dram->phy;
1141 	u32 dramtype = sdram_params->base.dramtype;
1142 	struct sdram_head_info_index_v2 *index =
1143 		(struct sdram_head_info_index_v2 *)common_info;
1144 	struct dq_map_info *map_info;
1145 
1146 	map_info = (struct dq_map_info *)((void *)common_info +
1147 		index->dq_map_index.offset * 4);
1148 
1149 	if (dramtype <= LPDDR4)
1150 		writel((map_info->byte_map[dramtype / 4] >>
1151 			((dramtype % 4) * 8)) & 0xff,
1152 		       PHY_REG(phy_base, 0x4f));
1153 
1154 	return 0;
1155 }
1156 
1157 static void phy_cfg(struct dram_info *dram,
1158 		    struct rv1126_sdram_params *sdram_params)
1159 {
1160 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
1161 	void __iomem *phy_base = dram->phy;
1162 	u32 i, dq_map, tmp;
1163 	u32 byte1 = 0, byte0 = 0;
1164 
1165 	sdram_cmd_dq_path_remap(dram, sdram_params);
1166 
1167 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0);
1168 	for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
1169 		writel(sdram_params->phy_regs.phy[i][1],
1170 		       phy_base + sdram_params->phy_regs.phy[i][0]);
1171 	}
1172 
1173 	clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5));
1174 	dq_map = readl(PHY_REG(phy_base, 0x4f));
1175 	for (i = 0; i < 4; i++) {
1176 		if (((dq_map >> (i * 2)) & 0x3) == 0)
1177 			byte0 = i;
1178 		if (((dq_map >> (i * 2)) & 0x3) == 1)
1179 			byte1 = i;
1180 	}
1181 
1182 	tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK);
1183 	if (cap_info->bw == 2)
1184 		tmp |= 0xf;
1185 	else if (cap_info->bw == 1)
1186 		tmp |= ((1 << byte0) | (1 << byte1));
1187 	else
1188 		tmp |= (1 << byte0);
1189 
1190 	writel(tmp, PHY_REG(phy_base, 0xf));
1191 
1192 	/* lpddr4 odt control by phy, enable cs0 odt */
1193 	if (sdram_params->base.dramtype == LPDDR4)
1194 		clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4,
1195 				(1 << 6) | (1 << 4));
1196 	/* for ca training ca vref choose range1 */
1197 	setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6));
1198 	setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6));
1199 	/* for wr training PHY_0x7c[5], choose range0 */
1200 	clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5));
1201 }
1202 
1203 static int update_refresh_reg(struct dram_info *dram)
1204 {
1205 	void __iomem *pctl_base = dram->pctl;
1206 	u32 ret;
1207 
1208 	ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
1209 	writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
1210 
1211 	return 0;
1212 }
1213 
1214 /*
1215  * rank = 1: cs0
1216  * rank = 2: cs1
1217  */
1218 int read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype)
1219 {
1220 	u32 ret;
1221 	u32 i, temp;
1222 	u32 dqmap;
1223 
1224 	void __iomem *pctl_base = dram->pctl;
1225 	struct sdram_head_info_index_v2 *index =
1226 		(struct sdram_head_info_index_v2 *)common_info;
1227 	struct dq_map_info *map_info;
1228 
1229 	map_info = (struct dq_map_info *)((void *)common_info +
1230 		index->dq_map_index.offset * 4);
1231 
1232 	if (dramtype == LPDDR2)
1233 		dqmap = map_info->lp2_dq0_7_map;
1234 	else
1235 		dqmap = map_info->lp3_dq0_7_map;
1236 
1237 	pctl_read_mr(pctl_base, rank, mr_num);
1238 
1239 	ret = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff);
1240 
1241 	if (dramtype != LPDDR4) {
1242 		temp = 0;
1243 		for (i = 0; i < 8; i++) {
1244 			temp = temp | (((ret >> i) & 0x1) <<
1245 				       ((dqmap >> (i * 4)) & 0xf));
1246 		}
1247 	} else {
1248 		ret = (readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff);
1249 	}
1250 
1251 	return ret;
1252 }
1253 
1254 /* before call this function autorefresh should be disabled */
1255 void send_a_refresh(struct dram_info *dram)
1256 {
1257 	void __iomem *pctl_base = dram->pctl;
1258 
1259 	while (readl(pctl_base + DDR_PCTL2_DBGSTAT) & 0x3)
1260 		continue;
1261 	writel(0x3, pctl_base + DDR_PCTL2_DBGCMD);
1262 }
1263 
1264 static void enter_sr(struct dram_info *dram, u32 en)
1265 {
1266 	void __iomem *pctl_base = dram->pctl;
1267 
1268 	if (en) {
1269 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1270 		while (1) {
1271 			if (((readl(pctl_base + DDR_PCTL2_STAT) &
1272 			      PCTL2_SELFREF_TYPE_MASK) ==
1273 			     PCTL2_SELFREF_TYPE_SR_NOT_AUTO) &&
1274 			    ((readl(pctl_base + DDR_PCTL2_STAT) &
1275 			      PCTL2_OPERATING_MODE_MASK) ==
1276 			     PCTL2_OPERATING_MODE_SR))
1277 				break;
1278 		}
1279 	} else {
1280 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1281 		while ((readl(pctl_base + DDR_PCTL2_STAT) &
1282 		       PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR)
1283 			continue;
1284 	}
1285 }
1286 
1287 void record_dq_prebit(struct dram_info *dram)
1288 {
1289 	u32 group, i, tmp;
1290 	void __iomem *phy_base = dram->phy;
1291 
1292 	for (group = 0; group < 4; group++) {
1293 		for (i = 0; i < ARRAY_SIZE(dq_sel); i++) {
1294 			/* l_loop_invdelaysel */
1295 			writel(dq_sel[i][0], PHY_REG(phy_base,
1296 						     grp_addr[group] + 0x2c));
1297 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e));
1298 			writel(tmp, PHY_REG(phy_base,
1299 					    grp_addr[group] + dq_sel[i][1]));
1300 
1301 			/* r_loop_invdelaysel */
1302 			writel(dq_sel[i][0], PHY_REG(phy_base,
1303 						     grp_addr[group] + 0x2d));
1304 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f));
1305 			writel(tmp, PHY_REG(phy_base,
1306 					    grp_addr[group] + dq_sel[i][2]));
1307 		}
1308 	}
1309 }
1310 
1311 static void update_dq_rx_prebit(struct dram_info *dram)
1312 {
1313 	void __iomem *phy_base = dram->phy;
1314 
1315 	clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4),
1316 			BIT(4));
1317 	udelay(1);
1318 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4));
1319 }
1320 
1321 static void update_dq_tx_prebit(struct dram_info *dram)
1322 {
1323 	void __iomem *phy_base = dram->phy;
1324 
1325 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1326 	setbits_le32(PHY_REG(phy_base, 0x2), BIT(3));
1327 	setbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1328 	udelay(1);
1329 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1330 }
1331 
1332 static void update_ca_prebit(struct dram_info *dram)
1333 {
1334 	void __iomem *phy_base = dram->phy;
1335 
1336 	clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2));
1337 	setbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1338 	udelay(1);
1339 	clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1340 }
1341 
1342 /*
1343  * dir: 0: de-skew = delta_*
1344  *	1: de-skew = reg val - delta_*
1345  * delta_dir: value for differential signal: clk/
1346  * delta_sig: value for single signal: ca/cmd
1347  */
1348 static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif,
1349 			     int delta_sig, u32 cs, u32 dramtype)
1350 {
1351 	void __iomem *phy_base = dram->phy;
1352 	u32 i, cs_en, tmp;
1353 	u32 dfi_lp_stat = 0;
1354 
1355 	if (cs == 0)
1356 		cs_en = 1;
1357 	else if (cs == 2)
1358 		cs_en = 2;
1359 	else
1360 		cs_en = 3;
1361 
1362 	if (dramtype == LPDDR4 &&
1363 	    ((readl(PHY_REG(phy_base, 0x60)) & BIT(5)) == 0)) {
1364 		dfi_lp_stat = 1;
1365 		setbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1366 	}
1367 	enter_sr(dram, 1);
1368 
1369 	for (i = 0; i < 0x20; i++) {
1370 		if (dir == DESKEW_MDF_ABS_VAL)
1371 			tmp = delta_sig;
1372 		else
1373 			tmp = readl(PHY_REG(phy_base, 0x150 + i)) +
1374 			      delta_sig;
1375 		writel(tmp, PHY_REG(phy_base, 0x150 + i));
1376 	}
1377 
1378 	if (dir == DESKEW_MDF_ABS_VAL)
1379 		tmp = delta_dif;
1380 	else
1381 		tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) -
1382 		       delta_sig + delta_dif;
1383 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x17));
1384 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x18));
1385 	if (dramtype == LPDDR4) {
1386 		writel(tmp, PHY_REG(phy_base, 0x150 + 0x4));
1387 		writel(tmp, PHY_REG(phy_base, 0x150 + 0xa));
1388 
1389 		clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6);
1390 		update_ca_prebit(dram);
1391 	}
1392 	enter_sr(dram, 0);
1393 
1394 	if (dfi_lp_stat)
1395 		clrbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1396 
1397 }
1398 
1399 static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank)
1400 {
1401 	u32 i, j, offset = 0;
1402 	u32 min = 0x3f;
1403 	void __iomem *phy_base = dram->phy;
1404 	u32 byte_en;
1405 
1406 	if (signal == SKEW_TX_SIGNAL)
1407 		offset = 8;
1408 
1409 	if (signal == SKEW_CA_SIGNAL) {
1410 		for (i = 0; i < 0x20; i++)
1411 			min = MIN(min, readl(PHY_REG(phy_base, 0x150  + i)));
1412 	} else {
1413 		byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1414 		for (j = offset; j < offset + rank * 4; j++) {
1415 			if (!((byte_en >> (j % 4)) & 1))
1416 				continue;
1417 			for (i = 0; i < 11; i++)
1418 				min = MIN(min,
1419 					  readl(PHY_REG(phy_base,
1420 							dqs_dq_skew_adr[j] +
1421 							i)));
1422 		}
1423 	}
1424 
1425 	return min;
1426 }
1427 
1428 static u32 low_power_update(struct dram_info *dram, u32 en)
1429 {
1430 	void __iomem *pctl_base = dram->pctl;
1431 	u32 lp_stat = 0;
1432 
1433 	if (en) {
1434 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf);
1435 	} else {
1436 		lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf;
1437 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf);
1438 	}
1439 
1440 	return lp_stat;
1441 }
1442 
1443 /*
1444  * signal:
1445  * dir: 0: de-skew = delta_*
1446  *	1: de-skew = reg val - delta_*
1447  * delta_dir: value for differential signal: dqs
1448  * delta_sig: value for single signal: dq/dm
1449  */
1450 static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir,
1451 			     int delta_dif, int delta_sig, u32 rank)
1452 {
1453 	void __iomem *phy_base = dram->phy;
1454 	u32 i, j, tmp, offset;
1455 	u32 byte_en;
1456 
1457 	byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1458 
1459 	if (signal == SKEW_RX_SIGNAL)
1460 		offset = 0;
1461 	else
1462 		offset = 8;
1463 
1464 	for (j = offset; j < (offset + rank * 4); j++) {
1465 		if (!((byte_en >> (j % 4)) & 1))
1466 			continue;
1467 		for (i = 0; i < 0x9; i++) {
1468 			if (dir == DESKEW_MDF_ABS_VAL)
1469 				tmp = delta_sig;
1470 			else
1471 				tmp = delta_sig + readl(PHY_REG(phy_base,
1472 							dqs_dq_skew_adr[j] +
1473 							i));
1474 			writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i));
1475 		}
1476 		if (dir == DESKEW_MDF_ABS_VAL)
1477 			tmp = delta_dif;
1478 		else
1479 			tmp = delta_dif + readl(PHY_REG(phy_base,
1480 						dqs_dq_skew_adr[j] + 9));
1481 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9));
1482 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa));
1483 	}
1484 	if (signal == SKEW_RX_SIGNAL)
1485 		update_dq_rx_prebit(dram);
1486 	else
1487 		update_dq_tx_prebit(dram);
1488 }
1489 
1490 static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype)
1491 {
1492 	void __iomem *phy_base = dram->phy;
1493 	u32 ret;
1494 	u32 dis_auto_zq = 0;
1495 	u32 odt_val_up, odt_val_dn;
1496 	u32 i, j;
1497 
1498 	odt_val_dn = readl(PHY_REG(phy_base, 0x110));
1499 	odt_val_up = readl(PHY_REG(phy_base, 0x111));
1500 
1501 	if (dramtype != LPDDR4) {
1502 		for (i = 0; i < 4; i++) {
1503 			j = 0x110 + i * 0x10;
1504 			writel(PHY_DDR4_LPDDR3_RTT_294ohm,
1505 			       PHY_REG(phy_base, j));
1506 			writel(PHY_DDR4_LPDDR3_RTT_DISABLE,
1507 			       PHY_REG(phy_base, j + 0x1));
1508 		}
1509 	}
1510 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1511 	/* use normal read mode for data training */
1512 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1513 
1514 	if (dramtype == DDR4)
1515 		setbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1516 
1517 	/* choose training cs */
1518 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
1519 	/* enable gate training */
1520 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
1521 	udelay(50);
1522 	ret = readl(PHY_REG(phy_base, 0x91));
1523 	/* disable gate training */
1524 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
1525 	clrbits_le32(PHY_REG(phy_base, 2), 0x30);
1526 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1527 
1528 	if (ret & 0x20)
1529 		ret = -1;
1530 	else
1531 		ret = (ret & 0xf) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf);
1532 
1533 	if (dramtype != LPDDR4) {
1534 		for (i = 0; i < 4; i++) {
1535 			j = 0x110 + i * 0x10;
1536 			writel(odt_val_dn, PHY_REG(phy_base, j));
1537 			writel(odt_val_up, PHY_REG(phy_base, j + 0x1));
1538 		}
1539 	}
1540 	return ret;
1541 }
1542 
1543 static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype,
1544 			    u32 rank)
1545 {
1546 	void __iomem *pctl_base = dram->pctl;
1547 	void __iomem *phy_base = dram->phy;
1548 	u32 dis_auto_zq = 0;
1549 	u32 tmp;
1550 	u32 cur_fsp;
1551 	u32 timeout_us = 1000;
1552 
1553 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1554 
1555 	clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1556 
1557 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1558 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) &
1559 	      0xffff;
1560 	writel(tmp & 0xff, PHY_REG(phy_base, 0x3));
1561 
1562 	/* disable another cs's output */
1563 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1564 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12),
1565 			      dramtype);
1566 	if (dramtype == DDR3 || dramtype == DDR4)
1567 		writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1568 	else
1569 		writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1570 
1571 	/* choose cs */
1572 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1573 			((0x2 >> cs) << 6) | (0 << 2));
1574 	/* enable write leveling */
1575 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1576 			((0x2 >> cs) << 6) | (1 << 2));
1577 
1578 	while (1) {
1579 		if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) ==
1580 		    (readl(PHY_REG(phy_base, 0xf)) & 0xf))
1581 			break;
1582 
1583 		udelay(1);
1584 		if (timeout_us-- == 0) {
1585 			printascii("error: write leveling timeout\n");
1586 			while (1)
1587 				;
1588 		}
1589 	}
1590 
1591 	/* disable write leveling */
1592 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1593 			((0x2 >> cs) << 6) | (0 << 2));
1594 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6);
1595 
1596 	/* enable another cs's output */
1597 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1598 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12),
1599 			      dramtype);
1600 
1601 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1602 
1603 	return 0;
1604 }
1605 
1606 char pattern[32] = {
1607 	0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa,
1608 	0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55,
1609 	0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55,
1610 	0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa
1611 };
1612 
1613 static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype,
1614 			    u32 mhz)
1615 {
1616 	void __iomem *pctl_base = dram->pctl;
1617 	void __iomem *phy_base = dram->phy;
1618 	u32 trefi_1x, trfc_1x;
1619 	u32 dis_auto_zq = 0;
1620 	u32 timeout_us = 1000;
1621 	u32 dqs_default;
1622 	u32 cur_fsp;
1623 	u32 vref_inner;
1624 	u32 i;
1625 	struct sdram_head_info_index_v2 *index =
1626 		(struct sdram_head_info_index_v2 *)common_info;
1627 	struct dq_map_info *map_info;
1628 
1629 	vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff;
1630 	if (dramtype == DDR3 && vref_inner == 0x80) {
1631 		for (i = 0; i < 4; i++)
1632 			writel(vref_inner - 0xa,
1633 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1634 
1635 		/* reg_rx_vref_value_update */
1636 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1637 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1638 	}
1639 
1640 	map_info = (struct dq_map_info *)((void *)common_info +
1641 		index->dq_map_index.offset * 4);
1642 	/* only 1cs a time, 0:cs0 1 cs1 */
1643 	if (cs > 1)
1644 		return -1;
1645 
1646 	dqs_default = 0xf;
1647 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1648 
1649 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1650 	/* config refresh timing */
1651 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1652 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1653 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1654 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1655 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1656 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1657 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1658 	/* reg_phy_trfc */
1659 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1660 	/* reg_max_refi_cnt */
1661 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1662 
1663 	/* choose training cs */
1664 	clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6);
1665 
1666 	/* set dq map for ddr4 */
1667 	if (dramtype == DDR4) {
1668 		setbits_le32(PHY_REG(phy_base, 0x70), BIT(7));
1669 		for (i = 0; i < 4; i++) {
1670 			writel((map_info->ddr4_dq_map[cs * 2] >>
1671 				((i % 4) * 8)) & 0xff,
1672 				PHY_REG(phy_base, 0x238 + i));
1673 			writel((map_info->ddr4_dq_map[cs * 2 + 1] >>
1674 				((i % 4) * 8)) & 0xff,
1675 				PHY_REG(phy_base, 0x2b8 + i));
1676 		}
1677 	}
1678 
1679 	/* cha_l reg_l_rd_train_dqs_default[5:0] */
1680 	clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default);
1681 	/* cha_h reg_h_rd_train_dqs_default[5:0] */
1682 	clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default);
1683 	/* chb_l reg_l_rd_train_dqs_default[5:0] */
1684 	clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default);
1685 	/* chb_h reg_h_rd_train_dqs_default[5:0] */
1686 	clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default);
1687 
1688 	/* Choose the read train auto mode */
1689 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1);
1690 	/* Enable the auto train of the read train */
1691 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3);
1692 
1693 	/* Wait the train done. */
1694 	while (1) {
1695 		if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1)
1696 			break;
1697 
1698 		udelay(1);
1699 		if (timeout_us-- == 0) {
1700 			printascii("error: read training timeout\n");
1701 			return -1;
1702 		}
1703 	}
1704 
1705 	/* Check the read train state */
1706 	if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) ||
1707 	    (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) {
1708 		printascii("error: read training error\n");
1709 		return -1;
1710 	}
1711 
1712 	/* Exit the Read Training by setting */
1713 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1));
1714 
1715 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1716 
1717 	if (dramtype == DDR3 && vref_inner == 0x80) {
1718 		for (i = 0; i < 4; i++)
1719 			writel(vref_inner,
1720 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1721 
1722 		/* reg_rx_vref_value_update */
1723 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1724 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1725 	}
1726 
1727 	return 0;
1728 }
1729 
1730 static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype,
1731 			    u32 mhz, u32 dst_fsp)
1732 {
1733 	void __iomem *pctl_base = dram->pctl;
1734 	void __iomem *phy_base = dram->phy;
1735 	u32 trefi_1x, trfc_1x;
1736 	u32 dis_auto_zq = 0;
1737 	u32 timeout_us = 1000;
1738 	u32 cur_fsp;
1739 	u32 mr_tmp, cl, cwl, phy_fsp, offset = 0;
1740 
1741 	if (dramtype == LPDDR3 && mhz <= 400) {
1742 		phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3;
1743 		offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3;
1744 		cl = readl(PHY_REG(phy_base, offset));
1745 		cwl = readl(PHY_REG(phy_base, offset + 2));
1746 
1747 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8);
1748 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4);
1749 		pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype);
1750 	}
1751 
1752 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1753 
1754 	/* PHY_0x7b[7:0] reg_train_col_addr[7:0] */
1755 	clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0);
1756 	/* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */
1757 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2);
1758 	/* PHY_0x7c[1:0] reg_train_col_addr[9:8] */
1759 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0);
1760 	/* PHY_0x7d[7:0] reg_train_row_addr[7:0] */
1761 	clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0);
1762 	/* PHY_0x7e[7:0] reg_train_row_addr[15:8] */
1763 	clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0);
1764 
1765 	/* PHY_0x71[3] wrtrain_check_data_value_random_gen */
1766 	clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3));
1767 
1768 	/* config refresh timing */
1769 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1770 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1771 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1772 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1773 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1774 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1775 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1776 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1777 	/* reg_phy_trfc */
1778 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1779 	/* reg_max_refi_cnt */
1780 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1781 
1782 	/* choose training cs */
1783 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6);
1784 
1785 	/* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */
1786 	/* 0: Use the write-leveling value. */
1787 	/* 1: use reg0x233 0x237 0x2b3 0x2b7 */
1788 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4));
1789 
1790 	/* PHY_0x7a [0] reg_dq_wr_train_auto */
1791 	setbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1792 
1793 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1794 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1795 
1796 	send_a_refresh(dram);
1797 
1798 	while (1) {
1799 		if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1)
1800 			break;
1801 
1802 		udelay(1);
1803 		if (timeout_us-- == 0) {
1804 			printascii("error: write training timeout\n");
1805 			while (1)
1806 				;
1807 		}
1808 	}
1809 
1810 	/* Check the write train state */
1811 	if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) {
1812 		printascii("error: write training error\n");
1813 		return -1;
1814 	}
1815 
1816 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1817 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1818 
1819 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1820 
1821 	/* save LPDDR4 write vref to fsp_param for dfs */
1822 	if (dramtype == LPDDR4) {
1823 		fsp_param[dst_fsp].vref_dq[cs] =
1824 			((readl(PHY_REG(phy_base, 0x384)) & 0x3f) +
1825 			 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2;
1826 		/* add range info */
1827 		fsp_param[dst_fsp].vref_dq[cs] |=
1828 			((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1);
1829 	}
1830 
1831 	if (dramtype == LPDDR3 && mhz <= 400) {
1832 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl);
1833 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl);
1834 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1835 			       DDR_PCTL2_INIT3);
1836 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
1837 			      dramtype);
1838 	}
1839 
1840 	return 0;
1841 }
1842 
1843 static int data_training(struct dram_info *dram, u32 cs,
1844 			 struct rv1126_sdram_params *sdram_params, u32 dst_fsp,
1845 			 u32 training_flag)
1846 {
1847 	u32 ret = 0;
1848 
1849 	if (training_flag == FULL_TRAINING)
1850 		training_flag = READ_GATE_TRAINING | WRITE_LEVELING |
1851 				WRITE_TRAINING | READ_TRAINING;
1852 
1853 	if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) {
1854 		ret = data_training_wl(dram, cs,
1855 				       sdram_params->base.dramtype,
1856 				       sdram_params->ch.cap_info.rank);
1857 		if (ret != 0)
1858 			goto out;
1859 	}
1860 
1861 	if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) {
1862 		ret = data_training_rg(dram, cs,
1863 				       sdram_params->base.dramtype);
1864 		if (ret != 0)
1865 			goto out;
1866 	}
1867 
1868 	if ((training_flag & READ_TRAINING) == READ_TRAINING) {
1869 		ret = data_training_rd(dram, cs,
1870 				       sdram_params->base.dramtype,
1871 				       sdram_params->base.ddr_freq);
1872 		if (ret != 0)
1873 			goto out;
1874 	}
1875 
1876 	if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) {
1877 		ret = data_training_wr(dram, cs,
1878 				       sdram_params->base.dramtype,
1879 				       sdram_params->base.ddr_freq, dst_fsp);
1880 		if (ret != 0)
1881 			goto out;
1882 	}
1883 
1884 out:
1885 	return ret;
1886 }
1887 
1888 static int get_wrlvl_val(struct dram_info *dram,
1889 			 struct rv1126_sdram_params *sdram_params)
1890 {
1891 	u32 i, j, clk_skew;
1892 	void __iomem *phy_base = dram->phy;
1893 	u32 lp_stat;
1894 	int ret;
1895 
1896 	lp_stat = low_power_update(dram, 0);
1897 
1898 	clk_skew = 0x1f;
1899 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, clk_skew, 3,
1900 			 sdram_params->base.dramtype);
1901 
1902 	ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING);
1903 	if (sdram_params->ch.cap_info.rank == 2)
1904 		ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING);
1905 
1906 	for (j = 0; j < 2; j++)
1907 		for (i = 0; i < 4; i++)
1908 			wrlvl_result[j][i] =
1909 				readl(PHY_REG(phy_base,
1910 					      wrlvl_result_offset[j][i])) -
1911 				clk_skew;
1912 
1913 	low_power_update(dram, lp_stat);
1914 
1915 	return ret;
1916 }
1917 
1918 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
1919 static void init_rw_trn_result_struct(struct rw_trn_result *result,
1920 				      void __iomem *phy_base, u8 cs_num)
1921 {
1922 	int i;
1923 
1924 	result->cs_num = cs_num;
1925 	result->byte_en = readb(PHY_REG(dram_info.phy, 0xf)) &
1926 			  PHY_DQ_WIDTH_MASK;
1927 	for (i = 0; i < FSP_NUM; i++)
1928 		result->fsp_mhz[i] = 0;
1929 }
1930 
1931 static void save_rw_trn_min_max(void __iomem *phy_base,
1932 				struct cs_rw_trn_result *rd_result,
1933 				struct cs_rw_trn_result *wr_result,
1934 				u8 byte_en)
1935 {
1936 	u16 phy_ofs;
1937 	u8 dqs;
1938 	u8 dq;
1939 
1940 	for (dqs = 0; (byte_en & BIT(dqs)) != 0 && dqs < BYTE_NUM; dqs++) {
1941 		/* Channel A or B (low or high 16 bit) */
1942 		phy_ofs = dqs < 2 ? 0x230 : 0x2b0;
1943 		/* low or high 8 bit */
1944 		phy_ofs += (dqs & 0x1) == 0 ? 0 : 0x9;
1945 		for (dq = 0; dq < 8; dq++) {
1946 			rd_result->dqs[dqs].dq_min[dq] =
1947 				readb(PHY_REG(phy_base, phy_ofs + 0x15 + dq));
1948 			rd_result->dqs[dqs].dq_max[dq] =
1949 				readb(PHY_REG(phy_base, phy_ofs + 0x27 + dq));
1950 			wr_result->dqs[dqs].dq_min[dq] =
1951 				readb(PHY_REG(phy_base, phy_ofs + 0x3d + dq));
1952 			wr_result->dqs[dqs].dq_max[dq] =
1953 				readb(PHY_REG(phy_base, phy_ofs + 0x4f + dq));
1954 		}
1955 	}
1956 }
1957 
1958 static void save_rw_trn_deskew(void __iomem *phy_base,
1959 			       struct fsp_rw_trn_result *result, u8 cs_num,
1960 			       int min_val, bool rw)
1961 {
1962 	u16 phy_ofs;
1963 	u8 cs;
1964 	u8 dq;
1965 
1966 	result->min_val = min_val;
1967 
1968 	for (cs = 0; cs < cs_num; cs++) {
1969 		phy_ofs = cs == 0 ? 0x170 : 0x1a0;
1970 		phy_ofs += rw == SKEW_RX_SIGNAL ? 0x1 : 0x17;
1971 		for (dq = 0; dq < 8; dq++) {
1972 			result->cs[cs].dqs[0].dq_deskew[dq] =
1973 				readb(PHY_REG(phy_base, phy_ofs + dq));
1974 			result->cs[cs].dqs[1].dq_deskew[dq] =
1975 				readb(PHY_REG(phy_base, phy_ofs + 0xb + dq));
1976 			result->cs[cs].dqs[2].dq_deskew[dq] =
1977 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + dq));
1978 			result->cs[cs].dqs[3].dq_deskew[dq] =
1979 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + dq));
1980 		}
1981 
1982 		result->cs[cs].dqs[0].dqs_deskew =
1983 			readb(PHY_REG(phy_base, phy_ofs + 0x8));
1984 		result->cs[cs].dqs[1].dqs_deskew =
1985 			readb(PHY_REG(phy_base, phy_ofs + 0xb + 0x8));
1986 		result->cs[cs].dqs[2].dqs_deskew =
1987 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0x8));
1988 		result->cs[cs].dqs[3].dqs_deskew =
1989 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + 0x8));
1990 	}
1991 }
1992 
1993 static void save_rw_trn_result_to_ddr(struct rw_trn_result *result)
1994 {
1995 	result->flag = DDR_DQ_EYE_FLAG;
1996 	memcpy((void *)(RW_TRN_RESULT_ADDR), result, sizeof(*result));
1997 }
1998 #endif
1999 
2000 static int high_freq_training(struct dram_info *dram,
2001 			      struct rv1126_sdram_params *sdram_params,
2002 			      u32 fsp)
2003 {
2004 	u32 i, j;
2005 	void __iomem *phy_base = dram->phy;
2006 	u32 dramtype = sdram_params->base.dramtype;
2007 	int min_val;
2008 	int dqs_skew, clk_skew, ca_skew;
2009 	int ret;
2010 
2011 	dqs_skew = 0;
2012 	for (j = 0; j < sdram_params->ch.cap_info.rank; j++)
2013 		for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++)
2014 			dqs_skew += wrlvl_result[j][i];
2015 	dqs_skew = dqs_skew / (sdram_params->ch.cap_info.rank *
2016 			       ARRAY_SIZE(wrlvl_result[0]));
2017 
2018 	clk_skew = 0x20 - dqs_skew;
2019 	dqs_skew = 0x20;
2020 
2021 	if (dramtype == LPDDR4) {
2022 		clk_skew = 0;
2023 		ca_skew = 0;
2024 	} else if (dramtype == LPDDR3) {
2025 		ca_skew = clk_skew - 4;
2026 	} else {
2027 		ca_skew = clk_skew;
2028 	}
2029 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3,
2030 			 dramtype);
2031 
2032 	writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233));
2033 	writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237));
2034 	writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2035 	writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2036 	ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING |
2037 			    READ_TRAINING | WRITE_TRAINING);
2038 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2039 	rw_trn_result.fsp_mhz[fsp] = (u16)sdram_params->base.ddr_freq;
2040 	save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[0],
2041 			    &rw_trn_result.wr_fsp[fsp].cs[0],
2042 			    rw_trn_result.byte_en);
2043 #endif
2044 	if (sdram_params->ch.cap_info.rank == 2) {
2045 		writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233));
2046 		writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237));
2047 		writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2048 		writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2049 		ret |= data_training(dram, 1, sdram_params, fsp,
2050 				     READ_GATE_TRAINING | READ_TRAINING |
2051 				     WRITE_TRAINING);
2052 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2053 		save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[1],
2054 				    &rw_trn_result.wr_fsp[fsp].cs[1],
2055 				    rw_trn_result.byte_en);
2056 #endif
2057 	}
2058 	if (ret)
2059 		goto out;
2060 
2061 	record_dq_prebit(dram);
2062 
2063 	min_val = get_min_value(dram, SKEW_RX_SIGNAL,
2064 				sdram_params->ch.cap_info.rank) * -1;
2065 	modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2066 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2067 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2068 	save_rw_trn_deskew(phy_base, &rw_trn_result.rd_fsp[fsp],
2069 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2070 			   SKEW_RX_SIGNAL);
2071 #endif
2072 
2073 	min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL,
2074 				    sdram_params->ch.cap_info.rank),
2075 		      get_min_value(dram, SKEW_CA_SIGNAL,
2076 				    sdram_params->ch.cap_info.rank)) * -1;
2077 
2078 	/* clk = 0, rx all skew -7, tx - min_value */
2079 	modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3,
2080 			 dramtype);
2081 
2082 	modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2083 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2084 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2085 	save_rw_trn_deskew(phy_base, &rw_trn_result.wr_fsp[fsp],
2086 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2087 			   SKEW_TX_SIGNAL);
2088 #endif
2089 
2090 	ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING);
2091 	if (sdram_params->ch.cap_info.rank == 2)
2092 		ret |= data_training(dram, 1, sdram_params, 0,
2093 				     READ_GATE_TRAINING);
2094 out:
2095 	return ret;
2096 }
2097 
2098 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
2099 {
2100 	writel(ddrconfig, &dram->msch->deviceconf);
2101 	clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0);
2102 }
2103 
2104 static void update_noc_timing(struct dram_info *dram,
2105 			      struct rv1126_sdram_params *sdram_params)
2106 {
2107 	void __iomem *pctl_base = dram->pctl;
2108 	u32 bw, bl;
2109 
2110 	bw = 8 << sdram_params->ch.cap_info.bw;
2111 	bl = ((readl(pctl_base + DDR_PCTL2_MSTR) >> 16) & 0xf) * 2;
2112 
2113 	/* update the noc timing related to data bus width */
2114 	if ((bw / 8 * bl) == 16)
2115 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 0;
2116 	else if ((bw / 8 * bl) == 32)
2117 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 1;
2118 	else if ((bw / 8 * bl) == 64)
2119 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 2;
2120 	else
2121 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 3;
2122 
2123 	sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty =
2124 		(bl * bw / 8) > 16 ? (bl / 4) : (16 / (bl * bw / 8)) * bl / 4;
2125 
2126 	if (sdram_params->base.dramtype == LPDDR4) {
2127 		sdram_params->ch.noc_timings.ddrmode.b.mwrsize =
2128 			(bw == 16) ? 0x1 : 0x2;
2129 		sdram_params->ch.noc_timings.ddrtimingc0.b.wrtomwr =
2130 			3 * sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty;
2131 	}
2132 
2133 	writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
2134 	       &dram->msch->ddrtiminga0);
2135 	writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
2136 	       &dram->msch->ddrtimingb0);
2137 	writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
2138 	       &dram->msch->ddrtimingc0);
2139 	writel(sdram_params->ch.noc_timings.devtodev0.d32,
2140 	       &dram->msch->devtodev0);
2141 	writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
2142 	writel(sdram_params->ch.noc_timings.ddr4timing.d32,
2143 	       &dram->msch->ddr4timing);
2144 }
2145 
2146 static void dram_all_config(struct dram_info *dram,
2147 			    struct rv1126_sdram_params *sdram_params)
2148 {
2149 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2150 	u32 dram_type = sdram_params->base.dramtype;
2151 	void __iomem *pctl_base = dram->pctl;
2152 	u32 sys_reg2 = 0;
2153 	u32 sys_reg3 = 0;
2154 	u64 cs_cap[2];
2155 	u32 cs_pst;
2156 
2157 	set_ddrconfig(dram, cap_info->ddrconfig);
2158 	sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
2159 			 &sys_reg3, 0);
2160 	writel(sys_reg2, &dram->pmugrf->os_reg[2]);
2161 	writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2162 
2163 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2164 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2165 
2166 	if (cap_info->rank == 2) {
2167 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2168 			6 + 2;
2169 		if (cs_pst > 28)
2170 			cs_cap[0] = 1 << cs_pst;
2171 	}
2172 
2173 	writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
2174 			(((cs_cap[0] >> 20) / 64) & 0xff),
2175 			&dram->msch->devicesize);
2176 	update_noc_timing(dram, sdram_params);
2177 }
2178 
2179 static void enable_low_power(struct dram_info *dram,
2180 			     struct rv1126_sdram_params *sdram_params)
2181 {
2182 	void __iomem *pctl_base = dram->pctl;
2183 	u32 grf_lp_con;
2184 
2185 	writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]);
2186 
2187 	if (sdram_params->base.dramtype == DDR4)
2188 		grf_lp_con = (0x7 << 16) | (1 << 1);
2189 	else if (sdram_params->base.dramtype == DDR3)
2190 		grf_lp_con = (0x7 << 16) | (1 << 0);
2191 	else
2192 		grf_lp_con = (0x7 << 16) | (1 << 2);
2193 
2194 	/* en lpckdis_en */
2195 	grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
2196 	writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con);
2197 
2198 	/* enable sr, pd */
2199 	if (dram->pd_idle == 0)
2200 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2201 	else
2202 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2203 	if (dram->sr_idle == 0)
2204 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2205 	else
2206 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2207 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
2208 }
2209 
2210 static void ddr_set_atags(struct dram_info *dram,
2211 			  struct rv1126_sdram_params *sdram_params)
2212 {
2213 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2214 	u32 dram_type = sdram_params->base.dramtype;
2215 	void __iomem *pctl_base = dram->pctl;
2216 	struct tag_serial t_serial;
2217 	struct tag_ddr_mem t_ddrmem;
2218 	struct tag_soc_info t_socinfo;
2219 	u64 cs_cap[2];
2220 	u32 cs_pst = 0;
2221 
2222 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2223 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2224 
2225 	memset(&t_serial, 0, sizeof(struct tag_serial));
2226 
2227 	t_serial.version = 0;
2228 	t_serial.enable = 1;
2229 	t_serial.addr = CONFIG_DEBUG_UART_BASE;
2230 	t_serial.baudrate = CONFIG_BAUDRATE;
2231 	t_serial.m_mode = SERIAL_M_MODE_M0;
2232 	t_serial.id = 2;
2233 
2234 	atags_destroy();
2235 	atags_set_tag(ATAG_SERIAL,  &t_serial);
2236 
2237 	memset(&t_ddrmem, 0, sizeof(struct tag_ddr_mem));
2238 	if (cap_info->row_3_4) {
2239 		cs_cap[0] =  cs_cap[0] * 3 / 4;
2240 		cs_cap[1] =  cs_cap[1] * 3 / 4;
2241 	}
2242 	t_ddrmem.version = 0;
2243 	t_ddrmem.bank[0] = CONFIG_SYS_SDRAM_BASE;
2244 	if (cs_cap[1]) {
2245 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2246 			6 + 2;
2247 	}
2248 
2249 	if (cs_cap[1] && cs_pst > 27) {
2250 		t_ddrmem.count = 2;
2251 		t_ddrmem.bank[1] = 1 << cs_pst;
2252 		t_ddrmem.bank[2] = cs_cap[0];
2253 		t_ddrmem.bank[3] = cs_cap[1];
2254 	} else {
2255 		t_ddrmem.count = 1;
2256 		t_ddrmem.bank[1] = (u64)cs_cap[0] + (u64)cs_cap[1];
2257 	}
2258 
2259 	atags_set_tag(ATAG_DDR_MEM,  &t_ddrmem);
2260 
2261 	memset(&t_socinfo, 0, sizeof(struct tag_soc_info));
2262 	t_socinfo.version = 0;
2263 	t_socinfo.name = 0x1126;
2264 }
2265 
2266 static void print_ddr_info(struct rv1126_sdram_params *sdram_params)
2267 {
2268 	u32 split;
2269 
2270 	if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2271 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2272 		split = 0;
2273 	else
2274 		split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2275 			SPLIT_SIZE_MASK;
2276 
2277 	sdram_print_ddr_info(&sdram_params->ch.cap_info,
2278 			     &sdram_params->base, split);
2279 }
2280 
2281 static int sdram_init_(struct dram_info *dram,
2282 		       struct rv1126_sdram_params *sdram_params, u32 post_init)
2283 {
2284 	void __iomem *pctl_base = dram->pctl;
2285 	void __iomem *phy_base = dram->phy;
2286 	u32 ddr4_vref;
2287 	u32 mr_tmp;
2288 
2289 	rkclk_configure_ddr(dram, sdram_params);
2290 
2291 	rkclk_ddr_reset(dram, 1, 1, 1, 1);
2292 	udelay(10);
2293 
2294 	rkclk_ddr_reset(dram, 1, 1, 1, 0);
2295 	phy_cfg(dram, sdram_params);
2296 
2297 	rkclk_ddr_reset(dram, 1, 1, 0, 0);
2298 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1);
2299 
2300 	rkclk_ddr_reset(dram, 1, 0, 0, 0);
2301 	pctl_cfg(dram->pctl, &sdram_params->pctl_regs,
2302 		 dram->sr_idle, dram->pd_idle);
2303 
2304 	if (sdram_params->ch.cap_info.bw == 2)
2305 		/* 32bit interface use pageclose */
2306 		setbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2307 	else
2308 		clrbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2309 
2310 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2311 	u32 tmp, trefi;
2312 
2313 	tmp = readl(pctl_base + DDR_PCTL2_RFSHTMG);
2314 	trefi = (tmp >> 16) & 0xfff;
2315 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2316 	       pctl_base + DDR_PCTL2_RFSHTMG);
2317 #endif
2318 
2319 	/* set frequency_mode */
2320 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
2321 	/* set target_frequency to Frequency 0 */
2322 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0);
2323 
2324 	set_ds_odt(dram, sdram_params, 0);
2325 	sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params);
2326 	set_ctl_address_map(dram, sdram_params);
2327 
2328 	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
2329 
2330 	rkclk_ddr_reset(dram, 0, 0, 0, 0);
2331 
2332 	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
2333 		continue;
2334 
2335 	if (sdram_params->base.dramtype == LPDDR3) {
2336 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3);
2337 	} else if (sdram_params->base.dramtype == LPDDR4) {
2338 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6);
2339 		/* MR11 */
2340 		pctl_write_mr(dram->pctl, 3, 11,
2341 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2342 			      LPDDR4);
2343 		/* MR12 */
2344 		pctl_write_mr(dram->pctl, 3, 12,
2345 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2346 			      LPDDR4);
2347 
2348 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2349 		/* MR22 */
2350 		pctl_write_mr(dram->pctl, 3, 22,
2351 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2352 			      LPDDR4);
2353 		/* MR14 */
2354 		pctl_write_mr(dram->pctl, 3, 14,
2355 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2356 			      LPDDR4);
2357 	}
2358 
2359 	if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) != 0) {
2360 		if (post_init != 0)
2361 			printascii("DTT cs0 error\n");
2362 		return -1;
2363 	}
2364 
2365 	if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) {
2366 		if (data_training(dram, 1, sdram_params, 0,
2367 				  READ_GATE_TRAINING) != 0) {
2368 			printascii("DTT cs1 error\n");
2369 			return -1;
2370 		}
2371 	}
2372 
2373 	if (sdram_params->base.dramtype == DDR4) {
2374 		ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39;
2375 		pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref,
2376 				  sdram_params->base.dramtype);
2377 	}
2378 
2379 	dram_all_config(dram, sdram_params);
2380 	enable_low_power(dram, sdram_params);
2381 
2382 	return 0;
2383 }
2384 
2385 static u64 dram_detect_cap(struct dram_info *dram,
2386 			   struct rv1126_sdram_params *sdram_params,
2387 			   unsigned char channel)
2388 {
2389 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2390 	void __iomem *pctl_base = dram->pctl;
2391 	void __iomem *phy_base = dram->phy;
2392 	u32 mr8;
2393 
2394 	u32 bktmp;
2395 	u32 coltmp;
2396 	u32 rowtmp;
2397 	u32 cs;
2398 	u32 bw = 1;
2399 	u32 dram_type = sdram_params->base.dramtype;
2400 	u32 pwrctl;
2401 
2402 	cap_info->bw = bw;
2403 	if (dram_type != LPDDR4) {
2404 		if (dram_type != DDR4) {
2405 			coltmp = 12;
2406 			bktmp = 3;
2407 			if (dram_type == LPDDR2)
2408 				rowtmp = 15;
2409 			else
2410 				rowtmp = 16;
2411 
2412 			if (sdram_detect_col(cap_info, coltmp) != 0)
2413 				goto cap_err;
2414 
2415 			sdram_detect_bank(cap_info, coltmp, bktmp);
2416 			sdram_detect_dbw(cap_info, dram_type);
2417 		} else {
2418 			coltmp = 10;
2419 			bktmp = 4;
2420 			rowtmp = 17;
2421 
2422 			cap_info->col = 10;
2423 			cap_info->bk = 2;
2424 			sdram_detect_bg(cap_info, coltmp);
2425 		}
2426 
2427 		if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
2428 			goto cap_err;
2429 
2430 		sdram_detect_row_3_4(cap_info, coltmp, bktmp);
2431 	} else {
2432 		mr8 = (read_mr(dram, 1, 8, dram_type) >> 2) & 0xf;
2433 		cap_info->col = 10;
2434 		cap_info->bk = 3;
2435 		cap_info->cs0_row = 14 + (mr8 + 1) / 2;
2436 		if (mr8 % 2)
2437 			cap_info->row_3_4 = 1;
2438 		else
2439 			cap_info->row_3_4 = 0;
2440 		cap_info->dbw = 1;
2441 		cap_info->bw = 2;
2442 	}
2443 
2444 	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
2445 	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
2446 
2447 	if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0)
2448 		cs = 1;
2449 	else
2450 		cs = 0;
2451 	cap_info->rank = cs + 1;
2452 
2453 	if (dram_type != LPDDR4) {
2454 		setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2455 
2456 		if (data_training(dram, 0, sdram_params, 0,
2457 				  READ_GATE_TRAINING) == 0)
2458 			cap_info->bw = 2;
2459 		else
2460 			cap_info->bw = 1;
2461 	}
2462 
2463 	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
2464 
2465 	cap_info->cs0_high16bit_row = cap_info->cs0_row;
2466 	if (cs) {
2467 		cap_info->cs1_row = cap_info->cs0_row;
2468 		cap_info->cs1_high16bit_row = cap_info->cs0_row;
2469 	} else {
2470 		cap_info->cs1_row = 0;
2471 		cap_info->cs1_high16bit_row = 0;
2472 	}
2473 
2474 	return 0;
2475 cap_err:
2476 	return -1;
2477 }
2478 
2479 static int dram_detect_cs1_row(struct dram_info *dram,
2480 			       struct rv1126_sdram_params *sdram_params,
2481 			       unsigned char channel)
2482 {
2483 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2484 	void __iomem *pctl_base = dram->pctl;
2485 	u32 ret = 0;
2486 	void __iomem *test_addr;
2487 	u32 row, bktmp, coltmp, bw;
2488 	u64 cs0_cap;
2489 	u32 byte_mask;
2490 	u32 cs_pst;
2491 	u32 cs_add = 0;
2492 	u32 max_row;
2493 
2494 	if (cap_info->rank == 2) {
2495 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2496 			6 + 2;
2497 		if (cs_pst < 28)
2498 			cs_add = 1;
2499 
2500 		cs0_cap = 1 << cs_pst;
2501 
2502 		if (sdram_params->base.dramtype == DDR4) {
2503 			if (cap_info->dbw == 0)
2504 				bktmp = cap_info->bk + 2;
2505 			else
2506 				bktmp = cap_info->bk + 1;
2507 		} else {
2508 			bktmp = cap_info->bk;
2509 		}
2510 		bw = cap_info->bw;
2511 		coltmp = cap_info->col;
2512 
2513 		if (bw == 2)
2514 			byte_mask = 0xFFFF;
2515 		else
2516 			byte_mask = 0xFF;
2517 
2518 		max_row = (cs_pst == 31) ? 30 : 31;
2519 
2520 		max_row = max_row - bktmp - coltmp - bw - cs_add + 1;
2521 
2522 		row = (cap_info->cs0_row > max_row) ? max_row :
2523 			cap_info->cs0_row;
2524 
2525 		for (; row > 12; row--) {
2526 			test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
2527 				    (u32)cs0_cap +
2528 				    (1ul << (row + bktmp + coltmp +
2529 					     cs_add + bw - 1ul)));
2530 
2531 			writel(0, CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap);
2532 			writel(PATTERN, test_addr);
2533 
2534 			if (((readl(test_addr) & byte_mask) ==
2535 			     (PATTERN & byte_mask)) &&
2536 			    ((readl(CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap) &
2537 			      byte_mask) == 0)) {
2538 				ret = row;
2539 				break;
2540 			}
2541 		}
2542 	}
2543 
2544 	return ret;
2545 }
2546 
2547 /* return: 0 = success, other = fail */
2548 static int sdram_init_detect(struct dram_info *dram,
2549 			     struct rv1126_sdram_params *sdram_params)
2550 {
2551 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2552 	u32 ret;
2553 	u32 sys_reg = 0;
2554 	u32 sys_reg3 = 0;
2555 	struct sdram_head_info_index_v2 *index =
2556 		(struct sdram_head_info_index_v2 *)common_info;
2557 	struct dq_map_info *map_info;
2558 
2559 	map_info = (struct dq_map_info *)((void *)common_info +
2560 		index->dq_map_index.offset * 4);
2561 
2562 	if (sdram_init_(dram, sdram_params, 0)) {
2563 		if (sdram_params->base.dramtype == DDR3) {
2564 			clrsetbits_le32(&map_info->byte_map[0], 0xff << 24,
2565 					((0x1 << 6) | (0x3 << 4) | (0x2 << 2) |
2566 					(0x0 << 0)) << 24);
2567 			if (sdram_init_(dram, sdram_params, 0))
2568 				return -1;
2569 		} else {
2570 			return -1;
2571 		}
2572 	}
2573 
2574 	if (sdram_params->base.dramtype == DDR3) {
2575 		writel(PATTERN, CONFIG_SYS_SDRAM_BASE);
2576 		if (readl(CONFIG_SYS_SDRAM_BASE) != PATTERN)
2577 			return -1;
2578 	}
2579 
2580 	if (dram_detect_cap(dram, sdram_params, 0) != 0)
2581 		return -1;
2582 
2583 	pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
2584 				   sdram_params->base.dramtype);
2585 	ret = sdram_init_(dram, sdram_params, 1);
2586 	if (ret != 0)
2587 		goto out;
2588 
2589 	cap_info->cs1_row =
2590 		dram_detect_cs1_row(dram, sdram_params, 0);
2591 	if (cap_info->cs1_row) {
2592 		sys_reg = readl(&dram->pmugrf->os_reg[2]);
2593 		sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
2594 		SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
2595 				    sys_reg, sys_reg3, 0);
2596 		writel(sys_reg, &dram->pmugrf->os_reg[2]);
2597 		writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2598 	}
2599 
2600 	sdram_detect_high_row(cap_info);
2601 
2602 out:
2603 	return ret;
2604 }
2605 
2606 struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz)
2607 {
2608 	u32 i;
2609 	u32 offset = 0;
2610 	struct ddr2_3_4_lp2_3_info *ddr_info;
2611 
2612 	if (!freq_mhz) {
2613 		ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype);
2614 		if (ddr_info)
2615 			freq_mhz =
2616 				(ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
2617 				DDR_FREQ_MASK;
2618 		else
2619 			freq_mhz = 0;
2620 	}
2621 
2622 	for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) {
2623 		if (sdram_configs[i].base.ddr_freq == 0 ||
2624 		    freq_mhz < sdram_configs[i].base.ddr_freq)
2625 			break;
2626 	}
2627 	offset = i == 0 ? 0 : i - 1;
2628 
2629 	return &sdram_configs[offset];
2630 }
2631 
2632 static const u16 pctl_need_update_reg[] = {
2633 	DDR_PCTL2_RFSHTMG,
2634 	DDR_PCTL2_INIT3,
2635 	DDR_PCTL2_INIT4,
2636 	DDR_PCTL2_INIT6,
2637 	DDR_PCTL2_INIT7,
2638 	DDR_PCTL2_DRAMTMG0,
2639 	DDR_PCTL2_DRAMTMG1,
2640 	DDR_PCTL2_DRAMTMG2,
2641 	DDR_PCTL2_DRAMTMG3,
2642 	DDR_PCTL2_DRAMTMG4,
2643 	DDR_PCTL2_DRAMTMG5,
2644 	DDR_PCTL2_DRAMTMG6,
2645 	DDR_PCTL2_DRAMTMG7,
2646 	DDR_PCTL2_DRAMTMG8,
2647 	DDR_PCTL2_DRAMTMG9,
2648 	DDR_PCTL2_DRAMTMG12,
2649 	DDR_PCTL2_DRAMTMG13,
2650 	DDR_PCTL2_DRAMTMG14,
2651 	DDR_PCTL2_ZQCTL0,
2652 	DDR_PCTL2_DFITMG0,
2653 	DDR_PCTL2_ODTCFG
2654 };
2655 
2656 static const u16 phy_need_update_reg[] = {
2657 	0x14,
2658 	0x18,
2659 	0x1c
2660 };
2661 
2662 static void pre_set_rate(struct dram_info *dram,
2663 			 struct rv1126_sdram_params *sdram_params,
2664 			 u32 dst_fsp, u32 dst_fsp_lp4)
2665 {
2666 	u32 i, j, find;
2667 	void __iomem *pctl_base = dram->pctl;
2668 	void __iomem *phy_base = dram->phy;
2669 	u32 phy_offset;
2670 	u32 mr_tmp;
2671 	u32 dramtype = sdram_params->base.dramtype;
2672 
2673 	sw_set_req(dram);
2674 	/* pctl timing update */
2675 	for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) {
2676 		for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF;
2677 		     j++) {
2678 			if (sdram_params->pctl_regs.pctl[j][0] ==
2679 			    pctl_need_update_reg[i]) {
2680 				writel(sdram_params->pctl_regs.pctl[j][1],
2681 				       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2682 				       pctl_need_update_reg[i]);
2683 				find = j;
2684 				break;
2685 			}
2686 		}
2687 	}
2688 
2689 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2690 	u32 tmp, trefi;
2691 
2692 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2693 	trefi = (tmp >> 16) & 0xfff;
2694 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2695 	       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2696 #endif
2697 
2698 	sw_set_ack(dram);
2699 
2700 	/* phy timing update */
2701 	if (dst_fsp == 0)
2702 		phy_offset = 0;
2703 	else
2704 		phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3);
2705 	/* cl cwl al update */
2706 	for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) {
2707 		for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF;
2708 		     j++) {
2709 			if (sdram_params->phy_regs.phy[j][0] ==
2710 			    phy_need_update_reg[i]) {
2711 				writel(sdram_params->phy_regs.phy[j][1],
2712 				       phy_base + phy_offset +
2713 				       phy_need_update_reg[i]);
2714 				find = j;
2715 				break;
2716 			}
2717 		}
2718 	}
2719 
2720 	set_ds_odt(dram, sdram_params, dst_fsp);
2721 	if (dramtype == LPDDR4) {
2722 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2723 			       DDR_PCTL2_INIT4);
2724 		/* MR13 */
2725 		pctl_write_mr(dram->pctl, 3, 13,
2726 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2727 			       PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2728 			      ((0x2 << 6) >> dst_fsp_lp4), dramtype);
2729 		writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2730 				      PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2731 				      ((0x2 << 6) >> dst_fsp_lp4),
2732 				       PHY_REG(phy_base, 0x1b));
2733 		/* MR3 */
2734 		pctl_write_mr(dram->pctl, 3, 3,
2735 			      mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT &
2736 			      PCTL2_MR_MASK,
2737 			      dramtype);
2738 		writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK,
2739 		       PHY_REG(phy_base, 0x19));
2740 
2741 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2742 			       DDR_PCTL2_INIT3);
2743 		/* MR1 */
2744 		pctl_write_mr(dram->pctl, 3, 1,
2745 			      mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT &
2746 			      PCTL2_MR_MASK,
2747 			      dramtype);
2748 		writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK,
2749 		       PHY_REG(phy_base, 0x17));
2750 		/* MR2 */
2751 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
2752 			      dramtype);
2753 		writel(mr_tmp & PCTL2_MR_MASK,
2754 		       PHY_REG(phy_base, 0x18));
2755 
2756 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2757 			       DDR_PCTL2_INIT6);
2758 		/* MR11 */
2759 		pctl_write_mr(dram->pctl, 3, 11,
2760 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2761 			      dramtype);
2762 		writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2763 		       PHY_REG(phy_base, 0x1a));
2764 		/* MR12 */
2765 		pctl_write_mr(dram->pctl, 3, 12,
2766 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2767 			      dramtype);
2768 
2769 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2770 			       DDR_PCTL2_INIT7);
2771 		/* MR22 */
2772 		pctl_write_mr(dram->pctl, 3, 22,
2773 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2774 			      dramtype);
2775 		writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2776 		       PHY_REG(phy_base, 0x1d));
2777 		/* MR14 */
2778 		pctl_write_mr(dram->pctl, 3, 14,
2779 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2780 			      dramtype);
2781 		writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2782 		       PHY_REG(phy_base, 0x1c));
2783 	}
2784 
2785 	update_noc_timing(dram, sdram_params);
2786 }
2787 
2788 static void save_fsp_param(struct dram_info *dram, u32 dst_fsp,
2789 			   struct rv1126_sdram_params *sdram_params)
2790 {
2791 	void __iomem *pctl_base = dram->pctl;
2792 	void __iomem *phy_base = dram->phy;
2793 	struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp];
2794 	u32 temp, temp1;
2795 	struct ddr2_3_4_lp2_3_info *ddr_info;
2796 
2797 	ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype);
2798 
2799 	p_fsp_param->freq_mhz = sdram_params->base.ddr_freq;
2800 
2801 	if (sdram_params->base.dramtype == LPDDR4) {
2802 		p_fsp_param->rd_odt_up_en = 0;
2803 		p_fsp_param->rd_odt_down_en = 1;
2804 	} else {
2805 		p_fsp_param->rd_odt_up_en =
2806 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
2807 		p_fsp_param->rd_odt_down_en =
2808 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
2809 	}
2810 
2811 	if (p_fsp_param->rd_odt_up_en)
2812 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111));
2813 	else if (p_fsp_param->rd_odt_down_en)
2814 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110));
2815 	else
2816 		p_fsp_param->rd_odt = 0;
2817 	p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112));
2818 	p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100));
2819 	p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102));
2820 	p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128));
2821 	p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105));
2822 
2823 	if (sdram_params->base.dramtype == DDR3) {
2824 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2825 			     DDR_PCTL2_INIT3);
2826 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
2827 		p_fsp_param->ds_pdds = temp & DDR3_DS_MASK;
2828 		p_fsp_param->dq_odt = temp & DDR3_RTT_NOM_MASK;
2829 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2830 	} else if (sdram_params->base.dramtype == DDR4) {
2831 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2832 			     DDR_PCTL2_INIT3);
2833 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
2834 		p_fsp_param->ds_pdds = temp & DDR4_DS_MASK;
2835 		p_fsp_param->dq_odt = temp & DDR4_RTT_NOM_MASK;
2836 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2837 	} else if (sdram_params->base.dramtype == LPDDR3) {
2838 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2839 			     DDR_PCTL2_INIT4);
2840 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
2841 		p_fsp_param->ds_pdds = temp & 0xf;
2842 
2843 		p_fsp_param->dq_odt = lp3_odt_value;
2844 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2845 	} else if (sdram_params->base.dramtype == LPDDR4) {
2846 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2847 			     DDR_PCTL2_INIT4);
2848 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
2849 		p_fsp_param->ds_pdds = temp & LPDDR4_PDDS_MASK;
2850 
2851 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2852 			     DDR_PCTL2_INIT6);
2853 		temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK;
2854 		p_fsp_param->dq_odt = temp & LPDDR4_DQODT_MASK;
2855 		p_fsp_param->ca_odt = temp & LPDDR4_CAODT_MASK;
2856 
2857 		temp = MAX(readl(PHY_REG(phy_base, 0x3ae)),
2858 			   readl(PHY_REG(phy_base, 0x3ce)));
2859 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)),
2860 			    readl(PHY_REG(phy_base, 0x3de)));
2861 		p_fsp_param->vref_ca[0] = (temp + temp1) / 2;
2862 		temp = MAX(readl(PHY_REG(phy_base, 0x3af)),
2863 			   readl(PHY_REG(phy_base, 0x3cf)));
2864 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)),
2865 			    readl(PHY_REG(phy_base, 0x3df)));
2866 		p_fsp_param->vref_ca[1] = (temp + temp1) / 2;
2867 		p_fsp_param->vref_ca[0] |=
2868 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
2869 		p_fsp_param->vref_ca[1] |=
2870 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
2871 
2872 		p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >>
2873 					      3) & 0x1;
2874 	}
2875 
2876 	p_fsp_param->noc_timings.ddrtiminga0 =
2877 		sdram_params->ch.noc_timings.ddrtiminga0;
2878 	p_fsp_param->noc_timings.ddrtimingb0 =
2879 		sdram_params->ch.noc_timings.ddrtimingb0;
2880 	p_fsp_param->noc_timings.ddrtimingc0 =
2881 		sdram_params->ch.noc_timings.ddrtimingc0;
2882 	p_fsp_param->noc_timings.devtodev0 =
2883 		sdram_params->ch.noc_timings.devtodev0;
2884 	p_fsp_param->noc_timings.ddrmode =
2885 		sdram_params->ch.noc_timings.ddrmode;
2886 	p_fsp_param->noc_timings.ddr4timing =
2887 		sdram_params->ch.noc_timings.ddr4timing;
2888 	p_fsp_param->noc_timings.agingx0 =
2889 		sdram_params->ch.noc_timings.agingx0;
2890 	p_fsp_param->noc_timings.aging0 =
2891 		sdram_params->ch.noc_timings.aging0;
2892 	p_fsp_param->noc_timings.aging1 =
2893 		sdram_params->ch.noc_timings.aging1;
2894 	p_fsp_param->noc_timings.aging2 =
2895 		sdram_params->ch.noc_timings.aging2;
2896 	p_fsp_param->noc_timings.aging3 =
2897 		sdram_params->ch.noc_timings.aging3;
2898 
2899 	p_fsp_param->flag = FSP_FLAG;
2900 }
2901 
2902 #ifndef CONFIG_SPL_KERNEL_BOOT
2903 static void copy_fsp_param_to_ddr(void)
2904 {
2905 	memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param,
2906 	       sizeof(fsp_param));
2907 }
2908 #endif
2909 
2910 void ddr_set_rate(struct dram_info *dram,
2911 		  struct rv1126_sdram_params *sdram_params,
2912 		  u32 freq, u32 cur_freq, u32 dst_fsp,
2913 		  u32 dst_fsp_lp4, u32 training_en)
2914 {
2915 	u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off;
2916 	u32 mr_tmp;
2917 	u32 lp_stat;
2918 	u32 dramtype = sdram_params->base.dramtype;
2919 	struct rv1126_sdram_params *sdram_params_new;
2920 	void __iomem *pctl_base = dram->pctl;
2921 	void __iomem *phy_base = dram->phy;
2922 
2923 	lp_stat = low_power_update(dram, 0);
2924 	sdram_params_new = get_default_sdram_config(freq);
2925 	sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank;
2926 	sdram_params_new->ch.cap_info.bw = sdram_params->ch.cap_info.bw;
2927 
2928 	pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4);
2929 
2930 	while ((readl(pctl_base + DDR_PCTL2_STAT) &
2931 			 PCTL2_OPERATING_MODE_MASK) ==
2932 			 PCTL2_OPERATING_MODE_SR)
2933 		continue;
2934 
2935 	dest_dll_off = 0;
2936 	dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2937 			  DDR_PCTL2_INIT3);
2938 	if ((dramtype == DDR3 && (dst_init3 & 1)) ||
2939 	    (dramtype == DDR4 && !(dst_init3 & 1)))
2940 		dest_dll_off = 1;
2941 
2942 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
2943 	cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
2944 			  DDR_PCTL2_INIT3);
2945 	cur_init3 &= PCTL2_MR_MASK;
2946 	cur_dll_off = 1;
2947 	if ((dramtype == DDR3 && !(cur_init3 & 1)) ||
2948 	    (dramtype == DDR4 && (cur_init3 & 1)))
2949 		cur_dll_off = 0;
2950 
2951 	if (!cur_dll_off) {
2952 		if (dramtype == DDR3)
2953 			cur_init3 |= 1;
2954 		else
2955 			cur_init3 &= ~1;
2956 		pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype);
2957 	}
2958 
2959 	setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
2960 		     PCTL2_DIS_AUTO_REFRESH);
2961 	update_refresh_reg(dram);
2962 
2963 	enter_sr(dram, 1);
2964 
2965 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
2966 	       PMUGRF_CON_DDRPHY_BUFFEREN_EN,
2967 	       &dram->pmugrf->soc_con[0]);
2968 	sw_set_req(dram);
2969 	clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC,
2970 		     PCTL2_DFI_INIT_COMPLETE_EN);
2971 	sw_set_ack(dram);
2972 
2973 	sw_set_req(dram);
2974 	if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off)
2975 		setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
2976 	else
2977 		clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
2978 
2979 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0,
2980 		     PCTL2_DIS_SRX_ZQCL);
2981 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0,
2982 		     PCTL2_DIS_SRX_ZQCL);
2983 	sw_set_ack(dram);
2984 
2985 	writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT),
2986 	       &dram->cru->clkgate_con[21]);
2987 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
2988 					(0x1 << CLK_DDR_UPCTL_EN_SHIFT) |
2989 					(0x1 << ACLK_DDR_UPCTL_EN_SHIFT),
2990 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
2991 
2992 	clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
2993 	rkclk_set_dpll(dram, freq * MHz / 2);
2994 	phy_pll_set(dram, freq * MHz, 0);
2995 	phy_pll_set(dram, freq * MHz, 1);
2996 	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
2997 
2998 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
2999 			PMUGRF_CON_DDRPHY_BUFFEREN_DIS,
3000 			&dram->pmugrf->soc_con[0]);
3001 	writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT),
3002 	       &dram->cru->clkgate_con[21]);
3003 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3004 					(0x0 << CLK_DDR_UPCTL_EN_SHIFT) |
3005 					(0x0 << ACLK_DDR_UPCTL_EN_SHIFT),
3006 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3007 	while ((readl(pctl_base + DDR_PCTL2_DFISTAT) &
3008 	       PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE)
3009 		continue;
3010 
3011 	sw_set_req(dram);
3012 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
3013 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp);
3014 	sw_set_ack(dram);
3015 	update_refresh_reg(dram);
3016 	clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2);
3017 
3018 	enter_sr(dram, 0);
3019 
3020 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3021 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3022 
3023 	mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4);
3024 	if (dramtype == LPDDR3) {
3025 		pctl_write_mr(dram->pctl, 3, 1,
3026 			      (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) &
3027 			      PCTL2_MR_MASK,
3028 			      dramtype);
3029 		pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK,
3030 			      dramtype);
3031 		pctl_write_mr(dram->pctl, 3, 3,
3032 			      (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) &
3033 			      PCTL2_MR_MASK,
3034 			      dramtype);
3035 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype);
3036 	} else if ((dramtype == DDR3) || (dramtype == DDR4)) {
3037 		pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK,
3038 			      dramtype);
3039 		if (!dest_dll_off) {
3040 			pctl_write_mr(dram->pctl, 3, 0,
3041 				      ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) &
3042 				      PCTL2_MR_MASK) | DDR3_DLL_RESET,
3043 				      dramtype);
3044 			udelay(2);
3045 		}
3046 		pctl_write_mr(dram->pctl, 3, 0,
3047 			      (dst_init3 >> PCTL2_DDR34_MR0_SHIFT &
3048 			       PCTL2_MR_MASK) & (~DDR3_DLL_RESET),
3049 			      dramtype);
3050 		pctl_write_mr(dram->pctl, 3, 2,
3051 			      ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) &
3052 			       PCTL2_MR_MASK), dramtype);
3053 		if (dramtype == DDR4) {
3054 			pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK,
3055 				      dramtype);
3056 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3057 				       DDR_PCTL2_INIT6);
3058 			pctl_write_mr(dram->pctl, 3, 4,
3059 				      (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) &
3060 				       PCTL2_MR_MASK,
3061 				      dramtype);
3062 			pctl_write_mr(dram->pctl, 3, 5,
3063 				      mr_tmp >> PCTL2_DDR4_MR5_SHIFT &
3064 				      PCTL2_MR_MASK,
3065 				      dramtype);
3066 
3067 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3068 				       DDR_PCTL2_INIT7);
3069 			pctl_write_mr(dram->pctl, 3, 6,
3070 				      mr_tmp >> PCTL2_DDR4_MR6_SHIFT &
3071 				      PCTL2_MR_MASK,
3072 				      dramtype);
3073 		}
3074 	} else if (dramtype == LPDDR4) {
3075 		pctl_write_mr(dram->pctl, 3, 13,
3076 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
3077 			       PCTL2_MR_MASK) & (~(BIT(7)))) |
3078 			      dst_fsp_lp4 << 7, dramtype);
3079 	}
3080 	clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3081 		     PCTL2_DIS_AUTO_REFRESH);
3082 	update_refresh_reg(dram);
3083 
3084 	/* training */
3085 	high_freq_training(dram, sdram_params_new, dst_fsp);
3086 	low_power_update(dram, lp_stat);
3087 
3088 	save_fsp_param(dram, dst_fsp, sdram_params_new);
3089 }
3090 
3091 static void ddr_set_rate_for_fsp(struct dram_info *dram,
3092 				 struct rv1126_sdram_params *sdram_params)
3093 {
3094 	struct ddr2_3_4_lp2_3_info *ddr_info;
3095 	u32 f0;
3096 	u32 dramtype = sdram_params->base.dramtype;
3097 #ifndef CONFIG_SPL_KERNEL_BOOT
3098 	u32 f1, f2, f3;
3099 #endif
3100 
3101 	ddr_info = get_ddr_drv_odt_info(dramtype);
3102 	if (!ddr_info)
3103 		return;
3104 
3105 	f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
3106 	     DDR_FREQ_MASK;
3107 
3108 #ifndef CONFIG_SPL_KERNEL_BOOT
3109 	memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param));
3110 	memset((void *)&fsp_param, 0, sizeof(fsp_param));
3111 
3112 	f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) &
3113 	     DDR_FREQ_MASK;
3114 	f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) &
3115 	     DDR_FREQ_MASK;
3116 	f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) &
3117 	     DDR_FREQ_MASK;
3118 #endif
3119 
3120 	if (get_wrlvl_val(dram, sdram_params))
3121 		printascii("get wrlvl value fail\n");
3122 
3123 #ifndef CONFIG_SPL_KERNEL_BOOT
3124 	printascii("change to: ");
3125 	printdec(f1);
3126 	printascii("MHz\n");
3127 	ddr_set_rate(&dram_info, sdram_params, f1,
3128 		     sdram_params->base.ddr_freq, 1, 1, 1);
3129 	printascii("change to: ");
3130 	printdec(f2);
3131 	printascii("MHz\n");
3132 	ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1);
3133 	printascii("change to: ");
3134 	printdec(f3);
3135 	printascii("MHz\n");
3136 	ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1);
3137 #endif
3138 	printascii("change to: ");
3139 	printdec(f0);
3140 	printascii("MHz(final freq)\n");
3141 #ifndef CONFIG_SPL_KERNEL_BOOT
3142 	ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1);
3143 #else
3144 	ddr_set_rate(&dram_info, sdram_params, f0, sdram_params->base.ddr_freq, 1, 1, 1);
3145 #endif
3146 }
3147 
3148 int get_uart_config(void)
3149 {
3150 	struct sdram_head_info_index_v2 *index =
3151 		(struct sdram_head_info_index_v2 *)common_info;
3152 	struct global_info *gbl_info;
3153 
3154 	gbl_info = (struct global_info *)((void *)common_info +
3155 		index->global_index.offset * 4);
3156 
3157 	return gbl_info->uart_info;
3158 }
3159 
3160 /* return: 0 = success, other = fail */
3161 int sdram_init(void)
3162 {
3163 	struct rv1126_sdram_params *sdram_params;
3164 	int ret = 0;
3165 	struct sdram_head_info_index_v2 *index =
3166 		(struct sdram_head_info_index_v2 *)common_info;
3167 	struct global_info *gbl_info;
3168 
3169 	dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
3170 	dram_info.pctl = (void *)UPCTL2_BASE_ADDR;
3171 	dram_info.grf = (void *)GRF_BASE_ADDR;
3172 	dram_info.cru = (void *)CRU_BASE_ADDR;
3173 	dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR;
3174 	dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR;
3175 	dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR;
3176 
3177 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
3178 	printascii("extended temp support\n");
3179 #endif
3180 	if (index->version_info != 2 ||
3181 	    (index->global_index.size != sizeof(struct global_info) / 4) ||
3182 	    (index->ddr3_index.size !=
3183 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3184 	    (index->ddr4_index.size !=
3185 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3186 	    (index->lp3_index.size !=
3187 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3188 	    (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) ||
3189 	    index->global_index.offset == 0 ||
3190 	    index->ddr3_index.offset == 0 ||
3191 	    index->ddr4_index.offset == 0 ||
3192 	    index->lp3_index.offset == 0 ||
3193 	    index->lp4_index.offset == 0) {
3194 		printascii("common info error\n");
3195 		goto error;
3196 	}
3197 
3198 	gbl_info = (struct global_info *)((void *)common_info +
3199 		index->global_index.offset * 4);
3200 
3201 	dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info);
3202 	dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info);
3203 
3204 	sdram_params = &sdram_configs[0];
3205 
3206 	if (sdram_params->base.dramtype == DDR3 ||
3207 	    sdram_params->base.dramtype == DDR4) {
3208 		if (DDR_2T_INFO(gbl_info->info_2t))
3209 			sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10;
3210 		else
3211 			sdram_params->pctl_regs.pctl[0][1] &=
3212 				~(0x1 << 10);
3213 	}
3214 	ret = sdram_init_detect(&dram_info, sdram_params);
3215 	if (ret) {
3216 		sdram_print_dram_type(sdram_params->base.dramtype);
3217 		printascii(", ");
3218 		printdec(sdram_params->base.ddr_freq);
3219 		printascii("MHz\n");
3220 		goto error;
3221 	}
3222 	print_ddr_info(sdram_params);
3223 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3224 	init_rw_trn_result_struct(&rw_trn_result, dram_info.phy,
3225 				  (u8)sdram_params->ch.cap_info.rank);
3226 #endif
3227 
3228 	ddr_set_rate_for_fsp(&dram_info, sdram_params);
3229 #ifndef CONFIG_SPL_KERNEL_BOOT
3230 	copy_fsp_param_to_ddr();
3231 #endif
3232 
3233 	ddr_set_atags(&dram_info, sdram_params);
3234 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3235 	save_rw_trn_result_to_ddr(&rw_trn_result);
3236 #endif
3237 
3238 	printascii("out\n");
3239 
3240 	return ret;
3241 error:
3242 	printascii("error\n");
3243 	return (-1);
3244 }
3245 #endif /* CONFIG_TPL_BUILD */
3246