xref: /OK3568_Linux_fs/u-boot/drivers/ram/rockchip/sdram_rv1126.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * (C) Copyright 2020 Rockchip Electronics Co., Ltd.
4  */
5 
6 #include <common.h>
7 #include <debug_uart.h>
8 #include <dm.h>
9 #include <ram.h>
10 #include <syscon.h>
11 #include <asm/io.h>
12 #include <asm/arch/clock.h>
13 #include <asm/arch/hardware.h>
14 #include <asm/arch/rk_atags.h>
15 #include <asm/arch/cru_rv1126.h>
16 #include <asm/arch/grf_rv1126.h>
17 #include <asm/arch/sdram_common.h>
18 #include <asm/arch/sdram_rv1126.h>
19 
20 /* define training flag */
21 #define CA_TRAINING			(0x1 << 0)
22 #define READ_GATE_TRAINING		(0x1 << 1)
23 #define WRITE_LEVELING			(0x1 << 2)
24 #define WRITE_TRAINING			(0x1 << 3)
25 #define READ_TRAINING			(0x1 << 4)
26 #define FULL_TRAINING			(0xff)
27 
28 #define SKEW_RX_SIGNAL			(0)
29 #define SKEW_TX_SIGNAL			(1)
30 #define SKEW_CA_SIGNAL			(2)
31 
32 #define DESKEW_MDF_ABS_VAL		(0)
33 #define DESKEW_MDF_DIFF_VAL		(1)
34 
35 #ifdef CONFIG_TPL_BUILD
36 #ifndef CONFIG_TPL_TINY_FRAMEWORK
37 #error please defined CONFIG_TPL_TINY_FRAMEWORK for RV1126 !!!
38 #endif
39 #endif
40 
41 #ifdef CONFIG_TPL_BUILD
42 
43 struct dram_info {
44 	void __iomem *pctl;
45 	void __iomem *phy;
46 	struct rv1126_cru *cru;
47 	struct msch_regs *msch;
48 	struct rv1126_ddrgrf *ddrgrf;
49 	struct rv1126_grf *grf;
50 	struct ram_info info;
51 	struct rv1126_pmugrf *pmugrf;
52 	u32 sr_idle;
53 	u32 pd_idle;
54 };
55 
56 #define GRF_BASE_ADDR			0xfe000000
57 #define PMU_GRF_BASE_ADDR		0xfe020000
58 #define DDR_GRF_BASE_ADDR		0xfe030000
59 #define BUS_SGRF_BASE_ADDR		0xfe0a0000
60 #define SERVER_MSCH_BASE_ADDR		0xfe800000
61 #define CRU_BASE_ADDR			0xff490000
62 #define DDR_PHY_BASE_ADDR		0xff4a0000
63 #define UPCTL2_BASE_ADDR		0xffa50000
64 
65 #define SGRF_SOC_CON2			0x8
66 #define SGRF_SOC_CON12			0x30
67 #define SGRF_SOC_CON13			0x34
68 
69 struct dram_info dram_info;
70 
71 #if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 3)
72 struct rv1126_sdram_params sdram_configs[] = {
73 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-328.inc"
74 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-396.inc"
75 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-528.inc"
76 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-664.inc"
77 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-784.inc"
78 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-924.inc"
79 	#include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-1056.inc"
80 };
81 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 0)
82 struct rv1126_sdram_params sdram_configs[] = {
83 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-328.inc"
84 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-396.inc"
85 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-528.inc"
86 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-664.inc"
87 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-784.inc"
88 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-924.inc"
89 	#include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-1056.inc"
90 };
91 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 6)
92 struct rv1126_sdram_params sdram_configs[] = {
93 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-328.inc"
94 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-396.inc"
95 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-528.inc"
96 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-664.inc"
97 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-784.inc"
98 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-924.inc"
99 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-1056.inc"
100 };
101 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 7) || (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 8)
102 struct rv1126_sdram_params sdram_configs[] = {
103 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-328.inc"
104 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-396.inc"
105 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-528.inc"
106 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-664.inc"
107 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-784.inc"
108 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-924.inc"
109 	#include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-1056.inc"
110 };
111 #endif
112 
113 u32 common_info[] = {
114 	#include "sdram_inc/rv1126/sdram-rv1126-loader_params.inc"
115 };
116 
117 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
118 static struct rw_trn_result rw_trn_result;
119 #endif
120 
121 static struct rv1126_fsp_param fsp_param[MAX_IDX];
122 
123 static u8 lp3_odt_value;
124 
125 static s8 wrlvl_result[2][4];
126 
127 /* DDR configuration 0-9 */
128 u16 ddr_cfg_2_rbc[] = {
129 	((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */
130 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */
131 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */
132 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */
133 	((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */
134 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */
135 	((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */
136 	((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */
137 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */
138 	((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */
139 };
140 
141 /* DDR configuration 10-21 */
142 u8 ddr4_cfg_2_rbc[] = {
143 	((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */
144 	((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */
145 	((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */
146 	((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */
147 	((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */
148 	((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */
149 	((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */
150 	((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */
151 	((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */
152 	((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */
153 	((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */
154 	((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */
155 };
156 
157 /* DDR configuration 22-28 */
158 u16 ddr_cfg_2_rbc_p2[] = {
159 	((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */
160 	((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */
161 	((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */
162 	((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */
163 	((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */
164 	((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */
165 	((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */
166 };
167 
168 u8 d4_rbc_2_d3_rbc[][2] = {
169 	{10, 0},
170 	{11, 2},
171 	{12, 23},
172 	{13, 1},
173 	{14, 28},
174 	{15, 24},
175 	{16, 27},
176 	{17, 7},
177 	{18, 6},
178 	{19, 25},
179 	{20, 26},
180 	{21, 3}
181 };
182 
183 u32 addrmap[29][9] = {
184 	{24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
185 		0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */
186 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
187 		0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */
188 	{23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
189 		0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */
190 	{22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606,
191 		0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */
192 	{24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909,
193 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */
194 	{6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707,
195 		0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */
196 	{7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808,
197 		0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */
198 	{8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909,
199 		0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */
200 	{22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
201 		0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */
202 	{23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
203 		0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */
204 
205 	{24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
206 		0x08080808, 0x00000f0f, 0x0801}, /* 10 */
207 	{23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
208 		0x0f080808, 0x00000f0f, 0x0801}, /* 11 */
209 	{24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
210 		0x07070707, 0x00000f07, 0x0700}, /* 12 */
211 	{23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
212 		0x07070707, 0x00000f0f, 0x0700}, /* 13 */
213 	{24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
214 		0x07070707, 0x00000f07, 0x3f01}, /* 14 */
215 	{23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
216 		0x07070707, 0x00000f0f, 0x3f01}, /* 15 */
217 	{23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606,
218 		0x06060606, 0x00000f06, 0x3f00}, /* 16 */
219 	{8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909,
220 		0x0f090909, 0x00000f0f, 0x0801}, /* 17 */
221 	{7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808,
222 		0x08080808, 0x00000f0f, 0x0700}, /* 18 */
223 	{7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
224 		0x08080808, 0x00000f0f, 0x3f01}, /* 19 */
225 
226 	{6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
227 		0x07070707, 0x00000f07, 0x3f00}, /* 20 */
228 	{23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606,
229 		0x06060606, 0x00000f06, 0x0600}, /* 21 */
230 	{21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505,
231 		0x05050505, 0x00000f0f, 0x3f3f}, /* 22 */
232 
233 	{24, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
234 		0x07070707, 0x00000f07, 0x3f3f}, /* 23 */
235 	{23, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
236 		0x07070707, 0x00000f0f, 0x3f3f}, /* 24 */
237 	{7, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
238 		0x08080808, 0x00000f0f, 0x3f3f}, /* 25 */
239 	{6, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
240 		0x07070707, 0x00000f07, 0x3f3f}, /* 26 */
241 	{23, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
242 		0x06060606, 0x00000f06, 0x3f3f}, /* 27 */
243 	{24, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
244 		0x07070707, 0x00000f07, 0x3f3f} /* 28 */
245 };
246 
247 static u8 dq_sel[22][3] = {
248 	{0x0, 0x17, 0x22},
249 	{0x1, 0x18, 0x23},
250 	{0x2, 0x19, 0x24},
251 	{0x3, 0x1a, 0x25},
252 	{0x4, 0x1b, 0x26},
253 	{0x5, 0x1c, 0x27},
254 	{0x6, 0x1d, 0x28},
255 	{0x7, 0x1e, 0x29},
256 	{0x8, 0x16, 0x21},
257 	{0x9, 0x1f, 0x2a},
258 	{0xa, 0x20, 0x2b},
259 	{0x10, 0x1, 0xc},
260 	{0x11, 0x2, 0xd},
261 	{0x12, 0x3, 0xe},
262 	{0x13, 0x4, 0xf},
263 	{0x14, 0x5, 0x10},
264 	{0x15, 0x6, 0x11},
265 	{0x16, 0x7, 0x12},
266 	{0x17, 0x8, 0x13},
267 	{0x18, 0x0, 0xb},
268 	{0x19, 0x9, 0x14},
269 	{0x1a, 0xa, 0x15}
270 };
271 
272 static u16 grp_addr[4] = {
273 	ADD_GROUP_CS0_A,
274 	ADD_GROUP_CS0_B,
275 	ADD_GROUP_CS1_A,
276 	ADD_GROUP_CS1_B
277 };
278 
279 static u8 wrlvl_result_offset[2][4] = {
280 	{0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27},
281 	{0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29},
282 };
283 
284 static u16 dqs_dq_skew_adr[16] = {
285 	0x170 + 0,	/* SKEW_UPDATE_RX_CS0_DQS0 */
286 	0x170 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS1 */
287 	0x1d0 + 0,	/* SKEW_UPDATE_RX_CS0_DQS2 */
288 	0x1d0 + 0xb,	/* SKEW_UPDATE_RX_CS0_DQS3 */
289 	0x1a0 + 0,	/* SKEW_UPDATE_RX_CS1_DQS0 */
290 	0x1a0 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS1 */
291 	0x200 + 0,	/* SKEW_UPDATE_RX_CS1_DQS2 */
292 	0x200 + 0xb,	/* SKEW_UPDATE_RX_CS1_DQS3 */
293 	0x170 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS0 */
294 	0x170 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS1 */
295 	0x1d0 + 0x16,	/* SKEW_UPDATE_TX_CS0_DQS2 */
296 	0x1d0 + 0x21,	/* SKEW_UPDATE_TX_CS0_DQS3 */
297 	0x1a0 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS0 */
298 	0x1a0 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS1 */
299 	0x200 + 0x16,	/* SKEW_UPDATE_TX_CS1_DQS2 */
300 	0x200 + 0x21,	/* SKEW_UPDATE_TX_CS1_DQS3 */
301 };
302 
rkclk_ddr_reset(struct dram_info * dram,u32 ctl_srstn,u32 ctl_psrstn,u32 phy_srstn,u32 phy_psrstn)303 static void rkclk_ddr_reset(struct dram_info *dram,
304 			    u32 ctl_srstn, u32 ctl_psrstn,
305 			    u32 phy_srstn, u32 phy_psrstn)
306 {
307 	writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) |
308 	       UPCTL2_ASRSTN_REQ(ctl_srstn),
309 	       BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13);
310 
311 	writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn),
312 	       &dram->cru->softrst_con[12]);
313 }
314 
rkclk_set_dpll(struct dram_info * dram,unsigned int hz)315 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
316 {
317 	unsigned int refdiv, postdiv1, postdiv2, fbdiv;
318 	int delay = 1000;
319 	u32 mhz = hz / MHz;
320 	struct global_info *gbl_info;
321 	struct sdram_head_info_index_v2 *index =
322 		(struct sdram_head_info_index_v2 *)common_info;
323 	u32 ssmod_info;
324 	u32 dsmpd = 1;
325 
326 	gbl_info = (struct global_info *)((void *)common_info +
327 		    index->global_index.offset * 4);
328 	ssmod_info = gbl_info->info_2t;
329 	refdiv = 1;
330 	if (mhz <= 100) {
331 		postdiv1 = 6;
332 		postdiv2 = 4;
333 	} else if (mhz <= 150) {
334 		postdiv1 = 4;
335 		postdiv2 = 4;
336 	} else if (mhz <= 200) {
337 		postdiv1 = 6;
338 		postdiv2 = 2;
339 	} else if (mhz <= 300) {
340 		postdiv1 = 4;
341 		postdiv2 = 2;
342 	} else if (mhz <= 400) {
343 		postdiv1 = 6;
344 		postdiv2 = 1;
345 	} else {
346 		postdiv1 = 4;
347 		postdiv2 = 1;
348 	}
349 	fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
350 
351 	writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
352 
353 	writel(0x1f000000, &dram->cru->clksel_con[64]);
354 	writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
355 	/* enable ssmod */
356 	if (PLL_SSMOD_SPREAD(ssmod_info)) {
357 		dsmpd = 0;
358 		clrsetbits_le32(&dram->cru->pll[1].con2,
359 				0xffffff << 0, 0x0 << 0);
360 		writel(SSMOD_SPREAD(PLL_SSMOD_SPREAD(ssmod_info)) |
361 		       SSMOD_DIVVAL(PLL_SSMOD_DIV(ssmod_info)) |
362 		       SSMOD_DOWNSPREAD(PLL_SSMOD_DOWNSPREAD(ssmod_info)) |
363 		       SSMOD_RESET(0) |
364 		       SSMOD_DIS_SSCG(0) |
365 		       SSMOD_BP(0),
366 		       &dram->cru->pll[1].con3);
367 	}
368 	writel(DSMPD(dsmpd) | POSTDIV2(postdiv2) | REFDIV(refdiv),
369 	       &dram->cru->pll[1].con1);
370 
371 	while (delay > 0) {
372 		udelay(1);
373 		if (LOCK(readl(&dram->cru->pll[1].con1)))
374 			break;
375 		delay--;
376 	}
377 	if (delay <= 0)
378 		printascii("ERROR: DPLL lock timeout!\n");
379 
380 	writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
381 }
382 
rkclk_configure_ddr(struct dram_info * dram,struct rv1126_sdram_params * sdram_params)383 static void rkclk_configure_ddr(struct dram_info *dram,
384 				struct rv1126_sdram_params *sdram_params)
385 {
386 	/* for inno ddr phy need freq / 2 */
387 	rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2);
388 }
389 
390 static unsigned int
calculate_ddrconfig(struct rv1126_sdram_params * sdram_params)391 	calculate_ddrconfig(struct rv1126_sdram_params *sdram_params)
392 {
393 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
394 	u32 cs, bw, die_bw, col, row, bank;
395 	u32 cs1_row;
396 	u32 i, tmp;
397 	u32 ddrconf = -1;
398 	u32 row_3_4;
399 
400 	cs = cap_info->rank;
401 	bw = cap_info->bw;
402 	die_bw = cap_info->dbw;
403 	col = cap_info->col;
404 	row = cap_info->cs0_row;
405 	cs1_row = cap_info->cs1_row;
406 	bank = cap_info->bk;
407 	row_3_4 = cap_info->row_3_4;
408 
409 	if (sdram_params->base.dramtype == DDR4) {
410 		if (cs == 2 && row == cs1_row && !row_3_4) {
411 			tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) |
412 			      die_bw;
413 			for (i = 17; i < 21; i++) {
414 				if (((tmp & 0xf) ==
415 				     (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
416 				    ((tmp & 0x70) <=
417 				     (ddr4_cfg_2_rbc[i - 10] & 0x70))) {
418 					ddrconf = i;
419 					goto out;
420 				}
421 			}
422 		}
423 
424 		tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw;
425 		for (i = 10; i < 21; i++) {
426 			if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
427 			    ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) &&
428 			    ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) {
429 				ddrconf = i;
430 				goto out;
431 			}
432 		}
433 	} else {
434 		if (cs == 2 && row == cs1_row && bank == 3) {
435 			for (i = 5; i < 8; i++) {
436 				if (((bw + col - 10) == (ddr_cfg_2_rbc[i] &
437 							 0x7)) &&
438 				    ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] &
439 							  (0x7 << 5))) {
440 					ddrconf = i;
441 					goto out;
442 				}
443 			}
444 		}
445 
446 		tmp = ((cs - 1) << 8) | ((row - 13) << 5) |
447 		      ((bw + col - 10) << 0);
448 		if (bank == 3)
449 			tmp |= (1 << 3);
450 
451 		for (i = 0; i < 9; i++)
452 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) &&
453 			    ((tmp & (7 << 5)) <=
454 			     (ddr_cfg_2_rbc[i] & (7 << 5))) &&
455 			    ((tmp & (1 << 8)) <=
456 			     (ddr_cfg_2_rbc[i] & (1 << 8)))) {
457 				ddrconf = i;
458 				goto out;
459 			}
460 
461 		for (i = 0; i < 7; i++)
462 			if (((tmp & 0x1f) == (ddr_cfg_2_rbc_p2[i] & 0x1f)) &&
463 			    ((tmp & (7 << 5)) <=
464 			     (ddr_cfg_2_rbc_p2[i] & (7 << 5))) &&
465 			    ((tmp & (1 << 8)) <=
466 			     (ddr_cfg_2_rbc_p2[i] & (1 << 8)))) {
467 				ddrconf = i + 22;
468 				goto out;
469 			}
470 
471 		if (cs == 1 && bank == 3 && row <= 17 &&
472 		    (col + bw) == 12)
473 			ddrconf = 23;
474 	}
475 
476 out:
477 	if (ddrconf > 28)
478 		printascii("calculate ddrconfig error\n");
479 
480 	if (sdram_params->base.dramtype == DDR4) {
481 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
482 			if (ddrconf == d4_rbc_2_d3_rbc[i][0]) {
483 				if (ddrconf == 21 && row > 16)
484 					printascii("warn:ddrconf21 row > 16\n");
485 				else
486 					ddrconf = d4_rbc_2_d3_rbc[i][1];
487 				break;
488 			}
489 		}
490 	}
491 
492 	return ddrconf;
493 }
494 
sw_set_req(struct dram_info * dram)495 static void sw_set_req(struct dram_info *dram)
496 {
497 	void __iomem *pctl_base = dram->pctl;
498 
499 	/* clear sw_done=0 */
500 	writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL);
501 }
502 
sw_set_ack(struct dram_info * dram)503 static void sw_set_ack(struct dram_info *dram)
504 {
505 	void __iomem *pctl_base = dram->pctl;
506 
507 	/* set sw_done=1 */
508 	writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL);
509 	while (1) {
510 		/* wait programming done */
511 		if (readl(pctl_base + DDR_PCTL2_SWSTAT) &
512 				PCTL2_SW_DONE_ACK)
513 			break;
514 	}
515 }
516 
set_ctl_address_map(struct dram_info * dram,struct rv1126_sdram_params * sdram_params)517 static void set_ctl_address_map(struct dram_info *dram,
518 				struct rv1126_sdram_params *sdram_params)
519 {
520 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
521 	void __iomem *pctl_base = dram->pctl;
522 	u32 ddrconf = cap_info->ddrconfig;
523 	u32 i, row;
524 
525 	row = cap_info->cs0_row;
526 	if (sdram_params->base.dramtype == DDR4) {
527 		for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
528 			if (ddrconf == d4_rbc_2_d3_rbc[i][1]) {
529 				ddrconf = d4_rbc_2_d3_rbc[i][0];
530 				break;
531 			}
532 		}
533 	}
534 
535 	if (ddrconf >= ARRAY_SIZE(addrmap)) {
536 		printascii("set ctl address map fail\n");
537 		return;
538 	}
539 
540 	sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
541 			  &addrmap[ddrconf][0], ARRAY_SIZE(addrmap[ddrconf]) * 4);
542 
543 	/* unused row set to 0xf */
544 	for (i = 17; i >= row; i--)
545 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
546 			((i - 12) * 8 / 32) * 4,
547 			0xf << ((i - 12) * 8 % 32));
548 
549 	if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4)
550 		setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
551 	if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1)
552 		setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
553 
554 	if (cap_info->rank == 1)
555 		clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
556 }
557 
phy_pll_set(struct dram_info * dram,u32 freq,u32 wait)558 static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait)
559 {
560 	void __iomem *phy_base = dram->phy;
561 	u32 fbdiv, prediv, postdiv, postdiv_en;
562 	int delay = 1000;
563 
564 	if (wait) {
565 		clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB);
566 		while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK)) {
567 			udelay(1);
568 			if (delay-- <= 0) {
569 				printascii("ERROR: phy pll lock timeout!\n");
570 				while (1)
571 					;
572 			}
573 		}
574 	} else {
575 		freq /= MHz;
576 		prediv = 1;
577 		if (freq <= 200) {
578 			fbdiv = 16;
579 			postdiv = 2;
580 			postdiv_en = 1;
581 		} else if (freq <= 456) {
582 			fbdiv = 8;
583 			postdiv = 1;
584 			postdiv_en = 1;
585 		} else {
586 			fbdiv = 4;
587 			postdiv = 0;
588 			postdiv_en = 0;
589 		}
590 		writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50));
591 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK,
592 				(fbdiv >> 8) & 1);
593 		clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK,
594 				postdiv_en << PHY_POSTDIV_EN_SHIFT);
595 
596 		clrsetbits_le32(PHY_REG(phy_base, 0x52),
597 				PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv);
598 		clrsetbits_le32(PHY_REG(phy_base, 0x53),
599 				PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT,
600 				postdiv << PHY_POSTDIV_SHIFT);
601 	}
602 }
603 
604 static const u16 d3_phy_drv_2_ohm[][2] = {
605 	{PHY_DDR3_RON_455ohm, 455},
606 	{PHY_DDR3_RON_230ohm, 230},
607 	{PHY_DDR3_RON_153ohm, 153},
608 	{PHY_DDR3_RON_115ohm, 115},
609 	{PHY_DDR3_RON_91ohm, 91},
610 	{PHY_DDR3_RON_76ohm, 76},
611 	{PHY_DDR3_RON_65ohm, 65},
612 	{PHY_DDR3_RON_57ohm, 57},
613 	{PHY_DDR3_RON_51ohm, 51},
614 	{PHY_DDR3_RON_46ohm, 46},
615 	{PHY_DDR3_RON_41ohm, 41},
616 	{PHY_DDR3_RON_38ohm, 38},
617 	{PHY_DDR3_RON_35ohm, 35},
618 	{PHY_DDR3_RON_32ohm, 32},
619 	{PHY_DDR3_RON_30ohm, 30},
620 	{PHY_DDR3_RON_28ohm, 28},
621 	{PHY_DDR3_RON_27ohm, 27},
622 	{PHY_DDR3_RON_25ohm, 25},
623 	{PHY_DDR3_RON_24ohm, 24},
624 	{PHY_DDR3_RON_23ohm, 23},
625 	{PHY_DDR3_RON_22ohm, 22},
626 	{PHY_DDR3_RON_21ohm, 21},
627 	{PHY_DDR3_RON_20ohm, 20}
628 };
629 
630 static u16 d3_phy_odt_2_ohm[][2] = {
631 	{PHY_DDR3_RTT_DISABLE, 0},
632 	{PHY_DDR3_RTT_561ohm, 561},
633 	{PHY_DDR3_RTT_282ohm, 282},
634 	{PHY_DDR3_RTT_188ohm, 188},
635 	{PHY_DDR3_RTT_141ohm, 141},
636 	{PHY_DDR3_RTT_113ohm, 113},
637 	{PHY_DDR3_RTT_94ohm, 94},
638 	{PHY_DDR3_RTT_81ohm, 81},
639 	{PHY_DDR3_RTT_72ohm, 72},
640 	{PHY_DDR3_RTT_64ohm, 64},
641 	{PHY_DDR3_RTT_58ohm, 58},
642 	{PHY_DDR3_RTT_52ohm, 52},
643 	{PHY_DDR3_RTT_48ohm, 48},
644 	{PHY_DDR3_RTT_44ohm, 44},
645 	{PHY_DDR3_RTT_41ohm, 41},
646 	{PHY_DDR3_RTT_38ohm, 38},
647 	{PHY_DDR3_RTT_37ohm, 37},
648 	{PHY_DDR3_RTT_34ohm, 34},
649 	{PHY_DDR3_RTT_32ohm, 32},
650 	{PHY_DDR3_RTT_31ohm, 31},
651 	{PHY_DDR3_RTT_29ohm, 29},
652 	{PHY_DDR3_RTT_28ohm, 28},
653 	{PHY_DDR3_RTT_27ohm, 27},
654 	{PHY_DDR3_RTT_25ohm, 25}
655 };
656 
657 static u16 d4lp3_phy_drv_2_ohm[][2] = {
658 	{PHY_DDR4_LPDDR3_RON_482ohm, 482},
659 	{PHY_DDR4_LPDDR3_RON_244ohm, 244},
660 	{PHY_DDR4_LPDDR3_RON_162ohm, 162},
661 	{PHY_DDR4_LPDDR3_RON_122ohm, 122},
662 	{PHY_DDR4_LPDDR3_RON_97ohm, 97},
663 	{PHY_DDR4_LPDDR3_RON_81ohm, 81},
664 	{PHY_DDR4_LPDDR3_RON_69ohm, 69},
665 	{PHY_DDR4_LPDDR3_RON_61ohm, 61},
666 	{PHY_DDR4_LPDDR3_RON_54ohm, 54},
667 	{PHY_DDR4_LPDDR3_RON_48ohm, 48},
668 	{PHY_DDR4_LPDDR3_RON_44ohm, 44},
669 	{PHY_DDR4_LPDDR3_RON_40ohm, 40},
670 	{PHY_DDR4_LPDDR3_RON_37ohm, 37},
671 	{PHY_DDR4_LPDDR3_RON_34ohm, 34},
672 	{PHY_DDR4_LPDDR3_RON_32ohm, 32},
673 	{PHY_DDR4_LPDDR3_RON_30ohm, 30},
674 	{PHY_DDR4_LPDDR3_RON_28ohm, 28},
675 	{PHY_DDR4_LPDDR3_RON_27ohm, 27},
676 	{PHY_DDR4_LPDDR3_RON_25ohm, 25},
677 	{PHY_DDR4_LPDDR3_RON_24ohm, 24},
678 	{PHY_DDR4_LPDDR3_RON_23ohm, 23},
679 	{PHY_DDR4_LPDDR3_RON_22ohm, 22},
680 	{PHY_DDR4_LPDDR3_RON_21ohm, 21}
681 };
682 
683 static u16 d4lp3_phy_odt_2_ohm[][2] = {
684 	{PHY_DDR4_LPDDR3_RTT_DISABLE, 0},
685 	{PHY_DDR4_LPDDR3_RTT_586ohm, 586},
686 	{PHY_DDR4_LPDDR3_RTT_294ohm, 294},
687 	{PHY_DDR4_LPDDR3_RTT_196ohm, 196},
688 	{PHY_DDR4_LPDDR3_RTT_148ohm, 148},
689 	{PHY_DDR4_LPDDR3_RTT_118ohm, 118},
690 	{PHY_DDR4_LPDDR3_RTT_99ohm, 99},
691 	{PHY_DDR4_LPDDR3_RTT_85ohm, 58},
692 	{PHY_DDR4_LPDDR3_RTT_76ohm, 76},
693 	{PHY_DDR4_LPDDR3_RTT_67ohm, 67},
694 	{PHY_DDR4_LPDDR3_RTT_60ohm, 60},
695 	{PHY_DDR4_LPDDR3_RTT_55ohm, 55},
696 	{PHY_DDR4_LPDDR3_RTT_50ohm, 50},
697 	{PHY_DDR4_LPDDR3_RTT_46ohm, 46},
698 	{PHY_DDR4_LPDDR3_RTT_43ohm, 43},
699 	{PHY_DDR4_LPDDR3_RTT_40ohm, 40},
700 	{PHY_DDR4_LPDDR3_RTT_38ohm, 38},
701 	{PHY_DDR4_LPDDR3_RTT_36ohm, 36},
702 	{PHY_DDR4_LPDDR3_RTT_34ohm, 34},
703 	{PHY_DDR4_LPDDR3_RTT_32ohm, 32},
704 	{PHY_DDR4_LPDDR3_RTT_31ohm, 31},
705 	{PHY_DDR4_LPDDR3_RTT_29ohm, 29},
706 	{PHY_DDR4_LPDDR3_RTT_28ohm, 28},
707 	{PHY_DDR4_LPDDR3_RTT_27ohm, 27}
708 };
709 
710 static u16 lp4_phy_drv_2_ohm[][2] = {
711 	{PHY_LPDDR4_RON_501ohm, 501},
712 	{PHY_LPDDR4_RON_253ohm, 253},
713 	{PHY_LPDDR4_RON_168ohm, 168},
714 	{PHY_LPDDR4_RON_126ohm, 126},
715 	{PHY_LPDDR4_RON_101ohm, 101},
716 	{PHY_LPDDR4_RON_84ohm, 84},
717 	{PHY_LPDDR4_RON_72ohm, 72},
718 	{PHY_LPDDR4_RON_63ohm, 63},
719 	{PHY_LPDDR4_RON_56ohm, 56},
720 	{PHY_LPDDR4_RON_50ohm, 50},
721 	{PHY_LPDDR4_RON_46ohm, 46},
722 	{PHY_LPDDR4_RON_42ohm, 42},
723 	{PHY_LPDDR4_RON_38ohm, 38},
724 	{PHY_LPDDR4_RON_36ohm, 36},
725 	{PHY_LPDDR4_RON_33ohm, 33},
726 	{PHY_LPDDR4_RON_31ohm, 31},
727 	{PHY_LPDDR4_RON_29ohm, 29},
728 	{PHY_LPDDR4_RON_28ohm, 28},
729 	{PHY_LPDDR4_RON_26ohm, 26},
730 	{PHY_LPDDR4_RON_25ohm, 25},
731 	{PHY_LPDDR4_RON_24ohm, 24},
732 	{PHY_LPDDR4_RON_23ohm, 23},
733 	{PHY_LPDDR4_RON_22ohm, 22}
734 };
735 
736 static u16 lp4_phy_odt_2_ohm[][2] = {
737 	{PHY_LPDDR4_RTT_DISABLE, 0},
738 	{PHY_LPDDR4_RTT_604ohm, 604},
739 	{PHY_LPDDR4_RTT_303ohm, 303},
740 	{PHY_LPDDR4_RTT_202ohm, 202},
741 	{PHY_LPDDR4_RTT_152ohm, 152},
742 	{PHY_LPDDR4_RTT_122ohm, 122},
743 	{PHY_LPDDR4_RTT_101ohm, 101},
744 	{PHY_LPDDR4_RTT_87ohm,	87},
745 	{PHY_LPDDR4_RTT_78ohm, 78},
746 	{PHY_LPDDR4_RTT_69ohm, 69},
747 	{PHY_LPDDR4_RTT_62ohm, 62},
748 	{PHY_LPDDR4_RTT_56ohm, 56},
749 	{PHY_LPDDR4_RTT_52ohm, 52},
750 	{PHY_LPDDR4_RTT_48ohm, 48},
751 	{PHY_LPDDR4_RTT_44ohm, 44},
752 	{PHY_LPDDR4_RTT_41ohm, 41},
753 	{PHY_LPDDR4_RTT_39ohm, 39},
754 	{PHY_LPDDR4_RTT_37ohm, 37},
755 	{PHY_LPDDR4_RTT_35ohm, 35},
756 	{PHY_LPDDR4_RTT_33ohm, 33},
757 	{PHY_LPDDR4_RTT_32ohm, 32},
758 	{PHY_LPDDR4_RTT_30ohm, 30},
759 	{PHY_LPDDR4_RTT_29ohm, 29},
760 	{PHY_LPDDR4_RTT_27ohm, 27}
761 };
762 
lp4_odt_calc(u32 odt_ohm)763 static u32 lp4_odt_calc(u32 odt_ohm)
764 {
765 	u32 odt;
766 
767 	if (odt_ohm == 0)
768 		odt = LPDDR4_DQODT_DIS;
769 	else if (odt_ohm <= 40)
770 		odt = LPDDR4_DQODT_40;
771 	else if (odt_ohm <= 48)
772 		odt = LPDDR4_DQODT_48;
773 	else if (odt_ohm <= 60)
774 		odt = LPDDR4_DQODT_60;
775 	else if (odt_ohm <= 80)
776 		odt = LPDDR4_DQODT_80;
777 	else if (odt_ohm <= 120)
778 		odt = LPDDR4_DQODT_120;
779 	else
780 		odt = LPDDR4_DQODT_240;
781 
782 	return odt;
783 }
784 
get_ddr_drv_odt_info(u32 dramtype)785 static void *get_ddr_drv_odt_info(u32 dramtype)
786 {
787 	struct sdram_head_info_index_v2 *index =
788 		(struct sdram_head_info_index_v2 *)common_info;
789 	void *ddr_info = 0;
790 
791 	if (dramtype == DDR4)
792 		ddr_info = (void *)common_info + index->ddr4_index.offset * 4;
793 	else if (dramtype == DDR3)
794 		ddr_info = (void *)common_info + index->ddr3_index.offset * 4;
795 	else if (dramtype == LPDDR3)
796 		ddr_info = (void *)common_info + index->lp3_index.offset * 4;
797 	else if (dramtype == LPDDR4)
798 		ddr_info = (void *)common_info + index->lp4_index.offset * 4;
799 	else if (dramtype == LPDDR4X)
800 		ddr_info = (void *)common_info + index->lp4x_index.offset * 4;
801 	else
802 		printascii("unsupported dram type\n");
803 	return ddr_info;
804 }
805 
set_lp4_vref(struct dram_info * dram,struct lp4_info * lp4_info,u32 freq_mhz,u32 dst_fsp,u32 dramtype)806 static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info,
807 			 u32 freq_mhz, u32 dst_fsp, u32 dramtype)
808 {
809 	void __iomem *pctl_base = dram->pctl;
810 	u32 ca_vref, dq_vref;
811 
812 	if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
813 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff);
814 	else
815 		ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten);
816 
817 	if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq))
818 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff);
819 	else
820 		dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten);
821 
822 	if (dramtype == LPDDR4) {
823 		if (ca_vref < 100)
824 			ca_vref = 100;
825 		if (ca_vref > 420)
826 			ca_vref = 420;
827 
828 		if (ca_vref <= 300)
829 			ca_vref = (0 << 6) | (ca_vref - 100) / 4;
830 		else
831 			ca_vref = (1 << 6) | (ca_vref - 220) / 4;
832 
833 		if (dq_vref < 100)
834 			dq_vref = 100;
835 		if (dq_vref > 420)
836 			dq_vref = 420;
837 
838 		if (dq_vref <= 300)
839 			dq_vref = (0 << 6) | (dq_vref - 100) / 4;
840 		else
841 			dq_vref = (1 << 6) | (dq_vref - 220) / 4;
842 	} else {
843 		ca_vref = ca_vref * 11 / 6;
844 		if (ca_vref < 150)
845 			ca_vref = 150;
846 		if (ca_vref > 629)
847 			ca_vref = 629;
848 
849 		if (ca_vref <= 449)
850 			ca_vref = (0 << 6) | (ca_vref - 150) / 4;
851 		else
852 			ca_vref = (1 << 6) | (ca_vref - 329) / 4;
853 
854 		if (dq_vref < 150)
855 			dq_vref = 150;
856 		if (dq_vref > 629)
857 			dq_vref = 629;
858 
859 		if (dq_vref <= 449)
860 			dq_vref = (0 << 6) | (dq_vref - 150) / 6;
861 		else
862 			dq_vref = (1 << 6) | (dq_vref - 329) / 6;
863 	}
864 	sw_set_req(dram);
865 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
866 			DDR_PCTL2_INIT6,
867 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT,
868 			ca_vref << PCTL2_LPDDR4_MR12_SHIFT);
869 
870 	clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
871 			DDR_PCTL2_INIT7,
872 			PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT,
873 			dq_vref << PCTL2_LPDDR4_MR14_SHIFT);
874 	sw_set_ack(dram);
875 }
876 
set_ds_odt(struct dram_info * dram,struct rv1126_sdram_params * sdram_params,u32 dst_fsp)877 static void set_ds_odt(struct dram_info *dram,
878 		       struct rv1126_sdram_params *sdram_params, u32 dst_fsp)
879 {
880 	void __iomem *phy_base = dram->phy;
881 	void __iomem *pctl_base = dram->pctl;
882 	u32 dramtype = sdram_params->base.dramtype;
883 	struct ddr2_3_4_lp2_3_info *ddr_info;
884 	struct lp4_info *lp4_info;
885 	u32 i, j, tmp;
886 	const u16 (*p_drv)[2];
887 	const u16 (*p_odt)[2];
888 	u32 drv_info, sr_info;
889 	u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm;
890 	u32 phy_odt_ohm, dram_odt_ohm;
891 	u32 lp4_pu_cal, phy_lp4_drv_pd_en;
892 	u32 phy_odt_up_en, phy_odt_dn_en;
893 	u32 sr_dq, sr_clk;
894 	u32 freq = sdram_params->base.ddr_freq;
895 	u32 mr1_mr3, mr11, mr22, vref_out, vref_inner;
896 	u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0;
897 	u32 phy_dq_drv = 0;
898 	u32 phy_odt_up = 0, phy_odt_dn = 0;
899 
900 	ddr_info = get_ddr_drv_odt_info(dramtype);
901 	lp4_info = (void *)ddr_info;
902 
903 	if (!ddr_info)
904 		return;
905 
906 	/* dram odt en freq control phy drv, dram odt and phy sr */
907 	if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) {
908 		drv_info = ddr_info->drv_when_odtoff;
909 		dram_odt_ohm = 0;
910 		sr_info = ddr_info->sr_when_odtoff;
911 		phy_lp4_drv_pd_en =
912 			PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info);
913 	} else {
914 		drv_info = ddr_info->drv_when_odten;
915 		dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info);
916 		sr_info = ddr_info->sr_when_odten;
917 		phy_lp4_drv_pd_en =
918 			PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info);
919 	}
920 	phy_dq_drv_ohm =
921 		DRV_INFO_PHY_DQ_DRV(drv_info);
922 	phy_clk_drv_ohm =
923 		DRV_INFO_PHY_CLK_DRV(drv_info);
924 	phy_ca_drv_ohm =
925 		DRV_INFO_PHY_CA_DRV(drv_info);
926 
927 	sr_dq = DQ_SR_INFO(sr_info);
928 	sr_clk = CLK_SR_INFO(sr_info);
929 
930 	/* phy odt en freq control dram drv and phy odt */
931 	if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) {
932 		dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff);
933 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info);
934 		phy_odt_ohm = 0;
935 		phy_odt_up_en = 0;
936 		phy_odt_dn_en = 0;
937 	} else {
938 		dram_drv_ohm =
939 			DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten);
940 		phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info);
941 		phy_odt_up_en =
942 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
943 		phy_odt_dn_en =
944 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
945 		lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info);
946 	}
947 
948 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
949 		if (phy_odt_ohm) {
950 			phy_odt_up_en = 0;
951 			phy_odt_dn_en = 1;
952 		}
953 		if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
954 			dram_caodt_ohm = 0;
955 		else
956 			dram_caodt_ohm =
957 				ODT_INFO_LP4_CA_ODT(lp4_info->odt_info);
958 	}
959 
960 	if (dramtype == DDR3) {
961 		p_drv = d3_phy_drv_2_ohm;
962 		p_odt = d3_phy_odt_2_ohm;
963 	} else if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
964 		p_drv = lp4_phy_drv_2_ohm;
965 		p_odt = lp4_phy_odt_2_ohm;
966 	} else {
967 		p_drv = d4lp3_phy_drv_2_ohm;
968 		p_odt = d4lp3_phy_odt_2_ohm;
969 	}
970 
971 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
972 		if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) {
973 			phy_dq_drv = **(p_drv + i);
974 			break;
975 		}
976 		if (i == 0)
977 			break;
978 	}
979 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
980 		if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) {
981 			phy_clk_drv = **(p_drv + i);
982 			break;
983 		}
984 		if (i == 0)
985 			break;
986 	}
987 	for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
988 		if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) {
989 			phy_ca_drv = **(p_drv + i);
990 			break;
991 		}
992 		if (i == 0)
993 			break;
994 	}
995 	if (!phy_odt_ohm)
996 		phy_odt = 0;
997 	else
998 		for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) {
999 			if (phy_odt_ohm <= *(*(p_odt + i) + 1)) {
1000 				phy_odt = **(p_odt + i);
1001 				break;
1002 			}
1003 			if (i == 0)
1004 				break;
1005 		}
1006 
1007 	if (dramtype != LPDDR4 && dramtype != LPDDR4X) {
1008 		if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en))
1009 			vref_inner = 0x80;
1010 		else if (phy_odt_up_en)
1011 			vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 /
1012 				     (dram_drv_ohm + phy_odt_ohm);
1013 		else
1014 			vref_inner = phy_odt_ohm * 128 /
1015 				(phy_odt_ohm + dram_drv_ohm);
1016 
1017 		if (dramtype != DDR3 && dram_odt_ohm)
1018 			vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 /
1019 				   (phy_dq_drv_ohm + dram_odt_ohm);
1020 		else
1021 			vref_out = 0x80;
1022 	} else {
1023 		/* for lp4 and lp4x*/
1024 		if (phy_odt_ohm)
1025 			vref_inner =
1026 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) *
1027 				 256) / 1000;
1028 		else
1029 			vref_inner =
1030 				(PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) *
1031 				 256) / 1000;
1032 
1033 		vref_out = 0x80;
1034 	}
1035 
1036 	/* default ZQCALIB bypass mode */
1037 	clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv);
1038 	clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv);
1039 	clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv);
1040 	clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv);
1041 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1042 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_clk_drv);
1043 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_clk_drv);
1044 	} else {
1045 		clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_ca_drv);
1046 		clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_ca_drv);
1047 	}
1048 	/* clk / cmd slew rate */
1049 	clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk);
1050 
1051 	phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1;
1052 	if (phy_odt_up_en)
1053 		phy_odt_up = phy_odt;
1054 	if (phy_odt_dn_en)
1055 		phy_odt_dn = phy_odt;
1056 
1057 	for (i = 0; i < 4; i++) {
1058 		j = 0x110 + i * 0x10;
1059 		clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up);
1060 		clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn);
1061 		clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv);
1062 		clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv);
1063 		writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10));
1064 
1065 		clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10),
1066 				1 << 3, phy_lp4_drv_pd_en << 3);
1067 		if (dramtype == LPDDR4 || dramtype == LPDDR4X)
1068 			clrbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10), BIT(5));
1069 		/* dq slew rate */
1070 		clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10),
1071 				0x1f, sr_dq);
1072 	}
1073 
1074 	/* reg_rx_vref_value_update */
1075 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1076 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1077 
1078 	/* RAM VREF */
1079 	writel(vref_out, PHY_REG(phy_base, 0x105));
1080 	if (dramtype == LPDDR3)
1081 		udelay(100);
1082 
1083 	if (dramtype == LPDDR4 || dramtype == LPDDR4X)
1084 		set_lp4_vref(dram, lp4_info, freq, dst_fsp, dramtype);
1085 
1086 	if (dramtype == DDR3 || dramtype == DDR4) {
1087 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1088 				DDR_PCTL2_INIT3);
1089 		mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK;
1090 	} else {
1091 		mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1092 				DDR_PCTL2_INIT4);
1093 		mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK;
1094 	}
1095 
1096 	if (dramtype == DDR3) {
1097 		mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK);
1098 		if (dram_drv_ohm == 34)
1099 			mr1_mr3 |= DDR3_DS_34;
1100 
1101 		if (dram_odt_ohm == 0)
1102 			mr1_mr3 |= DDR3_RTT_NOM_DIS;
1103 		else if (dram_odt_ohm <= 40)
1104 			mr1_mr3 |= DDR3_RTT_NOM_40;
1105 		else if (dram_odt_ohm <= 60)
1106 			mr1_mr3 |= DDR3_RTT_NOM_60;
1107 		else
1108 			mr1_mr3 |= DDR3_RTT_NOM_120;
1109 
1110 	} else if (dramtype == DDR4) {
1111 		mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK);
1112 		if (dram_drv_ohm == 48)
1113 			mr1_mr3 |= DDR4_DS_48;
1114 
1115 		if (dram_odt_ohm == 0)
1116 			mr1_mr3 |= DDR4_RTT_NOM_DIS;
1117 		else if (dram_odt_ohm <= 34)
1118 			mr1_mr3 |= DDR4_RTT_NOM_34;
1119 		else if (dram_odt_ohm <= 40)
1120 			mr1_mr3 |= DDR4_RTT_NOM_40;
1121 		else if (dram_odt_ohm <= 48)
1122 			mr1_mr3 |= DDR4_RTT_NOM_48;
1123 		else if (dram_odt_ohm <= 60)
1124 			mr1_mr3 |= DDR4_RTT_NOM_60;
1125 		else
1126 			mr1_mr3 |= DDR4_RTT_NOM_120;
1127 
1128 	} else if (dramtype == LPDDR3) {
1129 		if (dram_drv_ohm <= 34)
1130 			mr1_mr3 |= LPDDR3_DS_34;
1131 		else if (dram_drv_ohm <= 40)
1132 			mr1_mr3 |= LPDDR3_DS_40;
1133 		else if (dram_drv_ohm <= 48)
1134 			mr1_mr3 |= LPDDR3_DS_48;
1135 		else if (dram_drv_ohm <= 60)
1136 			mr1_mr3 |= LPDDR3_DS_60;
1137 		else if (dram_drv_ohm <= 80)
1138 			mr1_mr3 |= LPDDR3_DS_80;
1139 
1140 		if (dram_odt_ohm == 0)
1141 			lp3_odt_value = LPDDR3_ODT_DIS;
1142 		else if (dram_odt_ohm <= 60)
1143 			lp3_odt_value = LPDDR3_ODT_60;
1144 		else if (dram_odt_ohm <= 120)
1145 			lp3_odt_value = LPDDR3_ODT_120;
1146 		else
1147 			lp3_odt_value = LPDDR3_ODT_240;
1148 	} else {/* for lpddr4 and lpddr4x */
1149 		/* MR3 for lp4 PU-CAL and PDDS */
1150 		mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK);
1151 		mr1_mr3 |= lp4_pu_cal;
1152 
1153 		tmp = lp4_odt_calc(dram_drv_ohm);
1154 		if (!tmp)
1155 			tmp = LPDDR4_PDDS_240;
1156 		mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT);
1157 
1158 		/* MR11 for lp4 ca odt, dq odt set */
1159 		mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1160 			     DDR_PCTL2_INIT6);
1161 		mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK;
1162 
1163 		mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK);
1164 
1165 		tmp = lp4_odt_calc(dram_odt_ohm);
1166 		mr11 |= (tmp << LPDDR4_DQODT_SHIFT);
1167 
1168 		tmp = lp4_odt_calc(dram_caodt_ohm);
1169 		mr11 |= (tmp << LPDDR4_CAODT_SHIFT);
1170 		sw_set_req(dram);
1171 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1172 				DDR_PCTL2_INIT6,
1173 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT,
1174 				mr11 << PCTL2_LPDDR4_MR11_SHIFT);
1175 		sw_set_ack(dram);
1176 
1177 		/* MR22 for soc odt/odt-ck/odt-cs/odt-ca */
1178 		mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1179 			     DDR_PCTL2_INIT7);
1180 		mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK;
1181 		mr22 &= ~LPDDR4_SOC_ODT_MASK;
1182 
1183 		tmp = lp4_odt_calc(phy_odt_ohm);
1184 		mr22 |= tmp;
1185 		mr22 = mr22 |
1186 		       (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) <<
1187 			LPDDR4_ODTE_CK_SHIFT) |
1188 		       (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) <<
1189 			LPDDR4_ODTE_CS_SHIFT) |
1190 		       (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) <<
1191 			LPDDR4_ODTD_CA_SHIFT);
1192 
1193 		sw_set_req(dram);
1194 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1195 				DDR_PCTL2_INIT7,
1196 				PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT,
1197 				mr22 << PCTL2_LPDDR4_MR22_SHIFT);
1198 		sw_set_ack(dram);
1199 	}
1200 
1201 	if (dramtype == DDR4 || dramtype == DDR3) {
1202 		sw_set_req(dram);
1203 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1204 				DDR_PCTL2_INIT3,
1205 				PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT,
1206 				mr1_mr3 << PCTL2_DDR34_MR1_SHIFT);
1207 		sw_set_ack(dram);
1208 	} else {
1209 		sw_set_req(dram);
1210 		clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1211 				DDR_PCTL2_INIT4,
1212 				PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT,
1213 				mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT);
1214 		sw_set_ack(dram);
1215 	}
1216 }
1217 
sdram_cmd_dq_path_remap(struct dram_info * dram,struct rv1126_sdram_params * sdram_params)1218 static int sdram_cmd_dq_path_remap(struct dram_info *dram,
1219 				   struct rv1126_sdram_params *sdram_params)
1220 {
1221 	void __iomem *phy_base = dram->phy;
1222 	u32 dramtype = sdram_params->base.dramtype;
1223 	struct sdram_head_info_index_v2 *index =
1224 		(struct sdram_head_info_index_v2 *)common_info;
1225 	struct dq_map_info *map_info;
1226 
1227 	map_info = (struct dq_map_info *)((void *)common_info +
1228 		index->dq_map_index.offset * 4);
1229 
1230 	if (dramtype == LPDDR4X)
1231 		dramtype = LPDDR4;
1232 
1233 	if (dramtype <= LPDDR4)
1234 		writel((map_info->byte_map[dramtype / 4] >>
1235 			((dramtype % 4) * 8)) & 0xff,
1236 		       PHY_REG(phy_base, 0x4f));
1237 
1238 	return 0;
1239 }
1240 
phy_cfg(struct dram_info * dram,struct rv1126_sdram_params * sdram_params)1241 static void phy_cfg(struct dram_info *dram,
1242 		    struct rv1126_sdram_params *sdram_params)
1243 {
1244 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
1245 	void __iomem *phy_base = dram->phy;
1246 	u32 i, dq_map, tmp;
1247 	u32 byte1 = 0, byte0 = 0;
1248 
1249 	sdram_cmd_dq_path_remap(dram, sdram_params);
1250 
1251 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0);
1252 	for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
1253 		writel(sdram_params->phy_regs.phy[i][1],
1254 		       phy_base + sdram_params->phy_regs.phy[i][0]);
1255 	}
1256 
1257 	clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5));
1258 	dq_map = readl(PHY_REG(phy_base, 0x4f));
1259 	for (i = 0; i < 4; i++) {
1260 		if (((dq_map >> (i * 2)) & 0x3) == 0) {
1261 			byte0 = i;
1262 			break;
1263 		}
1264 	}
1265 	for (i = 0; i < 4; i++) {
1266 		if (((dq_map >> (i * 2)) & 0x3) == 1) {
1267 			byte1 = i;
1268 			break;
1269 		}
1270 	}
1271 
1272 	tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK);
1273 	if (cap_info->bw == 2)
1274 		tmp |= 0xf;
1275 	else if (cap_info->bw == 1)
1276 		tmp |= ((1 << byte0) | (1 << byte1));
1277 	else
1278 		tmp |= (1 << byte0);
1279 
1280 	writel(tmp, PHY_REG(phy_base, 0xf));
1281 
1282 	/* lpddr4 odt control by phy, enable cs0 odt */
1283 	if (sdram_params->base.dramtype == LPDDR4 ||
1284 	    sdram_params->base.dramtype == LPDDR4X)
1285 		clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4,
1286 				(1 << 6) | (1 << 4));
1287 	/* for ca training ca vref choose range1 */
1288 	setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6));
1289 	setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6));
1290 	/* for wr training PHY_0x7c[5], choose range0 */
1291 	clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5));
1292 }
1293 
update_refresh_reg(struct dram_info * dram)1294 static int update_refresh_reg(struct dram_info *dram)
1295 {
1296 	void __iomem *pctl_base = dram->pctl;
1297 	u32 ret;
1298 
1299 	ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
1300 	writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
1301 
1302 	return 0;
1303 }
1304 
1305 /*
1306  * rank = 1: cs0
1307  * rank = 2: cs1
1308  */
read_mr(struct dram_info * dram,u32 rank,u32 mr_num,u32 dramtype)1309 u32 read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype)
1310 {
1311 	u32 ret;
1312 	u32 i, temp;
1313 	void __iomem *pctl_base = dram->pctl;
1314 	struct sdram_head_info_index_v2 *index =
1315 		(struct sdram_head_info_index_v2 *)common_info;
1316 	struct dq_map_info *map_info;
1317 
1318 	map_info = (struct dq_map_info *)((void *)common_info +
1319 		index->dq_map_index.offset * 4);
1320 
1321 	pctl_read_mr(pctl_base, rank, mr_num);
1322 
1323 	if (dramtype == LPDDR3) {
1324 		temp = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff);
1325 		ret = 0;
1326 		for (i = 0; i < 8; i++)
1327 			ret |= ((temp >> i) & 0x1) << ((map_info->lp3_dq0_7_map >> (i * 4)) & 0xf);
1328 	} else {
1329 		ret = readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff;
1330 	}
1331 
1332 	return ret;
1333 }
1334 
1335 /* before call this function autorefresh should be disabled */
send_a_refresh(struct dram_info * dram)1336 void send_a_refresh(struct dram_info *dram)
1337 {
1338 	void __iomem *pctl_base = dram->pctl;
1339 
1340 	while (readl(pctl_base + DDR_PCTL2_DBGSTAT) & 0x3)
1341 		continue;
1342 	writel(0x3, pctl_base + DDR_PCTL2_DBGCMD);
1343 }
1344 
enter_sr(struct dram_info * dram,u32 en)1345 static void enter_sr(struct dram_info *dram, u32 en)
1346 {
1347 	void __iomem *pctl_base = dram->pctl;
1348 
1349 	if (en) {
1350 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1351 		while (1) {
1352 			if (((readl(pctl_base + DDR_PCTL2_STAT) &
1353 			      PCTL2_SELFREF_TYPE_MASK) ==
1354 			     PCTL2_SELFREF_TYPE_SR_NOT_AUTO) &&
1355 			    ((readl(pctl_base + DDR_PCTL2_STAT) &
1356 			      PCTL2_OPERATING_MODE_MASK) ==
1357 			     PCTL2_OPERATING_MODE_SR))
1358 				break;
1359 		}
1360 	} else {
1361 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1362 		while ((readl(pctl_base + DDR_PCTL2_STAT) &
1363 		       PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR)
1364 			continue;
1365 	}
1366 }
1367 
record_dq_prebit(struct dram_info * dram)1368 void record_dq_prebit(struct dram_info *dram)
1369 {
1370 	u32 group, i, tmp;
1371 	void __iomem *phy_base = dram->phy;
1372 
1373 	for (group = 0; group < 4; group++) {
1374 		for (i = 0; i < ARRAY_SIZE(dq_sel); i++) {
1375 			/* l_loop_invdelaysel */
1376 			writel(dq_sel[i][0], PHY_REG(phy_base,
1377 						     grp_addr[group] + 0x2c));
1378 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e));
1379 			writel(tmp, PHY_REG(phy_base,
1380 					    grp_addr[group] + dq_sel[i][1]));
1381 
1382 			/* r_loop_invdelaysel */
1383 			writel(dq_sel[i][0], PHY_REG(phy_base,
1384 						     grp_addr[group] + 0x2d));
1385 			tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f));
1386 			writel(tmp, PHY_REG(phy_base,
1387 					    grp_addr[group] + dq_sel[i][2]));
1388 		}
1389 	}
1390 }
1391 
update_dq_rx_prebit(struct dram_info * dram)1392 static void update_dq_rx_prebit(struct dram_info *dram)
1393 {
1394 	void __iomem *phy_base = dram->phy;
1395 
1396 	clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4),
1397 			BIT(4));
1398 	udelay(1);
1399 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4));
1400 }
1401 
update_dq_tx_prebit(struct dram_info * dram)1402 static void update_dq_tx_prebit(struct dram_info *dram)
1403 {
1404 	void __iomem *phy_base = dram->phy;
1405 
1406 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1407 	setbits_le32(PHY_REG(phy_base, 0x2), BIT(3));
1408 	setbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1409 	udelay(1);
1410 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1411 }
1412 
update_ca_prebit(struct dram_info * dram)1413 static void update_ca_prebit(struct dram_info *dram)
1414 {
1415 	void __iomem *phy_base = dram->phy;
1416 
1417 	clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2));
1418 	setbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1419 	udelay(1);
1420 	clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1421 }
1422 
1423 /*
1424  * dir: 0: de-skew = delta_*
1425  *	1: de-skew = reg val - delta_*
1426  * delta_dir: value for differential signal: clk/
1427  * delta_sig: value for single signal: ca/cmd
1428  */
modify_ca_deskew(struct dram_info * dram,u32 dir,int delta_dif,int delta_sig,u32 cs,u32 dramtype)1429 static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif,
1430 			     int delta_sig, u32 cs, u32 dramtype)
1431 {
1432 	void __iomem *phy_base = dram->phy;
1433 	u32 i, cs_en, tmp;
1434 	u32 dfi_lp_stat = 0;
1435 
1436 	if (cs == 0)
1437 		cs_en = 1;
1438 	else if (cs == 2)
1439 		cs_en = 2;
1440 	else
1441 		cs_en = 3;
1442 
1443 	if ((dramtype == LPDDR4 || dramtype == LPDDR4X) &&
1444 	    ((readl(PHY_REG(phy_base, 0x60)) & BIT(5)) == 0)) {
1445 		dfi_lp_stat = 1;
1446 		setbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1447 	}
1448 	enter_sr(dram, 1);
1449 
1450 	for (i = 0; i < 0x20; i++) {
1451 		if (dir == DESKEW_MDF_ABS_VAL)
1452 			tmp = delta_sig;
1453 		else
1454 			tmp = readl(PHY_REG(phy_base, 0x150 + i)) +
1455 			      delta_sig;
1456 		writel(tmp, PHY_REG(phy_base, 0x150 + i));
1457 	}
1458 
1459 	if (dir == DESKEW_MDF_ABS_VAL)
1460 		tmp = delta_dif;
1461 	else
1462 		tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) -
1463 		       delta_sig + delta_dif;
1464 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x17));
1465 	writel(tmp, PHY_REG(phy_base, 0x150 + 0x18));
1466 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1467 		writel(tmp, PHY_REG(phy_base, 0x150 + 0x4));
1468 		writel(tmp, PHY_REG(phy_base, 0x150 + 0xa));
1469 
1470 		clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6);
1471 		update_ca_prebit(dram);
1472 	}
1473 	enter_sr(dram, 0);
1474 
1475 	if (dfi_lp_stat)
1476 		clrbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1477 
1478 }
1479 
get_min_value(struct dram_info * dram,u32 signal,u32 rank)1480 static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank)
1481 {
1482 	u32 i, j, offset = 0;
1483 	u32 min = 0x3f;
1484 	void __iomem *phy_base = dram->phy;
1485 	u32 byte_en;
1486 
1487 	if (signal == SKEW_TX_SIGNAL)
1488 		offset = 8;
1489 
1490 	if (signal == SKEW_CA_SIGNAL) {
1491 		for (i = 0; i < 0x20; i++)
1492 			min = MIN(min, readl(PHY_REG(phy_base, 0x150  + i)));
1493 	} else {
1494 		byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1495 		for (j = offset; j < offset + rank * 4; j++) {
1496 			if (!((byte_en >> (j % 4)) & 1))
1497 				continue;
1498 			for (i = 0; i < 11; i++)
1499 				min = MIN(min,
1500 					  readl(PHY_REG(phy_base,
1501 							dqs_dq_skew_adr[j] +
1502 							i)));
1503 		}
1504 	}
1505 
1506 	return min;
1507 }
1508 
low_power_update(struct dram_info * dram,u32 en)1509 static u32 low_power_update(struct dram_info *dram, u32 en)
1510 {
1511 	void __iomem *pctl_base = dram->pctl;
1512 	u32 lp_stat = 0;
1513 
1514 	if (en) {
1515 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf);
1516 	} else {
1517 		lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf;
1518 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf);
1519 	}
1520 
1521 	return lp_stat;
1522 }
1523 
1524 /*
1525  * signal:
1526  * dir: 0: de-skew = delta_*
1527  *	1: de-skew = reg val - delta_*
1528  * delta_dir: value for differential signal: dqs
1529  * delta_sig: value for single signal: dq/dm
1530  */
modify_dq_deskew(struct dram_info * dram,u32 signal,u32 dir,int delta_dif,int delta_sig,u32 rank)1531 static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir,
1532 			     int delta_dif, int delta_sig, u32 rank)
1533 {
1534 	void __iomem *phy_base = dram->phy;
1535 	u32 i, j, tmp, offset;
1536 	u32 byte_en;
1537 
1538 	byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1539 
1540 	if (signal == SKEW_RX_SIGNAL)
1541 		offset = 0;
1542 	else
1543 		offset = 8;
1544 
1545 	for (j = offset; j < (offset + rank * 4); j++) {
1546 		if (!((byte_en >> (j % 4)) & 1))
1547 			continue;
1548 		for (i = 0; i < 0x9; i++) {
1549 			if (dir == DESKEW_MDF_ABS_VAL)
1550 				tmp = delta_sig;
1551 			else
1552 				tmp = delta_sig + readl(PHY_REG(phy_base,
1553 							dqs_dq_skew_adr[j] +
1554 							i));
1555 			writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i));
1556 		}
1557 		if (dir == DESKEW_MDF_ABS_VAL)
1558 			tmp = delta_dif;
1559 		else
1560 			tmp = delta_dif + readl(PHY_REG(phy_base,
1561 						dqs_dq_skew_adr[j] + 9));
1562 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9));
1563 		writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa));
1564 	}
1565 	if (signal == SKEW_RX_SIGNAL)
1566 		update_dq_rx_prebit(dram);
1567 	else
1568 		update_dq_tx_prebit(dram);
1569 }
1570 
data_training_rg(struct dram_info * dram,u32 cs,u32 dramtype)1571 static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype)
1572 {
1573 	void __iomem *phy_base = dram->phy;
1574 	u32 ret;
1575 	u32 dis_auto_zq = 0;
1576 	u32 odt_val_up, odt_val_dn;
1577 	u32 i, j;
1578 
1579 	odt_val_dn = readl(PHY_REG(phy_base, 0x110));
1580 	odt_val_up = readl(PHY_REG(phy_base, 0x111));
1581 
1582 	if (dramtype != LPDDR4 || dramtype != LPDDR4X) {
1583 		for (i = 0; i < 4; i++) {
1584 			j = 0x110 + i * 0x10;
1585 			writel(PHY_DDR4_LPDDR3_RTT_294ohm,
1586 			       PHY_REG(phy_base, j));
1587 			writel(PHY_DDR4_LPDDR3_RTT_DISABLE,
1588 			       PHY_REG(phy_base, j + 0x1));
1589 		}
1590 	}
1591 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1592 	/* use normal read mode for data training */
1593 	clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1594 
1595 	if (dramtype == DDR4)
1596 		setbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1597 
1598 	/* choose training cs */
1599 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
1600 	/* enable gate training */
1601 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
1602 	udelay(50);
1603 	ret = readl(PHY_REG(phy_base, 0x91));
1604 	/* disable gate training */
1605 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
1606 	clrbits_le32(PHY_REG(phy_base, 2), 0x30);
1607 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1608 
1609 	ret = (ret & 0x2f) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf);
1610 
1611 	if (dramtype != LPDDR4 || dramtype != LPDDR4X) {
1612 		for (i = 0; i < 4; i++) {
1613 			j = 0x110 + i * 0x10;
1614 			writel(odt_val_dn, PHY_REG(phy_base, j));
1615 			writel(odt_val_up, PHY_REG(phy_base, j + 0x1));
1616 		}
1617 	}
1618 	return ret;
1619 }
1620 
data_training_wl(struct dram_info * dram,u32 cs,u32 dramtype,u32 rank)1621 static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype,
1622 			    u32 rank)
1623 {
1624 	void __iomem *pctl_base = dram->pctl;
1625 	void __iomem *phy_base = dram->phy;
1626 	u32 dis_auto_zq = 0;
1627 	u32 tmp;
1628 	u32 cur_fsp;
1629 	u32 timeout_us = 1000;
1630 
1631 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1632 
1633 	clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1634 
1635 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1636 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) &
1637 	      0xffff;
1638 	writel(tmp & 0xff, PHY_REG(phy_base, 0x3));
1639 
1640 	/* disable another cs's output */
1641 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1642 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12),
1643 			      dramtype);
1644 	if (dramtype == DDR3 || dramtype == DDR4)
1645 		writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1646 	else
1647 		writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1648 
1649 	/* choose cs */
1650 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1651 			((0x2 >> cs) << 6) | (0 << 2));
1652 	/* enable write leveling */
1653 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1654 			((0x2 >> cs) << 6) | (1 << 2));
1655 
1656 	while (1) {
1657 		if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) ==
1658 		    (readl(PHY_REG(phy_base, 0xf)) & 0xf))
1659 			break;
1660 
1661 		udelay(1);
1662 		if (timeout_us-- == 0) {
1663 			printascii("error: write leveling timeout\n");
1664 			while (1)
1665 				;
1666 		}
1667 	}
1668 
1669 	/* disable write leveling */
1670 	clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1671 			((0x2 >> cs) << 6) | (0 << 2));
1672 	clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6);
1673 
1674 	/* enable another cs's output */
1675 	if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1676 		pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12),
1677 			      dramtype);
1678 
1679 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1680 
1681 	return 0;
1682 }
1683 
1684 char pattern[32] = {
1685 	0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa,
1686 	0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55,
1687 	0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55,
1688 	0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa
1689 };
1690 
data_training_rd(struct dram_info * dram,u32 cs,u32 dramtype,u32 mhz)1691 static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype,
1692 			    u32 mhz)
1693 {
1694 	void __iomem *pctl_base = dram->pctl;
1695 	void __iomem *phy_base = dram->phy;
1696 	u32 trefi_1x, trfc_1x;
1697 	u32 dis_auto_zq = 0;
1698 	u32 timeout_us = 1000;
1699 	u32 dqs_default;
1700 	u32 cur_fsp;
1701 	u32 vref_inner;
1702 	u32 i;
1703 	struct sdram_head_info_index_v2 *index =
1704 		(struct sdram_head_info_index_v2 *)common_info;
1705 	struct dq_map_info *map_info;
1706 
1707 	vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff;
1708 	if (dramtype == DDR3 && vref_inner == 0x80) {
1709 		for (i = 0; i < 4; i++)
1710 			writel(vref_inner - 0xa,
1711 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1712 
1713 		/* reg_rx_vref_value_update */
1714 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1715 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1716 	}
1717 
1718 	map_info = (struct dq_map_info *)((void *)common_info +
1719 		index->dq_map_index.offset * 4);
1720 	/* only 1cs a time, 0:cs0 1 cs1 */
1721 	if (cs > 1)
1722 		return -1;
1723 
1724 	dqs_default = 0xf;
1725 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1726 
1727 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1728 	/* config refresh timing */
1729 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1730 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1731 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1732 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1733 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1734 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1735 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1736 	/* reg_phy_trfc */
1737 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1738 	/* reg_max_refi_cnt */
1739 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1740 
1741 	/* choose training cs */
1742 	clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6);
1743 
1744 	/* set dq map for ddr4 */
1745 	if (dramtype == DDR4) {
1746 		setbits_le32(PHY_REG(phy_base, 0x70), BIT(7));
1747 		for (i = 0; i < 4; i++) {
1748 			writel((map_info->ddr4_dq_map[cs * 2] >>
1749 				((i % 4) * 8)) & 0xff,
1750 				PHY_REG(phy_base, 0x238 + i));
1751 			writel((map_info->ddr4_dq_map[cs * 2 + 1] >>
1752 				((i % 4) * 8)) & 0xff,
1753 				PHY_REG(phy_base, 0x2b8 + i));
1754 		}
1755 	}
1756 
1757 	/* cha_l reg_l_rd_train_dqs_default[5:0] */
1758 	clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default);
1759 	/* cha_h reg_h_rd_train_dqs_default[5:0] */
1760 	clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default);
1761 	/* chb_l reg_l_rd_train_dqs_default[5:0] */
1762 	clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default);
1763 	/* chb_h reg_h_rd_train_dqs_default[5:0] */
1764 	clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default);
1765 
1766 	/* Choose the read train auto mode */
1767 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1);
1768 	/* Enable the auto train of the read train */
1769 	clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3);
1770 
1771 	/* Wait the train done. */
1772 	while (1) {
1773 		if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1)
1774 			break;
1775 
1776 		udelay(1);
1777 		if (timeout_us-- == 0) {
1778 			printascii("error: read training timeout\n");
1779 			return -1;
1780 		}
1781 	}
1782 
1783 	/* Check the read train state */
1784 	if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) ||
1785 	    (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) {
1786 		printascii("error: read training error\n");
1787 		return -1;
1788 	}
1789 
1790 	/* Exit the Read Training by setting */
1791 	clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1));
1792 
1793 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1794 
1795 	if (dramtype == DDR3 && vref_inner == 0x80) {
1796 		for (i = 0; i < 4; i++)
1797 			writel(vref_inner,
1798 			       PHY_REG(phy_base, 0x118 + i * 0x10));
1799 
1800 		/* reg_rx_vref_value_update */
1801 		setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1802 		clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1803 	}
1804 
1805 	return 0;
1806 }
1807 
data_training_wr(struct dram_info * dram,u32 cs,u32 dramtype,u32 mhz,u32 dst_fsp)1808 static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype,
1809 			    u32 mhz, u32 dst_fsp)
1810 {
1811 	void __iomem *pctl_base = dram->pctl;
1812 	void __iomem *phy_base = dram->phy;
1813 	u32 trefi_1x, trfc_1x;
1814 	u32 dis_auto_zq = 0;
1815 	u32 timeout_us = 1000;
1816 	u32 cur_fsp;
1817 	u32 mr_tmp, cl, cwl, phy_fsp, offset = 0;
1818 
1819 	if (dramtype == LPDDR3 && mhz <= 400) {
1820 		phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3;
1821 		offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3;
1822 		cl = readl(PHY_REG(phy_base, offset));
1823 		cwl = readl(PHY_REG(phy_base, offset + 2));
1824 
1825 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8);
1826 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4);
1827 		pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype);
1828 	}
1829 
1830 	dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1831 
1832 	/* PHY_0x7b[7:0] reg_train_col_addr[7:0] */
1833 	clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0);
1834 	/* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */
1835 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2);
1836 	/* PHY_0x7c[1:0] reg_train_col_addr[9:8] */
1837 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0);
1838 	/* PHY_0x7d[7:0] reg_train_row_addr[7:0] */
1839 	clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0);
1840 	/* PHY_0x7e[7:0] reg_train_row_addr[15:8] */
1841 	clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0);
1842 
1843 	/* PHY_0x71[3] wrtrain_check_data_value_random_gen */
1844 	clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3));
1845 
1846 	/* config refresh timing */
1847 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1848 	trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1849 			   DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1850 	trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1851 			DDR_PCTL2_RFSHTMG) & 0x3ff;
1852 	/* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1853 	clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1854 	clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1855 	/* reg_phy_trfc */
1856 	clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1857 	/* reg_max_refi_cnt */
1858 	clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1859 
1860 	/* choose training cs */
1861 	clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6);
1862 
1863 	/* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */
1864 	/* 0: Use the write-leveling value. */
1865 	/* 1: use reg0x233 0x237 0x2b3 0x2b7 */
1866 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4));
1867 
1868 	/* PHY_0x7a [0] reg_dq_wr_train_auto */
1869 	setbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1870 
1871 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1872 	setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1873 
1874 	send_a_refresh(dram);
1875 
1876 	while (1) {
1877 		if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1)
1878 			break;
1879 
1880 		udelay(1);
1881 		if (timeout_us-- == 0) {
1882 			printascii("error: write training timeout\n");
1883 			while (1)
1884 				;
1885 		}
1886 	}
1887 
1888 	/* Check the write train state */
1889 	if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) {
1890 		printascii("error: write training error\n");
1891 		return -1;
1892 	}
1893 
1894 	/* PHY_0x7a [1] reg_dq_wr_train_en */
1895 	clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1896 
1897 	pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1898 
1899 	/* save LPDDR4/LPDDR4X write vref to fsp_param for dfs */
1900 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1901 		fsp_param[dst_fsp].vref_dq[cs] =
1902 			((readl(PHY_REG(phy_base, 0x384)) & 0x3f) +
1903 			 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2;
1904 		/* add range info */
1905 		fsp_param[dst_fsp].vref_dq[cs] |=
1906 			((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1);
1907 	}
1908 
1909 	if (dramtype == LPDDR3 && mhz <= 400) {
1910 		clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl);
1911 		clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl);
1912 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1913 			       DDR_PCTL2_INIT3);
1914 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
1915 			      dramtype);
1916 	}
1917 
1918 	return 0;
1919 }
1920 
data_training(struct dram_info * dram,u32 cs,struct rv1126_sdram_params * sdram_params,u32 dst_fsp,u32 training_flag)1921 static int data_training(struct dram_info *dram, u32 cs,
1922 			 struct rv1126_sdram_params *sdram_params, u32 dst_fsp,
1923 			 u32 training_flag)
1924 {
1925 	u32 ret = 0;
1926 
1927 	if (training_flag == FULL_TRAINING)
1928 		training_flag = READ_GATE_TRAINING | WRITE_LEVELING |
1929 				WRITE_TRAINING | READ_TRAINING;
1930 
1931 	if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) {
1932 		ret = data_training_wl(dram, cs,
1933 				       sdram_params->base.dramtype,
1934 				       sdram_params->ch.cap_info.rank);
1935 		if (ret != 0)
1936 			goto out;
1937 	}
1938 
1939 	if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) {
1940 		ret = data_training_rg(dram, cs,
1941 				       sdram_params->base.dramtype);
1942 		if (ret != 0)
1943 			goto out;
1944 	}
1945 
1946 	if ((training_flag & READ_TRAINING) == READ_TRAINING) {
1947 		ret = data_training_rd(dram, cs,
1948 				       sdram_params->base.dramtype,
1949 				       sdram_params->base.ddr_freq);
1950 		if (ret != 0)
1951 			goto out;
1952 	}
1953 
1954 	if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) {
1955 		ret = data_training_wr(dram, cs,
1956 				       sdram_params->base.dramtype,
1957 				       sdram_params->base.ddr_freq, dst_fsp);
1958 		if (ret != 0)
1959 			goto out;
1960 	}
1961 
1962 out:
1963 	return ret;
1964 }
1965 
get_wrlvl_val(struct dram_info * dram,struct rv1126_sdram_params * sdram_params)1966 static int get_wrlvl_val(struct dram_info *dram,
1967 			 struct rv1126_sdram_params *sdram_params)
1968 {
1969 	int i, j, clk_skew;
1970 	void __iomem *phy_base = dram->phy;
1971 	u32 lp_stat;
1972 	int ret;
1973 
1974 	lp_stat = low_power_update(dram, 0);
1975 
1976 	clk_skew = 0x1f;
1977 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, clk_skew, 3,
1978 			 sdram_params->base.dramtype);
1979 
1980 	ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING);
1981 	if (sdram_params->ch.cap_info.rank == 2)
1982 		ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING);
1983 
1984 	for (j = 0; j < 2; j++)
1985 		for (i = 0; i < 4; i++)
1986 			wrlvl_result[j][i] =
1987 				(readl(PHY_REG(phy_base, wrlvl_result_offset[j][i])) & 0x3f) -
1988 				clk_skew;
1989 
1990 	low_power_update(dram, lp_stat);
1991 
1992 	return ret;
1993 }
1994 
1995 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
init_rw_trn_result_struct(struct rw_trn_result * result,void __iomem * phy_base,u8 cs_num)1996 static void init_rw_trn_result_struct(struct rw_trn_result *result,
1997 				      void __iomem *phy_base, u8 cs_num)
1998 {
1999 	int i;
2000 
2001 	result->cs_num = cs_num;
2002 	result->byte_en = readb(PHY_REG(dram_info.phy, 0xf)) &
2003 			  PHY_DQ_WIDTH_MASK;
2004 	for (i = 0; i < FSP_NUM; i++)
2005 		result->fsp_mhz[i] = 0;
2006 }
2007 
save_rw_trn_min_max(void __iomem * phy_base,struct cs_rw_trn_result * rd_result,struct cs_rw_trn_result * wr_result,u8 byte_en)2008 static void save_rw_trn_min_max(void __iomem *phy_base,
2009 				struct cs_rw_trn_result *rd_result,
2010 				struct cs_rw_trn_result *wr_result,
2011 				u8 byte_en)
2012 {
2013 	u16 phy_ofs;
2014 	u8 dqs;
2015 	u8 dq;
2016 
2017 	for (dqs = 0; dqs < BYTE_NUM; dqs++) {
2018 		if ((byte_en & BIT(dqs)) == 0)
2019 			continue;
2020 
2021 		/* Channel A or B (low or high 16 bit) */
2022 		phy_ofs = dqs < 2 ? 0x230 : 0x2b0;
2023 		/* low or high 8 bit */
2024 		phy_ofs += (dqs & 0x1) == 0 ? 0 : 0x9;
2025 		for (dq = 0; dq < 8; dq++) {
2026 			rd_result->dqs[dqs].dq_min[dq] =
2027 				readb(PHY_REG(phy_base, phy_ofs + 0x15 + dq));
2028 			rd_result->dqs[dqs].dq_max[dq] =
2029 				readb(PHY_REG(phy_base, phy_ofs + 0x27 + dq));
2030 			wr_result->dqs[dqs].dq_min[dq] =
2031 				readb(PHY_REG(phy_base, phy_ofs + 0x3d + dq));
2032 			wr_result->dqs[dqs].dq_max[dq] =
2033 				readb(PHY_REG(phy_base, phy_ofs + 0x4f + dq));
2034 		}
2035 	}
2036 }
2037 
save_rw_trn_deskew(void __iomem * phy_base,struct fsp_rw_trn_result * result,u8 cs_num,int min_val,bool rw)2038 static void save_rw_trn_deskew(void __iomem *phy_base,
2039 			       struct fsp_rw_trn_result *result, u8 cs_num,
2040 			       int min_val, bool rw)
2041 {
2042 	u16 phy_ofs;
2043 	u8 cs;
2044 	u8 dq;
2045 
2046 	result->min_val = min_val;
2047 
2048 	for (cs = 0; cs < cs_num; cs++) {
2049 		phy_ofs = cs == 0 ? 0x170 : 0x1a0;
2050 		phy_ofs += rw == SKEW_RX_SIGNAL ? 0x1 : 0x17;
2051 		for (dq = 0; dq < 8; dq++) {
2052 			result->cs[cs].dqs[0].dq_deskew[dq] =
2053 				readb(PHY_REG(phy_base, phy_ofs + dq));
2054 			result->cs[cs].dqs[1].dq_deskew[dq] =
2055 				readb(PHY_REG(phy_base, phy_ofs + 0xb + dq));
2056 			result->cs[cs].dqs[2].dq_deskew[dq] =
2057 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + dq));
2058 			result->cs[cs].dqs[3].dq_deskew[dq] =
2059 				readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + dq));
2060 		}
2061 
2062 		result->cs[cs].dqs[0].dqs_deskew =
2063 			readb(PHY_REG(phy_base, phy_ofs + 0x8));
2064 		result->cs[cs].dqs[1].dqs_deskew =
2065 			readb(PHY_REG(phy_base, phy_ofs + 0xb + 0x8));
2066 		result->cs[cs].dqs[2].dqs_deskew =
2067 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0x8));
2068 		result->cs[cs].dqs[3].dqs_deskew =
2069 			readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + 0x8));
2070 	}
2071 }
2072 
save_rw_trn_result_to_ddr(struct rw_trn_result * result)2073 static void save_rw_trn_result_to_ddr(struct rw_trn_result *result)
2074 {
2075 	result->flag = DDR_DQ_EYE_FLAG;
2076 	memcpy((void *)(RW_TRN_RESULT_ADDR), result, sizeof(*result));
2077 }
2078 #endif
2079 
high_freq_training(struct dram_info * dram,struct rv1126_sdram_params * sdram_params,u32 fsp)2080 static int high_freq_training(struct dram_info *dram,
2081 			      struct rv1126_sdram_params *sdram_params,
2082 			      u32 fsp)
2083 {
2084 	u32 i, j;
2085 	void __iomem *phy_base = dram->phy;
2086 	u32 dramtype = sdram_params->base.dramtype;
2087 	int min_val;
2088 	int dqs_skew, clk_skew, ca_skew;
2089 	u8 byte_en;
2090 	int ret;
2091 
2092 	byte_en = readl(PHY_REG(phy_base, 0xf)) & PHY_DQ_WIDTH_MASK;
2093 	dqs_skew = 0;
2094 	for (j = 0; j < sdram_params->ch.cap_info.rank; j++) {
2095 		for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) {
2096 			if ((byte_en & BIT(i)) != 0)
2097 				dqs_skew += wrlvl_result[j][i];
2098 		}
2099 	}
2100 	dqs_skew = dqs_skew /
2101 		   (int)(sdram_params->ch.cap_info.rank * (1 << sdram_params->ch.cap_info.bw));
2102 
2103 	clk_skew = 0x20 - dqs_skew;
2104 	dqs_skew = 0x20;
2105 
2106 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
2107 		min_val = 0xff;
2108 		for (j = 0; j < sdram_params->ch.cap_info.rank; j++)
2109 			for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) {
2110 				if ((byte_en & BIT(i)) != 0)
2111 					min_val = MIN(wrlvl_result[j][i], min_val);
2112 			}
2113 
2114 		if (min_val < 0) {
2115 			clk_skew = -min_val;
2116 			ca_skew = -min_val;
2117 		} else {
2118 			clk_skew = 0;
2119 			ca_skew = 0;
2120 		}
2121 	} else if (dramtype == LPDDR3) {
2122 		ca_skew = clk_skew - 4;
2123 	} else {
2124 		ca_skew = clk_skew;
2125 	}
2126 	modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3,
2127 			 dramtype);
2128 
2129 	writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233));
2130 	writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237));
2131 	writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2132 	writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2133 	ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING |
2134 			    READ_TRAINING | WRITE_TRAINING);
2135 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2136 	rw_trn_result.fsp_mhz[fsp] = (u16)sdram_params->base.ddr_freq;
2137 	save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[0],
2138 			    &rw_trn_result.wr_fsp[fsp].cs[0],
2139 			    rw_trn_result.byte_en);
2140 #endif
2141 	if (sdram_params->ch.cap_info.rank == 2) {
2142 		writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233));
2143 		writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237));
2144 		writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2145 		writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2146 		ret |= data_training(dram, 1, sdram_params, fsp,
2147 				     READ_GATE_TRAINING | READ_TRAINING |
2148 				     WRITE_TRAINING);
2149 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2150 		save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[1],
2151 				    &rw_trn_result.wr_fsp[fsp].cs[1],
2152 				    rw_trn_result.byte_en);
2153 #endif
2154 	}
2155 	if (ret)
2156 		goto out;
2157 
2158 	record_dq_prebit(dram);
2159 
2160 	min_val = get_min_value(dram, SKEW_RX_SIGNAL,
2161 				sdram_params->ch.cap_info.rank) * -1;
2162 	modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2163 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2164 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2165 	save_rw_trn_deskew(phy_base, &rw_trn_result.rd_fsp[fsp],
2166 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2167 			   SKEW_RX_SIGNAL);
2168 #endif
2169 
2170 	min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL,
2171 				    sdram_params->ch.cap_info.rank),
2172 		      get_min_value(dram, SKEW_CA_SIGNAL,
2173 				    sdram_params->ch.cap_info.rank)) * -1;
2174 
2175 	/* clk = 0, rx all skew -7, tx - min_value */
2176 	modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3,
2177 			 dramtype);
2178 
2179 	modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2180 			 min_val, min_val, sdram_params->ch.cap_info.rank);
2181 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2182 	save_rw_trn_deskew(phy_base, &rw_trn_result.wr_fsp[fsp],
2183 			   rw_trn_result.cs_num, (u8)(min_val * (-1)),
2184 			   SKEW_TX_SIGNAL);
2185 #endif
2186 
2187 	ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING);
2188 	if (sdram_params->ch.cap_info.rank == 2)
2189 		ret |= data_training(dram, 1, sdram_params, 0,
2190 				     READ_GATE_TRAINING);
2191 out:
2192 	return ret;
2193 }
2194 
set_ddrconfig(struct dram_info * dram,u32 ddrconfig)2195 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
2196 {
2197 	writel(ddrconfig, &dram->msch->deviceconf);
2198 	clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0);
2199 }
2200 
update_noc_timing(struct dram_info * dram,struct rv1126_sdram_params * sdram_params)2201 static void update_noc_timing(struct dram_info *dram,
2202 			      struct rv1126_sdram_params *sdram_params)
2203 {
2204 	void __iomem *pctl_base = dram->pctl;
2205 	u32 bw, bl;
2206 
2207 	bw = 8 << sdram_params->ch.cap_info.bw;
2208 	bl = ((readl(pctl_base + DDR_PCTL2_MSTR) >> 16) & 0xf) * 2;
2209 
2210 	/* update the noc timing related to data bus width */
2211 	if ((bw / 8 * bl) <= 16)
2212 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 0;
2213 	else if ((bw / 8 * bl) == 32)
2214 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 1;
2215 	else if ((bw / 8 * bl) == 64)
2216 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 2;
2217 	else
2218 		sdram_params->ch.noc_timings.ddrmode.b.burstsize = 3;
2219 
2220 	sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty =
2221 		(bl * bw / 8) > 16 ? (bl / 4) : (16 / (bl * bw / 8)) * bl / 4;
2222 
2223 	if (sdram_params->base.dramtype == LPDDR4 ||
2224 	    sdram_params->base.dramtype == LPDDR4X) {
2225 		sdram_params->ch.noc_timings.ddrmode.b.mwrsize =
2226 			(bw == 16) ? 0x1 : 0x2;
2227 		sdram_params->ch.noc_timings.ddrtimingc0.b.wrtomwr =
2228 			3 * sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty;
2229 	}
2230 
2231 	writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
2232 	       &dram->msch->ddrtiminga0);
2233 	writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
2234 	       &dram->msch->ddrtimingb0);
2235 	writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
2236 	       &dram->msch->ddrtimingc0);
2237 	writel(sdram_params->ch.noc_timings.devtodev0.d32,
2238 	       &dram->msch->devtodev0);
2239 	writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
2240 	writel(sdram_params->ch.noc_timings.ddr4timing.d32,
2241 	       &dram->msch->ddr4timing);
2242 }
2243 
split_setup(struct dram_info * dram,struct rv1126_sdram_params * sdram_params)2244 static int split_setup(struct dram_info *dram,
2245 		       struct rv1126_sdram_params *sdram_params)
2246 {
2247 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2248 	u32 dramtype = sdram_params->base.dramtype;
2249 	u32 split_size, split_mode;
2250 	u64 cs_cap[2], cap;
2251 
2252 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dramtype);
2253 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dramtype);
2254 	/* only support the larger cap is in low 16bit */
2255 	if (cap_info->cs0_high16bit_row < cap_info->cs0_row) {
2256 		cap = cs_cap[0] / (1 << (cap_info->cs0_row -
2257 		cap_info->cs0_high16bit_row));
2258 	} else if ((cap_info->cs1_high16bit_row < cap_info->cs1_row) &&
2259 		   (cap_info->rank == 2)) {
2260 		if (!cap_info->cs1_high16bit_row)
2261 			cap = cs_cap[0];
2262 		else
2263 			cap = cs_cap[0] + cs_cap[1] / (1 << (cap_info->cs1_row -
2264 				cap_info->cs1_high16bit_row));
2265 	} else {
2266 		goto out;
2267 	}
2268 	split_size = (u32)(cap >> 24) & SPLIT_SIZE_MASK;
2269 	if (cap_info->bw == 2)
2270 		split_mode = SPLIT_MODE_32_L16_VALID;
2271 	else
2272 		split_mode = SPLIT_MODE_16_L8_VALID;
2273 
2274 	rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2275 		     (SPLIT_MODE_MASK << SPLIT_MODE_OFFSET) |
2276 		     (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2277 		     (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2278 		     (split_mode << SPLIT_MODE_OFFSET) |
2279 		     (0x0 << SPLIT_BYPASS_OFFSET) |
2280 		     (split_size << SPLIT_SIZE_OFFSET));
2281 
2282 	rk_clrsetreg(BUS_SGRF_BASE_ADDR + SGRF_SOC_CON2,
2283 		     MSCH_AXI_BYPASS_ALL_MASK << MSCH_AXI_BYPASS_ALL_SHIFT,
2284 		     0x0 << MSCH_AXI_BYPASS_ALL_SHIFT);
2285 
2286 out:
2287 	return 0;
2288 }
2289 
split_bypass(struct dram_info * dram)2290 static void split_bypass(struct dram_info *dram)
2291 {
2292 	if ((readl(&dram->ddrgrf->grf_ddrsplit_con) &
2293 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2294 		return;
2295 
2296 	/* bypass split */
2297 	rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2298 		     (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2299 		     (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2300 		     (0x1 << SPLIT_BYPASS_OFFSET) |
2301 		     (0x0 << SPLIT_SIZE_OFFSET));
2302 }
2303 
dram_all_config(struct dram_info * dram,struct rv1126_sdram_params * sdram_params)2304 static void dram_all_config(struct dram_info *dram,
2305 			    struct rv1126_sdram_params *sdram_params)
2306 {
2307 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2308 	u32 dram_type = sdram_params->base.dramtype;
2309 	void __iomem *pctl_base = dram->pctl;
2310 	u32 sys_reg2 = 0;
2311 	u32 sys_reg3 = 0;
2312 	u64 cs_cap[2];
2313 	u32 cs_pst;
2314 
2315 	set_ddrconfig(dram, cap_info->ddrconfig);
2316 	sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
2317 			 &sys_reg3, 0);
2318 	writel(sys_reg2, &dram->pmugrf->os_reg[2]);
2319 	writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2320 
2321 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2322 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2323 
2324 	if (cap_info->rank == 2) {
2325 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2326 			6 + 2;
2327 		if (cs_pst > 28)
2328 			cs_cap[0] = 1llu << cs_pst;
2329 	}
2330 
2331 	writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
2332 			(((cs_cap[0] >> 20) / 64) & 0xff),
2333 			&dram->msch->devicesize);
2334 	update_noc_timing(dram, sdram_params);
2335 }
2336 
enable_low_power(struct dram_info * dram,struct rv1126_sdram_params * sdram_params)2337 static void enable_low_power(struct dram_info *dram,
2338 			     struct rv1126_sdram_params *sdram_params)
2339 {
2340 	void __iomem *pctl_base = dram->pctl;
2341 	u32 grf_lp_con;
2342 
2343 	writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]);
2344 
2345 	if (sdram_params->base.dramtype == DDR4)
2346 		grf_lp_con = (0x7 << 16) | (1 << 1);
2347 	else if (sdram_params->base.dramtype == DDR3)
2348 		grf_lp_con = (0x7 << 16) | (1 << 0);
2349 	else
2350 		grf_lp_con = (0x7 << 16) | (1 << 2);
2351 
2352 	/* en lpckdis_en */
2353 	grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
2354 	writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con);
2355 
2356 	/* enable sr, pd */
2357 	if (dram->pd_idle == 0)
2358 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2359 	else
2360 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2361 	if (dram->sr_idle == 0)
2362 		clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2363 	else
2364 		setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2365 	setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
2366 }
2367 
ddr_set_atags(struct dram_info * dram,struct rv1126_sdram_params * sdram_params)2368 static void ddr_set_atags(struct dram_info *dram,
2369 			  struct rv1126_sdram_params *sdram_params)
2370 {
2371 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2372 	u32 dram_type = sdram_params->base.dramtype;
2373 	void __iomem *pctl_base = dram->pctl;
2374 	struct tag_serial t_serial;
2375 	struct tag_ddr_mem t_ddrmem;
2376 	struct tag_soc_info t_socinfo;
2377 	u64 cs_cap[2];
2378 	u32 cs_pst = 0;
2379 	u32 split, split_size;
2380 	u64 reduce_cap = 0;
2381 
2382 	cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2383 	cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2384 
2385 	memset(&t_serial, 0, sizeof(struct tag_serial));
2386 
2387 	t_serial.version = 0;
2388 	t_serial.enable = 1;
2389 	t_serial.addr = CONFIG_DEBUG_UART_BASE;
2390 	t_serial.baudrate = CONFIG_BAUDRATE;
2391 	t_serial.m_mode = SERIAL_M_MODE_M0;
2392 	t_serial.id = 2;
2393 
2394 	atags_destroy();
2395 	atags_set_tag(ATAG_SERIAL, &t_serial);
2396 
2397 	split = readl(&dram->ddrgrf->grf_ddrsplit_con);
2398 	memset(&t_ddrmem, 0, sizeof(struct tag_ddr_mem));
2399 	if (cap_info->row_3_4) {
2400 		cs_cap[0] =  cs_cap[0] * 3 / 4;
2401 		cs_cap[1] =  cs_cap[1] * 3 / 4;
2402 	} else if (!(split & (1 << SPLIT_BYPASS_OFFSET))) {
2403 		split_size = (split >> SPLIT_SIZE_OFFSET) & SPLIT_SIZE_MASK;
2404 		reduce_cap = (cs_cap[0] + cs_cap[1] - (split_size << 24)) / 2;
2405 	}
2406 	t_ddrmem.version = 0;
2407 	t_ddrmem.bank[0] = CONFIG_SYS_SDRAM_BASE;
2408 	if (cs_cap[1]) {
2409 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2410 			6 + 2;
2411 	}
2412 
2413 	if (cs_cap[1] && cs_pst > 27) {
2414 		t_ddrmem.count = 2;
2415 		t_ddrmem.bank[1] = 1 << cs_pst;
2416 		t_ddrmem.bank[2] = cs_cap[0];
2417 		t_ddrmem.bank[3] = cs_cap[1] - reduce_cap;
2418 	} else {
2419 		t_ddrmem.count = 1;
2420 		t_ddrmem.bank[1] = (u64)cs_cap[0] + (u64)cs_cap[1] - reduce_cap;
2421 	}
2422 
2423 	atags_set_tag(ATAG_DDR_MEM, &t_ddrmem);
2424 
2425 	memset(&t_socinfo, 0, sizeof(struct tag_soc_info));
2426 	t_socinfo.version = 0x1;
2427 	t_socinfo.name = 0x1126;
2428 	t_socinfo.flags = SOC_FLAGS_TDBT;
2429 	atags_set_tag(ATAG_SOC_INFO, &t_socinfo);
2430 }
2431 
print_ddr_info(struct rv1126_sdram_params * sdram_params)2432 static void print_ddr_info(struct rv1126_sdram_params *sdram_params)
2433 {
2434 	u32 split;
2435 
2436 	if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2437 	     (1 << SPLIT_BYPASS_OFFSET)) != 0)
2438 		split = 0;
2439 	else
2440 		split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2441 			SPLIT_SIZE_MASK;
2442 
2443 	sdram_print_ddr_info(&sdram_params->ch.cap_info,
2444 			     &sdram_params->base, split);
2445 }
2446 
modify_ddr34_bw_byte_map(u8 rg_result,struct rv1126_sdram_params * sdram_params)2447 static int modify_ddr34_bw_byte_map(u8 rg_result, struct rv1126_sdram_params *sdram_params)
2448 {
2449 	struct sdram_head_info_index_v2 *index = (struct sdram_head_info_index_v2 *)common_info;
2450 	struct dq_map_info *map_info = (struct dq_map_info *)
2451 				       ((void *)common_info + index->dq_map_index.offset * 4);
2452 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2453 	u32 dramtype = sdram_params->base.dramtype;
2454 	u32 byte_map = 0;
2455 	u32 byte = 0;
2456 	u32 byte_map_shift;
2457 	int i;
2458 
2459 	if (dramtype == DDR3)
2460 		byte_map_shift = 24;
2461 	else if (dramtype == DDR4)
2462 		byte_map_shift = 0;
2463 	else
2464 		return -1;
2465 
2466 	for (i = 0; i < 4; i++) {
2467 		if ((rg_result & BIT(i)) == 0) {
2468 			byte_map |= byte << (i * 2);
2469 			byte++;
2470 		}
2471 	}
2472 	if (byte != 1 && byte != 2 && byte != 4) {
2473 		printascii("DTT result is abnormal: ");
2474 		printdec(byte);
2475 		printascii("byte\n");
2476 		return -1;
2477 	}
2478 	cap_info->bw = byte / 2;
2479 	for (i = 0; i < 4; i++) {
2480 		if ((rg_result & BIT(i)) != 0) {
2481 			byte_map |= byte << (i * 2);
2482 			byte++;
2483 		}
2484 	}
2485 
2486 	if ((u8)byte_map != (u8)(map_info->byte_map[0] >> byte_map_shift)) {
2487 		clrsetbits_le32(&map_info->byte_map[0],
2488 				0xff << byte_map_shift, byte_map << byte_map_shift);
2489 		pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info, dramtype);
2490 		return 1;
2491 	}
2492 
2493 	return 0;
2494 }
2495 
sdram_init_(struct dram_info * dram,struct rv1126_sdram_params * sdram_params,u32 post_init)2496 int sdram_init_(struct dram_info *dram, struct rv1126_sdram_params *sdram_params, u32 post_init)
2497 {
2498 	void __iomem *pctl_base = dram->pctl;
2499 	void __iomem *phy_base = dram->phy;
2500 	u32 ddr4_vref;
2501 	u32 mr_tmp, tmp;
2502 	int delay = 1000;
2503 
2504 	rkclk_configure_ddr(dram, sdram_params);
2505 
2506 	rkclk_ddr_reset(dram, 1, 1, 1, 1);
2507 	udelay(10);
2508 
2509 	rkclk_ddr_reset(dram, 1, 1, 1, 0);
2510 	phy_cfg(dram, sdram_params);
2511 
2512 	rkclk_ddr_reset(dram, 1, 1, 0, 0);
2513 	phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1);
2514 
2515 	rkclk_ddr_reset(dram, 1, 0, 0, 0);
2516 	pctl_cfg(dram->pctl, &sdram_params->pctl_regs,
2517 		 dram->sr_idle, dram->pd_idle);
2518 
2519 	if (sdram_params->ch.cap_info.bw == 2) {
2520 		/* 32bit interface use pageclose */
2521 		setbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2522 		/* pageclose = 1, pageclose_timer = 0 will err in lp4 328MHz */
2523 		clrsetbits_le32(pctl_base + DDR_PCTL2_SCHED1, 0xff, 0x1 << 0);
2524 	} else {
2525 		clrbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2526 	}
2527 
2528 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2529 	u32 trefi;
2530 
2531 	tmp = readl(pctl_base + DDR_PCTL2_RFSHTMG);
2532 	trefi = (tmp >> 16) & 0xfff;
2533 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2534 	       pctl_base + DDR_PCTL2_RFSHTMG);
2535 #endif
2536 
2537 	/* set frequency_mode */
2538 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
2539 	/* set target_frequency to Frequency 0 */
2540 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0);
2541 
2542 	set_ds_odt(dram, sdram_params, 0);
2543 	sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params);
2544 	set_ctl_address_map(dram, sdram_params);
2545 
2546 	setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
2547 
2548 	rkclk_ddr_reset(dram, 0, 0, 0, 0);
2549 
2550 	while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0) {
2551 		udelay(1);
2552 		if (delay-- <= 0) {
2553 			printascii("ERROR: Cannot wait dfi_init_done!\n");
2554 			while (1)
2555 				;
2556 		}
2557 	}
2558 
2559 	if (sdram_params->base.dramtype == LPDDR3) {
2560 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3);
2561 	} else if (sdram_params->base.dramtype == LPDDR4 ||
2562 		   sdram_params->base.dramtype == LPDDR4X) {
2563 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6);
2564 		/* MR11 */
2565 		pctl_write_mr(dram->pctl, 3, 11,
2566 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2567 			      LPDDR4);
2568 		/* MR12 */
2569 		pctl_write_mr(dram->pctl, 3, 12,
2570 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2571 			      LPDDR4);
2572 
2573 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2574 		/* MR22 */
2575 		pctl_write_mr(dram->pctl, 3, 22,
2576 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2577 			      LPDDR4);
2578 	}
2579 
2580 	if (sdram_params->base.dramtype == DDR3 && post_init == 0)
2581 		setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2582 	tmp = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) & 0xf;
2583 
2584 	if (tmp != 0) {
2585 		if (post_init != 0) {
2586 			printascii("DTT cs0 error\n");
2587 			return -1;
2588 		}
2589 		if (sdram_params->base.dramtype != DDR3 || tmp == 0xf)
2590 			return -1;
2591 	}
2592 
2593 	if (sdram_params->base.dramtype == DDR3 && post_init == 0) {
2594 		if (modify_ddr34_bw_byte_map((u8)tmp, sdram_params) != 0)
2595 			return -1;
2596 	}
2597 
2598 	if (sdram_params->base.dramtype == LPDDR4) {
2599 		mr_tmp = read_mr(dram, 1, 14, LPDDR4);
2600 
2601 		if (mr_tmp != 0x4d)
2602 			return -1;
2603 	}
2604 
2605 	if (sdram_params->base.dramtype == LPDDR4 ||
2606 	    sdram_params->base.dramtype == LPDDR4X) {
2607 		mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2608 		/* MR14 */
2609 		pctl_write_mr(dram->pctl, 3, 14,
2610 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2611 			      LPDDR4);
2612 	}
2613 	if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) {
2614 		if (data_training(dram, 1, sdram_params, 0,
2615 				  READ_GATE_TRAINING) != 0) {
2616 			printascii("DTT cs1 error\n");
2617 			return -1;
2618 		}
2619 	}
2620 
2621 	if (sdram_params->base.dramtype == DDR4) {
2622 		ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39;
2623 		pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref,
2624 				  sdram_params->base.dramtype);
2625 	}
2626 
2627 	dram_all_config(dram, sdram_params);
2628 	enable_low_power(dram, sdram_params);
2629 
2630 	return 0;
2631 }
2632 
dram_detect_cap(struct dram_info * dram,struct rv1126_sdram_params * sdram_params,unsigned char channel)2633 static u64 dram_detect_cap(struct dram_info *dram,
2634 			   struct rv1126_sdram_params *sdram_params,
2635 			   unsigned char channel)
2636 {
2637 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2638 	void __iomem *pctl_base = dram->pctl;
2639 	void __iomem *phy_base = dram->phy;
2640 	u32 mr8;
2641 
2642 	u32 bktmp;
2643 	u32 coltmp;
2644 	u32 rowtmp;
2645 	u32 cs;
2646 	u32 dram_type = sdram_params->base.dramtype;
2647 	u32 pwrctl;
2648 	u32 i, dq_map;
2649 	u32 byte1 = 0, byte0 = 0;
2650 
2651 	if (dram_type != LPDDR4 && dram_type != LPDDR4X) {
2652 		if (dram_type != DDR4) {
2653 			if (dram_type == DDR3)
2654 				coltmp = 11;
2655 			else
2656 				coltmp = 12;
2657 			bktmp = 3;
2658 			if (dram_type == LPDDR2)
2659 				rowtmp = 15;
2660 			else
2661 				rowtmp = 16;
2662 
2663 			if (sdram_detect_col(cap_info, coltmp) != 0)
2664 				goto cap_err;
2665 
2666 			sdram_detect_bank(cap_info, coltmp, bktmp);
2667 			if (dram_type != LPDDR3)
2668 				sdram_detect_dbw(cap_info, dram_type);
2669 		} else {
2670 			coltmp = 10;
2671 			bktmp = 4;
2672 			rowtmp = 17;
2673 
2674 			cap_info->col = 10;
2675 			cap_info->bk = 2;
2676 			sdram_detect_bg(cap_info, coltmp);
2677 		}
2678 
2679 		if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
2680 			goto cap_err;
2681 
2682 		sdram_detect_row_3_4(cap_info, coltmp, bktmp);
2683 	} else {
2684 		cap_info->col = 10;
2685 		cap_info->bk = 3;
2686 		mr8 = read_mr(dram, 1, 8, dram_type);
2687 		cap_info->dbw = ((mr8 >> 6) & 0x3) == 0 ? 1 : 0;
2688 		mr8 = (mr8 >> 2) & 0xf;
2689 		if (mr8 >= 0 && mr8 <= 6) {
2690 			cap_info->cs0_row = 14 + (mr8 + 1) / 2;
2691 		} else if (mr8 == 0xc) {
2692 			cap_info->cs0_row = 13;
2693 		} else {
2694 			printascii("Cap ERR: Fail to get cap of LPDDR4/X from MR8\n");
2695 			goto cap_err;
2696 		}
2697 		if (cap_info->dbw == 0)
2698 			cap_info->cs0_row++;
2699 		cap_info->row_3_4 = mr8 % 2 == 1 ? 1 : 0;
2700 		if (cap_info->cs0_row >= 17) {
2701 			printascii("Cap ERR: ");
2702 			printascii("RV1126 LPDDR4/X cannot support row >= 17\n");
2703 			goto cap_err;
2704 			// cap_info->cs0_row = 16;
2705 			// cap_info->row_3_4 = 0;
2706 		}
2707 	}
2708 
2709 	pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
2710 	writel(0, pctl_base + DDR_PCTL2_PWRCTL);
2711 
2712 	if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0)
2713 		cs = 1;
2714 	else
2715 		cs = 0;
2716 	cap_info->rank = cs + 1;
2717 
2718 	setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2719 
2720 	if (dram_type != DDR3) {
2721 		if ((data_training_rg(dram, 0, dram_type) & 0xf) == 0) {
2722 			cap_info->bw = 2;
2723 		} else {
2724 			dq_map = readl(PHY_REG(phy_base, 0x4f));
2725 			for (i = 0; i < 4; i++) {
2726 				if (((dq_map >> (i * 2)) & 0x3) == 0)
2727 					byte0 = i;
2728 				if (((dq_map >> (i * 2)) & 0x3) == 1)
2729 					byte1 = i;
2730 			}
2731 			clrsetbits_le32(PHY_REG(phy_base, 0xf), PHY_DQ_WIDTH_MASK,
2732 					BIT(byte0) | BIT(byte1));
2733 			if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) == 0)
2734 				cap_info->bw = 1;
2735 			else
2736 				cap_info->bw = 0;
2737 		}
2738 	}
2739 	if (cap_info->bw > 0)
2740 		cap_info->dbw = 1;
2741 
2742 	writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
2743 
2744 	cap_info->cs0_high16bit_row = cap_info->cs0_row;
2745 	if (cs) {
2746 		cap_info->cs1_row = cap_info->cs0_row;
2747 		cap_info->cs1_high16bit_row = cap_info->cs0_row;
2748 	} else {
2749 		cap_info->cs1_row = 0;
2750 		cap_info->cs1_high16bit_row = 0;
2751 	}
2752 
2753 	if (dram_type == LPDDR3)
2754 		sdram_detect_dbw(cap_info, dram_type);
2755 
2756 	return 0;
2757 cap_err:
2758 	return -1;
2759 }
2760 
dram_detect_cs1_row(struct dram_info * dram,struct rv1126_sdram_params * sdram_params,unsigned char channel)2761 static int dram_detect_cs1_row(struct dram_info *dram,
2762 			       struct rv1126_sdram_params *sdram_params,
2763 			       unsigned char channel)
2764 {
2765 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2766 	void __iomem *pctl_base = dram->pctl;
2767 	u32 ret = 0;
2768 	void __iomem *test_addr;
2769 	u32 row, bktmp, coltmp, bw;
2770 	u64 cs0_cap;
2771 	u32 byte_mask;
2772 	u32 cs_pst;
2773 	u32 cs_add = 0;
2774 	u32 max_row;
2775 
2776 	if (cap_info->rank == 2) {
2777 		cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2778 			6 + 2;
2779 		if (cs_pst < 28)
2780 			cs_add = 1;
2781 
2782 		cs0_cap = 1 << cs_pst;
2783 
2784 		if (sdram_params->base.dramtype == DDR4) {
2785 			if (cap_info->dbw == 0)
2786 				bktmp = cap_info->bk + 2;
2787 			else
2788 				bktmp = cap_info->bk + 1;
2789 		} else {
2790 			bktmp = cap_info->bk;
2791 		}
2792 		bw = cap_info->bw;
2793 		coltmp = cap_info->col;
2794 
2795 		if (bw == 2)
2796 			byte_mask = 0xFFFF;
2797 		else
2798 			byte_mask = 0xFF;
2799 
2800 		max_row = (cs_pst == 31) ? 30 : 31;
2801 
2802 		max_row = max_row - bktmp - coltmp - bw - cs_add + 1;
2803 
2804 		row = (cap_info->cs0_row > max_row) ? max_row :
2805 			cap_info->cs0_row;
2806 
2807 		for (; row > 12; row--) {
2808 			test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
2809 				    (u32)cs0_cap +
2810 				    (1ul << (row + bktmp + coltmp +
2811 					     cs_add + bw - 1ul)));
2812 
2813 			writel(0, CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap);
2814 			writel(PATTERN, test_addr);
2815 
2816 			if (((readl(test_addr) & byte_mask) ==
2817 			     (PATTERN & byte_mask)) &&
2818 			    ((readl(CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap) &
2819 			      byte_mask) == 0)) {
2820 				ret = row;
2821 				break;
2822 			}
2823 		}
2824 	}
2825 
2826 	return ret;
2827 }
2828 
2829 /* return: 0 = success, other = fail */
sdram_init_detect(struct dram_info * dram,struct rv1126_sdram_params * sdram_params)2830 static int sdram_init_detect(struct dram_info *dram,
2831 			     struct rv1126_sdram_params *sdram_params)
2832 {
2833 	struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2834 	u32 ret;
2835 	u32 sys_reg = 0;
2836 	u32 sys_reg3 = 0;
2837 
2838 	if (sdram_init_(dram, sdram_params, 0)) {
2839 		if (sdram_params->base.dramtype == DDR3) {
2840 			if (sdram_init_(dram, sdram_params, 0))
2841 				return -1;
2842 		} else {
2843 			return -1;
2844 		}
2845 	}
2846 
2847 	if (sdram_params->base.dramtype == DDR3) {
2848 		writel(PATTERN, CONFIG_SYS_SDRAM_BASE);
2849 		if (readl(CONFIG_SYS_SDRAM_BASE) != PATTERN)
2850 			return -1;
2851 	}
2852 
2853 	split_bypass(dram);
2854 	if (dram_detect_cap(dram, sdram_params, 0) != 0)
2855 		return -1;
2856 
2857 	pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
2858 				   sdram_params->base.dramtype);
2859 	ret = sdram_init_(dram, sdram_params, 1);
2860 	if (ret != 0)
2861 		goto out;
2862 
2863 	cap_info->cs1_row =
2864 		dram_detect_cs1_row(dram, sdram_params, 0);
2865 	if (cap_info->cs1_row) {
2866 		sys_reg = readl(&dram->pmugrf->os_reg[2]);
2867 		sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
2868 		SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
2869 				    sys_reg, sys_reg3, 0);
2870 		writel(sys_reg, &dram->pmugrf->os_reg[2]);
2871 		writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2872 	}
2873 
2874 	sdram_detect_high_row(cap_info, sdram_params->base.dramtype);
2875 	split_setup(dram, sdram_params);
2876 out:
2877 	return ret;
2878 }
2879 
get_default_sdram_config(u32 freq_mhz)2880 struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz)
2881 {
2882 	u32 i;
2883 	u32 offset = 0;
2884 	struct ddr2_3_4_lp2_3_info *ddr_info;
2885 
2886 	if (!freq_mhz) {
2887 		ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype);
2888 		if (ddr_info)
2889 			freq_mhz =
2890 				(ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
2891 				DDR_FREQ_MASK;
2892 		else
2893 			freq_mhz = 0;
2894 	}
2895 
2896 	for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) {
2897 		if (sdram_configs[i].base.ddr_freq == 0 ||
2898 		    freq_mhz < sdram_configs[i].base.ddr_freq)
2899 			break;
2900 	}
2901 	offset = i == 0 ? 0 : i - 1;
2902 
2903 	return &sdram_configs[offset];
2904 }
2905 
2906 static const u16 pctl_need_update_reg[] = {
2907 	DDR_PCTL2_RFSHTMG,
2908 	DDR_PCTL2_INIT3,
2909 	DDR_PCTL2_INIT4,
2910 	DDR_PCTL2_INIT6,
2911 	DDR_PCTL2_INIT7,
2912 	DDR_PCTL2_DRAMTMG0,
2913 	DDR_PCTL2_DRAMTMG1,
2914 	DDR_PCTL2_DRAMTMG2,
2915 	DDR_PCTL2_DRAMTMG3,
2916 	DDR_PCTL2_DRAMTMG4,
2917 	DDR_PCTL2_DRAMTMG5,
2918 	DDR_PCTL2_DRAMTMG6,
2919 	DDR_PCTL2_DRAMTMG7,
2920 	DDR_PCTL2_DRAMTMG8,
2921 	DDR_PCTL2_DRAMTMG9,
2922 	DDR_PCTL2_DRAMTMG12,
2923 	DDR_PCTL2_DRAMTMG13,
2924 	DDR_PCTL2_DRAMTMG14,
2925 	DDR_PCTL2_ZQCTL0,
2926 	DDR_PCTL2_DFITMG0,
2927 	DDR_PCTL2_ODTCFG
2928 };
2929 
2930 static const u16 phy_need_update_reg[] = {
2931 	0x14,
2932 	0x18,
2933 	0x1c
2934 };
2935 
pre_set_rate(struct dram_info * dram,struct rv1126_sdram_params * sdram_params,u32 dst_fsp,u32 dst_fsp_lp4)2936 static void pre_set_rate(struct dram_info *dram,
2937 			 struct rv1126_sdram_params *sdram_params,
2938 			 u32 dst_fsp, u32 dst_fsp_lp4)
2939 {
2940 	u32 i, j, find;
2941 	void __iomem *pctl_base = dram->pctl;
2942 	void __iomem *phy_base = dram->phy;
2943 	u32 phy_offset;
2944 	u32 mr_tmp;
2945 	u32 dramtype = sdram_params->base.dramtype;
2946 
2947 	sw_set_req(dram);
2948 	/* pctl timing update */
2949 	for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) {
2950 		for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF;
2951 		     j++) {
2952 			if (sdram_params->pctl_regs.pctl[j][0] ==
2953 			    pctl_need_update_reg[i]) {
2954 				writel(sdram_params->pctl_regs.pctl[j][1],
2955 				       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2956 				       pctl_need_update_reg[i]);
2957 				find = j;
2958 				break;
2959 			}
2960 		}
2961 	}
2962 
2963 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2964 	u32 tmp, trefi;
2965 
2966 	tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2967 	trefi = (tmp >> 16) & 0xfff;
2968 	writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2969 	       pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2970 #endif
2971 
2972 	sw_set_ack(dram);
2973 
2974 	/* phy timing update */
2975 	if (dst_fsp == 0)
2976 		phy_offset = 0;
2977 	else
2978 		phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3);
2979 	/* cl cwl al update */
2980 	for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) {
2981 		for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF;
2982 		     j++) {
2983 			if (sdram_params->phy_regs.phy[j][0] ==
2984 			    phy_need_update_reg[i]) {
2985 				writel(sdram_params->phy_regs.phy[j][1],
2986 				       phy_base + phy_offset +
2987 				       phy_need_update_reg[i]);
2988 				find = j;
2989 				break;
2990 			}
2991 		}
2992 	}
2993 
2994 	set_ds_odt(dram, sdram_params, dst_fsp);
2995 	if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
2996 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2997 			       DDR_PCTL2_INIT4);
2998 		/* MR13 */
2999 		pctl_write_mr(dram->pctl, 3, 13,
3000 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
3001 			       PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
3002 			      ((0x2 << 6) >> dst_fsp_lp4), dramtype);
3003 		writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
3004 				      PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
3005 				      ((0x2 << 6) >> dst_fsp_lp4),
3006 				       PHY_REG(phy_base, 0x1b));
3007 		/* MR3 */
3008 		pctl_write_mr(dram->pctl, 3, 3,
3009 			      mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT &
3010 			      PCTL2_MR_MASK,
3011 			      dramtype);
3012 		writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK,
3013 		       PHY_REG(phy_base, 0x19));
3014 
3015 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3016 			       DDR_PCTL2_INIT3);
3017 		/* MR1 */
3018 		pctl_write_mr(dram->pctl, 3, 1,
3019 			      mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT &
3020 			      PCTL2_MR_MASK,
3021 			      dramtype);
3022 		writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK,
3023 		       PHY_REG(phy_base, 0x17));
3024 		/* MR2 */
3025 		pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
3026 			      dramtype);
3027 		writel(mr_tmp & PCTL2_MR_MASK,
3028 		       PHY_REG(phy_base, 0x18));
3029 
3030 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3031 			       DDR_PCTL2_INIT6);
3032 		/* MR11 */
3033 		pctl_write_mr(dram->pctl, 3, 11,
3034 			      mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
3035 			      dramtype);
3036 		writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
3037 		       PHY_REG(phy_base, 0x1a));
3038 		/* MR12 */
3039 		pctl_write_mr(dram->pctl, 3, 12,
3040 			      mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
3041 			      dramtype);
3042 
3043 		mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3044 			       DDR_PCTL2_INIT7);
3045 		/* MR22 */
3046 		pctl_write_mr(dram->pctl, 3, 22,
3047 			      mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
3048 			      dramtype);
3049 		writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
3050 		       PHY_REG(phy_base, 0x1d));
3051 		/* MR14 */
3052 		pctl_write_mr(dram->pctl, 3, 14,
3053 			      mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
3054 			      dramtype);
3055 		writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
3056 		       PHY_REG(phy_base, 0x1c));
3057 	}
3058 
3059 	update_noc_timing(dram, sdram_params);
3060 }
3061 
save_fsp_param(struct dram_info * dram,u32 dst_fsp,struct rv1126_sdram_params * sdram_params)3062 static void save_fsp_param(struct dram_info *dram, u32 dst_fsp,
3063 			   struct rv1126_sdram_params *sdram_params)
3064 {
3065 	void __iomem *pctl_base = dram->pctl;
3066 	void __iomem *phy_base = dram->phy;
3067 	struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp];
3068 	u32 temp, temp1;
3069 	struct ddr2_3_4_lp2_3_info *ddr_info;
3070 
3071 	ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype);
3072 
3073 	p_fsp_param->freq_mhz = sdram_params->base.ddr_freq;
3074 
3075 	if (sdram_params->base.dramtype == LPDDR4 ||
3076 	    sdram_params->base.dramtype == LPDDR4X) {
3077 		p_fsp_param->rd_odt_up_en = 0;
3078 		p_fsp_param->rd_odt_down_en = 1;
3079 	} else {
3080 		p_fsp_param->rd_odt_up_en =
3081 			ODT_INFO_PULLUP_EN(ddr_info->odt_info);
3082 		p_fsp_param->rd_odt_down_en =
3083 			ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
3084 	}
3085 
3086 	if (p_fsp_param->rd_odt_up_en)
3087 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111));
3088 	else if (p_fsp_param->rd_odt_down_en)
3089 		p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110));
3090 	else
3091 		p_fsp_param->rd_odt = 0;
3092 	p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112));
3093 	p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100));
3094 	p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102));
3095 	p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128));
3096 	p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105));
3097 
3098 	if (sdram_params->base.dramtype == DDR3) {
3099 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3100 			     DDR_PCTL2_INIT3);
3101 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
3102 		p_fsp_param->ds_pdds = temp & DDR3_DS_MASK;
3103 		p_fsp_param->dq_odt = temp & DDR3_RTT_NOM_MASK;
3104 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3105 	} else if (sdram_params->base.dramtype == DDR4) {
3106 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3107 			     DDR_PCTL2_INIT3);
3108 		temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
3109 		p_fsp_param->ds_pdds = temp & DDR4_DS_MASK;
3110 		p_fsp_param->dq_odt = temp & DDR4_RTT_NOM_MASK;
3111 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3112 	} else if (sdram_params->base.dramtype == LPDDR3) {
3113 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3114 			     DDR_PCTL2_INIT4);
3115 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
3116 		p_fsp_param->ds_pdds = temp & 0xf;
3117 
3118 		p_fsp_param->dq_odt = lp3_odt_value;
3119 		p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3120 	} else if (sdram_params->base.dramtype == LPDDR4 ||
3121 		   sdram_params->base.dramtype == LPDDR4X) {
3122 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3123 			     DDR_PCTL2_INIT4);
3124 		temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
3125 		p_fsp_param->ds_pdds = temp & LPDDR4_PDDS_MASK;
3126 
3127 		temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3128 			     DDR_PCTL2_INIT6);
3129 		temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK;
3130 		p_fsp_param->dq_odt = temp & LPDDR4_DQODT_MASK;
3131 		p_fsp_param->ca_odt = temp & LPDDR4_CAODT_MASK;
3132 
3133 		temp = MAX(readl(PHY_REG(phy_base, 0x3ae)),
3134 			   readl(PHY_REG(phy_base, 0x3ce)));
3135 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)),
3136 			    readl(PHY_REG(phy_base, 0x3de)));
3137 		p_fsp_param->vref_ca[0] = (temp + temp1) / 2;
3138 		temp = MAX(readl(PHY_REG(phy_base, 0x3af)),
3139 			   readl(PHY_REG(phy_base, 0x3cf)));
3140 		temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)),
3141 			    readl(PHY_REG(phy_base, 0x3df)));
3142 		p_fsp_param->vref_ca[1] = (temp + temp1) / 2;
3143 		p_fsp_param->vref_ca[0] |=
3144 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
3145 		p_fsp_param->vref_ca[1] |=
3146 			(readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
3147 
3148 		p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >>
3149 					      3) & 0x1;
3150 	}
3151 
3152 	p_fsp_param->noc_timings.ddrtiminga0 =
3153 		sdram_params->ch.noc_timings.ddrtiminga0;
3154 	p_fsp_param->noc_timings.ddrtimingb0 =
3155 		sdram_params->ch.noc_timings.ddrtimingb0;
3156 	p_fsp_param->noc_timings.ddrtimingc0 =
3157 		sdram_params->ch.noc_timings.ddrtimingc0;
3158 	p_fsp_param->noc_timings.devtodev0 =
3159 		sdram_params->ch.noc_timings.devtodev0;
3160 	p_fsp_param->noc_timings.ddrmode =
3161 		sdram_params->ch.noc_timings.ddrmode;
3162 	p_fsp_param->noc_timings.ddr4timing =
3163 		sdram_params->ch.noc_timings.ddr4timing;
3164 	p_fsp_param->noc_timings.agingx0 =
3165 		sdram_params->ch.noc_timings.agingx0;
3166 	p_fsp_param->noc_timings.aging0 =
3167 		sdram_params->ch.noc_timings.aging0;
3168 	p_fsp_param->noc_timings.aging1 =
3169 		sdram_params->ch.noc_timings.aging1;
3170 	p_fsp_param->noc_timings.aging2 =
3171 		sdram_params->ch.noc_timings.aging2;
3172 	p_fsp_param->noc_timings.aging3 =
3173 		sdram_params->ch.noc_timings.aging3;
3174 
3175 	p_fsp_param->flag = FSP_FLAG;
3176 }
3177 
3178 #ifndef CONFIG_SPL_KERNEL_BOOT
copy_fsp_param_to_ddr(void)3179 static void copy_fsp_param_to_ddr(void)
3180 {
3181 	memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param,
3182 	       sizeof(fsp_param));
3183 }
3184 #endif
3185 
pctl_modify_trfc(struct ddr_pctl_regs * pctl_regs,struct sdram_cap_info * cap_info,u32 dram_type,u32 freq)3186 static void pctl_modify_trfc(struct ddr_pctl_regs *pctl_regs,
3187 			     struct sdram_cap_info *cap_info, u32 dram_type,
3188 			     u32 freq)
3189 {
3190 	u64 cs0_cap;
3191 	u32 die_cap;
3192 	u32 trfc_ns, trfc4_ns;
3193 	u32 trfc, txsnr;
3194 	u32 txs_abort_fast = 0;
3195 	u32 tmp;
3196 
3197 	cs0_cap = sdram_get_cs_cap(cap_info, 0, dram_type);
3198 	die_cap = (u32)(cs0_cap >> (20 + (cap_info->bw - cap_info->dbw)));
3199 
3200 	switch (dram_type) {
3201 	case DDR3:
3202 		if (die_cap <= DIE_CAP_512MBIT)
3203 			trfc_ns = 90;
3204 		else if (die_cap <= DIE_CAP_1GBIT)
3205 			trfc_ns = 110;
3206 		else if (die_cap <= DIE_CAP_2GBIT)
3207 			trfc_ns = 160;
3208 		else if (die_cap <= DIE_CAP_4GBIT)
3209 			trfc_ns = 260;
3210 		else
3211 			trfc_ns = 350;
3212 		txsnr = MAX(5, ((trfc_ns + 10) * freq + 999) / 1000);
3213 		break;
3214 
3215 	case DDR4:
3216 		if (die_cap <= DIE_CAP_2GBIT) {
3217 			trfc_ns = 160;
3218 			trfc4_ns = 90;
3219 		} else if (die_cap <= DIE_CAP_4GBIT) {
3220 			trfc_ns = 260;
3221 			trfc4_ns = 110;
3222 		} else if (die_cap <= DIE_CAP_8GBIT) {
3223 			trfc_ns = 350;
3224 			trfc4_ns = 160;
3225 		} else {
3226 			trfc_ns = 550;
3227 			trfc4_ns = 260;
3228 		}
3229 		txsnr = ((trfc_ns + 10) * freq + 999) / 1000;
3230 		txs_abort_fast = ((trfc4_ns + 10) * freq + 999) / 1000;
3231 		break;
3232 
3233 	case LPDDR3:
3234 		if (die_cap <= DIE_CAP_4GBIT)
3235 			trfc_ns = 130;
3236 		else
3237 			trfc_ns = 210;
3238 		txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3239 		break;
3240 
3241 	case LPDDR4:
3242 	case LPDDR4X:
3243 		if (die_cap <= DIE_CAP_2GBIT)
3244 			trfc_ns = 130;
3245 		else if (die_cap <= DIE_CAP_4GBIT)
3246 			trfc_ns = 180;
3247 		else if (die_cap <= DIE_CAP_8GBIT)
3248 			trfc_ns = 280;
3249 		else
3250 			trfc_ns = 380;
3251 		txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3252 		break;
3253 
3254 	default:
3255 		return;
3256 	}
3257 	trfc = (trfc_ns * freq + 999) / 1000;
3258 
3259 	for (int i = 0; pctl_regs->pctl[i][0] != 0xffffffff; i++) {
3260 		switch (pctl_regs->pctl[i][0]) {
3261 		case DDR_PCTL2_RFSHTMG:
3262 			tmp = pctl_regs->pctl[i][1];
3263 			/* t_rfc_min */
3264 			tmp &= ~((u32)0x3ff);
3265 			tmp |= ((trfc + 1) / 2) & 0x3ff;
3266 			pctl_regs->pctl[i][1] = tmp;
3267 			break;
3268 
3269 		case DDR_PCTL2_DRAMTMG8:
3270 			if (dram_type == DDR3 || dram_type == DDR4) {
3271 				tmp = pctl_regs->pctl[i][1];
3272 				/* t_xs_x32 */
3273 				tmp &= ~((u32)0x7f);
3274 				tmp |= ((txsnr + 63) / 64 + 1) & 0x7f;
3275 
3276 				if (dram_type == DDR4) {
3277 					/* t_xs_abort_x32 */
3278 					tmp &= ~((u32)(0x7f << 16));
3279 					tmp |= (((txs_abort_fast + 63) / 64 + 1) & 0x7f) << 16;
3280 					/* t_xs_fast_x32 */
3281 					tmp &= ~((u32)(0x7f << 24));
3282 					tmp |= (((txs_abort_fast + 63) / 64 + 1) & 0x7f) << 24;
3283 				}
3284 
3285 				pctl_regs->pctl[i][1] = tmp;
3286 			}
3287 			break;
3288 
3289 		case DDR_PCTL2_DRAMTMG14:
3290 			if (dram_type == LPDDR3 ||
3291 			    dram_type == LPDDR4 || dram_type == LPDDR4X) {
3292 				tmp = pctl_regs->pctl[i][1];
3293 				/* t_xsr */
3294 				tmp &= ~((u32)0xfff);
3295 				tmp |= ((txsnr + 1) / 2) & 0xfff;
3296 				pctl_regs->pctl[i][1] = tmp;
3297 			}
3298 			break;
3299 
3300 		default:
3301 			break;
3302 		}
3303 	}
3304 }
3305 
ddr_set_rate(struct dram_info * dram,struct rv1126_sdram_params * sdram_params,u32 freq,u32 cur_freq,u32 dst_fsp,u32 dst_fsp_lp4,u32 training_en)3306 void ddr_set_rate(struct dram_info *dram,
3307 		  struct rv1126_sdram_params *sdram_params,
3308 		  u32 freq, u32 cur_freq, u32 dst_fsp,
3309 		  u32 dst_fsp_lp4, u32 training_en)
3310 {
3311 	u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off;
3312 	u32 mr_tmp;
3313 	u32 lp_stat;
3314 	u32 dramtype = sdram_params->base.dramtype;
3315 	struct rv1126_sdram_params *sdram_params_new;
3316 	void __iomem *pctl_base = dram->pctl;
3317 	void __iomem *phy_base = dram->phy;
3318 	int delay = 1000;
3319 
3320 	lp_stat = low_power_update(dram, 0);
3321 	sdram_params_new = get_default_sdram_config(freq);
3322 	sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank;
3323 	sdram_params_new->ch.cap_info.bw = sdram_params->ch.cap_info.bw;
3324 
3325 	pctl_modify_trfc(&sdram_params_new->pctl_regs,
3326 			 &sdram_params->ch.cap_info, dramtype, freq);
3327 	pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4);
3328 
3329 	while ((readl(pctl_base + DDR_PCTL2_STAT) &
3330 			 PCTL2_OPERATING_MODE_MASK) ==
3331 			 PCTL2_OPERATING_MODE_SR)
3332 		continue;
3333 
3334 	dest_dll_off = 0;
3335 	dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3336 			  DDR_PCTL2_INIT3);
3337 	if ((dramtype == DDR3 && (dst_init3 & 1)) ||
3338 	    (dramtype == DDR4 && !(dst_init3 & 1)))
3339 		dest_dll_off = 1;
3340 
3341 	cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
3342 	cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
3343 			  DDR_PCTL2_INIT3);
3344 	cur_init3 &= PCTL2_MR_MASK;
3345 	cur_dll_off = 1;
3346 	if ((dramtype == DDR3 && !(cur_init3 & 1)) ||
3347 	    (dramtype == DDR4 && (cur_init3 & 1)))
3348 		cur_dll_off = 0;
3349 
3350 	if (!cur_dll_off) {
3351 		if (dramtype == DDR3)
3352 			cur_init3 |= 1;
3353 		else
3354 			cur_init3 &= ~1;
3355 		pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype);
3356 	}
3357 
3358 	setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3359 		     PCTL2_DIS_AUTO_REFRESH);
3360 	update_refresh_reg(dram);
3361 
3362 	enter_sr(dram, 1);
3363 
3364 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3365 	       PMUGRF_CON_DDRPHY_BUFFEREN_EN,
3366 	       &dram->pmugrf->soc_con[0]);
3367 	sw_set_req(dram);
3368 	clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC,
3369 		     PCTL2_DFI_INIT_COMPLETE_EN);
3370 	sw_set_ack(dram);
3371 
3372 	sw_set_req(dram);
3373 	if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off)
3374 		setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3375 	else
3376 		clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3377 
3378 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0,
3379 		     PCTL2_DIS_SRX_ZQCL);
3380 	setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0,
3381 		     PCTL2_DIS_SRX_ZQCL);
3382 	sw_set_ack(dram);
3383 
3384 	writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT),
3385 	       &dram->cru->clkgate_con[21]);
3386 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3387 					(0x1 << CLK_DDR_UPCTL_EN_SHIFT) |
3388 					(0x1 << ACLK_DDR_UPCTL_EN_SHIFT),
3389 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3390 
3391 	clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3392 	rkclk_set_dpll(dram, freq * MHz / 2);
3393 	phy_pll_set(dram, freq * MHz, 0);
3394 	phy_pll_set(dram, freq * MHz, 1);
3395 	setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3396 
3397 	writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3398 			PMUGRF_CON_DDRPHY_BUFFEREN_DIS,
3399 			&dram->pmugrf->soc_con[0]);
3400 	writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT),
3401 	       &dram->cru->clkgate_con[21]);
3402 	writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3403 					(0x0 << CLK_DDR_UPCTL_EN_SHIFT) |
3404 					(0x0 << ACLK_DDR_UPCTL_EN_SHIFT),
3405 			BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3406 	while ((readl(pctl_base + DDR_PCTL2_DFISTAT) &
3407 	       PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE) {
3408 		udelay(1);
3409 		if (delay-- <= 0) {
3410 			printascii("ERROR: Cannot wait DFI_INIT_COMPLETE\n");
3411 			while (1)
3412 				;
3413 		}
3414 	}
3415 
3416 	sw_set_req(dram);
3417 	setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
3418 	clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp);
3419 	sw_set_ack(dram);
3420 	update_refresh_reg(dram);
3421 	clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2);
3422 
3423 	enter_sr(dram, 0);
3424 
3425 	setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3426 	clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3427 
3428 	mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4);
3429 	if (dramtype == LPDDR3) {
3430 		pctl_write_mr(dram->pctl, 3, 1,
3431 			      (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) &
3432 			      PCTL2_MR_MASK,
3433 			      dramtype);
3434 		pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK,
3435 			      dramtype);
3436 		pctl_write_mr(dram->pctl, 3, 3,
3437 			      (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) &
3438 			      PCTL2_MR_MASK,
3439 			      dramtype);
3440 		pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype);
3441 	} else if ((dramtype == DDR3) || (dramtype == DDR4)) {
3442 		pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK,
3443 			      dramtype);
3444 		if (!dest_dll_off) {
3445 			pctl_write_mr(dram->pctl, 3, 0,
3446 				      ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) &
3447 				      PCTL2_MR_MASK) | DDR3_DLL_RESET,
3448 				      dramtype);
3449 			udelay(2);
3450 		}
3451 		pctl_write_mr(dram->pctl, 3, 0,
3452 			      (dst_init3 >> PCTL2_DDR34_MR0_SHIFT &
3453 			       PCTL2_MR_MASK) & (~DDR3_DLL_RESET),
3454 			      dramtype);
3455 		pctl_write_mr(dram->pctl, 3, 2,
3456 			      ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) &
3457 			       PCTL2_MR_MASK), dramtype);
3458 		if (dramtype == DDR4) {
3459 			pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK,
3460 				      dramtype);
3461 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3462 				       DDR_PCTL2_INIT6);
3463 			pctl_write_mr(dram->pctl, 3, 4,
3464 				      (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) &
3465 				       PCTL2_MR_MASK,
3466 				      dramtype);
3467 			pctl_write_mr(dram->pctl, 3, 5,
3468 				      mr_tmp >> PCTL2_DDR4_MR5_SHIFT &
3469 				      PCTL2_MR_MASK,
3470 				      dramtype);
3471 
3472 			mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3473 				       DDR_PCTL2_INIT7);
3474 			pctl_write_mr(dram->pctl, 3, 6,
3475 				      mr_tmp >> PCTL2_DDR4_MR6_SHIFT &
3476 				      PCTL2_MR_MASK,
3477 				      dramtype);
3478 		}
3479 	} else if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
3480 		pctl_write_mr(dram->pctl, 3, 13,
3481 			      ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
3482 			       PCTL2_MR_MASK) & (~(BIT(7)))) |
3483 			      dst_fsp_lp4 << 7, dramtype);
3484 	}
3485 	clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3486 		     PCTL2_DIS_AUTO_REFRESH);
3487 	update_refresh_reg(dram);
3488 
3489 	/* training */
3490 	high_freq_training(dram, sdram_params_new, dst_fsp);
3491 	low_power_update(dram, lp_stat);
3492 
3493 	save_fsp_param(dram, dst_fsp, sdram_params_new);
3494 }
3495 
ddr_set_rate_for_fsp(struct dram_info * dram,struct rv1126_sdram_params * sdram_params)3496 static void ddr_set_rate_for_fsp(struct dram_info *dram,
3497 				 struct rv1126_sdram_params *sdram_params)
3498 {
3499 	struct ddr2_3_4_lp2_3_info *ddr_info;
3500 	u32 f0;
3501 	u32 dramtype = sdram_params->base.dramtype;
3502 #ifndef CONFIG_SPL_KERNEL_BOOT
3503 	u32 f1, f2, f3;
3504 #endif
3505 
3506 	ddr_info = get_ddr_drv_odt_info(dramtype);
3507 	if (!ddr_info)
3508 		return;
3509 
3510 	f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
3511 	     DDR_FREQ_MASK;
3512 
3513 #ifndef CONFIG_SPL_KERNEL_BOOT
3514 	memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param));
3515 	memset((void *)&fsp_param, 0, sizeof(fsp_param));
3516 
3517 	f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) &
3518 	     DDR_FREQ_MASK;
3519 	f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) &
3520 	     DDR_FREQ_MASK;
3521 	f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) &
3522 	     DDR_FREQ_MASK;
3523 #endif
3524 
3525 	if (get_wrlvl_val(dram, sdram_params))
3526 		printascii("get wrlvl value fail\n");
3527 
3528 #ifndef CONFIG_SPL_KERNEL_BOOT
3529 	printascii("change to: ");
3530 	printdec(f1);
3531 	printascii("MHz\n");
3532 	ddr_set_rate(&dram_info, sdram_params, f1,
3533 		     sdram_params->base.ddr_freq, 1, 1, 1);
3534 	printascii("change to: ");
3535 	printdec(f2);
3536 	printascii("MHz\n");
3537 	ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1);
3538 	printascii("change to: ");
3539 	printdec(f3);
3540 	printascii("MHz\n");
3541 	ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1);
3542 #endif
3543 	printascii("change to: ");
3544 	printdec(f0);
3545 	printascii("MHz(final freq)\n");
3546 #ifndef CONFIG_SPL_KERNEL_BOOT
3547 	ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1);
3548 #else
3549 	ddr_set_rate(&dram_info, sdram_params, f0, sdram_params->base.ddr_freq, 1, 1, 1);
3550 #endif
3551 }
3552 
get_uart_config(void)3553 int get_uart_config(void)
3554 {
3555 	struct sdram_head_info_index_v2 *index =
3556 		(struct sdram_head_info_index_v2 *)common_info;
3557 	struct global_info *gbl_info;
3558 
3559 	gbl_info = (struct global_info *)((void *)common_info +
3560 		index->global_index.offset * 4);
3561 
3562 	return gbl_info->uart_info;
3563 }
3564 
3565 /* return: 0 = success, other = fail */
sdram_init(void)3566 int sdram_init(void)
3567 {
3568 	struct rv1126_sdram_params *sdram_params;
3569 	int ret = 0;
3570 	struct sdram_head_info_index_v2 *index =
3571 		(struct sdram_head_info_index_v2 *)common_info;
3572 	struct global_info *gbl_info;
3573 
3574 	dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
3575 	dram_info.pctl = (void *)UPCTL2_BASE_ADDR;
3576 	dram_info.grf = (void *)GRF_BASE_ADDR;
3577 	dram_info.cru = (void *)CRU_BASE_ADDR;
3578 	dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR;
3579 	dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR;
3580 	dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR;
3581 
3582 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
3583 	printascii("extended temp support\n");
3584 #endif
3585 	if (index->version_info != 2 ||
3586 	    (index->global_index.size != sizeof(struct global_info) / 4) ||
3587 	    (index->ddr3_index.size !=
3588 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3589 	    (index->ddr4_index.size !=
3590 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3591 	    (index->lp3_index.size !=
3592 		sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3593 	    (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) ||
3594 	    (index->lp4x_index.size != (sizeof(struct lp4_info) / 4)) ||
3595 	    index->global_index.offset == 0 ||
3596 	    index->ddr3_index.offset == 0 ||
3597 	    index->ddr4_index.offset == 0 ||
3598 	    index->lp3_index.offset == 0 ||
3599 	    index->lp4_index.offset == 0 ||
3600 	    index->lp4x_index.offset == 0) {
3601 		printascii("common info error\n");
3602 		goto error;
3603 	}
3604 
3605 	gbl_info = (struct global_info *)((void *)common_info +
3606 		index->global_index.offset * 4);
3607 
3608 	dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info);
3609 	dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info);
3610 
3611 	sdram_params = &sdram_configs[0];
3612 	#if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 8)
3613 	for (int j = 0; j < ARRAY_SIZE(sdram_configs); j++)
3614 		sdram_configs[j].base.dramtype = LPDDR4X;
3615 	#endif
3616 	if (sdram_params->base.dramtype == DDR3 ||
3617 	    sdram_params->base.dramtype == DDR4) {
3618 		if (DDR_2T_INFO(gbl_info->info_2t))
3619 			sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10;
3620 		else
3621 			sdram_params->pctl_regs.pctl[0][1] &=
3622 				~(0x1 << 10);
3623 	}
3624 	ret = sdram_init_detect(&dram_info, sdram_params);
3625 	if (ret) {
3626 		sdram_print_dram_type(sdram_params->base.dramtype);
3627 		printascii(", ");
3628 		printdec(sdram_params->base.ddr_freq);
3629 		printascii("MHz\n");
3630 		goto error;
3631 	}
3632 	print_ddr_info(sdram_params);
3633 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3634 	init_rw_trn_result_struct(&rw_trn_result, dram_info.phy,
3635 				  (u8)sdram_params->ch.cap_info.rank);
3636 #endif
3637 
3638 	ddr_set_rate_for_fsp(&dram_info, sdram_params);
3639 #ifndef CONFIG_SPL_KERNEL_BOOT
3640 	copy_fsp_param_to_ddr();
3641 #endif
3642 
3643 	ddr_set_atags(&dram_info, sdram_params);
3644 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3645 	save_rw_trn_result_to_ddr(&rw_trn_result);
3646 #endif
3647 
3648 	printascii("out\n");
3649 
3650 	return ret;
3651 error:
3652 	printascii("error\n");
3653 	return (-1);
3654 }
3655 #endif /* CONFIG_TPL_BUILD */
3656