1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * (C) Copyright 2020 Rockchip Electronics Co., Ltd.
4 */
5
6 #include <common.h>
7 #include <debug_uart.h>
8 #include <dm.h>
9 #include <ram.h>
10 #include <syscon.h>
11 #include <asm/io.h>
12 #include <asm/arch/clock.h>
13 #include <asm/arch/hardware.h>
14 #include <asm/arch/rk_atags.h>
15 #include <asm/arch/cru_rv1126.h>
16 #include <asm/arch/grf_rv1126.h>
17 #include <asm/arch/sdram_common.h>
18 #include <asm/arch/sdram_rv1126.h>
19
20 /* define training flag */
21 #define CA_TRAINING (0x1 << 0)
22 #define READ_GATE_TRAINING (0x1 << 1)
23 #define WRITE_LEVELING (0x1 << 2)
24 #define WRITE_TRAINING (0x1 << 3)
25 #define READ_TRAINING (0x1 << 4)
26 #define FULL_TRAINING (0xff)
27
28 /* #define DDR4_READ_GATE_PREAMBLE_MODE */
29 #ifndef DDR4_READ_GATE_PREAMBLE_MODE
30 /* DDR4 read gate normal mode conflicts with 1nCK preamble */
31 #define DDR4_READ_GATE_2NCK_PREAMBLE
32 #endif
33
34 #define SKEW_RX_SIGNAL (0)
35 #define SKEW_TX_SIGNAL (1)
36 #define SKEW_CA_SIGNAL (2)
37
38 #define DESKEW_MDF_ABS_VAL (0)
39 #define DESKEW_MDF_DIFF_VAL (1)
40
41 #ifdef CONFIG_TPL_BUILD
42 #ifndef CONFIG_TPL_TINY_FRAMEWORK
43 #error please defined CONFIG_TPL_TINY_FRAMEWORK for RV1126 !!!
44 #endif
45 #endif
46
47 #ifdef CONFIG_TPL_BUILD
48
49 struct dram_info {
50 void __iomem *pctl;
51 void __iomem *phy;
52 struct rv1126_cru *cru;
53 struct msch_regs *msch;
54 struct rv1126_ddrgrf *ddrgrf;
55 struct rv1126_grf *grf;
56 struct ram_info info;
57 struct rv1126_pmugrf *pmugrf;
58 u32 sr_idle;
59 u32 pd_idle;
60 };
61
62 #define GRF_BASE_ADDR 0xfe000000
63 #define PMU_GRF_BASE_ADDR 0xfe020000
64 #define DDR_GRF_BASE_ADDR 0xfe030000
65 #define BUS_SGRF_BASE_ADDR 0xfe0a0000
66 #define SERVER_MSCH_BASE_ADDR 0xfe800000
67 #define CRU_BASE_ADDR 0xff490000
68 #define DDR_PHY_BASE_ADDR 0xff4a0000
69 #define UPCTL2_BASE_ADDR 0xffa50000
70
71 #define SGRF_SOC_CON2 0x8
72 #define SGRF_SOC_CON12 0x30
73 #define SGRF_SOC_CON13 0x34
74
75 struct dram_info dram_info;
76
77 #if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 3)
78 struct rv1126_sdram_params sdram_configs[] = {
79 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-328.inc"
80 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-396.inc"
81 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-528.inc"
82 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-664.inc"
83 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-784.inc"
84 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-924.inc"
85 #include "sdram_inc/rv1126/sdram-rv1126-ddr3-detect-1056.inc"
86 };
87 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 0)
88 struct rv1126_sdram_params sdram_configs[] = {
89 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-328.inc"
90 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-396.inc"
91 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-528.inc"
92 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-664.inc"
93 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-784.inc"
94 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-924.inc"
95 #include "sdram_inc/rv1126/sdram-rv1126-ddr4-detect-1056.inc"
96 };
97 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 6)
98 struct rv1126_sdram_params sdram_configs[] = {
99 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-328.inc"
100 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-396.inc"
101 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-528.inc"
102 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-664.inc"
103 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-784.inc"
104 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-924.inc"
105 #include "sdram_inc/rv1126/sdram-rv1126-lpddr3-detect-1056.inc"
106 };
107 #elif (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 7) || (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 8)
108 struct rv1126_sdram_params sdram_configs[] = {
109 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-328.inc"
110 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-396.inc"
111 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-528.inc"
112 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-664.inc"
113 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-784.inc"
114 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-924.inc"
115 #include "sdram_inc/rv1126/sdram-rv1126-lpddr4-detect-1056.inc"
116 };
117 #endif
118
119 u32 common_info[] = {
120 #include "sdram_inc/rv1126/sdram-rv1126-loader_params.inc"
121 };
122
123 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
124 static struct rw_trn_result rw_trn_result;
125 #endif
126
127 static struct rv1126_fsp_param fsp_param[MAX_IDX];
128
129 static u8 lp3_odt_value;
130
131 static s8 wrlvl_result[2][4];
132
133 /* DDR configuration 0-9 */
134 u16 ddr_cfg_2_rbc[] = {
135 ((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */
136 ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */
137 ((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */
138 ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */
139 ((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */
140 ((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */
141 ((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */
142 ((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */
143 ((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */
144 ((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */
145 };
146
147 /* DDR configuration 10-21 */
148 u8 ddr4_cfg_2_rbc[] = {
149 ((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */
150 ((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */
151 ((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */
152 ((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */
153 ((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */
154 ((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */
155 ((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */
156 ((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */
157 ((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */
158 ((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */
159 ((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */
160 ((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */
161 };
162
163 /* DDR configuration 22-28 */
164 u16 ddr_cfg_2_rbc_p2[] = {
165 ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */
166 ((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */
167 ((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */
168 ((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */
169 ((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */
170 ((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */
171 ((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */
172 };
173
174 u8 d4_rbc_2_d3_rbc[][2] = {
175 {10, 0},
176 {11, 2},
177 {12, 23},
178 {13, 1},
179 {14, 28},
180 {15, 24},
181 {16, 27},
182 {17, 7},
183 {18, 6},
184 {19, 25},
185 {20, 26},
186 {21, 3}
187 };
188
189 u32 addrmap[29][9] = {
190 {24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
191 0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */
192 {23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
193 0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */
194 {23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
195 0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */
196 {22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606,
197 0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */
198 {24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909,
199 0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */
200 {6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707,
201 0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */
202 {7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808,
203 0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */
204 {8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909,
205 0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */
206 {22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
207 0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */
208 {23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
209 0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */
210
211 {24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
212 0x08080808, 0x00000f0f, 0x0801}, /* 10 */
213 {23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
214 0x0f080808, 0x00000f0f, 0x0801}, /* 11 */
215 {24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
216 0x07070707, 0x00000f07, 0x0700}, /* 12 */
217 {23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
218 0x07070707, 0x00000f0f, 0x0700}, /* 13 */
219 {24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
220 0x07070707, 0x00000f07, 0x3f01}, /* 14 */
221 {23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
222 0x07070707, 0x00000f0f, 0x3f01}, /* 15 */
223 {23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606,
224 0x06060606, 0x00000f06, 0x3f00}, /* 16 */
225 {8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909,
226 0x0f090909, 0x00000f0f, 0x0801}, /* 17 */
227 {7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808,
228 0x08080808, 0x00000f0f, 0x0700}, /* 18 */
229 {7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
230 0x08080808, 0x00000f0f, 0x3f01}, /* 19 */
231
232 {6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
233 0x07070707, 0x00000f07, 0x3f00}, /* 20 */
234 {23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606,
235 0x06060606, 0x00000f06, 0x0600}, /* 21 */
236 {21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505,
237 0x05050505, 0x00000f0f, 0x3f3f}, /* 22 */
238
239 {24, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
240 0x07070707, 0x00000f07, 0x3f3f}, /* 23 */
241 {23, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
242 0x07070707, 0x00000f0f, 0x3f3f}, /* 24 */
243 {7, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
244 0x08080808, 0x00000f0f, 0x3f3f}, /* 25 */
245 {6, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
246 0x07070707, 0x00000f07, 0x3f3f}, /* 26 */
247 {23, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
248 0x06060606, 0x00000f06, 0x3f3f}, /* 27 */
249 {24, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
250 0x07070707, 0x00000f07, 0x3f3f} /* 28 */
251 };
252
253 static u8 dq_sel[22][3] = {
254 {0x0, 0x17, 0x22},
255 {0x1, 0x18, 0x23},
256 {0x2, 0x19, 0x24},
257 {0x3, 0x1a, 0x25},
258 {0x4, 0x1b, 0x26},
259 {0x5, 0x1c, 0x27},
260 {0x6, 0x1d, 0x28},
261 {0x7, 0x1e, 0x29},
262 {0x8, 0x16, 0x21},
263 {0x9, 0x1f, 0x2a},
264 {0xa, 0x20, 0x2b},
265 {0x10, 0x1, 0xc},
266 {0x11, 0x2, 0xd},
267 {0x12, 0x3, 0xe},
268 {0x13, 0x4, 0xf},
269 {0x14, 0x5, 0x10},
270 {0x15, 0x6, 0x11},
271 {0x16, 0x7, 0x12},
272 {0x17, 0x8, 0x13},
273 {0x18, 0x0, 0xb},
274 {0x19, 0x9, 0x14},
275 {0x1a, 0xa, 0x15}
276 };
277
278 static u16 grp_addr[4] = {
279 ADD_GROUP_CS0_A,
280 ADD_GROUP_CS0_B,
281 ADD_GROUP_CS1_A,
282 ADD_GROUP_CS1_B
283 };
284
285 static u8 wrlvl_result_offset[2][4] = {
286 {0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27},
287 {0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29},
288 };
289
290 static u16 dqs_dq_skew_adr[16] = {
291 0x170 + 0, /* SKEW_UPDATE_RX_CS0_DQS0 */
292 0x170 + 0xb, /* SKEW_UPDATE_RX_CS0_DQS1 */
293 0x1d0 + 0, /* SKEW_UPDATE_RX_CS0_DQS2 */
294 0x1d0 + 0xb, /* SKEW_UPDATE_RX_CS0_DQS3 */
295 0x1a0 + 0, /* SKEW_UPDATE_RX_CS1_DQS0 */
296 0x1a0 + 0xb, /* SKEW_UPDATE_RX_CS1_DQS1 */
297 0x200 + 0, /* SKEW_UPDATE_RX_CS1_DQS2 */
298 0x200 + 0xb, /* SKEW_UPDATE_RX_CS1_DQS3 */
299 0x170 + 0x16, /* SKEW_UPDATE_TX_CS0_DQS0 */
300 0x170 + 0x21, /* SKEW_UPDATE_TX_CS0_DQS1 */
301 0x1d0 + 0x16, /* SKEW_UPDATE_TX_CS0_DQS2 */
302 0x1d0 + 0x21, /* SKEW_UPDATE_TX_CS0_DQS3 */
303 0x1a0 + 0x16, /* SKEW_UPDATE_TX_CS1_DQS0 */
304 0x1a0 + 0x21, /* SKEW_UPDATE_TX_CS1_DQS1 */
305 0x200 + 0x16, /* SKEW_UPDATE_TX_CS1_DQS2 */
306 0x200 + 0x21, /* SKEW_UPDATE_TX_CS1_DQS3 */
307 };
308
rkclk_ddr_reset(struct dram_info * dram,u32 ctl_srstn,u32 ctl_psrstn,u32 phy_srstn,u32 phy_psrstn)309 static void rkclk_ddr_reset(struct dram_info *dram,
310 u32 ctl_srstn, u32 ctl_psrstn,
311 u32 phy_srstn, u32 phy_psrstn)
312 {
313 writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) |
314 UPCTL2_ASRSTN_REQ(ctl_srstn),
315 BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13);
316
317 writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn),
318 &dram->cru->softrst_con[12]);
319 }
320
rkclk_set_dpll(struct dram_info * dram,unsigned int hz)321 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
322 {
323 unsigned int refdiv, postdiv1, postdiv2, fbdiv;
324 int delay = 1000;
325 u32 mhz = hz / MHz;
326 struct global_info *gbl_info;
327 struct sdram_head_info_index_v2 *index =
328 (struct sdram_head_info_index_v2 *)common_info;
329 u32 ssmod_info;
330 u32 dsmpd = 1;
331
332 gbl_info = (struct global_info *)((void *)common_info +
333 index->global_index.offset * 4);
334 ssmod_info = gbl_info->info_2t;
335 refdiv = 1;
336 if (mhz <= 100) {
337 postdiv1 = 6;
338 postdiv2 = 4;
339 } else if (mhz <= 150) {
340 postdiv1 = 4;
341 postdiv2 = 4;
342 } else if (mhz <= 200) {
343 postdiv1 = 6;
344 postdiv2 = 2;
345 } else if (mhz <= 300) {
346 postdiv1 = 4;
347 postdiv2 = 2;
348 } else if (mhz <= 400) {
349 postdiv1 = 6;
350 postdiv2 = 1;
351 } else {
352 postdiv1 = 4;
353 postdiv2 = 1;
354 }
355 fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
356
357 writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
358
359 writel(0x1f000000, &dram->cru->clksel_con[64]);
360 writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
361 /* enable ssmod */
362 if (PLL_SSMOD_SPREAD(ssmod_info)) {
363 dsmpd = 0;
364 clrsetbits_le32(&dram->cru->pll[1].con2,
365 0xffffff << 0, 0x0 << 0);
366 writel(SSMOD_SPREAD(PLL_SSMOD_SPREAD(ssmod_info)) |
367 SSMOD_DIVVAL(PLL_SSMOD_DIV(ssmod_info)) |
368 SSMOD_DOWNSPREAD(PLL_SSMOD_DOWNSPREAD(ssmod_info)) |
369 SSMOD_RESET(0) |
370 SSMOD_DIS_SSCG(0) |
371 SSMOD_BP(0),
372 &dram->cru->pll[1].con3);
373 }
374 writel(DSMPD(dsmpd) | POSTDIV2(postdiv2) | REFDIV(refdiv),
375 &dram->cru->pll[1].con1);
376
377 while (delay > 0) {
378 udelay(1);
379 if (LOCK(readl(&dram->cru->pll[1].con1)))
380 break;
381 delay--;
382 }
383 if (delay <= 0)
384 printascii("ERROR: DPLL lock timeout!\n");
385
386 writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
387 }
388
rkclk_configure_ddr(struct dram_info * dram,struct rv1126_sdram_params * sdram_params)389 static void rkclk_configure_ddr(struct dram_info *dram,
390 struct rv1126_sdram_params *sdram_params)
391 {
392 /* for ddr phy need freq / 2 */
393 rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2);
394 }
395
396 static unsigned int
calculate_ddrconfig(struct rv1126_sdram_params * sdram_params)397 calculate_ddrconfig(struct rv1126_sdram_params *sdram_params)
398 {
399 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
400 u32 cs, bw, die_bw, col, row, bank;
401 u32 cs1_row;
402 u32 i, tmp;
403 u32 ddrconf = -1;
404 u32 row_3_4;
405
406 cs = cap_info->rank;
407 bw = cap_info->bw;
408 die_bw = cap_info->dbw;
409 col = cap_info->col;
410 row = cap_info->cs0_row;
411 cs1_row = cap_info->cs1_row;
412 bank = cap_info->bk;
413 row_3_4 = cap_info->row_3_4;
414
415 if (sdram_params->base.dramtype == DDR4) {
416 if (cs == 2 && row == cs1_row && !row_3_4) {
417 tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) |
418 die_bw;
419 for (i = 17; i < 21; i++) {
420 if (((tmp & 0xf) ==
421 (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
422 ((tmp & 0x70) <=
423 (ddr4_cfg_2_rbc[i - 10] & 0x70))) {
424 ddrconf = i;
425 goto out;
426 }
427 }
428 }
429
430 tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw;
431 for (i = 10; i < 21; i++) {
432 if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
433 ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) &&
434 ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) {
435 ddrconf = i;
436 goto out;
437 }
438 }
439 } else {
440 if (cs == 2 && row == cs1_row && bank == 3) {
441 for (i = 5; i < 8; i++) {
442 if (((bw + col - 10) == (ddr_cfg_2_rbc[i] &
443 0x7)) &&
444 ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] &
445 (0x7 << 5))) {
446 ddrconf = i;
447 goto out;
448 }
449 }
450 }
451
452 tmp = ((cs - 1) << 8) | ((row - 13) << 5) |
453 ((bw + col - 10) << 0);
454 if (bank == 3)
455 tmp |= (1 << 3);
456
457 for (i = 0; i < 9; i++)
458 if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) &&
459 ((tmp & (7 << 5)) <=
460 (ddr_cfg_2_rbc[i] & (7 << 5))) &&
461 ((tmp & (1 << 8)) <=
462 (ddr_cfg_2_rbc[i] & (1 << 8)))) {
463 ddrconf = i;
464 goto out;
465 }
466
467 for (i = 0; i < 7; i++)
468 if (((tmp & 0x1f) == (ddr_cfg_2_rbc_p2[i] & 0x1f)) &&
469 ((tmp & (7 << 5)) <=
470 (ddr_cfg_2_rbc_p2[i] & (7 << 5))) &&
471 ((tmp & (1 << 8)) <=
472 (ddr_cfg_2_rbc_p2[i] & (1 << 8)))) {
473 ddrconf = i + 22;
474 goto out;
475 }
476
477 if (cs == 1 && bank == 3 && row <= 17 &&
478 (col + bw) == 12)
479 ddrconf = 23;
480 }
481
482 out:
483 if (ddrconf > 28)
484 printascii("calculate ddrconfig error\n");
485
486 if (sdram_params->base.dramtype == DDR4) {
487 for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
488 if (ddrconf == d4_rbc_2_d3_rbc[i][0]) {
489 if (ddrconf == 21 && row > 16)
490 printascii("warn:ddrconf21 row > 16\n");
491 else
492 ddrconf = d4_rbc_2_d3_rbc[i][1];
493 break;
494 }
495 }
496 }
497
498 return ddrconf;
499 }
500
sw_set_req(struct dram_info * dram)501 static void sw_set_req(struct dram_info *dram)
502 {
503 void __iomem *pctl_base = dram->pctl;
504
505 /* clear sw_done=0 */
506 writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL);
507 }
508
sw_set_ack(struct dram_info * dram)509 static void sw_set_ack(struct dram_info *dram)
510 {
511 void __iomem *pctl_base = dram->pctl;
512
513 /* set sw_done=1 */
514 writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL);
515 while (1) {
516 /* wait programming done */
517 if (readl(pctl_base + DDR_PCTL2_SWSTAT) &
518 PCTL2_SW_DONE_ACK)
519 break;
520 }
521 }
522
set_ctl_address_map(struct dram_info * dram,struct rv1126_sdram_params * sdram_params)523 static void set_ctl_address_map(struct dram_info *dram,
524 struct rv1126_sdram_params *sdram_params)
525 {
526 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
527 void __iomem *pctl_base = dram->pctl;
528 u32 ddrconf = cap_info->ddrconfig;
529 u32 i, row;
530
531 row = cap_info->cs0_row;
532 if (sdram_params->base.dramtype == DDR4) {
533 for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
534 if (ddrconf == d4_rbc_2_d3_rbc[i][1]) {
535 ddrconf = d4_rbc_2_d3_rbc[i][0];
536 break;
537 }
538 }
539 }
540
541 if (ddrconf >= ARRAY_SIZE(addrmap)) {
542 printascii("set ctl address map fail\n");
543 return;
544 }
545
546 sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
547 &addrmap[ddrconf][0], ARRAY_SIZE(addrmap[ddrconf]) * 4);
548
549 /* unused row set to 0xf */
550 for (i = 17; i >= row; i--)
551 setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
552 ((i - 12) * 8 / 32) * 4,
553 0xf << ((i - 12) * 8 % 32));
554
555 if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4)
556 setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
557 if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1)
558 setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
559
560 if (cap_info->rank == 1)
561 clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
562 }
563
phy_pll_set(struct dram_info * dram,u32 freq,u32 wait)564 static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait)
565 {
566 void __iomem *phy_base = dram->phy;
567 u32 fbdiv, prediv, postdiv, postdiv_en;
568 int delay = 1000;
569
570 if (wait) {
571 clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB);
572 while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK)) {
573 udelay(1);
574 if (delay-- <= 0) {
575 printascii("ERROR: phy pll lock timeout!\n");
576 while (1)
577 ;
578 }
579 }
580 } else {
581 freq /= MHz;
582 prediv = 1;
583 if (freq <= 200) {
584 fbdiv = 16;
585 postdiv = 2;
586 postdiv_en = 1;
587 } else if (freq <= 456) {
588 fbdiv = 8;
589 postdiv = 1;
590 postdiv_en = 1;
591 } else {
592 fbdiv = 4;
593 postdiv = 0;
594 postdiv_en = 0;
595 }
596 writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50));
597 clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK,
598 (fbdiv >> 8) & 1);
599 clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK,
600 postdiv_en << PHY_POSTDIV_EN_SHIFT);
601
602 clrsetbits_le32(PHY_REG(phy_base, 0x52),
603 PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv);
604 clrsetbits_le32(PHY_REG(phy_base, 0x53),
605 PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT,
606 postdiv << PHY_POSTDIV_SHIFT);
607 }
608 }
609
610 static const u16 d3_phy_drv_2_ohm[][2] = {
611 {PHY_DDR3_RON_455ohm, 455},
612 {PHY_DDR3_RON_230ohm, 230},
613 {PHY_DDR3_RON_153ohm, 153},
614 {PHY_DDR3_RON_115ohm, 115},
615 {PHY_DDR3_RON_91ohm, 91},
616 {PHY_DDR3_RON_76ohm, 76},
617 {PHY_DDR3_RON_65ohm, 65},
618 {PHY_DDR3_RON_57ohm, 57},
619 {PHY_DDR3_RON_51ohm, 51},
620 {PHY_DDR3_RON_46ohm, 46},
621 {PHY_DDR3_RON_41ohm, 41},
622 {PHY_DDR3_RON_38ohm, 38},
623 {PHY_DDR3_RON_35ohm, 35},
624 {PHY_DDR3_RON_32ohm, 32},
625 {PHY_DDR3_RON_30ohm, 30},
626 {PHY_DDR3_RON_28ohm, 28},
627 {PHY_DDR3_RON_27ohm, 27},
628 {PHY_DDR3_RON_25ohm, 25},
629 {PHY_DDR3_RON_24ohm, 24},
630 {PHY_DDR3_RON_23ohm, 23},
631 {PHY_DDR3_RON_22ohm, 22},
632 {PHY_DDR3_RON_21ohm, 21},
633 {PHY_DDR3_RON_20ohm, 20}
634 };
635
636 static u16 d3_phy_odt_2_ohm[][2] = {
637 {PHY_DDR3_RTT_DISABLE, 0},
638 {PHY_DDR3_RTT_561ohm, 561},
639 {PHY_DDR3_RTT_282ohm, 282},
640 {PHY_DDR3_RTT_188ohm, 188},
641 {PHY_DDR3_RTT_141ohm, 141},
642 {PHY_DDR3_RTT_113ohm, 113},
643 {PHY_DDR3_RTT_94ohm, 94},
644 {PHY_DDR3_RTT_81ohm, 81},
645 {PHY_DDR3_RTT_72ohm, 72},
646 {PHY_DDR3_RTT_64ohm, 64},
647 {PHY_DDR3_RTT_58ohm, 58},
648 {PHY_DDR3_RTT_52ohm, 52},
649 {PHY_DDR3_RTT_48ohm, 48},
650 {PHY_DDR3_RTT_44ohm, 44},
651 {PHY_DDR3_RTT_41ohm, 41},
652 {PHY_DDR3_RTT_38ohm, 38},
653 {PHY_DDR3_RTT_37ohm, 37},
654 {PHY_DDR3_RTT_34ohm, 34},
655 {PHY_DDR3_RTT_32ohm, 32},
656 {PHY_DDR3_RTT_31ohm, 31},
657 {PHY_DDR3_RTT_29ohm, 29},
658 {PHY_DDR3_RTT_28ohm, 28},
659 {PHY_DDR3_RTT_27ohm, 27},
660 {PHY_DDR3_RTT_25ohm, 25}
661 };
662
663 static u16 d4lp3_phy_drv_2_ohm[][2] = {
664 {PHY_DDR4_LPDDR3_RON_482ohm, 482},
665 {PHY_DDR4_LPDDR3_RON_244ohm, 244},
666 {PHY_DDR4_LPDDR3_RON_162ohm, 162},
667 {PHY_DDR4_LPDDR3_RON_122ohm, 122},
668 {PHY_DDR4_LPDDR3_RON_97ohm, 97},
669 {PHY_DDR4_LPDDR3_RON_81ohm, 81},
670 {PHY_DDR4_LPDDR3_RON_69ohm, 69},
671 {PHY_DDR4_LPDDR3_RON_61ohm, 61},
672 {PHY_DDR4_LPDDR3_RON_54ohm, 54},
673 {PHY_DDR4_LPDDR3_RON_48ohm, 48},
674 {PHY_DDR4_LPDDR3_RON_44ohm, 44},
675 {PHY_DDR4_LPDDR3_RON_40ohm, 40},
676 {PHY_DDR4_LPDDR3_RON_37ohm, 37},
677 {PHY_DDR4_LPDDR3_RON_34ohm, 34},
678 {PHY_DDR4_LPDDR3_RON_32ohm, 32},
679 {PHY_DDR4_LPDDR3_RON_30ohm, 30},
680 {PHY_DDR4_LPDDR3_RON_28ohm, 28},
681 {PHY_DDR4_LPDDR3_RON_27ohm, 27},
682 {PHY_DDR4_LPDDR3_RON_25ohm, 25},
683 {PHY_DDR4_LPDDR3_RON_24ohm, 24},
684 {PHY_DDR4_LPDDR3_RON_23ohm, 23},
685 {PHY_DDR4_LPDDR3_RON_22ohm, 22},
686 {PHY_DDR4_LPDDR3_RON_21ohm, 21}
687 };
688
689 static u16 d4lp3_phy_odt_2_ohm[][2] = {
690 {PHY_DDR4_LPDDR3_RTT_DISABLE, 0},
691 {PHY_DDR4_LPDDR3_RTT_586ohm, 586},
692 {PHY_DDR4_LPDDR3_RTT_294ohm, 294},
693 {PHY_DDR4_LPDDR3_RTT_196ohm, 196},
694 {PHY_DDR4_LPDDR3_RTT_148ohm, 148},
695 {PHY_DDR4_LPDDR3_RTT_118ohm, 118},
696 {PHY_DDR4_LPDDR3_RTT_99ohm, 99},
697 {PHY_DDR4_LPDDR3_RTT_85ohm, 58},
698 {PHY_DDR4_LPDDR3_RTT_76ohm, 76},
699 {PHY_DDR4_LPDDR3_RTT_67ohm, 67},
700 {PHY_DDR4_LPDDR3_RTT_60ohm, 60},
701 {PHY_DDR4_LPDDR3_RTT_55ohm, 55},
702 {PHY_DDR4_LPDDR3_RTT_50ohm, 50},
703 {PHY_DDR4_LPDDR3_RTT_46ohm, 46},
704 {PHY_DDR4_LPDDR3_RTT_43ohm, 43},
705 {PHY_DDR4_LPDDR3_RTT_40ohm, 40},
706 {PHY_DDR4_LPDDR3_RTT_38ohm, 38},
707 {PHY_DDR4_LPDDR3_RTT_36ohm, 36},
708 {PHY_DDR4_LPDDR3_RTT_34ohm, 34},
709 {PHY_DDR4_LPDDR3_RTT_32ohm, 32},
710 {PHY_DDR4_LPDDR3_RTT_31ohm, 31},
711 {PHY_DDR4_LPDDR3_RTT_29ohm, 29},
712 {PHY_DDR4_LPDDR3_RTT_28ohm, 28},
713 {PHY_DDR4_LPDDR3_RTT_27ohm, 27}
714 };
715
716 static u16 lp4_phy_drv_2_ohm[][2] = {
717 {PHY_LPDDR4_RON_501ohm, 501},
718 {PHY_LPDDR4_RON_253ohm, 253},
719 {PHY_LPDDR4_RON_168ohm, 168},
720 {PHY_LPDDR4_RON_126ohm, 126},
721 {PHY_LPDDR4_RON_101ohm, 101},
722 {PHY_LPDDR4_RON_84ohm, 84},
723 {PHY_LPDDR4_RON_72ohm, 72},
724 {PHY_LPDDR4_RON_63ohm, 63},
725 {PHY_LPDDR4_RON_56ohm, 56},
726 {PHY_LPDDR4_RON_50ohm, 50},
727 {PHY_LPDDR4_RON_46ohm, 46},
728 {PHY_LPDDR4_RON_42ohm, 42},
729 {PHY_LPDDR4_RON_38ohm, 38},
730 {PHY_LPDDR4_RON_36ohm, 36},
731 {PHY_LPDDR4_RON_33ohm, 33},
732 {PHY_LPDDR4_RON_31ohm, 31},
733 {PHY_LPDDR4_RON_29ohm, 29},
734 {PHY_LPDDR4_RON_28ohm, 28},
735 {PHY_LPDDR4_RON_26ohm, 26},
736 {PHY_LPDDR4_RON_25ohm, 25},
737 {PHY_LPDDR4_RON_24ohm, 24},
738 {PHY_LPDDR4_RON_23ohm, 23},
739 {PHY_LPDDR4_RON_22ohm, 22}
740 };
741
742 static u16 lp4_phy_odt_2_ohm[][2] = {
743 {PHY_LPDDR4_RTT_DISABLE, 0},
744 {PHY_LPDDR4_RTT_604ohm, 604},
745 {PHY_LPDDR4_RTT_303ohm, 303},
746 {PHY_LPDDR4_RTT_202ohm, 202},
747 {PHY_LPDDR4_RTT_152ohm, 152},
748 {PHY_LPDDR4_RTT_122ohm, 122},
749 {PHY_LPDDR4_RTT_101ohm, 101},
750 {PHY_LPDDR4_RTT_87ohm, 87},
751 {PHY_LPDDR4_RTT_78ohm, 78},
752 {PHY_LPDDR4_RTT_69ohm, 69},
753 {PHY_LPDDR4_RTT_62ohm, 62},
754 {PHY_LPDDR4_RTT_56ohm, 56},
755 {PHY_LPDDR4_RTT_52ohm, 52},
756 {PHY_LPDDR4_RTT_48ohm, 48},
757 {PHY_LPDDR4_RTT_44ohm, 44},
758 {PHY_LPDDR4_RTT_41ohm, 41},
759 {PHY_LPDDR4_RTT_39ohm, 39},
760 {PHY_LPDDR4_RTT_37ohm, 37},
761 {PHY_LPDDR4_RTT_35ohm, 35},
762 {PHY_LPDDR4_RTT_33ohm, 33},
763 {PHY_LPDDR4_RTT_32ohm, 32},
764 {PHY_LPDDR4_RTT_30ohm, 30},
765 {PHY_LPDDR4_RTT_29ohm, 29},
766 {PHY_LPDDR4_RTT_27ohm, 27}
767 };
768
lp4_odt_calc(u32 odt_ohm)769 static u32 lp4_odt_calc(u32 odt_ohm)
770 {
771 u32 odt;
772
773 if (odt_ohm == 0)
774 odt = LPDDR4_DQODT_DIS;
775 else if (odt_ohm <= 40)
776 odt = LPDDR4_DQODT_40;
777 else if (odt_ohm <= 48)
778 odt = LPDDR4_DQODT_48;
779 else if (odt_ohm <= 60)
780 odt = LPDDR4_DQODT_60;
781 else if (odt_ohm <= 80)
782 odt = LPDDR4_DQODT_80;
783 else if (odt_ohm <= 120)
784 odt = LPDDR4_DQODT_120;
785 else
786 odt = LPDDR4_DQODT_240;
787
788 return odt;
789 }
790
get_ddr_drv_odt_info(u32 dramtype)791 static void *get_ddr_drv_odt_info(u32 dramtype)
792 {
793 struct sdram_head_info_index_v2 *index =
794 (struct sdram_head_info_index_v2 *)common_info;
795 void *ddr_info = 0;
796
797 if (dramtype == DDR4)
798 ddr_info = (void *)common_info + index->ddr4_index.offset * 4;
799 else if (dramtype == DDR3)
800 ddr_info = (void *)common_info + index->ddr3_index.offset * 4;
801 else if (dramtype == LPDDR3)
802 ddr_info = (void *)common_info + index->lp3_index.offset * 4;
803 else if (dramtype == LPDDR4)
804 ddr_info = (void *)common_info + index->lp4_index.offset * 4;
805 else if (dramtype == LPDDR4X)
806 ddr_info = (void *)common_info + index->lp4x_index.offset * 4;
807 else
808 printascii("unsupported dram type\n");
809 return ddr_info;
810 }
811
set_lp4_vref(struct dram_info * dram,struct lp4_info * lp4_info,u32 freq_mhz,u32 dst_fsp,u32 dramtype)812 static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info,
813 u32 freq_mhz, u32 dst_fsp, u32 dramtype)
814 {
815 void __iomem *pctl_base = dram->pctl;
816 u32 ca_vref, dq_vref;
817
818 if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
819 ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff);
820 else
821 ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten);
822
823 if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq))
824 dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff);
825 else
826 dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten);
827
828 if (dramtype == LPDDR4) {
829 if (ca_vref < 100)
830 ca_vref = 100;
831 if (ca_vref > 420)
832 ca_vref = 420;
833
834 if (ca_vref <= 300)
835 ca_vref = (0 << 6) | (ca_vref - 100) / 4;
836 else
837 ca_vref = (1 << 6) | (ca_vref - 220) / 4;
838
839 if (dq_vref < 100)
840 dq_vref = 100;
841 if (dq_vref > 420)
842 dq_vref = 420;
843
844 if (dq_vref <= 300)
845 dq_vref = (0 << 6) | (dq_vref - 100) / 4;
846 else
847 dq_vref = (1 << 6) | (dq_vref - 220) / 4;
848 } else {
849 ca_vref = ca_vref * 11 / 6;
850 if (ca_vref < 150)
851 ca_vref = 150;
852 if (ca_vref > 629)
853 ca_vref = 629;
854
855 if (ca_vref <= 449)
856 ca_vref = (0 << 6) | (ca_vref - 150) / 4;
857 else
858 ca_vref = (1 << 6) | (ca_vref - 329) / 4;
859
860 if (dq_vref < 150)
861 dq_vref = 150;
862 if (dq_vref > 629)
863 dq_vref = 629;
864
865 if (dq_vref <= 449)
866 dq_vref = (0 << 6) | (dq_vref - 150) / 6;
867 else
868 dq_vref = (1 << 6) | (dq_vref - 329) / 6;
869 }
870 sw_set_req(dram);
871 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
872 DDR_PCTL2_INIT6,
873 PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT,
874 ca_vref << PCTL2_LPDDR4_MR12_SHIFT);
875
876 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
877 DDR_PCTL2_INIT7,
878 PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT,
879 dq_vref << PCTL2_LPDDR4_MR14_SHIFT);
880 sw_set_ack(dram);
881 }
882
set_ds_odt(struct dram_info * dram,struct rv1126_sdram_params * sdram_params,u32 dst_fsp)883 static void set_ds_odt(struct dram_info *dram,
884 struct rv1126_sdram_params *sdram_params, u32 dst_fsp)
885 {
886 void __iomem *phy_base = dram->phy;
887 void __iomem *pctl_base = dram->pctl;
888 u32 dramtype = sdram_params->base.dramtype;
889 struct ddr2_3_4_lp2_3_info *ddr_info;
890 struct lp4_info *lp4_info;
891 u32 i, j, tmp;
892 const u16 (*p_drv)[2];
893 const u16 (*p_odt)[2];
894 u32 drv_info, sr_info;
895 u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm;
896 u32 phy_odt_ohm, dram_odt_ohm;
897 u32 lp4_pu_cal, phy_lp4_drv_pd_en;
898 u32 phy_odt_up_en, phy_odt_dn_en;
899 u32 sr_dq, sr_clk;
900 u32 freq = sdram_params->base.ddr_freq;
901 u32 mr1_mr3, mr11, mr22, vref_out, vref_inner;
902 u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0;
903 u32 phy_dq_drv = 0;
904 u32 phy_odt_up = 0, phy_odt_dn = 0;
905
906 ddr_info = get_ddr_drv_odt_info(dramtype);
907 lp4_info = (void *)ddr_info;
908
909 if (!ddr_info)
910 return;
911
912 /* dram odt en freq control phy drv, dram odt and phy sr */
913 if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) {
914 drv_info = ddr_info->drv_when_odtoff;
915 dram_odt_ohm = 0;
916 sr_info = ddr_info->sr_when_odtoff;
917 phy_lp4_drv_pd_en =
918 PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info);
919 } else {
920 drv_info = ddr_info->drv_when_odten;
921 dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info);
922 sr_info = ddr_info->sr_when_odten;
923 phy_lp4_drv_pd_en =
924 PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info);
925 }
926 phy_dq_drv_ohm =
927 DRV_INFO_PHY_DQ_DRV(drv_info);
928 phy_clk_drv_ohm =
929 DRV_INFO_PHY_CLK_DRV(drv_info);
930 phy_ca_drv_ohm =
931 DRV_INFO_PHY_CA_DRV(drv_info);
932
933 sr_dq = DQ_SR_INFO(sr_info);
934 sr_clk = CLK_SR_INFO(sr_info);
935
936 /* phy odt en freq control dram drv and phy odt */
937 if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) {
938 dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff);
939 lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info);
940 phy_odt_ohm = 0;
941 phy_odt_up_en = 0;
942 phy_odt_dn_en = 0;
943 } else {
944 dram_drv_ohm =
945 DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten);
946 phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info);
947 phy_odt_up_en =
948 ODT_INFO_PULLUP_EN(ddr_info->odt_info);
949 phy_odt_dn_en =
950 ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
951 lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info);
952 }
953
954 if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
955 if (phy_odt_ohm) {
956 phy_odt_up_en = 0;
957 phy_odt_dn_en = 1;
958 }
959 if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
960 dram_caodt_ohm = 0;
961 else
962 dram_caodt_ohm =
963 ODT_INFO_LP4_CA_ODT(lp4_info->odt_info);
964 }
965
966 if (dramtype == DDR3) {
967 p_drv = d3_phy_drv_2_ohm;
968 p_odt = d3_phy_odt_2_ohm;
969 } else if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
970 p_drv = lp4_phy_drv_2_ohm;
971 p_odt = lp4_phy_odt_2_ohm;
972 } else {
973 p_drv = d4lp3_phy_drv_2_ohm;
974 p_odt = d4lp3_phy_odt_2_ohm;
975 }
976
977 for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
978 if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) {
979 phy_dq_drv = **(p_drv + i);
980 break;
981 }
982 if (i == 0)
983 break;
984 }
985 for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
986 if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) {
987 phy_clk_drv = **(p_drv + i);
988 break;
989 }
990 if (i == 0)
991 break;
992 }
993 for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
994 if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) {
995 phy_ca_drv = **(p_drv + i);
996 break;
997 }
998 if (i == 0)
999 break;
1000 }
1001 if (!phy_odt_ohm)
1002 phy_odt = 0;
1003 else
1004 for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) {
1005 if (phy_odt_ohm <= *(*(p_odt + i) + 1)) {
1006 phy_odt = **(p_odt + i);
1007 break;
1008 }
1009 if (i == 0)
1010 break;
1011 }
1012
1013 if (dramtype != LPDDR4 && dramtype != LPDDR4X) {
1014 if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en))
1015 vref_inner = 0x80;
1016 else if (phy_odt_up_en)
1017 vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 /
1018 (dram_drv_ohm + phy_odt_ohm);
1019 else
1020 vref_inner = phy_odt_ohm * 128 /
1021 (phy_odt_ohm + dram_drv_ohm);
1022
1023 if (dramtype != DDR3 && dram_odt_ohm)
1024 vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 /
1025 (phy_dq_drv_ohm + dram_odt_ohm);
1026 else
1027 vref_out = 0x80;
1028 } else {
1029 /* for lp4 and lp4x*/
1030 if (phy_odt_ohm)
1031 vref_inner =
1032 (PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) *
1033 256) / 1000;
1034 else
1035 vref_inner =
1036 (PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) *
1037 256) / 1000;
1038
1039 vref_out = 0x80;
1040 }
1041
1042 /* default ZQCALIB bypass mode */
1043 clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv);
1044 clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv);
1045 clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv);
1046 clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv);
1047 if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1048 clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_clk_drv);
1049 clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_clk_drv);
1050 } else {
1051 clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_ca_drv);
1052 clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_ca_drv);
1053 }
1054 /* clk / cmd slew rate */
1055 clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk);
1056
1057 phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1;
1058 if (phy_odt_up_en)
1059 phy_odt_up = phy_odt;
1060 if (phy_odt_dn_en)
1061 phy_odt_dn = phy_odt;
1062
1063 for (i = 0; i < 4; i++) {
1064 j = 0x110 + i * 0x10;
1065 clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up);
1066 clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn);
1067 clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv);
1068 clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv);
1069 writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10));
1070
1071 clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10),
1072 1 << 3, phy_lp4_drv_pd_en << 3);
1073 if (dramtype == LPDDR4 || dramtype == LPDDR4X)
1074 clrbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10), BIT(5));
1075 /* dq slew rate */
1076 clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10),
1077 0x1f, sr_dq);
1078 }
1079
1080 /* reg_rx_vref_value_update */
1081 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1082 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1083
1084 /* RAM VREF */
1085 writel(vref_out, PHY_REG(phy_base, 0x105));
1086 if (dramtype == LPDDR3)
1087 udelay(100);
1088
1089 if (dramtype == LPDDR4 || dramtype == LPDDR4X)
1090 set_lp4_vref(dram, lp4_info, freq, dst_fsp, dramtype);
1091
1092 if (dramtype == DDR3 || dramtype == DDR4) {
1093 mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1094 DDR_PCTL2_INIT3);
1095 mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK;
1096 } else {
1097 mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1098 DDR_PCTL2_INIT4);
1099 mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK;
1100 }
1101
1102 if (dramtype == DDR3) {
1103 mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK);
1104 if (dram_drv_ohm == 34)
1105 mr1_mr3 |= DDR3_DS_34;
1106
1107 if (dram_odt_ohm == 0)
1108 mr1_mr3 |= DDR3_RTT_NOM_DIS;
1109 else if (dram_odt_ohm <= 40)
1110 mr1_mr3 |= DDR3_RTT_NOM_40;
1111 else if (dram_odt_ohm <= 60)
1112 mr1_mr3 |= DDR3_RTT_NOM_60;
1113 else
1114 mr1_mr3 |= DDR3_RTT_NOM_120;
1115
1116 } else if (dramtype == DDR4) {
1117 mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK);
1118 if (dram_drv_ohm == 48)
1119 mr1_mr3 |= DDR4_DS_48;
1120
1121 if (dram_odt_ohm == 0)
1122 mr1_mr3 |= DDR4_RTT_NOM_DIS;
1123 else if (dram_odt_ohm <= 34)
1124 mr1_mr3 |= DDR4_RTT_NOM_34;
1125 else if (dram_odt_ohm <= 40)
1126 mr1_mr3 |= DDR4_RTT_NOM_40;
1127 else if (dram_odt_ohm <= 48)
1128 mr1_mr3 |= DDR4_RTT_NOM_48;
1129 else if (dram_odt_ohm <= 60)
1130 mr1_mr3 |= DDR4_RTT_NOM_60;
1131 else
1132 mr1_mr3 |= DDR4_RTT_NOM_120;
1133
1134 } else if (dramtype == LPDDR3) {
1135 if (dram_drv_ohm <= 34)
1136 mr1_mr3 |= LPDDR3_DS_34;
1137 else if (dram_drv_ohm <= 40)
1138 mr1_mr3 |= LPDDR3_DS_40;
1139 else if (dram_drv_ohm <= 48)
1140 mr1_mr3 |= LPDDR3_DS_48;
1141 else if (dram_drv_ohm <= 60)
1142 mr1_mr3 |= LPDDR3_DS_60;
1143 else if (dram_drv_ohm <= 80)
1144 mr1_mr3 |= LPDDR3_DS_80;
1145
1146 if (dram_odt_ohm == 0)
1147 lp3_odt_value = LPDDR3_ODT_DIS;
1148 else if (dram_odt_ohm <= 60)
1149 lp3_odt_value = LPDDR3_ODT_60;
1150 else if (dram_odt_ohm <= 120)
1151 lp3_odt_value = LPDDR3_ODT_120;
1152 else
1153 lp3_odt_value = LPDDR3_ODT_240;
1154 } else {/* for lpddr4 and lpddr4x */
1155 /* MR3 for lp4 PU-CAL and PDDS */
1156 mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK);
1157 mr1_mr3 |= lp4_pu_cal;
1158
1159 tmp = lp4_odt_calc(dram_drv_ohm);
1160 if (!tmp)
1161 tmp = LPDDR4_PDDS_240;
1162 mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT);
1163
1164 /* MR11 for lp4 ca odt, dq odt set */
1165 mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1166 DDR_PCTL2_INIT6);
1167 mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK;
1168
1169 mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK);
1170
1171 tmp = lp4_odt_calc(dram_odt_ohm);
1172 mr11 |= (tmp << LPDDR4_DQODT_SHIFT);
1173
1174 tmp = lp4_odt_calc(dram_caodt_ohm);
1175 mr11 |= (tmp << LPDDR4_CAODT_SHIFT);
1176 sw_set_req(dram);
1177 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1178 DDR_PCTL2_INIT6,
1179 PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT,
1180 mr11 << PCTL2_LPDDR4_MR11_SHIFT);
1181 sw_set_ack(dram);
1182
1183 /* MR22 for soc odt/odt-ck/odt-cs/odt-ca */
1184 mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1185 DDR_PCTL2_INIT7);
1186 mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK;
1187 mr22 &= ~LPDDR4_SOC_ODT_MASK;
1188
1189 tmp = lp4_odt_calc(phy_odt_ohm);
1190 mr22 |= tmp;
1191 mr22 = mr22 |
1192 (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) <<
1193 LPDDR4_ODTE_CK_SHIFT) |
1194 (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) <<
1195 LPDDR4_ODTE_CS_SHIFT) |
1196 (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) <<
1197 LPDDR4_ODTD_CA_SHIFT);
1198
1199 sw_set_req(dram);
1200 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1201 DDR_PCTL2_INIT7,
1202 PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT,
1203 mr22 << PCTL2_LPDDR4_MR22_SHIFT);
1204 sw_set_ack(dram);
1205 }
1206
1207 if (dramtype == DDR4 || dramtype == DDR3) {
1208 sw_set_req(dram);
1209 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1210 DDR_PCTL2_INIT3,
1211 PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT,
1212 mr1_mr3 << PCTL2_DDR34_MR1_SHIFT);
1213 sw_set_ack(dram);
1214 } else {
1215 sw_set_req(dram);
1216 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1217 DDR_PCTL2_INIT4,
1218 PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT,
1219 mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT);
1220 sw_set_ack(dram);
1221 }
1222 }
1223
sdram_cmd_dq_path_remap(struct dram_info * dram,struct rv1126_sdram_params * sdram_params)1224 static int sdram_cmd_dq_path_remap(struct dram_info *dram,
1225 struct rv1126_sdram_params *sdram_params)
1226 {
1227 void __iomem *phy_base = dram->phy;
1228 u32 dramtype = sdram_params->base.dramtype;
1229 struct sdram_head_info_index_v2 *index =
1230 (struct sdram_head_info_index_v2 *)common_info;
1231 struct dq_map_info *map_info;
1232
1233 map_info = (struct dq_map_info *)((void *)common_info +
1234 index->dq_map_index.offset * 4);
1235
1236 if (dramtype == LPDDR4X)
1237 dramtype = LPDDR4;
1238
1239 if (dramtype <= LPDDR4)
1240 writel((map_info->byte_map[dramtype / 4] >>
1241 ((dramtype % 4) * 8)) & 0xff,
1242 PHY_REG(phy_base, 0x4f));
1243
1244 return 0;
1245 }
1246
phy_cfg(struct dram_info * dram,struct rv1126_sdram_params * sdram_params)1247 static void phy_cfg(struct dram_info *dram,
1248 struct rv1126_sdram_params *sdram_params)
1249 {
1250 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
1251 void __iomem *phy_base = dram->phy;
1252 u32 i, dq_map, tmp;
1253 u32 byte1 = 0, byte0 = 0;
1254
1255 sdram_cmd_dq_path_remap(dram, sdram_params);
1256
1257 phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0);
1258 for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
1259 writel(sdram_params->phy_regs.phy[i][1],
1260 phy_base + sdram_params->phy_regs.phy[i][0]);
1261 }
1262
1263 clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5));
1264 dq_map = readl(PHY_REG(phy_base, 0x4f));
1265 for (i = 0; i < 4; i++) {
1266 if (((dq_map >> (i * 2)) & 0x3) == 0) {
1267 byte0 = i;
1268 break;
1269 }
1270 }
1271 for (i = 0; i < 4; i++) {
1272 if (((dq_map >> (i * 2)) & 0x3) == 1) {
1273 byte1 = i;
1274 break;
1275 }
1276 }
1277
1278 tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK);
1279 if (cap_info->bw == 2)
1280 tmp |= 0xf;
1281 else if (cap_info->bw == 1)
1282 tmp |= ((1 << byte0) | (1 << byte1));
1283 else
1284 tmp |= (1 << byte0);
1285
1286 writel(tmp, PHY_REG(phy_base, 0xf));
1287
1288 /* lpddr4 odt control by phy, enable cs0 odt */
1289 if (sdram_params->base.dramtype == LPDDR4 ||
1290 sdram_params->base.dramtype == LPDDR4X)
1291 clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4,
1292 (1 << 6) | (1 << 4));
1293 /* for ca training ca vref choose range1 */
1294 setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6));
1295 setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6));
1296 /* for wr training PHY_0x7c[5], choose range0 */
1297 clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5));
1298 }
1299
update_refresh_reg(struct dram_info * dram)1300 static int update_refresh_reg(struct dram_info *dram)
1301 {
1302 void __iomem *pctl_base = dram->pctl;
1303 u32 ret;
1304
1305 ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
1306 writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
1307
1308 return 0;
1309 }
1310
1311 /*
1312 * rank = 1: cs0
1313 * rank = 2: cs1
1314 */
read_mr(struct dram_info * dram,u32 rank,u32 byte,u32 mr_num,u32 dramtype)1315 u32 read_mr(struct dram_info *dram, u32 rank, u32 byte, u32 mr_num, u32 dramtype)
1316 {
1317 u32 ret;
1318 u32 i, temp;
1319 void __iomem *pctl_base = dram->pctl;
1320 struct sdram_head_info_index_v2 *index;
1321 struct dq_map_info *map_info;
1322
1323 pctl_read_mr(pctl_base, rank, mr_num);
1324
1325 if (dramtype != LPDDR4 && dramtype != LPDDR4X) {
1326 temp = (readl(&dram->ddrgrf->ddr_grf_status[0]) >> (byte * 8)) & 0xff;
1327
1328 if (byte == 0) {
1329 index = (struct sdram_head_info_index_v2 *)common_info;
1330 map_info = (struct dq_map_info *)((void *)common_info +
1331 index->dq_map_index.offset * 4);
1332 ret = 0;
1333 for (i = 0; i < 8; i++)
1334 ret |= ((temp >> i) & 0x1) <<
1335 ((map_info->lp3_dq0_7_map >> (i * 4)) & 0xf);
1336 } else {
1337 ret = temp;
1338 }
1339 } else {
1340 ret = (readl(&dram->ddrgrf->ddr_grf_status[1]) >> (byte * 8)) & 0xff;
1341 }
1342
1343 return ret;
1344 }
1345
enter_sr(struct dram_info * dram,u32 en)1346 static void enter_sr(struct dram_info *dram, u32 en)
1347 {
1348 void __iomem *pctl_base = dram->pctl;
1349
1350 if (en) {
1351 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1352 while (1) {
1353 if (((readl(pctl_base + DDR_PCTL2_STAT) &
1354 PCTL2_SELFREF_TYPE_MASK) ==
1355 PCTL2_SELFREF_TYPE_SR_NOT_AUTO) &&
1356 ((readl(pctl_base + DDR_PCTL2_STAT) &
1357 PCTL2_OPERATING_MODE_MASK) ==
1358 PCTL2_OPERATING_MODE_SR))
1359 break;
1360 }
1361 } else {
1362 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1363 while ((readl(pctl_base + DDR_PCTL2_STAT) &
1364 PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR)
1365 continue;
1366 }
1367 }
1368
record_dq_prebit(struct dram_info * dram)1369 void record_dq_prebit(struct dram_info *dram)
1370 {
1371 u32 group, i, tmp;
1372 void __iomem *phy_base = dram->phy;
1373
1374 for (group = 0; group < 4; group++) {
1375 for (i = 0; i < ARRAY_SIZE(dq_sel); i++) {
1376 /* l_loop_invdelaysel */
1377 writel(dq_sel[i][0], PHY_REG(phy_base,
1378 grp_addr[group] + 0x2c));
1379 tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e));
1380 writel(tmp, PHY_REG(phy_base,
1381 grp_addr[group] + dq_sel[i][1]));
1382
1383 /* r_loop_invdelaysel */
1384 writel(dq_sel[i][0], PHY_REG(phy_base,
1385 grp_addr[group] + 0x2d));
1386 tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f));
1387 writel(tmp, PHY_REG(phy_base,
1388 grp_addr[group] + dq_sel[i][2]));
1389 }
1390 }
1391 }
1392
update_dq_rx_prebit(struct dram_info * dram)1393 static void update_dq_rx_prebit(struct dram_info *dram)
1394 {
1395 void __iomem *phy_base = dram->phy;
1396
1397 clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4),
1398 BIT(4));
1399 udelay(1);
1400 clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4));
1401 }
1402
update_dq_tx_prebit(struct dram_info * dram)1403 static void update_dq_tx_prebit(struct dram_info *dram)
1404 {
1405 void __iomem *phy_base = dram->phy;
1406
1407 clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1408 setbits_le32(PHY_REG(phy_base, 0x2), BIT(3));
1409 setbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1410 udelay(1);
1411 clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1412 }
1413
update_ca_prebit(struct dram_info * dram)1414 static void update_ca_prebit(struct dram_info *dram)
1415 {
1416 void __iomem *phy_base = dram->phy;
1417
1418 clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2));
1419 setbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1420 udelay(1);
1421 clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1422 }
1423
1424 /*
1425 * dir: 0: de-skew = delta_*
1426 * 1: de-skew = reg val - delta_*
1427 * delta_dir: value for differential signal: clk/
1428 * delta_sig: value for single signal: ca/cmd
1429 */
modify_ca_deskew(struct dram_info * dram,u32 dir,int delta_dif,int delta_sig,u32 cs,u32 dramtype)1430 static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif,
1431 int delta_sig, u32 cs, u32 dramtype)
1432 {
1433 void __iomem *phy_base = dram->phy;
1434 u32 i, cs_en, tmp;
1435 u32 dfi_lp_stat = 0;
1436
1437 if (cs == 0)
1438 cs_en = 1;
1439 else if (cs == 2)
1440 cs_en = 2;
1441 else
1442 cs_en = 3;
1443
1444 if ((dramtype == LPDDR4 || dramtype == LPDDR4X) &&
1445 ((readl(PHY_REG(phy_base, 0x60)) & BIT(5)) == 0)) {
1446 dfi_lp_stat = 1;
1447 setbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1448 }
1449 enter_sr(dram, 1);
1450
1451 for (i = 0; i < 0x20; i++) {
1452 if (dir == DESKEW_MDF_ABS_VAL)
1453 tmp = delta_sig;
1454 else
1455 tmp = readl(PHY_REG(phy_base, 0x150 + i)) +
1456 delta_sig;
1457 writel(tmp, PHY_REG(phy_base, 0x150 + i));
1458 }
1459
1460 if (dir == DESKEW_MDF_ABS_VAL)
1461 tmp = delta_dif;
1462 else
1463 tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) -
1464 delta_sig + delta_dif;
1465 writel(tmp, PHY_REG(phy_base, 0x150 + 0x17));
1466 writel(tmp, PHY_REG(phy_base, 0x150 + 0x18));
1467 if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1468 writel(tmp, PHY_REG(phy_base, 0x150 + 0x4));
1469 writel(tmp, PHY_REG(phy_base, 0x150 + 0xa));
1470
1471 clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6);
1472 update_ca_prebit(dram);
1473 }
1474 enter_sr(dram, 0);
1475
1476 if (dfi_lp_stat)
1477 clrbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1478
1479 }
1480
get_min_value(struct dram_info * dram,u32 signal,u32 rank)1481 static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank)
1482 {
1483 u32 i, j, offset = 0;
1484 u32 min = 0x3f;
1485 void __iomem *phy_base = dram->phy;
1486 u32 byte_en;
1487
1488 if (signal == SKEW_TX_SIGNAL)
1489 offset = 8;
1490
1491 if (signal == SKEW_CA_SIGNAL) {
1492 for (i = 0; i < 0x20; i++)
1493 min = MIN(min, readl(PHY_REG(phy_base, 0x150 + i)));
1494 } else {
1495 byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1496 for (j = offset; j < offset + rank * 4; j++) {
1497 if (!((byte_en >> (j % 4)) & 1))
1498 continue;
1499 for (i = 0; i < 11; i++)
1500 min = MIN(min,
1501 readl(PHY_REG(phy_base,
1502 dqs_dq_skew_adr[j] +
1503 i)));
1504 }
1505 }
1506
1507 return min;
1508 }
1509
low_power_update(struct dram_info * dram,u32 en)1510 static u32 low_power_update(struct dram_info *dram, u32 en)
1511 {
1512 void __iomem *pctl_base = dram->pctl;
1513 u32 lp_stat = 0;
1514
1515 if (en) {
1516 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf);
1517 } else {
1518 lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf;
1519 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf);
1520 }
1521
1522 return lp_stat;
1523 }
1524
1525 /*
1526 * signal:
1527 * dir: 0: de-skew = delta_*
1528 * 1: de-skew = reg val - delta_*
1529 * delta_dir: value for differential signal: dqs
1530 * delta_sig: value for single signal: dq/dm
1531 */
modify_dq_deskew(struct dram_info * dram,u32 signal,u32 dir,int delta_dif,int delta_sig,u32 rank)1532 static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir,
1533 int delta_dif, int delta_sig, u32 rank)
1534 {
1535 void __iomem *phy_base = dram->phy;
1536 u32 i, j, tmp, offset;
1537 u32 byte_en;
1538
1539 byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1540
1541 if (signal == SKEW_RX_SIGNAL)
1542 offset = 0;
1543 else
1544 offset = 8;
1545
1546 for (j = offset; j < (offset + rank * 4); j++) {
1547 if (!((byte_en >> (j % 4)) & 1))
1548 continue;
1549 for (i = 0; i < 0x9; i++) {
1550 if (dir == DESKEW_MDF_ABS_VAL)
1551 tmp = delta_sig;
1552 else
1553 tmp = delta_sig + readl(PHY_REG(phy_base,
1554 dqs_dq_skew_adr[j] +
1555 i));
1556 writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i));
1557 }
1558 if (dir == DESKEW_MDF_ABS_VAL)
1559 tmp = delta_dif;
1560 else
1561 tmp = delta_dif + readl(PHY_REG(phy_base,
1562 dqs_dq_skew_adr[j] + 9));
1563 writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9));
1564 writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa));
1565 }
1566 if (signal == SKEW_RX_SIGNAL)
1567 update_dq_rx_prebit(dram);
1568 else
1569 update_dq_tx_prebit(dram);
1570 }
1571
data_training_rg(struct dram_info * dram,u32 cs,u32 dramtype)1572 static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype)
1573 {
1574 void __iomem *phy_base = dram->phy;
1575 u32 ret;
1576 u32 dis_auto_zq = 0;
1577 u32 odt_val_up, odt_val_dn;
1578 u32 i, j;
1579 #if defined(DDR4_READ_GATE_2NCK_PREAMBLE)
1580 void __iomem *pctl_base = dram->pctl;
1581 u32 mr4_d4 = 0;
1582 #endif
1583
1584 odt_val_dn = readl(PHY_REG(phy_base, 0x110));
1585 odt_val_up = readl(PHY_REG(phy_base, 0x111));
1586
1587 if (dramtype != LPDDR4 || dramtype != LPDDR4X) {
1588 for (i = 0; i < 4; i++) {
1589 j = 0x110 + i * 0x10;
1590 writel(PHY_DDR4_LPDDR3_RTT_294ohm,
1591 PHY_REG(phy_base, j));
1592 writel(PHY_DDR4_LPDDR3_RTT_DISABLE,
1593 PHY_REG(phy_base, j + 0x1));
1594 }
1595 }
1596 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1597 /* use normal read mode for data training */
1598 clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1599
1600 if (dramtype == DDR4) {
1601 #if defined(DDR4_READ_GATE_PREAMBLE_MODE)
1602 setbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1603 #elif defined(DDR4_READ_GATE_2NCK_PREAMBLE)
1604 mr4_d4 = readl(pctl_base + DDR_PCTL2_INIT6) >> PCTL2_DDR4_MR4_SHIFT & PCTL2_MR_MASK;
1605 /* 2nCK Read Preamble */
1606 pctl_write_mr(pctl_base, BIT(cs), 4, mr4_d4 | BIT(11), DDR4);
1607 #endif
1608 }
1609
1610 /* choose training cs */
1611 clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
1612 /* enable gate training */
1613 clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
1614 udelay(50);
1615 ret = readl(PHY_REG(phy_base, 0x91));
1616 /* disable gate training */
1617 clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
1618 clrbits_le32(PHY_REG(phy_base, 2), 0x30);
1619 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1620
1621 #if defined(DDR4_READ_GATE_2NCK_PREAMBLE)
1622 if (dramtype == DDR4) {
1623 pctl_write_mr(pctl_base, BIT(cs), 4, mr4_d4, DDR4);
1624 }
1625 #endif
1626
1627 ret = (ret & 0x2f) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf);
1628
1629 if (dramtype != LPDDR4 || dramtype != LPDDR4X) {
1630 for (i = 0; i < 4; i++) {
1631 j = 0x110 + i * 0x10;
1632 writel(odt_val_dn, PHY_REG(phy_base, j));
1633 writel(odt_val_up, PHY_REG(phy_base, j + 0x1));
1634 }
1635 }
1636 return ret;
1637 }
1638
data_training_wl(struct dram_info * dram,u32 cs,u32 dramtype,u32 rank)1639 static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype,
1640 u32 rank)
1641 {
1642 void __iomem *pctl_base = dram->pctl;
1643 void __iomem *phy_base = dram->phy;
1644 u32 dis_auto_zq = 0;
1645 u32 tmp;
1646 u32 cur_fsp;
1647 u32 timeout_us = 1000;
1648
1649 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1650
1651 clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1652
1653 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1654 tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) &
1655 0xffff;
1656 writel(tmp & 0xff, PHY_REG(phy_base, 0x3));
1657
1658 /* disable another cs's output */
1659 if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1660 pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12),
1661 dramtype);
1662 if (dramtype == DDR3 || dramtype == DDR4)
1663 writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1664 else
1665 writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1666
1667 /* choose cs */
1668 clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1669 ((0x2 >> cs) << 6) | (0 << 2));
1670 /* enable write leveling */
1671 clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1672 ((0x2 >> cs) << 6) | (1 << 2));
1673
1674 while (1) {
1675 if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) ==
1676 (readl(PHY_REG(phy_base, 0xf)) & 0xf))
1677 break;
1678
1679 udelay(1);
1680 if (timeout_us-- == 0) {
1681 printascii("error: write leveling timeout\n");
1682 while (1)
1683 ;
1684 }
1685 }
1686
1687 /* disable write leveling */
1688 clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1689 ((0x2 >> cs) << 6) | (0 << 2));
1690 clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6);
1691
1692 /* enable another cs's output */
1693 if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1694 pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12),
1695 dramtype);
1696
1697 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1698
1699 return 0;
1700 }
1701
1702 char pattern[32] = {
1703 0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa,
1704 0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55,
1705 0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55,
1706 0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa
1707 };
1708
data_training_rd(struct dram_info * dram,u32 cs,u32 dramtype,u32 mhz)1709 static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype,
1710 u32 mhz)
1711 {
1712 void __iomem *pctl_base = dram->pctl;
1713 void __iomem *phy_base = dram->phy;
1714 u32 trefi_1x, trfc_1x;
1715 u32 dis_auto_zq = 0;
1716 u32 timeout_us = 1000;
1717 u32 dqs_default;
1718 u32 cur_fsp;
1719 u32 vref_inner;
1720 u32 i;
1721 struct sdram_head_info_index_v2 *index =
1722 (struct sdram_head_info_index_v2 *)common_info;
1723 struct dq_map_info *map_info;
1724
1725 vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff;
1726 if (dramtype == DDR3 && vref_inner == 0x80) {
1727 for (i = 0; i < 4; i++)
1728 writel(vref_inner - 0xa,
1729 PHY_REG(phy_base, 0x118 + i * 0x10));
1730
1731 /* reg_rx_vref_value_update */
1732 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1733 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1734 }
1735
1736 map_info = (struct dq_map_info *)((void *)common_info +
1737 index->dq_map_index.offset * 4);
1738 /* only 1cs a time, 0:cs0 1 cs1 */
1739 if (cs > 1)
1740 return -1;
1741
1742 dqs_default = 0xf;
1743 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1744
1745 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1746 /* config refresh timing */
1747 trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1748 DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1749 trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1750 DDR_PCTL2_RFSHTMG) & 0x3ff;
1751 /* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1752 clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1753 clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1754 /* reg_phy_trfc */
1755 clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1756 /* reg_max_refi_cnt */
1757 clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1758
1759 /* choose training cs */
1760 clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6);
1761
1762 /* set dq map for ddr4 */
1763 if (dramtype == DDR4) {
1764 setbits_le32(PHY_REG(phy_base, 0x70), BIT(7));
1765 for (i = 0; i < 4; i++) {
1766 writel((map_info->ddr4_dq_map[cs * 2] >>
1767 ((i % 4) * 8)) & 0xff,
1768 PHY_REG(phy_base, 0x238 + i));
1769 writel((map_info->ddr4_dq_map[cs * 2 + 1] >>
1770 ((i % 4) * 8)) & 0xff,
1771 PHY_REG(phy_base, 0x2b8 + i));
1772 }
1773 }
1774
1775 /* cha_l reg_l_rd_train_dqs_default[5:0] */
1776 clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default);
1777 /* cha_h reg_h_rd_train_dqs_default[5:0] */
1778 clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default);
1779 /* chb_l reg_l_rd_train_dqs_default[5:0] */
1780 clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default);
1781 /* chb_h reg_h_rd_train_dqs_default[5:0] */
1782 clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default);
1783
1784 /* Choose the read train auto mode */
1785 clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1);
1786 /* Enable the auto train of the read train */
1787 clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3);
1788
1789 /* Wait the train done. */
1790 while (1) {
1791 if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1)
1792 break;
1793
1794 udelay(1);
1795 if (timeout_us-- == 0) {
1796 printascii("error: read training timeout\n");
1797 return -1;
1798 }
1799 }
1800
1801 /* Check the read train state */
1802 if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) ||
1803 (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) {
1804 printascii("error: read training error\n");
1805 return -1;
1806 }
1807
1808 /* Exit the Read Training by setting */
1809 clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1));
1810
1811 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1812
1813 if (dramtype == DDR3 && vref_inner == 0x80) {
1814 for (i = 0; i < 4; i++)
1815 writel(vref_inner,
1816 PHY_REG(phy_base, 0x118 + i * 0x10));
1817
1818 /* reg_rx_vref_value_update */
1819 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1820 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1821 }
1822
1823 return 0;
1824 }
1825
data_training_wr(struct dram_info * dram,u32 cs,u32 dramtype,u32 mhz,u32 dst_fsp)1826 static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype,
1827 u32 mhz, u32 dst_fsp)
1828 {
1829 void __iomem *pctl_base = dram->pctl;
1830 void __iomem *phy_base = dram->phy;
1831 u32 trefi_1x, trfc_1x;
1832 u32 dis_auto_zq = 0;
1833 u32 timeout_us = 1000;
1834 u32 cur_fsp;
1835 u32 mr_tmp, cl, cwl, phy_fsp, offset = 0;
1836
1837 if (dramtype == LPDDR3 && mhz <= 400) {
1838 phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3;
1839 offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3;
1840 cl = readl(PHY_REG(phy_base, offset));
1841 cwl = readl(PHY_REG(phy_base, offset + 2));
1842
1843 clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8);
1844 clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4);
1845 pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype);
1846 }
1847
1848 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1849
1850 /* PHY_0x7b[7:0] reg_train_col_addr[7:0] */
1851 clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0);
1852 /* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */
1853 clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2);
1854 /* PHY_0x7c[1:0] reg_train_col_addr[9:8] */
1855 clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0);
1856 /* PHY_0x7d[7:0] reg_train_row_addr[7:0] */
1857 clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0);
1858 /* PHY_0x7e[7:0] reg_train_row_addr[15:8] */
1859 clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0);
1860
1861 /* PHY_0x71[3] wrtrain_check_data_value_random_gen */
1862 clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3));
1863
1864 /* config refresh timing */
1865 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1866 trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1867 DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1868 trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1869 DDR_PCTL2_RFSHTMG) & 0x3ff;
1870 /* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1871 clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1872 clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1873 /* reg_phy_trfc */
1874 clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1875 /* reg_max_refi_cnt */
1876 clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1877
1878 /* choose training cs */
1879 clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6);
1880
1881 /* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */
1882 /* 0: Use the write-leveling value. */
1883 /* 1: use reg0x233 0x237 0x2b3 0x2b7 */
1884 setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4));
1885
1886 /* PHY_0x7a [0] reg_dq_wr_train_auto */
1887 setbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1888
1889 /* PHY_0x7a [1] reg_dq_wr_train_en */
1890 setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1891
1892 send_a_refresh(dram->pctl, 0x3);
1893
1894 while (1) {
1895 if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1)
1896 break;
1897
1898 udelay(1);
1899 if (timeout_us-- == 0) {
1900 printascii("error: write training timeout\n");
1901 while (1)
1902 ;
1903 }
1904 }
1905
1906 /* Check the write train state */
1907 if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) {
1908 printascii("error: write training error\n");
1909 return -1;
1910 }
1911
1912 /* PHY_0x7a [1] reg_dq_wr_train_en */
1913 clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1914
1915 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1916
1917 /* save LPDDR4/LPDDR4X write vref to fsp_param for dfs */
1918 if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
1919 fsp_param[dst_fsp].vref_dq[cs] =
1920 ((readl(PHY_REG(phy_base, 0x384)) & 0x3f) +
1921 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2;
1922 /* add range info */
1923 fsp_param[dst_fsp].vref_dq[cs] |=
1924 ((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1);
1925 }
1926
1927 if (dramtype == LPDDR3 && mhz <= 400) {
1928 clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl);
1929 clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl);
1930 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1931 DDR_PCTL2_INIT3);
1932 pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
1933 dramtype);
1934 }
1935
1936 return 0;
1937 }
1938
data_training(struct dram_info * dram,u32 cs,struct rv1126_sdram_params * sdram_params,u32 dst_fsp,u32 training_flag)1939 static int data_training(struct dram_info *dram, u32 cs,
1940 struct rv1126_sdram_params *sdram_params, u32 dst_fsp,
1941 u32 training_flag)
1942 {
1943 u32 ret = 0;
1944
1945 if (training_flag == FULL_TRAINING)
1946 training_flag = READ_GATE_TRAINING | WRITE_LEVELING |
1947 WRITE_TRAINING | READ_TRAINING;
1948
1949 if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) {
1950 ret = data_training_wl(dram, cs,
1951 sdram_params->base.dramtype,
1952 sdram_params->ch.cap_info.rank);
1953 if (ret != 0)
1954 goto out;
1955 }
1956
1957 if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) {
1958 ret = data_training_rg(dram, cs,
1959 sdram_params->base.dramtype);
1960 if (ret != 0)
1961 goto out;
1962 }
1963
1964 if ((training_flag & READ_TRAINING) == READ_TRAINING) {
1965 ret = data_training_rd(dram, cs,
1966 sdram_params->base.dramtype,
1967 sdram_params->base.ddr_freq);
1968 if (ret != 0)
1969 goto out;
1970 }
1971
1972 if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) {
1973 ret = data_training_wr(dram, cs,
1974 sdram_params->base.dramtype,
1975 sdram_params->base.ddr_freq, dst_fsp);
1976 if (ret != 0)
1977 goto out;
1978 }
1979
1980 out:
1981 return ret;
1982 }
1983
get_wrlvl_val(struct dram_info * dram,struct rv1126_sdram_params * sdram_params)1984 static int get_wrlvl_val(struct dram_info *dram,
1985 struct rv1126_sdram_params *sdram_params)
1986 {
1987 int i, j, clk_skew;
1988 void __iomem *phy_base = dram->phy;
1989 u32 lp_stat;
1990 int ret;
1991
1992 lp_stat = low_power_update(dram, 0);
1993
1994 clk_skew = 0x1f;
1995 modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, clk_skew, 3,
1996 sdram_params->base.dramtype);
1997
1998 ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING);
1999 if (sdram_params->ch.cap_info.rank == 2)
2000 ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING);
2001
2002 for (j = 0; j < 2; j++)
2003 for (i = 0; i < 4; i++)
2004 wrlvl_result[j][i] =
2005 (readl(PHY_REG(phy_base, wrlvl_result_offset[j][i])) & 0x3f) -
2006 clk_skew;
2007
2008 low_power_update(dram, lp_stat);
2009
2010 return ret;
2011 }
2012
2013 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
init_rw_trn_result_struct(struct rw_trn_result * result,void __iomem * phy_base,u8 cs_num)2014 static void init_rw_trn_result_struct(struct rw_trn_result *result,
2015 void __iomem *phy_base, u8 cs_num)
2016 {
2017 int i;
2018
2019 result->cs_num = cs_num;
2020 result->byte_en = readb(PHY_REG(dram_info.phy, 0xf)) &
2021 PHY_DQ_WIDTH_MASK;
2022 for (i = 0; i < FSP_NUM; i++)
2023 result->fsp_mhz[i] = 0;
2024 }
2025
save_rw_trn_min_max(void __iomem * phy_base,struct cs_rw_trn_result * rd_result,struct cs_rw_trn_result * wr_result,u8 byte_en)2026 static void save_rw_trn_min_max(void __iomem *phy_base,
2027 struct cs_rw_trn_result *rd_result,
2028 struct cs_rw_trn_result *wr_result,
2029 u8 byte_en)
2030 {
2031 u16 phy_ofs;
2032 u8 dqs;
2033 u8 dq;
2034
2035 for (dqs = 0; dqs < BYTE_NUM; dqs++) {
2036 if ((byte_en & BIT(dqs)) == 0)
2037 continue;
2038
2039 /* Channel A or B (low or high 16 bit) */
2040 phy_ofs = dqs < 2 ? 0x230 : 0x2b0;
2041 /* low or high 8 bit */
2042 phy_ofs += (dqs & 0x1) == 0 ? 0 : 0x9;
2043 for (dq = 0; dq < 8; dq++) {
2044 rd_result->dqs[dqs].dq_min[dq] =
2045 readb(PHY_REG(phy_base, phy_ofs + 0x15 + dq));
2046 rd_result->dqs[dqs].dq_max[dq] =
2047 readb(PHY_REG(phy_base, phy_ofs + 0x27 + dq));
2048 wr_result->dqs[dqs].dq_min[dq] =
2049 readb(PHY_REG(phy_base, phy_ofs + 0x3d + dq));
2050 wr_result->dqs[dqs].dq_max[dq] =
2051 readb(PHY_REG(phy_base, phy_ofs + 0x4f + dq));
2052 }
2053 }
2054 }
2055
save_rw_trn_deskew(void __iomem * phy_base,struct fsp_rw_trn_result * result,u8 cs_num,int min_val,bool rw)2056 static void save_rw_trn_deskew(void __iomem *phy_base,
2057 struct fsp_rw_trn_result *result, u8 cs_num,
2058 int min_val, bool rw)
2059 {
2060 u16 phy_ofs;
2061 u8 cs;
2062 u8 dq;
2063
2064 result->min_val = min_val;
2065
2066 for (cs = 0; cs < cs_num; cs++) {
2067 phy_ofs = cs == 0 ? 0x170 : 0x1a0;
2068 phy_ofs += rw == SKEW_RX_SIGNAL ? 0x1 : 0x17;
2069 for (dq = 0; dq < 8; dq++) {
2070 result->cs[cs].dqs[0].dq_deskew[dq] =
2071 readb(PHY_REG(phy_base, phy_ofs + dq));
2072 result->cs[cs].dqs[1].dq_deskew[dq] =
2073 readb(PHY_REG(phy_base, phy_ofs + 0xb + dq));
2074 result->cs[cs].dqs[2].dq_deskew[dq] =
2075 readb(PHY_REG(phy_base, phy_ofs + 0x60 + dq));
2076 result->cs[cs].dqs[3].dq_deskew[dq] =
2077 readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + dq));
2078 }
2079
2080 result->cs[cs].dqs[0].dqs_deskew =
2081 readb(PHY_REG(phy_base, phy_ofs + 0x8));
2082 result->cs[cs].dqs[1].dqs_deskew =
2083 readb(PHY_REG(phy_base, phy_ofs + 0xb + 0x8));
2084 result->cs[cs].dqs[2].dqs_deskew =
2085 readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0x8));
2086 result->cs[cs].dqs[3].dqs_deskew =
2087 readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + 0x8));
2088 }
2089 }
2090
save_rw_trn_result_to_ddr(struct rw_trn_result * result)2091 static void save_rw_trn_result_to_ddr(struct rw_trn_result *result)
2092 {
2093 result->flag = DDR_DQ_EYE_FLAG;
2094 memcpy((void *)(RW_TRN_RESULT_ADDR), result, sizeof(*result));
2095 }
2096 #endif
2097
high_freq_training(struct dram_info * dram,struct rv1126_sdram_params * sdram_params,u32 fsp)2098 static int high_freq_training(struct dram_info *dram,
2099 struct rv1126_sdram_params *sdram_params,
2100 u32 fsp)
2101 {
2102 u32 i, j;
2103 void __iomem *phy_base = dram->phy;
2104 u32 dramtype = sdram_params->base.dramtype;
2105 int min_val;
2106 int dqs_skew, clk_skew, ca_skew;
2107 u8 byte_en;
2108 int ret;
2109
2110 byte_en = readl(PHY_REG(phy_base, 0xf)) & PHY_DQ_WIDTH_MASK;
2111 dqs_skew = 0;
2112 for (j = 0; j < sdram_params->ch.cap_info.rank; j++) {
2113 for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) {
2114 if ((byte_en & BIT(i)) != 0)
2115 dqs_skew += wrlvl_result[j][i];
2116 }
2117 }
2118 dqs_skew = dqs_skew /
2119 (int)(sdram_params->ch.cap_info.rank * (1 << sdram_params->ch.cap_info.bw));
2120
2121 clk_skew = 0x20 - dqs_skew;
2122 dqs_skew = 0x20;
2123
2124 if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
2125 min_val = 0xff;
2126 for (j = 0; j < sdram_params->ch.cap_info.rank; j++)
2127 for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) {
2128 if ((byte_en & BIT(i)) != 0)
2129 min_val = MIN(wrlvl_result[j][i], min_val);
2130 }
2131
2132 if (min_val < 0) {
2133 clk_skew = -min_val;
2134 ca_skew = -min_val;
2135 } else {
2136 clk_skew = 0;
2137 ca_skew = 0;
2138 }
2139 } else if (dramtype == LPDDR3) {
2140 ca_skew = clk_skew - 4;
2141 } else {
2142 ca_skew = clk_skew;
2143 }
2144 modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3,
2145 dramtype);
2146
2147 writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233));
2148 writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237));
2149 writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2150 writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2151 ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING |
2152 READ_TRAINING | WRITE_TRAINING);
2153 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2154 rw_trn_result.fsp_mhz[fsp] = (u16)sdram_params->base.ddr_freq;
2155 save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[0],
2156 &rw_trn_result.wr_fsp[fsp].cs[0],
2157 rw_trn_result.byte_en);
2158 #endif
2159 if (sdram_params->ch.cap_info.rank == 2) {
2160 writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233));
2161 writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237));
2162 writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2163 writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2164 ret |= data_training(dram, 1, sdram_params, fsp,
2165 READ_GATE_TRAINING | READ_TRAINING |
2166 WRITE_TRAINING);
2167 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2168 save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[1],
2169 &rw_trn_result.wr_fsp[fsp].cs[1],
2170 rw_trn_result.byte_en);
2171 #endif
2172 }
2173 if (ret)
2174 goto out;
2175
2176 record_dq_prebit(dram);
2177
2178 min_val = get_min_value(dram, SKEW_RX_SIGNAL,
2179 sdram_params->ch.cap_info.rank) * -1;
2180 modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2181 min_val, min_val, sdram_params->ch.cap_info.rank);
2182 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2183 save_rw_trn_deskew(phy_base, &rw_trn_result.rd_fsp[fsp],
2184 rw_trn_result.cs_num, (u8)(min_val * (-1)),
2185 SKEW_RX_SIGNAL);
2186 #endif
2187
2188 min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL,
2189 sdram_params->ch.cap_info.rank),
2190 get_min_value(dram, SKEW_CA_SIGNAL,
2191 sdram_params->ch.cap_info.rank)) * -1;
2192
2193 /* clk = 0, rx all skew -7, tx - min_value */
2194 modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3,
2195 dramtype);
2196
2197 modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2198 min_val, min_val, sdram_params->ch.cap_info.rank);
2199 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
2200 save_rw_trn_deskew(phy_base, &rw_trn_result.wr_fsp[fsp],
2201 rw_trn_result.cs_num, (u8)(min_val * (-1)),
2202 SKEW_TX_SIGNAL);
2203 #endif
2204
2205 ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING);
2206 if (sdram_params->ch.cap_info.rank == 2)
2207 ret |= data_training(dram, 1, sdram_params, 0,
2208 READ_GATE_TRAINING);
2209 out:
2210 return ret;
2211 }
2212
set_ddrconfig(struct dram_info * dram,u32 ddrconfig)2213 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
2214 {
2215 writel(ddrconfig, &dram->msch->deviceconf);
2216 clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0);
2217 }
2218
update_noc_timing(struct dram_info * dram,struct rv1126_sdram_params * sdram_params)2219 static void update_noc_timing(struct dram_info *dram,
2220 struct rv1126_sdram_params *sdram_params)
2221 {
2222 void __iomem *pctl_base = dram->pctl;
2223 u32 bw, bl;
2224
2225 bw = 8 << sdram_params->ch.cap_info.bw;
2226 bl = ((readl(pctl_base + DDR_PCTL2_MSTR) >> 16) & 0xf) * 2;
2227
2228 /* update the noc timing related to data bus width */
2229 if ((bw / 8 * bl) <= 16)
2230 sdram_params->ch.noc_timings.ddrmode.b.burstsize = 0;
2231 else if ((bw / 8 * bl) == 32)
2232 sdram_params->ch.noc_timings.ddrmode.b.burstsize = 1;
2233 else if ((bw / 8 * bl) == 64)
2234 sdram_params->ch.noc_timings.ddrmode.b.burstsize = 2;
2235 else
2236 sdram_params->ch.noc_timings.ddrmode.b.burstsize = 3;
2237
2238 sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty =
2239 (bl * bw / 8) > 16 ? (bl / 4) : (16 / (bl * bw / 8)) * bl / 4;
2240
2241 if (sdram_params->base.dramtype == LPDDR4 ||
2242 sdram_params->base.dramtype == LPDDR4X) {
2243 sdram_params->ch.noc_timings.ddrmode.b.mwrsize =
2244 (bw == 16) ? 0x1 : 0x2;
2245 sdram_params->ch.noc_timings.ddrtimingc0.b.wrtomwr =
2246 3 * sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty;
2247 }
2248
2249 writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
2250 &dram->msch->ddrtiminga0);
2251 writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
2252 &dram->msch->ddrtimingb0);
2253 writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
2254 &dram->msch->ddrtimingc0);
2255 writel(sdram_params->ch.noc_timings.devtodev0.d32,
2256 &dram->msch->devtodev0);
2257 writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
2258 writel(sdram_params->ch.noc_timings.ddr4timing.d32,
2259 &dram->msch->ddr4timing);
2260 }
2261
split_setup(struct dram_info * dram,struct rv1126_sdram_params * sdram_params)2262 static int split_setup(struct dram_info *dram,
2263 struct rv1126_sdram_params *sdram_params)
2264 {
2265 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2266 u32 dramtype = sdram_params->base.dramtype;
2267 u32 split_size, split_mode;
2268 u64 cs_cap[2], cap;
2269
2270 cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dramtype);
2271 cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dramtype);
2272
2273 /* The ddr split only support 1 rank and less than 4GB capacity. */
2274 if ((cs_cap[1]) || (cs_cap[0] >= 0x100000000ULL))
2275 goto out;
2276
2277 /* only support the larger cap is in low 16bit */
2278 if (cap_info->cs0_high16bit_row < cap_info->cs0_row) {
2279 cap = cs_cap[0] / (1 << (cap_info->cs0_row -
2280 cap_info->cs0_high16bit_row));
2281 } else if ((cap_info->cs1_high16bit_row < cap_info->cs1_row) &&
2282 (cap_info->rank == 2)) {
2283 if (!cap_info->cs1_high16bit_row)
2284 cap = cs_cap[0];
2285 else
2286 cap = cs_cap[0] + cs_cap[1] / (1 << (cap_info->cs1_row -
2287 cap_info->cs1_high16bit_row));
2288 } else {
2289 goto out;
2290 }
2291 split_size = (u32)(cap >> 24) & SPLIT_SIZE_MASK;
2292 if (cap_info->bw == 2)
2293 split_mode = SPLIT_MODE_32_L16_VALID;
2294 else
2295 split_mode = SPLIT_MODE_16_L8_VALID;
2296
2297 rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2298 (SPLIT_MODE_MASK << SPLIT_MODE_OFFSET) |
2299 (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2300 (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2301 (split_mode << SPLIT_MODE_OFFSET) |
2302 (0x0 << SPLIT_BYPASS_OFFSET) |
2303 (split_size << SPLIT_SIZE_OFFSET));
2304
2305 rk_clrsetreg(BUS_SGRF_BASE_ADDR + SGRF_SOC_CON2,
2306 MSCH_AXI_BYPASS_ALL_MASK << MSCH_AXI_BYPASS_ALL_SHIFT,
2307 0x0 << MSCH_AXI_BYPASS_ALL_SHIFT);
2308
2309 out:
2310 return 0;
2311 }
2312
split_bypass(struct dram_info * dram)2313 static void split_bypass(struct dram_info *dram)
2314 {
2315 if ((readl(&dram->ddrgrf->grf_ddrsplit_con) &
2316 (1 << SPLIT_BYPASS_OFFSET)) != 0)
2317 return;
2318
2319 /* bypass split */
2320 rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2321 (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2322 (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2323 (0x1 << SPLIT_BYPASS_OFFSET) |
2324 (0x0 << SPLIT_SIZE_OFFSET));
2325 }
2326
dram_all_config(struct dram_info * dram,struct rv1126_sdram_params * sdram_params)2327 static void dram_all_config(struct dram_info *dram,
2328 struct rv1126_sdram_params *sdram_params)
2329 {
2330 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2331 u32 dram_type = sdram_params->base.dramtype;
2332 void __iomem *pctl_base = dram->pctl;
2333 u32 sys_reg2 = 0;
2334 u32 sys_reg3 = 0;
2335 u64 cs_cap[2];
2336 u32 cs_pst;
2337
2338 set_ddrconfig(dram, cap_info->ddrconfig);
2339 sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
2340 &sys_reg3, 0);
2341 writel(sys_reg2, &dram->pmugrf->os_reg[2]);
2342 writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2343
2344 cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2345 cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2346
2347 if (cap_info->rank == 2) {
2348 cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2349 6 + 2;
2350 if (cs_pst > 28)
2351 cs_cap[0] = 1llu << cs_pst;
2352 }
2353
2354 writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
2355 (((cs_cap[0] >> 20) / 64) & 0xff),
2356 &dram->msch->devicesize);
2357 update_noc_timing(dram, sdram_params);
2358 }
2359
enable_low_power(struct dram_info * dram,struct rv1126_sdram_params * sdram_params)2360 static void enable_low_power(struct dram_info *dram,
2361 struct rv1126_sdram_params *sdram_params)
2362 {
2363 void __iomem *pctl_base = dram->pctl;
2364 u32 grf_lp_con;
2365
2366 writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]);
2367
2368 if (sdram_params->base.dramtype == DDR4)
2369 grf_lp_con = (0x7 << 16) | (1 << 1);
2370 else if (sdram_params->base.dramtype == DDR3)
2371 grf_lp_con = (0x7 << 16) | (1 << 0);
2372 else
2373 grf_lp_con = (0x7 << 16) | (1 << 2);
2374
2375 /* en lpckdis_en */
2376 grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
2377 writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con);
2378
2379 /* enable sr, pd */
2380 if (dram->pd_idle == 0)
2381 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2382 else
2383 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2384 if (dram->sr_idle == 0)
2385 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2386 else
2387 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2388 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
2389 }
2390
ddr_set_atags(struct dram_info * dram,struct rv1126_sdram_params * sdram_params)2391 static void ddr_set_atags(struct dram_info *dram,
2392 struct rv1126_sdram_params *sdram_params)
2393 {
2394 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2395 u32 dram_type = sdram_params->base.dramtype;
2396 void __iomem *pctl_base = dram->pctl;
2397 struct tag_serial t_serial;
2398 struct tag_ddr_mem t_ddrmem;
2399 struct tag_soc_info t_socinfo;
2400 u64 cs_cap[2];
2401 u32 cs_pst = 0;
2402 u32 split, split_size;
2403 u64 reduce_cap = 0;
2404
2405 cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2406 cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2407
2408 memset(&t_serial, 0, sizeof(struct tag_serial));
2409
2410 t_serial.version = 0;
2411 t_serial.enable = 1;
2412 t_serial.addr = CONFIG_DEBUG_UART_BASE;
2413 t_serial.baudrate = CONFIG_BAUDRATE;
2414 t_serial.m_mode = SERIAL_M_MODE_M0;
2415 t_serial.id = 2;
2416
2417 atags_destroy();
2418 atags_set_tag(ATAG_SERIAL, &t_serial);
2419
2420 split = readl(&dram->ddrgrf->grf_ddrsplit_con);
2421 memset(&t_ddrmem, 0, sizeof(struct tag_ddr_mem));
2422 if (cap_info->row_3_4) {
2423 cs_cap[0] = cs_cap[0] * 3 / 4;
2424 cs_cap[1] = cs_cap[1] * 3 / 4;
2425 } else if (!(split & (1 << SPLIT_BYPASS_OFFSET))) {
2426 split_size = (split >> SPLIT_SIZE_OFFSET) & SPLIT_SIZE_MASK;
2427 reduce_cap = (cs_cap[0] + cs_cap[1] - (split_size << 24)) / 2;
2428 }
2429 t_ddrmem.version = 0;
2430 t_ddrmem.bank[0] = CONFIG_SYS_SDRAM_BASE;
2431 if (cs_cap[1]) {
2432 cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2433 6 + 2;
2434 }
2435
2436 if (cs_cap[1] && cs_pst > 27) {
2437 t_ddrmem.count = 2;
2438 t_ddrmem.bank[1] = 1 << cs_pst;
2439 t_ddrmem.bank[2] = cs_cap[0];
2440 t_ddrmem.bank[3] = cs_cap[1] - reduce_cap;
2441 } else {
2442 t_ddrmem.count = 1;
2443 t_ddrmem.bank[1] = (u64)cs_cap[0] + (u64)cs_cap[1] - reduce_cap;
2444 }
2445
2446 atags_set_tag(ATAG_DDR_MEM, &t_ddrmem);
2447
2448 memset(&t_socinfo, 0, sizeof(struct tag_soc_info));
2449 t_socinfo.version = 0x1;
2450 t_socinfo.name = 0x1126;
2451 t_socinfo.flags = SOC_FLAGS_TDBT;
2452 atags_set_tag(ATAG_SOC_INFO, &t_socinfo);
2453 }
2454
print_ddr_info(struct rv1126_sdram_params * sdram_params)2455 static void print_ddr_info(struct rv1126_sdram_params *sdram_params)
2456 {
2457 u32 split;
2458
2459 if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2460 (1 << SPLIT_BYPASS_OFFSET)) != 0)
2461 split = 0;
2462 else
2463 split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2464 SPLIT_SIZE_MASK;
2465
2466 sdram_print_ddr_info(&sdram_params->ch.cap_info,
2467 &sdram_params->base, split);
2468 }
2469
check_lp4_rzqi_value(struct dram_info * dram,u32 cs,u32 byte,u32 zq,u32 dramtype)2470 static int check_lp4_rzqi_value(struct dram_info *dram, u32 cs, u32 byte, u32 zq, u32 dramtype)
2471 {
2472 u32 rzqi;
2473
2474 rzqi = (read_mr(dram, BIT(cs), byte, 0, dramtype) >> 3) & 0x3;
2475 if (rzqi == 0x1 || rzqi == 0x2) {
2476 printascii("WARNING: ZQ");
2477 printdec(zq);
2478 printascii(" may ");
2479 if (rzqi == 0x1)
2480 printascii("connect to VSSQ or float!\n");
2481 else
2482 printascii("short to VDDQ!\n");
2483
2484 return -1;
2485 }
2486
2487 return 0;
2488 }
2489
check_lp4_rzqi(struct dram_info * dram,struct rv1126_sdram_params * sdram_params)2490 static int check_lp4_rzqi(struct dram_info *dram, struct rv1126_sdram_params *sdram_params)
2491 {
2492 u32 cs, byte;
2493 u32 dramtype = sdram_params->base.dramtype;
2494 struct sdram_cap_info *cap_info;
2495 int ret = 0;
2496
2497 if (dramtype != LPDDR4 && dramtype != LPDDR4X)
2498 return 0;
2499
2500 cap_info = &sdram_params->ch.cap_info;
2501 if (cap_info->dbw == 0) {
2502 cs = cap_info->rank - 1;
2503 for (byte = 0; byte < 2; byte++) {
2504 if (check_lp4_rzqi_value(dram, cs, byte, byte, dramtype))
2505 ret = -1;
2506 }
2507 } else {
2508 byte = 0;
2509 for (cs = 0; cs < cap_info->rank; cs++) {
2510 if (check_lp4_rzqi_value(dram, cs, byte, cs, dramtype))
2511 ret = -1;
2512 }
2513 }
2514
2515 return ret;
2516 }
2517
modify_ddr34_bw_byte_map(u8 rg_result,struct rv1126_sdram_params * sdram_params)2518 int modify_ddr34_bw_byte_map(u8 rg_result, struct rv1126_sdram_params *sdram_params)
2519 {
2520 struct sdram_head_info_index_v2 *index = (struct sdram_head_info_index_v2 *)common_info;
2521 struct dq_map_info *map_info = (struct dq_map_info *)
2522 ((void *)common_info + index->dq_map_index.offset * 4);
2523 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2524 u32 dramtype = sdram_params->base.dramtype;
2525 u32 byte_map = 0;
2526 u32 byte = 0;
2527 u32 byte_map_shift;
2528 int i;
2529
2530 if (dramtype == DDR3)
2531 byte_map_shift = 24;
2532 else if (dramtype == DDR4)
2533 byte_map_shift = 0;
2534 else
2535 return -1;
2536
2537 for (i = 0; i < 4; i++) {
2538 if ((rg_result & BIT(i)) == 0) {
2539 byte_map |= byte << (i * 2);
2540 byte++;
2541 }
2542 }
2543 if (byte != 1 && byte != 2 && byte != 4) {
2544 printascii("DTT result is abnormal: ");
2545 printdec(byte);
2546 printascii("byte\n");
2547 return -1;
2548 }
2549 cap_info->bw = byte / 2;
2550 for (i = 0; i < 4; i++) {
2551 if ((rg_result & BIT(i)) != 0) {
2552 byte_map |= byte << (i * 2);
2553 byte++;
2554 }
2555 }
2556
2557 if ((u8)byte_map != (u8)(map_info->byte_map[0] >> byte_map_shift)) {
2558 clrsetbits_le32(&map_info->byte_map[0],
2559 0xff << byte_map_shift, byte_map << byte_map_shift);
2560 pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info, dramtype);
2561 return 1;
2562 }
2563
2564 return 0;
2565 }
2566
sdram_init_(struct dram_info * dram,struct rv1126_sdram_params * sdram_params,u32 post_init)2567 int sdram_init_(struct dram_info *dram, struct rv1126_sdram_params *sdram_params, u32 post_init)
2568 {
2569 void __iomem *pctl_base = dram->pctl;
2570 void __iomem *phy_base = dram->phy;
2571 u32 ddr4_vref;
2572 u32 mr_tmp, tmp;
2573 int delay = 3000;
2574
2575 rkclk_configure_ddr(dram, sdram_params);
2576
2577 rkclk_ddr_reset(dram, 1, 1, 1, 1);
2578 udelay(10);
2579
2580 rkclk_ddr_reset(dram, 1, 1, 1, 0);
2581 phy_cfg(dram, sdram_params);
2582
2583 rkclk_ddr_reset(dram, 1, 1, 0, 0);
2584 phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1);
2585
2586 rkclk_ddr_reset(dram, 1, 0, 0, 0);
2587 pctl_cfg(dram->pctl, &sdram_params->pctl_regs,
2588 dram->sr_idle, dram->pd_idle);
2589
2590 if (sdram_params->ch.cap_info.bw == 2) {
2591 /* 32bit interface use pageclose */
2592 setbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2593 /* pageclose = 1, pageclose_timer = 0 will err in lp4 328MHz */
2594 clrsetbits_le32(pctl_base + DDR_PCTL2_SCHED1, 0xff, 0x1 << 0);
2595 } else {
2596 clrbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2597 }
2598
2599 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2600 u32 trefi;
2601
2602 tmp = readl(pctl_base + DDR_PCTL2_RFSHTMG);
2603 trefi = (tmp >> 16) & 0xfff;
2604 writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2605 pctl_base + DDR_PCTL2_RFSHTMG);
2606 #endif
2607
2608 /* set frequency_mode */
2609 setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
2610 /* set target_frequency to Frequency 0 */
2611 clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0);
2612
2613 set_ds_odt(dram, sdram_params, 0);
2614 sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params);
2615 set_ctl_address_map(dram, sdram_params);
2616
2617 setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
2618
2619 rkclk_ddr_reset(dram, 0, 0, 0, 0);
2620
2621 while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0) {
2622 udelay(1);
2623 if (delay-- <= 0) {
2624 printascii("ERROR: Cannot wait dfi_init_done!\n");
2625 while (1)
2626 ;
2627 }
2628 }
2629
2630 if (sdram_params->base.dramtype == LPDDR3) {
2631 pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3);
2632 } else if (sdram_params->base.dramtype == LPDDR4 ||
2633 sdram_params->base.dramtype == LPDDR4X) {
2634 mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6);
2635 /* MR11 */
2636 pctl_write_mr(dram->pctl, 3, 11,
2637 mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2638 LPDDR4);
2639 /* MR12 */
2640 pctl_write_mr(dram->pctl, 3, 12,
2641 mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2642 LPDDR4);
2643
2644 mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2645 /* MR22 */
2646 pctl_write_mr(dram->pctl, 3, 22,
2647 mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2648 LPDDR4);
2649 } else if (sdram_params->base.dramtype == DDR4) {
2650 mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7) >> PCTL2_DDR4_MR6_SHIFT & PCTL2_MR_MASK;
2651 pctl_write_mr(dram->pctl, 0x3, 6, mr_tmp | BIT(7), DDR4);
2652 pctl_write_mr(dram->pctl, 0x3, 6, mr_tmp | BIT(7), DDR4);
2653 pctl_write_mr(dram->pctl, 0x3, 6, mr_tmp, DDR4);
2654 }
2655
2656 if (sdram_params->base.dramtype == DDR3 && post_init == 0)
2657 setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2658 tmp = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) & 0xf;
2659
2660 if (tmp != 0) {
2661 if (post_init != 0) {
2662 printascii("DTT cs0 error\n");
2663 return -1;
2664 }
2665 if (sdram_params->base.dramtype != DDR3 || tmp == 0xf)
2666 return -1;
2667 }
2668
2669 if (sdram_params->base.dramtype == DDR3 && post_init == 0) {
2670 if (modify_ddr34_bw_byte_map((u8)tmp, sdram_params) != 0)
2671 return -1;
2672 }
2673
2674 if (sdram_params->base.dramtype == LPDDR4) {
2675 mr_tmp = read_mr(dram, 1, 0, 14, LPDDR4);
2676
2677 if (mr_tmp != 0x4d)
2678 return -1;
2679 }
2680
2681 if (sdram_params->base.dramtype == LPDDR4 ||
2682 sdram_params->base.dramtype == LPDDR4X) {
2683 mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2684 /* MR14 */
2685 pctl_write_mr(dram->pctl, 3, 14,
2686 mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2687 LPDDR4);
2688 }
2689 if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) {
2690 if (data_training(dram, 1, sdram_params, 0,
2691 READ_GATE_TRAINING) != 0) {
2692 printascii("DTT cs1 error\n");
2693 return -1;
2694 }
2695 }
2696
2697 if (sdram_params->base.dramtype == DDR4) {
2698 ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39;
2699 pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref,
2700 sdram_params->base.dramtype);
2701 }
2702
2703 dram_all_config(dram, sdram_params);
2704 enable_low_power(dram, sdram_params);
2705
2706 return 0;
2707 }
2708
dram_detect_cap(struct dram_info * dram,struct rv1126_sdram_params * sdram_params,unsigned char channel)2709 static u64 dram_detect_cap(struct dram_info *dram,
2710 struct rv1126_sdram_params *sdram_params,
2711 unsigned char channel)
2712 {
2713 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2714 void __iomem *pctl_base = dram->pctl;
2715 void __iomem *phy_base = dram->phy;
2716 u32 mr8;
2717
2718 u32 bktmp;
2719 u32 coltmp;
2720 u32 rowtmp;
2721 u32 cs;
2722 u32 dram_type = sdram_params->base.dramtype;
2723 u32 pwrctl;
2724 u32 i, dq_map;
2725 u32 byte1 = 0, byte0 = 0;
2726
2727 if (dram_type != LPDDR4 && dram_type != LPDDR4X) {
2728 if (dram_type != DDR4) {
2729 if (dram_type == DDR3)
2730 coltmp = 11;
2731 else
2732 coltmp = 12;
2733 bktmp = 3;
2734 if (dram_type == LPDDR2)
2735 rowtmp = 15;
2736 else
2737 rowtmp = 16;
2738
2739 if (sdram_detect_col(cap_info, coltmp) != 0)
2740 goto cap_err;
2741
2742 sdram_detect_bank(cap_info, pctl_base, coltmp, bktmp);
2743 if (dram_type != LPDDR3)
2744 sdram_detect_dbw(cap_info, dram_type);
2745 } else {
2746 coltmp = 10;
2747 bktmp = 4;
2748 rowtmp = 17;
2749
2750 cap_info->col = 10;
2751 cap_info->bk = 2;
2752 sdram_detect_bg(cap_info, pctl_base, coltmp);
2753 }
2754
2755 if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
2756 goto cap_err;
2757
2758 sdram_detect_row_3_4(cap_info, coltmp, bktmp);
2759 } else {
2760 cap_info->col = 10;
2761 cap_info->bk = 3;
2762 mr8 = read_mr(dram, 1, 0, 8, dram_type);
2763 cap_info->dbw = ((mr8 >> 6) & 0x3) == 0 ? 1 : 0;
2764 mr8 = (mr8 >> 2) & 0xf;
2765 if (mr8 >= 0 && mr8 <= 6) {
2766 cap_info->cs0_row = 14 + (mr8 + 1) / 2;
2767 } else if (mr8 == 0xc) {
2768 cap_info->cs0_row = 13;
2769 } else {
2770 printascii("Cap ERR: Fail to get cap of LPDDR4/X from MR8\n");
2771 goto cap_err;
2772 }
2773 if (cap_info->dbw == 0)
2774 cap_info->cs0_row++;
2775 cap_info->row_3_4 = mr8 % 2 == 1 ? 1 : 0;
2776 if (cap_info->cs0_row >= 17) {
2777 printascii("Cap ERR: ");
2778 printascii("RV1126 LPDDR4/X cannot support row >= 17\n");
2779 goto cap_err;
2780 // cap_info->cs0_row = 16;
2781 // cap_info->row_3_4 = 0;
2782 }
2783 }
2784
2785 pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
2786 writel(0, pctl_base + DDR_PCTL2_PWRCTL);
2787
2788 if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0)
2789 cs = 1;
2790 else
2791 cs = 0;
2792 cap_info->rank = cs + 1;
2793
2794 setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2795
2796 if (dram_type != DDR3) {
2797 if ((data_training_rg(dram, 0, dram_type) & 0xf) == 0) {
2798 cap_info->bw = 2;
2799 } else {
2800 dq_map = readl(PHY_REG(phy_base, 0x4f));
2801 for (i = 0; i < 4; i++) {
2802 if (((dq_map >> (i * 2)) & 0x3) == 0)
2803 byte0 = i;
2804 if (((dq_map >> (i * 2)) & 0x3) == 1)
2805 byte1 = i;
2806 }
2807 clrsetbits_le32(PHY_REG(phy_base, 0xf), PHY_DQ_WIDTH_MASK,
2808 BIT(byte0) | BIT(byte1));
2809 if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) == 0)
2810 cap_info->bw = 1;
2811 else
2812 cap_info->bw = 0;
2813 }
2814 }
2815
2816 writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
2817
2818 cap_info->cs0_high16bit_row = cap_info->cs0_row;
2819 if (cs) {
2820 cap_info->cs1_row = cap_info->cs0_row;
2821 cap_info->cs1_high16bit_row = cap_info->cs0_row;
2822 } else {
2823 cap_info->cs1_row = 0;
2824 cap_info->cs1_high16bit_row = 0;
2825 }
2826
2827 if (dram_type == LPDDR3)
2828 sdram_detect_dbw(cap_info, dram_type);
2829
2830 return 0;
2831 cap_err:
2832 return -1;
2833 }
2834
dram_detect_cs1_row(struct dram_info * dram,struct rv1126_sdram_params * sdram_params,unsigned char channel)2835 static int dram_detect_cs1_row(struct dram_info *dram,
2836 struct rv1126_sdram_params *sdram_params,
2837 unsigned char channel)
2838 {
2839 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2840 void __iomem *pctl_base = dram->pctl;
2841 u32 ret = 0;
2842 void __iomem *test_addr;
2843 u32 row, bktmp, coltmp, bw;
2844 u64 cs0_cap;
2845 u32 byte_mask;
2846 u32 cs_pst;
2847 u32 cs_add = 0;
2848 u32 max_row;
2849
2850 if (cap_info->rank == 2) {
2851 cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2852 6 + 2;
2853 if (cs_pst < 28)
2854 cs_add = 1;
2855
2856 cs0_cap = 1 << cs_pst;
2857
2858 if (sdram_params->base.dramtype == DDR4) {
2859 if (cap_info->dbw == 0)
2860 bktmp = cap_info->bk + 2;
2861 else
2862 bktmp = cap_info->bk + 1;
2863 } else {
2864 bktmp = cap_info->bk;
2865 }
2866 bw = cap_info->bw;
2867 coltmp = cap_info->col;
2868
2869 if (bw == 2)
2870 byte_mask = 0xFFFF;
2871 else
2872 byte_mask = 0xFF;
2873
2874 max_row = (cs_pst == 31) ? 30 : 31;
2875
2876 max_row = max_row - bktmp - coltmp - bw - cs_add + 1;
2877
2878 row = (cap_info->cs0_row > max_row) ? max_row :
2879 cap_info->cs0_row;
2880
2881 for (; row > 12; row--) {
2882 test_addr = (void __iomem *)(CONFIG_SYS_SDRAM_BASE +
2883 (u32)cs0_cap +
2884 (1ul << (row + bktmp + coltmp +
2885 cs_add + bw - 1ul)));
2886
2887 writel(0, CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap);
2888 writel(PATTERN, test_addr);
2889
2890 if (((readl(test_addr) & byte_mask) ==
2891 (PATTERN & byte_mask)) &&
2892 ((readl(CONFIG_SYS_SDRAM_BASE + (u32)cs0_cap) &
2893 byte_mask) == 0)) {
2894 ret = row;
2895 break;
2896 }
2897 }
2898 }
2899
2900 return ret;
2901 }
2902
2903 /* return: 0 = success, other = fail */
sdram_init_detect(struct dram_info * dram,struct rv1126_sdram_params * sdram_params)2904 static int sdram_init_detect(struct dram_info *dram,
2905 struct rv1126_sdram_params *sdram_params)
2906 {
2907 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2908 u32 ret;
2909 u32 sys_reg = 0;
2910 u32 sys_reg3 = 0;
2911
2912 if (sdram_init_(dram, sdram_params, 0)) {
2913 if (sdram_params->base.dramtype == DDR3) {
2914 if (sdram_init_(dram, sdram_params, 0))
2915 return -1;
2916 } else {
2917 return -1;
2918 }
2919 }
2920
2921 if (sdram_params->base.dramtype == DDR3) {
2922 writel(PATTERN, CONFIG_SYS_SDRAM_BASE);
2923 if (readl(CONFIG_SYS_SDRAM_BASE) != PATTERN)
2924 return -1;
2925 }
2926
2927 split_bypass(dram);
2928 if (dram_detect_cap(dram, sdram_params, 0) != 0)
2929 return -1;
2930
2931 pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
2932 sdram_params->base.dramtype);
2933 ret = sdram_init_(dram, sdram_params, 1);
2934 if (ret != 0)
2935 goto out;
2936
2937 cap_info->cs1_row =
2938 dram_detect_cs1_row(dram, sdram_params, 0);
2939 if (cap_info->cs1_row) {
2940 sys_reg = readl(&dram->pmugrf->os_reg[2]);
2941 sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
2942 SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
2943 sys_reg, sys_reg3, 0);
2944 writel(sys_reg, &dram->pmugrf->os_reg[2]);
2945 writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2946 }
2947
2948 sdram_detect_high_row(cap_info, sdram_params->base.dramtype);
2949 split_setup(dram, sdram_params);
2950 out:
2951 return ret;
2952 }
2953
get_default_sdram_config(u32 freq_mhz)2954 struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz)
2955 {
2956 u32 i;
2957 u32 offset = 0;
2958 struct ddr2_3_4_lp2_3_info *ddr_info;
2959
2960 if (!freq_mhz) {
2961 ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype);
2962 if (ddr_info)
2963 freq_mhz =
2964 (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
2965 DDR_FREQ_MASK;
2966 else
2967 freq_mhz = 0;
2968 }
2969
2970 for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) {
2971 if (sdram_configs[i].base.ddr_freq == 0 ||
2972 freq_mhz < sdram_configs[i].base.ddr_freq)
2973 break;
2974 }
2975 offset = i == 0 ? 0 : i - 1;
2976
2977 return &sdram_configs[offset];
2978 }
2979
2980 static const u16 pctl_need_update_reg[] = {
2981 DDR_PCTL2_RFSHTMG,
2982 DDR_PCTL2_INIT3,
2983 DDR_PCTL2_INIT4,
2984 DDR_PCTL2_INIT6,
2985 DDR_PCTL2_INIT7,
2986 DDR_PCTL2_DRAMTMG0,
2987 DDR_PCTL2_DRAMTMG1,
2988 DDR_PCTL2_DRAMTMG2,
2989 DDR_PCTL2_DRAMTMG3,
2990 DDR_PCTL2_DRAMTMG4,
2991 DDR_PCTL2_DRAMTMG5,
2992 DDR_PCTL2_DRAMTMG6,
2993 DDR_PCTL2_DRAMTMG7,
2994 DDR_PCTL2_DRAMTMG8,
2995 DDR_PCTL2_DRAMTMG9,
2996 DDR_PCTL2_DRAMTMG12,
2997 DDR_PCTL2_DRAMTMG13,
2998 DDR_PCTL2_DRAMTMG14,
2999 DDR_PCTL2_ZQCTL0,
3000 DDR_PCTL2_DFITMG0,
3001 DDR_PCTL2_ODTCFG
3002 };
3003
3004 static const u16 phy_need_update_reg[] = {
3005 0x14,
3006 0x18,
3007 0x1c
3008 };
3009
pre_set_rate(struct dram_info * dram,struct rv1126_sdram_params * sdram_params,u32 dst_fsp,u32 dst_fsp_lp4)3010 static void pre_set_rate(struct dram_info *dram,
3011 struct rv1126_sdram_params *sdram_params,
3012 u32 dst_fsp, u32 dst_fsp_lp4)
3013 {
3014 u32 i, j, find;
3015 void __iomem *pctl_base = dram->pctl;
3016 void __iomem *phy_base = dram->phy;
3017 u32 phy_offset;
3018 u32 mr_tmp;
3019 u32 dramtype = sdram_params->base.dramtype;
3020
3021 sw_set_req(dram);
3022 /* DDRCTL timing update */
3023 for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) {
3024 for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF;
3025 j++) {
3026 if (sdram_params->pctl_regs.pctl[j][0] ==
3027 pctl_need_update_reg[i]) {
3028 writel(sdram_params->pctl_regs.pctl[j][1],
3029 pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3030 pctl_need_update_reg[i]);
3031 find = j;
3032 break;
3033 }
3034 }
3035 }
3036
3037 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
3038 u32 tmp, trefi;
3039
3040 tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
3041 trefi = (tmp >> 16) & 0xfff;
3042 writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
3043 pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
3044 #endif
3045
3046 sw_set_ack(dram);
3047
3048 /* phy timing update */
3049 if (dst_fsp == 0)
3050 phy_offset = 0;
3051 else
3052 phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3);
3053 /* cl cwl al update */
3054 for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) {
3055 for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF;
3056 j++) {
3057 if (sdram_params->phy_regs.phy[j][0] ==
3058 phy_need_update_reg[i]) {
3059 writel(sdram_params->phy_regs.phy[j][1],
3060 phy_base + phy_offset +
3061 phy_need_update_reg[i]);
3062 find = j;
3063 break;
3064 }
3065 }
3066 }
3067
3068 set_ds_odt(dram, sdram_params, dst_fsp);
3069 if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
3070 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3071 DDR_PCTL2_INIT4);
3072 /* MR13 */
3073 pctl_write_mr(dram->pctl, 3, 13,
3074 ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
3075 PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
3076 ((0x2 << 6) >> dst_fsp_lp4), dramtype);
3077 writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
3078 PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
3079 ((0x2 << 6) >> dst_fsp_lp4),
3080 PHY_REG(phy_base, 0x1b));
3081 /* MR3 */
3082 pctl_write_mr(dram->pctl, 3, 3,
3083 mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT &
3084 PCTL2_MR_MASK,
3085 dramtype);
3086 writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK,
3087 PHY_REG(phy_base, 0x19));
3088
3089 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3090 DDR_PCTL2_INIT3);
3091 /* MR1 */
3092 pctl_write_mr(dram->pctl, 3, 1,
3093 mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT &
3094 PCTL2_MR_MASK,
3095 dramtype);
3096 writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK,
3097 PHY_REG(phy_base, 0x17));
3098 /* MR2 */
3099 pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
3100 dramtype);
3101 writel(mr_tmp & PCTL2_MR_MASK,
3102 PHY_REG(phy_base, 0x18));
3103
3104 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3105 DDR_PCTL2_INIT6);
3106 /* MR11 */
3107 pctl_write_mr(dram->pctl, 3, 11,
3108 mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
3109 dramtype);
3110 writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
3111 PHY_REG(phy_base, 0x1a));
3112 /* MR12 */
3113 pctl_write_mr(dram->pctl, 3, 12,
3114 mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
3115 dramtype);
3116
3117 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3118 DDR_PCTL2_INIT7);
3119 /* MR22 */
3120 pctl_write_mr(dram->pctl, 3, 22,
3121 mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
3122 dramtype);
3123 writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
3124 PHY_REG(phy_base, 0x1d));
3125 /* MR14 */
3126 pctl_write_mr(dram->pctl, 3, 14,
3127 mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
3128 dramtype);
3129 writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
3130 PHY_REG(phy_base, 0x1c));
3131 }
3132
3133 update_noc_timing(dram, sdram_params);
3134 }
3135
save_fsp_param(struct dram_info * dram,u32 dst_fsp,struct rv1126_sdram_params * sdram_params)3136 static void save_fsp_param(struct dram_info *dram, u32 dst_fsp,
3137 struct rv1126_sdram_params *sdram_params)
3138 {
3139 void __iomem *pctl_base = dram->pctl;
3140 void __iomem *phy_base = dram->phy;
3141 struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp];
3142 u32 temp, temp1;
3143 struct ddr2_3_4_lp2_3_info *ddr_info;
3144
3145 ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype);
3146
3147 p_fsp_param->freq_mhz = sdram_params->base.ddr_freq;
3148
3149 if (sdram_params->base.dramtype == LPDDR4 ||
3150 sdram_params->base.dramtype == LPDDR4X) {
3151 p_fsp_param->rd_odt_up_en = 0;
3152 p_fsp_param->rd_odt_down_en = 1;
3153 } else {
3154 p_fsp_param->rd_odt_up_en =
3155 ODT_INFO_PULLUP_EN(ddr_info->odt_info);
3156 p_fsp_param->rd_odt_down_en =
3157 ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
3158 }
3159
3160 if (p_fsp_param->rd_odt_up_en)
3161 p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111));
3162 else if (p_fsp_param->rd_odt_down_en)
3163 p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110));
3164 else
3165 p_fsp_param->rd_odt = 0;
3166 p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112));
3167 p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100));
3168 p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102));
3169 p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128));
3170 p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105));
3171
3172 if (sdram_params->base.dramtype == DDR3) {
3173 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3174 DDR_PCTL2_INIT3);
3175 temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
3176 p_fsp_param->ds_pdds = temp & DDR3_DS_MASK;
3177 p_fsp_param->dq_odt = temp & DDR3_RTT_NOM_MASK;
3178 p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3179 } else if (sdram_params->base.dramtype == DDR4) {
3180 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3181 DDR_PCTL2_INIT3);
3182 temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
3183 p_fsp_param->ds_pdds = temp & DDR4_DS_MASK;
3184 p_fsp_param->dq_odt = temp & DDR4_RTT_NOM_MASK;
3185 p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3186 } else if (sdram_params->base.dramtype == LPDDR3) {
3187 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3188 DDR_PCTL2_INIT4);
3189 temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
3190 p_fsp_param->ds_pdds = temp & 0xf;
3191
3192 p_fsp_param->dq_odt = lp3_odt_value;
3193 p_fsp_param->ca_odt = p_fsp_param->dq_odt;
3194 } else if (sdram_params->base.dramtype == LPDDR4 ||
3195 sdram_params->base.dramtype == LPDDR4X) {
3196 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3197 DDR_PCTL2_INIT4);
3198 temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
3199 p_fsp_param->ds_pdds = temp & LPDDR4_PDDS_MASK;
3200
3201 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3202 DDR_PCTL2_INIT6);
3203 temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK;
3204 p_fsp_param->dq_odt = temp & LPDDR4_DQODT_MASK;
3205 p_fsp_param->ca_odt = temp & LPDDR4_CAODT_MASK;
3206
3207 temp = MAX(readl(PHY_REG(phy_base, 0x3ae)),
3208 readl(PHY_REG(phy_base, 0x3ce)));
3209 temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)),
3210 readl(PHY_REG(phy_base, 0x3de)));
3211 p_fsp_param->vref_ca[0] = (temp + temp1) / 2;
3212 temp = MAX(readl(PHY_REG(phy_base, 0x3af)),
3213 readl(PHY_REG(phy_base, 0x3cf)));
3214 temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)),
3215 readl(PHY_REG(phy_base, 0x3df)));
3216 p_fsp_param->vref_ca[1] = (temp + temp1) / 2;
3217 p_fsp_param->vref_ca[0] |=
3218 (readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
3219 p_fsp_param->vref_ca[1] |=
3220 (readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
3221
3222 p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >>
3223 3) & 0x1;
3224 }
3225
3226 p_fsp_param->noc_timings.ddrtiminga0 =
3227 sdram_params->ch.noc_timings.ddrtiminga0;
3228 p_fsp_param->noc_timings.ddrtimingb0 =
3229 sdram_params->ch.noc_timings.ddrtimingb0;
3230 p_fsp_param->noc_timings.ddrtimingc0 =
3231 sdram_params->ch.noc_timings.ddrtimingc0;
3232 p_fsp_param->noc_timings.devtodev0 =
3233 sdram_params->ch.noc_timings.devtodev0;
3234 p_fsp_param->noc_timings.ddrmode =
3235 sdram_params->ch.noc_timings.ddrmode;
3236 p_fsp_param->noc_timings.ddr4timing =
3237 sdram_params->ch.noc_timings.ddr4timing;
3238 p_fsp_param->noc_timings.agingx0 =
3239 sdram_params->ch.noc_timings.agingx0;
3240 p_fsp_param->noc_timings.aging0 =
3241 sdram_params->ch.noc_timings.aging0;
3242 p_fsp_param->noc_timings.aging1 =
3243 sdram_params->ch.noc_timings.aging1;
3244 p_fsp_param->noc_timings.aging2 =
3245 sdram_params->ch.noc_timings.aging2;
3246 p_fsp_param->noc_timings.aging3 =
3247 sdram_params->ch.noc_timings.aging3;
3248
3249 p_fsp_param->flag = FSP_FLAG;
3250 }
3251
3252 #ifndef CONFIG_SPL_KERNEL_BOOT
copy_fsp_param_to_ddr(void)3253 static void copy_fsp_param_to_ddr(void)
3254 {
3255 memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param,
3256 sizeof(fsp_param));
3257 }
3258 #endif
3259
pctl_modify_trfc(struct ddr_pctl_regs * pctl_regs,struct sdram_cap_info * cap_info,u32 dram_type,u32 freq)3260 static void pctl_modify_trfc(struct ddr_pctl_regs *pctl_regs,
3261 struct sdram_cap_info *cap_info, u32 dram_type,
3262 u32 freq)
3263 {
3264 u64 cs0_cap;
3265 u32 die_cap;
3266 u32 trfc_ns, trfc4_ns;
3267 u32 trfc, txsnr;
3268 u32 txs_abort_fast = 0;
3269 u32 tmp;
3270
3271 cs0_cap = sdram_get_cs_cap(cap_info, 0, dram_type);
3272 die_cap = (u32)(cs0_cap >> (20 + (cap_info->bw - cap_info->dbw)));
3273
3274 switch (dram_type) {
3275 case DDR3:
3276 if (die_cap <= DIE_CAP_512MBIT)
3277 trfc_ns = 90;
3278 else if (die_cap <= DIE_CAP_1GBIT)
3279 trfc_ns = 110;
3280 else if (die_cap <= DIE_CAP_2GBIT)
3281 trfc_ns = 160;
3282 else if (die_cap <= DIE_CAP_4GBIT)
3283 trfc_ns = 260;
3284 else
3285 trfc_ns = 350;
3286 txsnr = MAX(5, ((trfc_ns + 10) * freq + 999) / 1000);
3287 break;
3288
3289 case DDR4:
3290 if (die_cap <= DIE_CAP_2GBIT) {
3291 trfc_ns = 160;
3292 trfc4_ns = 90;
3293 } else if (die_cap <= DIE_CAP_4GBIT) {
3294 trfc_ns = 260;
3295 trfc4_ns = 110;
3296 } else if (die_cap <= DIE_CAP_8GBIT) {
3297 trfc_ns = 350;
3298 trfc4_ns = 160;
3299 } else {
3300 trfc_ns = 550;
3301 trfc4_ns = 260;
3302 }
3303 txsnr = ((trfc_ns + 10) * freq + 999) / 1000;
3304 txs_abort_fast = ((trfc4_ns + 10) * freq + 999) / 1000;
3305 break;
3306
3307 case LPDDR3:
3308 if (die_cap <= DIE_CAP_4GBIT)
3309 trfc_ns = 130;
3310 else
3311 trfc_ns = 210;
3312 txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3313 break;
3314
3315 case LPDDR4:
3316 case LPDDR4X:
3317 if (die_cap <= DIE_CAP_2GBIT)
3318 trfc_ns = 130;
3319 else if (die_cap <= DIE_CAP_4GBIT)
3320 trfc_ns = 180;
3321 else if (die_cap <= DIE_CAP_8GBIT)
3322 trfc_ns = 280;
3323 else
3324 trfc_ns = 380;
3325 txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3326 break;
3327
3328 default:
3329 return;
3330 }
3331 trfc = (trfc_ns * freq + 999) / 1000;
3332
3333 for (int i = 0; pctl_regs->pctl[i][0] != 0xffffffff; i++) {
3334 switch (pctl_regs->pctl[i][0]) {
3335 case DDR_PCTL2_RFSHTMG:
3336 tmp = pctl_regs->pctl[i][1];
3337 /* t_rfc_min */
3338 tmp &= ~((u32)0x3ff);
3339 tmp |= ((trfc + 1) / 2) & 0x3ff;
3340 pctl_regs->pctl[i][1] = tmp;
3341 break;
3342
3343 case DDR_PCTL2_DRAMTMG8:
3344 if (dram_type == DDR3 || dram_type == DDR4) {
3345 tmp = pctl_regs->pctl[i][1];
3346 /* t_xs_x32 */
3347 tmp &= ~((u32)0x7f);
3348 tmp |= ((txsnr + 63) / 64 + 1) & 0x7f;
3349
3350 if (dram_type == DDR4) {
3351 /* t_xs_abort_x32 */
3352 tmp &= ~((u32)(0x7f << 16));
3353 tmp |= (((txs_abort_fast + 63) / 64 + 1) & 0x7f) << 16;
3354 /* t_xs_fast_x32 */
3355 tmp &= ~((u32)(0x7f << 24));
3356 tmp |= (((txs_abort_fast + 63) / 64 + 1) & 0x7f) << 24;
3357 }
3358
3359 pctl_regs->pctl[i][1] = tmp;
3360 }
3361 break;
3362
3363 case DDR_PCTL2_DRAMTMG14:
3364 if (dram_type == LPDDR3 ||
3365 dram_type == LPDDR4 || dram_type == LPDDR4X) {
3366 tmp = pctl_regs->pctl[i][1];
3367 /* t_xsr */
3368 tmp &= ~((u32)0xfff);
3369 tmp |= ((txsnr + 1) / 2) & 0xfff;
3370 pctl_regs->pctl[i][1] = tmp;
3371 }
3372 break;
3373
3374 default:
3375 break;
3376 }
3377 }
3378 }
3379
ddr_set_rate(struct dram_info * dram,struct rv1126_sdram_params * sdram_params,u32 freq,u32 cur_freq,u32 dst_fsp,u32 dst_fsp_lp4,u32 training_en)3380 void ddr_set_rate(struct dram_info *dram,
3381 struct rv1126_sdram_params *sdram_params,
3382 u32 freq, u32 cur_freq, u32 dst_fsp,
3383 u32 dst_fsp_lp4, u32 training_en)
3384 {
3385 u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off;
3386 u32 mr_tmp;
3387 u32 lp_stat;
3388 u32 dramtype = sdram_params->base.dramtype;
3389 struct rv1126_sdram_params *sdram_params_new;
3390 void __iomem *pctl_base = dram->pctl;
3391 void __iomem *phy_base = dram->phy;
3392 int delay = 1000;
3393
3394 lp_stat = low_power_update(dram, 0);
3395 sdram_params_new = get_default_sdram_config(freq);
3396 sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank;
3397 sdram_params_new->ch.cap_info.bw = sdram_params->ch.cap_info.bw;
3398
3399 pctl_modify_trfc(&sdram_params_new->pctl_regs,
3400 &sdram_params->ch.cap_info, dramtype, freq);
3401 pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4);
3402
3403 while ((readl(pctl_base + DDR_PCTL2_STAT) &
3404 PCTL2_OPERATING_MODE_MASK) ==
3405 PCTL2_OPERATING_MODE_SR)
3406 continue;
3407
3408 dest_dll_off = 0;
3409 dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3410 DDR_PCTL2_INIT3);
3411 if ((dramtype == DDR3 && (dst_init3 & 1)) ||
3412 (dramtype == DDR4 && !(dst_init3 & 1)))
3413 dest_dll_off = 1;
3414
3415 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
3416 cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
3417 DDR_PCTL2_INIT3);
3418 cur_init3 &= PCTL2_MR_MASK;
3419 cur_dll_off = 1;
3420 if ((dramtype == DDR3 && !(cur_init3 & 1)) ||
3421 (dramtype == DDR4 && (cur_init3 & 1)))
3422 cur_dll_off = 0;
3423
3424 if (!cur_dll_off) {
3425 if (dramtype == DDR3)
3426 cur_init3 |= 1;
3427 else
3428 cur_init3 &= ~1;
3429 pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype);
3430 }
3431
3432 setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3433 PCTL2_DIS_AUTO_REFRESH);
3434 update_refresh_reg(dram);
3435
3436 enter_sr(dram, 1);
3437
3438 writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3439 PMUGRF_CON_DDRPHY_BUFFEREN_EN,
3440 &dram->pmugrf->soc_con[0]);
3441 sw_set_req(dram);
3442 clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC,
3443 PCTL2_DFI_INIT_COMPLETE_EN);
3444 sw_set_ack(dram);
3445
3446 sw_set_req(dram);
3447 if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off)
3448 setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3449 else
3450 clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3451
3452 setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0,
3453 PCTL2_DIS_SRX_ZQCL);
3454 setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0,
3455 PCTL2_DIS_SRX_ZQCL);
3456 sw_set_ack(dram);
3457
3458 writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT),
3459 &dram->cru->clkgate_con[21]);
3460 writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3461 (0x1 << CLK_DDR_UPCTL_EN_SHIFT) |
3462 (0x1 << ACLK_DDR_UPCTL_EN_SHIFT),
3463 BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3464
3465 clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3466 rkclk_set_dpll(dram, freq * MHz / 2);
3467 phy_pll_set(dram, freq * MHz, 0);
3468 phy_pll_set(dram, freq * MHz, 1);
3469 setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3470
3471 writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3472 PMUGRF_CON_DDRPHY_BUFFEREN_DIS,
3473 &dram->pmugrf->soc_con[0]);
3474 writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT),
3475 &dram->cru->clkgate_con[21]);
3476 writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3477 (0x0 << CLK_DDR_UPCTL_EN_SHIFT) |
3478 (0x0 << ACLK_DDR_UPCTL_EN_SHIFT),
3479 BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3480 while ((readl(pctl_base + DDR_PCTL2_DFISTAT) &
3481 PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE) {
3482 udelay(1);
3483 if (delay-- <= 0) {
3484 printascii("ERROR: Cannot wait DFI_INIT_COMPLETE\n");
3485 while (1)
3486 ;
3487 }
3488 }
3489
3490 sw_set_req(dram);
3491 setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
3492 clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp);
3493 sw_set_ack(dram);
3494 update_refresh_reg(dram);
3495 clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2);
3496
3497 enter_sr(dram, 0);
3498
3499 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3500 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3501
3502 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4);
3503 if (dramtype == LPDDR3) {
3504 pctl_write_mr(dram->pctl, 3, 1,
3505 (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) &
3506 PCTL2_MR_MASK,
3507 dramtype);
3508 pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK,
3509 dramtype);
3510 pctl_write_mr(dram->pctl, 3, 3,
3511 (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) &
3512 PCTL2_MR_MASK,
3513 dramtype);
3514 pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype);
3515 } else if ((dramtype == DDR3) || (dramtype == DDR4)) {
3516 pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK,
3517 dramtype);
3518 if (!dest_dll_off) {
3519 pctl_write_mr(dram->pctl, 3, 0,
3520 ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) &
3521 PCTL2_MR_MASK) | DDR3_DLL_RESET,
3522 dramtype);
3523 udelay(2);
3524 }
3525 pctl_write_mr(dram->pctl, 3, 0,
3526 (dst_init3 >> PCTL2_DDR34_MR0_SHIFT &
3527 PCTL2_MR_MASK) & (~DDR3_DLL_RESET),
3528 dramtype);
3529 pctl_write_mr(dram->pctl, 3, 2,
3530 ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) &
3531 PCTL2_MR_MASK), dramtype);
3532 if (dramtype == DDR4) {
3533 pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK,
3534 dramtype);
3535 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3536 DDR_PCTL2_INIT6);
3537 pctl_write_mr(dram->pctl, 3, 4,
3538 (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) &
3539 PCTL2_MR_MASK,
3540 dramtype);
3541 pctl_write_mr(dram->pctl, 3, 5,
3542 mr_tmp >> PCTL2_DDR4_MR5_SHIFT &
3543 PCTL2_MR_MASK,
3544 dramtype);
3545
3546 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3547 DDR_PCTL2_INIT7);
3548 /* updata ddr4 vrefdq */
3549 pctl_write_mr(dram->pctl, 3, 6,
3550 (mr_tmp | (0x1 << 7)) >> PCTL2_DDR4_MR6_SHIFT &
3551 PCTL2_MR_MASK, dramtype);
3552 pctl_write_mr(dram->pctl, 3, 6,
3553 (mr_tmp | (0x1 << 7)) >> PCTL2_DDR4_MR6_SHIFT &
3554 PCTL2_MR_MASK, dramtype);
3555 pctl_write_mr(dram->pctl, 3, 6,
3556 mr_tmp >> PCTL2_DDR4_MR6_SHIFT &
3557 PCTL2_MR_MASK,
3558 dramtype);
3559 }
3560 } else if (dramtype == LPDDR4 || dramtype == LPDDR4X) {
3561 pctl_write_mr(dram->pctl, 3, 13,
3562 ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
3563 PCTL2_MR_MASK) & (~(BIT(7)))) |
3564 dst_fsp_lp4 << 7, dramtype);
3565 }
3566 clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3567 PCTL2_DIS_AUTO_REFRESH);
3568 update_refresh_reg(dram);
3569
3570 /* training */
3571 high_freq_training(dram, sdram_params_new, dst_fsp);
3572 low_power_update(dram, lp_stat);
3573
3574 save_fsp_param(dram, dst_fsp, sdram_params_new);
3575 }
3576
ddr_set_rate_for_fsp(struct dram_info * dram,struct rv1126_sdram_params * sdram_params)3577 static void ddr_set_rate_for_fsp(struct dram_info *dram,
3578 struct rv1126_sdram_params *sdram_params)
3579 {
3580 struct ddr2_3_4_lp2_3_info *ddr_info;
3581 u32 f0;
3582 u32 dramtype = sdram_params->base.dramtype;
3583 #ifndef CONFIG_SPL_KERNEL_BOOT
3584 u32 f1, f2, f3;
3585 #endif
3586
3587 ddr_info = get_ddr_drv_odt_info(dramtype);
3588 if (!ddr_info)
3589 return;
3590
3591 f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
3592 DDR_FREQ_MASK;
3593
3594 #ifndef CONFIG_SPL_KERNEL_BOOT
3595 memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param));
3596 memset((void *)&fsp_param, 0, sizeof(fsp_param));
3597
3598 f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) &
3599 DDR_FREQ_MASK;
3600 f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) &
3601 DDR_FREQ_MASK;
3602 f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) &
3603 DDR_FREQ_MASK;
3604 #endif
3605
3606 if (get_wrlvl_val(dram, sdram_params))
3607 printascii("get wrlvl value fail\n");
3608
3609 #ifndef CONFIG_SPL_KERNEL_BOOT
3610 printascii("change to: ");
3611 printdec(f1);
3612 printascii("MHz\n");
3613 ddr_set_rate(&dram_info, sdram_params, f1,
3614 sdram_params->base.ddr_freq, 1, 1, 1);
3615 printascii("change to: ");
3616 printdec(f2);
3617 printascii("MHz\n");
3618 ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1);
3619 printascii("change to: ");
3620 printdec(f3);
3621 printascii("MHz\n");
3622 ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1);
3623 #endif
3624 printascii("change to: ");
3625 printdec(f0);
3626 printascii("MHz(final freq)\n");
3627 #ifndef CONFIG_SPL_KERNEL_BOOT
3628 ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1);
3629 #else
3630 ddr_set_rate(&dram_info, sdram_params, f0, sdram_params->base.ddr_freq, 1, 1, 1);
3631 #endif
3632 }
3633
get_uart_config(void)3634 int get_uart_config(void)
3635 {
3636 struct sdram_head_info_index_v2 *index =
3637 (struct sdram_head_info_index_v2 *)common_info;
3638 struct global_info *gbl_info;
3639
3640 gbl_info = (struct global_info *)((void *)common_info +
3641 index->global_index.offset * 4);
3642
3643 return gbl_info->uart_info;
3644 }
3645
3646 /* return: 0 = success, other = fail */
sdram_init(void)3647 int sdram_init(void)
3648 {
3649 struct rv1126_sdram_params *sdram_params;
3650 int ret = 0;
3651 struct sdram_head_info_index_v2 *index =
3652 (struct sdram_head_info_index_v2 *)common_info;
3653 struct global_info *gbl_info;
3654
3655 dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
3656 dram_info.pctl = (void *)UPCTL2_BASE_ADDR;
3657 dram_info.grf = (void *)GRF_BASE_ADDR;
3658 dram_info.cru = (void *)CRU_BASE_ADDR;
3659 dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR;
3660 dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR;
3661 dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR;
3662
3663 #ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
3664 printascii("extended temp support\n");
3665 #endif
3666 if (index->version_info != 3 ||
3667 (index->global_index.size != sizeof(struct global_info) / 4) ||
3668 (index->ddr3_index.size !=
3669 sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3670 (index->ddr4_index.size !=
3671 sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3672 (index->lp3_index.size !=
3673 sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3674 (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) ||
3675 (index->lp4x_index.size != (sizeof(struct lp4_info) / 4)) ||
3676 index->global_index.offset == 0 ||
3677 index->ddr3_index.offset == 0 ||
3678 index->ddr4_index.offset == 0 ||
3679 index->lp3_index.offset == 0 ||
3680 index->lp4_index.offset == 0 ||
3681 index->lp4x_index.offset == 0) {
3682 printascii("common info error\n");
3683 goto error;
3684 }
3685
3686 gbl_info = (struct global_info *)((void *)common_info +
3687 index->global_index.offset * 4);
3688
3689 dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info);
3690 dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info);
3691
3692 sdram_params = &sdram_configs[0];
3693 #if (CONFIG_ROCKCHIP_TPL_INIT_DRAM_TYPE == 8)
3694 for (int j = 0; j < ARRAY_SIZE(sdram_configs); j++)
3695 sdram_configs[j].base.dramtype = LPDDR4X;
3696 #endif
3697 if (sdram_params->base.dramtype == DDR3 ||
3698 sdram_params->base.dramtype == DDR4) {
3699 if (DDR_2T_INFO(gbl_info->info_2t))
3700 sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10;
3701 else
3702 sdram_params->pctl_regs.pctl[0][1] &=
3703 ~(0x1 << 10);
3704 }
3705 ret = sdram_init_detect(&dram_info, sdram_params);
3706 if (ret) {
3707 sdram_print_dram_type(sdram_params->base.dramtype);
3708 printascii(", ");
3709 printdec(sdram_params->base.ddr_freq);
3710 printascii("MHz\n");
3711 goto error;
3712 }
3713 print_ddr_info(sdram_params);
3714
3715 if (check_lp4_rzqi(&dram_info, sdram_params))
3716 printascii("Please check the soldering and hardware design of DRAM ZQ.\n"
3717 "ZQ error may lead to instability at high frequancies!\n");
3718
3719 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3720 init_rw_trn_result_struct(&rw_trn_result, dram_info.phy,
3721 (u8)sdram_params->ch.cap_info.rank);
3722 #endif
3723
3724 ddr_set_rate_for_fsp(&dram_info, sdram_params);
3725 #ifndef CONFIG_SPL_KERNEL_BOOT
3726 copy_fsp_param_to_ddr();
3727 #endif
3728
3729 ddr_set_atags(&dram_info, sdram_params);
3730 #if defined(CONFIG_CMD_DDR_TEST_TOOL)
3731 save_rw_trn_result_to_ddr(&rw_trn_result);
3732 #endif
3733
3734 printascii("out\n");
3735
3736 return ret;
3737 error:
3738 printascii("error\n");
3739 return (-1);
3740 }
3741 #endif /* CONFIG_TPL_BUILD */
3742