1 /*
2 * (C) Copyright 2016-2017 Rockchip Inc.
3 *
4 * SPDX-License-Identifier: GPL-2.0
5 *
6 * Adapted from coreboot.
7 */
8
9 #include <common.h>
10 #include <clk.h>
11 #include <dm.h>
12 #include <dt-structs.h>
13 #include <ram.h>
14 #include <regmap.h>
15 #include <syscon.h>
16 #include <asm/io.h>
17 #include <asm/arch/clock.h>
18 #include <asm/arch/sdram.h>
19 #include <asm/arch/sdram_rk3399.h>
20 #include <asm/arch/cru_rk3399.h>
21 #include <asm/arch/grf_rk3399.h>
22 #include <asm/arch/pmu_rk3399.h>
23 #include <asm/arch/hardware.h>
24 #include <linux/err.h>
25 #include <time.h>
26
27 DECLARE_GLOBAL_DATA_PTR;
28
29 struct chan_info {
30 struct rk3399_ddr_pctl_regs *pctl;
31 struct rk3399_ddr_pi_regs *pi;
32 struct rk3399_ddr_publ_regs *publ;
33 struct msch_regs *msch;
34 };
35
36 struct dram_info {
37 #ifdef CONFIG_TPL_BUILD
38 struct chan_info chan[2];
39 struct clk ddr_clk;
40 struct rk3399_cru *cru;
41 struct rk3399_grf_regs *grf;
42 struct rk3399_pmu_regs *pmu;
43 struct rk3399_pmucru *pmucru;
44 struct rk3399_pmusgrf_regs *pmusgrf;
45 struct rk3399_ddr_cic_regs *cic;
46 #endif
47 struct ram_info info;
48 struct rk3399_pmugrf_regs *pmugrf;
49 };
50
51 #define PRESET_SGRF_HOLD(n) ((0x1 << (6 + 16)) | ((n) << 6))
52 #define PRESET_GPIO0_HOLD(n) ((0x1 << (7 + 16)) | ((n) << 7))
53 #define PRESET_GPIO1_HOLD(n) ((0x1 << (8 + 16)) | ((n) << 8))
54
55 #define PHY_DRV_ODT_HI_Z 0x0
56 #define PHY_DRV_ODT_240 0x1
57 #define PHY_DRV_ODT_120 0x8
58 #define PHY_DRV_ODT_80 0x9
59 #define PHY_DRV_ODT_60 0xc
60 #define PHY_DRV_ODT_48 0xd
61 #define PHY_DRV_ODT_40 0xe
62 #define PHY_DRV_ODT_34_3 0xf
63
64 #ifdef CONFIG_TPL_BUILD
65
66 struct rockchip_dmc_plat {
67 #if CONFIG_IS_ENABLED(OF_PLATDATA)
68 struct dtd_rockchip_rk3399_dmc dtplat;
69 #else
70 struct rk3399_sdram_params sdram_params;
71 #endif
72 struct regmap *map;
73 };
74
75 u32 g_pwrup_srefresh_exit[2];
76
77 struct rk3399_sdram_params dfs_configs[] = {
78 #include "sdram-rk3399-lpddr4-400.inc"
79 #include "sdram-rk3399-lpddr4-800.inc"
80 };
81
82 #define CRU_SFTRST_DDR_CTRL(ch, n) ((0x1 << (8 + 16 + (ch) * 4)) | \
83 ((n) << (8 + (ch) * 4)))
84 #define CRU_SFTRST_DDR_PHY(ch, n) ((0x1 << (9 + 16 + (ch) * 4)) | \
85 ((n) << (9 + (ch) * 4)))
rkclk_ddr_reset(struct rk3399_cru * cru,u32 channel,u32 ctl,u32 phy)86 static void rkclk_ddr_reset(struct rk3399_cru *cru, u32 channel, u32 ctl,
87 u32 phy)
88 {
89 channel &= 0x1;
90 ctl &= 0x1;
91 phy &= 0x1;
92 writel(CRU_SFTRST_DDR_CTRL(channel, ctl) |
93 CRU_SFTRST_DDR_PHY(channel, phy),
94 &cru->softrst_con[4]);
95 }
96
phy_pctrl_reset(struct rk3399_cru * cru,u32 channel)97 static void phy_pctrl_reset(struct rk3399_cru *cru,
98 u32 channel)
99 {
100 rkclk_ddr_reset(cru, channel, 1, 1);
101 udelay(10);
102 rkclk_ddr_reset(cru, channel, 1, 0);
103 udelay(10);
104 rkclk_ddr_reset(cru, channel, 0, 0);
105 udelay(10);
106 }
107
phy_dll_bypass_set(struct rk3399_ddr_publ_regs * ddr_publ_regs,u32 freq)108 static void phy_dll_bypass_set(struct rk3399_ddr_publ_regs *ddr_publ_regs,
109 u32 freq)
110 {
111 u32 *denali_phy = ddr_publ_regs->denali_phy;
112
113 /* From IP spec, only freq small than 125 can enter dll bypass mode */
114 if (freq <= 125) {
115 /* phy_sw_master_mode_X PHY_86/214/342/470 4bits offset_8 */
116 setbits_le32(&denali_phy[86], (0x3 << 2) << 8);
117 setbits_le32(&denali_phy[214], (0x3 << 2) << 8);
118 setbits_le32(&denali_phy[342], (0x3 << 2) << 8);
119 setbits_le32(&denali_phy[470], (0x3 << 2) << 8);
120
121 /* phy_adrctl_sw_master_mode PHY_547/675/803 4bits offset_16 */
122 setbits_le32(&denali_phy[547], (0x3 << 2) << 16);
123 setbits_le32(&denali_phy[675], (0x3 << 2) << 16);
124 setbits_le32(&denali_phy[803], (0x3 << 2) << 16);
125 } else {
126 /* phy_sw_master_mode_X PHY_86/214/342/470 4bits offset_8 */
127 clrbits_le32(&denali_phy[86], (0x3 << 2) << 8);
128 clrbits_le32(&denali_phy[214], (0x3 << 2) << 8);
129 clrbits_le32(&denali_phy[342], (0x3 << 2) << 8);
130 clrbits_le32(&denali_phy[470], (0x3 << 2) << 8);
131
132 /* phy_adrctl_sw_master_mode PHY_547/675/803 4bits offset_16 */
133 clrbits_le32(&denali_phy[547], (0x3 << 2) << 16);
134 clrbits_le32(&denali_phy[675], (0x3 << 2) << 16);
135 clrbits_le32(&denali_phy[803], (0x3 << 2) << 16);
136 }
137 }
138
set_memory_map(const struct chan_info * chan,u32 channel,const struct rk3399_sdram_params * sdram_params)139 static void set_memory_map(const struct chan_info *chan, u32 channel,
140 const struct rk3399_sdram_params *sdram_params)
141 {
142 const struct rk3399_sdram_channel *sdram_ch =
143 &sdram_params->ch[channel];
144 u32 *denali_ctl = chan->pctl->denali_ctl;
145 u32 *denali_pi = chan->pi->denali_pi;
146 u32 cs_map;
147 u32 reduc;
148 u32 row;
149
150 /* Get row number from ddrconfig setting */
151 if (sdram_ch->cap_info.ddrconfig < 2 ||
152 sdram_ch->cap_info.ddrconfig == 4)
153 row = 16;
154 else if (sdram_ch->cap_info.ddrconfig == 3 ||
155 sdram_ch->cap_info.ddrconfig == 5)
156 row = 14;
157 else
158 row = 15;
159
160 cs_map = (sdram_ch->cap_info.rank > 1) ? 3 : 1;
161 reduc = (sdram_ch->cap_info.bw == 2) ? 0 : 1;
162
163 /* Set the dram configuration to ctrl */
164 clrsetbits_le32(&denali_ctl[191], 0xF, (12 - sdram_ch->cap_info.col));
165 clrsetbits_le32(&denali_ctl[190], (0x3 << 16) | (0x7 << 24),
166 ((3 - sdram_ch->cap_info.bk) << 16) |
167 ((16 - row) << 24));
168
169 clrsetbits_le32(&denali_ctl[196], 0x3 | (1 << 16),
170 cs_map | (reduc << 16));
171
172 /* PI_199 PI_COL_DIFF:RW:0:4 */
173 clrsetbits_le32(&denali_pi[199], 0xF, (12 - sdram_ch->cap_info.col));
174
175 /* PI_155 PI_ROW_DIFF:RW:24:3 PI_BANK_DIFF:RW:16:2 */
176 clrsetbits_le32(&denali_pi[155], (0x3 << 16) | (0x7 << 24),
177 ((3 - sdram_ch->cap_info.bk) << 16) |
178 ((16 - row) << 24));
179 if (sdram_params->base.dramtype == LPDDR4) {
180 if (cs_map == 1)
181 cs_map = 0x5;
182 else if (cs_map == 2)
183 cs_map = 0xa;
184 else
185 cs_map = 0xF;
186 }
187
188 /* PI_41 PI_CS_MAP:RW:24:4 */
189 clrsetbits_le32(&denali_pi[41], 0xf << 24, cs_map << 24);
190 if (sdram_ch->cap_info.rank == 1 &&
191 sdram_params->base.dramtype == DDR3)
192 writel(0x2EC7FFFF, &denali_pi[34]);
193 }
194
phy_io_config(const struct chan_info * chan,struct rk3399_sdram_params * sdram_params,u32 rd_vref,u32 b_reg,u32 channel)195 static void phy_io_config(const struct chan_info *chan,
196 struct rk3399_sdram_params *sdram_params, u32 rd_vref,
197 u32 b_reg, u32 channel)
198 {
199 u32 *denali_phy;
200 u32 *denali_ctl;
201 u32 vref_mode_dq = 0, vref_value_dq = 0;
202 u32 vref_mode_ac = 0, vref_value_ac = 0;
203 u32 mode_sel = 0;
204 u32 boostp, boostn;
205 u32 slewp, slewn;
206 u32 speed;
207 u32 rx_cm_input;
208 u32 reg_value;
209 u32 ds_value, odt_value;
210
211 if (b_reg) {
212 denali_phy = chan->publ->denali_phy;
213 denali_ctl = chan->pctl->denali_ctl;
214 } else {
215 denali_phy = sdram_params->phy_regs.denali_phy;
216 denali_ctl = sdram_params->pctl_regs.denali_ctl;
217 }
218 rd_vref *= 1000;
219 /* vref setting & mode setting */
220 if (sdram_params->base.dramtype == LPDDR4) {
221 if (rd_vref < 36700) {
222 /* MODE_LV[2:0] = LPDDR4 (Range 2)*/
223 vref_mode_dq = 0x7;
224 /* MODE[2:0]= LPDDR4 Range 2(0.4*VDDQ) */
225 mode_sel = 0x5;
226 vref_value_dq = (rd_vref - 3300) / 521;
227 } else {
228 /* MODE_LV[2:0] = LPDDR4 (Range 1)*/
229 vref_mode_dq = 0x6;
230 /* MODE[2:0]= LPDDR4 Range 1(0.33*VDDQ) */
231 mode_sel = 0x4;
232 vref_value_dq = (rd_vref - 15300) / 521;
233 }
234 vref_mode_ac = 0x6;
235 /* VDDQ/3/2=16.8% */
236 vref_value_ac = 0x3;
237 } else if (sdram_params->base.dramtype == LPDDR3) {
238 /* LPDDR3 */
239 if (sdram_params->base.odt == 1) {
240 vref_mode_dq = 0x5; /*LPDDR3 ODT*/
241 ds_value = readl(&denali_ctl[138]) & 0xf;
242 odt_value = (readl(&denali_phy[6]) >> 4) & 0xf;
243 if (ds_value == 0x3) { /* 48ohm */
244 switch (odt_value) {
245 case PHY_DRV_ODT_240:
246 vref_value_dq = 0x1B;
247 break;
248 case PHY_DRV_ODT_120:
249 vref_value_dq = 0x26;
250 break;
251 case PHY_DRV_ODT_60:
252 vref_value_dq = 0x36;
253 break;
254 }
255 } else if (ds_value == 0x2) { /* 40ohm */
256 switch (odt_value) {
257 case PHY_DRV_ODT_240:
258 vref_value_dq = 0x19;
259 break;
260 case PHY_DRV_ODT_120:
261 vref_value_dq = 0x23;
262 break;
263 case PHY_DRV_ODT_60:
264 vref_value_dq = 0x31;
265 break;
266 }
267 } else if (ds_value == 0x1) { /*34.3ohm*/
268 switch (odt_value) {
269 case PHY_DRV_ODT_240:
270 vref_value_dq = 0x17;
271 break;
272 case PHY_DRV_ODT_120:
273 vref_value_dq = 0x20;
274 break;
275 case PHY_DRV_ODT_60:
276 vref_value_dq = 0x2e;
277 break;
278 }
279 }
280 } else {
281 vref_mode_dq = 0x2;
282 vref_value_dq = 0x1f;
283 }
284 vref_mode_ac = 0x2;
285 vref_value_ac = 0x1f;
286 mode_sel = 0x0;
287 } else if (sdram_params->base.dramtype == DDR3) {
288 vref_mode_dq = 0x1;
289 vref_value_dq = 0x1f;
290 vref_mode_ac = 0x1;
291 vref_value_ac = 0x1f;
292 mode_sel = 0x1;
293 }
294
295 reg_value = (vref_mode_dq << 9) | (0x1 << 8) | vref_value_dq;
296 /* PHY_913 PHY_PAD_VREF_CTRL_DQ_0 12bits offset_8 */
297 clrsetbits_le32(&denali_phy[913], 0xfff << 8, reg_value << 8);
298 /* PHY_914 PHY_PAD_VREF_CTRL_DQ_1 12bits offset_0 */
299 clrsetbits_le32(&denali_phy[914], 0xfff, reg_value);
300 /* PHY_914 PHY_PAD_VREF_CTRL_DQ_2 12bits offset_16 */
301 clrsetbits_le32(&denali_phy[914], 0xfff << 16, reg_value << 16);
302 /* PHY_915 PHY_PAD_VREF_CTRL_DQ_3 12bits offset_0 */
303 clrsetbits_le32(&denali_phy[915], 0xfff, reg_value);
304
305 reg_value = (vref_mode_ac << 9) | (0x1 << 8) | vref_value_ac;
306 /* PHY_915 PHY_PAD_VREF_CTRL_AC 12bits offset_16 */
307 clrsetbits_le32(&denali_phy[915], 0xfff << 16, reg_value << 16);
308
309 /* PHY_924 PHY_PAD_FDBK_DRIVE */
310 clrsetbits_le32(&denali_phy[924], 0x7 << 15, mode_sel << 15);
311 /* PHY_926 PHY_PAD_DATA_DRIVE */
312 clrsetbits_le32(&denali_phy[926], 0x7 << 6, mode_sel << 6);
313 /* PHY_927 PHY_PAD_DQS_DRIVE */
314 clrsetbits_le32(&denali_phy[927], 0x7 << 6, mode_sel << 6);
315 /* PHY_928 PHY_PAD_ADDR_DRIVE */
316 clrsetbits_le32(&denali_phy[928], 0x7 << 14, mode_sel << 14);
317 /* PHY_929 PHY_PAD_CLK_DRIVE */
318 clrsetbits_le32(&denali_phy[929], 0x7 << 14, mode_sel << 14);
319 /* PHY_935 PHY_PAD_CKE_DRIVE */
320 clrsetbits_le32(&denali_phy[935], 0x7 << 14, mode_sel << 14);
321 /* PHY_937 PHY_PAD_RST_DRIVE */
322 clrsetbits_le32(&denali_phy[937], 0x7 << 14, mode_sel << 14);
323 /* PHY_939 PHY_PAD_CS_DRIVE */
324 clrsetbits_le32(&denali_phy[939], 0x7 << 14, mode_sel << 14);
325
326 /* BOOSTP_EN & BOOSTn_EN */
327 if (sdram_params->base.dramtype == LPDDR4) {
328 boostp = 0x1;
329 boostn = 0x1;
330 reg_value = ((boostp << 4) | boostn);
331 /* PHY_925 PHY_PAD_FDBK_DRIVE2 */
332 clrsetbits_le32(&denali_phy[925], 0xff << 8, reg_value << 8);
333 /* PHY_926 PHY_PAD_DATA_DRIVE */
334 clrsetbits_le32(&denali_phy[926], 0xff << 12, reg_value << 12);
335 /* PHY_927 PHY_PAD_DQS_DRIVE */
336 clrsetbits_le32(&denali_phy[927], 0xff << 14, reg_value << 14);
337 /* PHY_928 PHY_PAD_ADDR_DRIVE */
338 clrsetbits_le32(&denali_phy[928], 0xff << 20, reg_value << 20);
339 /* PHY_929 PHY_PAD_CLK_DRIVE */
340 clrsetbits_le32(&denali_phy[929], 0xff << 22, reg_value << 22);
341 /* PHY_935 PHY_PAD_CKE_DRIVE */
342 clrsetbits_le32(&denali_phy[935], 0xff << 20, reg_value << 20);
343 /* PHY_937 PHY_PAD_RST_DRIVE */
344 clrsetbits_le32(&denali_phy[937], 0xff << 20, reg_value << 20);
345 /* PHY_939 PHY_PAD_CS_DRIVE */
346 clrsetbits_le32(&denali_phy[939], 0xff << 20, reg_value << 20);
347 }
348
349 /* SLEWP & SLEWN */
350 if (sdram_params->base.dramtype == LPDDR4) {
351 slewp = 0x1;
352 slewn = 0x1;
353 reg_value = ((slewp << 3) | slewn);
354 /* PHY_924 PHY_PAD_FDBK_DRIVE */
355 clrsetbits_le32(&denali_phy[924], 0x3f << 8, reg_value << 8);
356 /* PHY_926 PHY_PAD_DATA_DRIVE */
357 clrsetbits_le32(&denali_phy[926], 0x3f, reg_value);
358 /* PHY_927 PHY_PAD_DQS_DRIVE */
359 clrsetbits_le32(&denali_phy[927], 0x3f, reg_value);
360 /* PHY_928 PHY_PAD_ADDR_DRIVE */
361 clrsetbits_le32(&denali_phy[928], 0x3f << 8, reg_value << 8);
362 /* PHY_929 PHY_PAD_CLK_DRIVE */
363 clrsetbits_le32(&denali_phy[929], 0x3f << 8, reg_value << 8);
364 /* PHY_935 PHY_PAD_CKE_DRIVE */
365 clrsetbits_le32(&denali_phy[935], 0x3f << 8, reg_value << 8);
366 /* PHY_937 PHY_PAD_RST_DRIVE */
367 clrsetbits_le32(&denali_phy[937], 0x3f << 8, reg_value << 8);
368 /* PHY_939 PHY_PAD_CS_DRIVE */
369 clrsetbits_le32(&denali_phy[939], 0x3f << 8, reg_value << 8);
370 }
371
372 /* speed setting */
373 if (sdram_params->base.ddr_freq < 400 * MHz)
374 speed = 0x0;
375 else if (sdram_params->base.ddr_freq < 800 * MHz)
376 speed = 0x1;
377 else if (sdram_params->base.ddr_freq < 1200 * MHz)
378 speed = 0x2;
379 speed = 0x2;
380
381 /* PHY_924 PHY_PAD_FDBK_DRIVE */
382 clrsetbits_le32(&denali_phy[924], 0x3 << 21, speed << 21);
383 /* PHY_926 PHY_PAD_DATA_DRIVE */
384 clrsetbits_le32(&denali_phy[926], 0x3 << 9, speed << 9);
385 /* PHY_927 PHY_PAD_DQS_DRIVE */
386 clrsetbits_le32(&denali_phy[927], 0x3 << 9, speed << 9);
387 /* PHY_928 PHY_PAD_ADDR_DRIVE */
388 clrsetbits_le32(&denali_phy[928], 0x3 << 17, speed << 17);
389 /* PHY_929 PHY_PAD_CLK_DRIVE */
390 clrsetbits_le32(&denali_phy[929], 0x3 << 17, speed << 17);
391 /* PHY_935 PHY_PAD_CKE_DRIVE */
392 clrsetbits_le32(&denali_phy[935], 0x3 << 17, speed << 17);
393 /* PHY_937 PHY_PAD_RST_DRIVE */
394 clrsetbits_le32(&denali_phy[937], 0x3 << 17, speed << 17);
395 /* PHY_939 PHY_PAD_CS_DRIVE */
396 clrsetbits_le32(&denali_phy[939], 0x3 << 17, speed << 17);
397
398 /* RX_CM_INPUT */
399 if (sdram_params->base.dramtype == LPDDR4) {
400 rx_cm_input = 0x1;
401 /* PHY_924 PHY_PAD_FDBK_DRIVE */
402 clrsetbits_le32(&denali_phy[924], 0x1 << 14, rx_cm_input << 14);
403 /* PHY_926 PHY_PAD_DATA_DRIVE */
404 clrsetbits_le32(&denali_phy[926], 0x1 << 11, rx_cm_input << 11);
405 /* PHY_927 PHY_PAD_DQS_DRIVE */
406 clrsetbits_le32(&denali_phy[927], 0x1 << 13, rx_cm_input << 13);
407 /* PHY_928 PHY_PAD_ADDR_DRIVE */
408 clrsetbits_le32(&denali_phy[928], 0x1 << 19, rx_cm_input << 19);
409 /* PHY_929 PHY_PAD_CLK_DRIVE */
410 clrsetbits_le32(&denali_phy[929], 0x1 << 21, rx_cm_input << 21);
411 /* PHY_935 PHY_PAD_CKE_DRIVE */
412 clrsetbits_le32(&denali_phy[935], 0x1 << 19, rx_cm_input << 19);
413 /* PHY_937 PHY_PAD_RST_DRIVE */
414 clrsetbits_le32(&denali_phy[937], 0x1 << 19, rx_cm_input << 19);
415 /* PHY_939 PHY_PAD_CS_DRIVE */
416 clrsetbits_le32(&denali_phy[939], 0x1 << 19, rx_cm_input << 19);
417 }
418 }
419
420 struct io_setting {
421 u32 mhz;
422 u32 mr5;
423 /* dram side */
424 u32 dq_odt;
425 u32 ca_odt;
426 u32 pdds;
427 u32 dq_vref;
428 u32 ca_vref;
429 /* phy side */
430 u32 rd_odt;
431 u32 wr_dq_drv;
432 u32 wr_ca_drv;
433 u32 wr_ckcs_drv;
434 u32 rd_odt_en;
435 u32 rd_vref;
436 };
437
438 struct io_setting lp4_io_set[] = {
439 {
440 50 * MHz,
441 0,
442 /* dram side */
443 0, /* dq_odt; */
444 0, /* ca_odt; */
445 6, /* pdds; */
446 0x72, /* dq_vref; */
447 0x72, /* ca_vref; */
448 /* phy side */
449 PHY_DRV_ODT_HI_Z, /* rd_odt; */
450 PHY_DRV_ODT_40, /* wr_dq_drv; */
451 PHY_DRV_ODT_40, /* wr_ca_drv; */
452 PHY_DRV_ODT_40, /* wr_ckcs_drv; */
453 0, /* rd_odt_en;*/
454 41, /* rd_vref; (unit %, range 3.3% - 48.7%) */
455 },
456 {
457 400 * MHz,
458 0,
459 /* dram side */
460 0, /* dq_odt; */
461 0, /* ca_odt; */
462 6, /* pdds; */
463 0x72, /* dq_vref; */
464 0x72, /* ca_vref; */
465 /* phy side */
466 PHY_DRV_ODT_HI_Z, /* rd_odt; */
467 PHY_DRV_ODT_40, /* wr_dq_drv; */
468 PHY_DRV_ODT_40, /* wr_ca_drv; */
469 PHY_DRV_ODT_40, /* wr_ckcs_drv; */
470 0, /* rd_odt_en; */
471 /* shmoo result, read signal 41% is the best */
472 41, /* rd_vref; (unit %, range 3.3% - 48.7%) */
473 },
474 {
475 800 * MHz,
476 0,
477 /* dram side */
478 0, /* dq_odt; */
479 0, /* ca_odt; */
480 1, /* pdds; */
481 0x72, /* dq_vref; */
482 0x72, /* ca_vref; */
483 /* phy side */
484 PHY_DRV_ODT_40, /* rd_odt; */
485 PHY_DRV_ODT_40, /* wr_dq_drv; */
486 PHY_DRV_ODT_40, /* wr_ca_drv; */
487 PHY_DRV_ODT_40, /* wr_ckcs_drv; */
488 1, /* rd_odt_en; */
489 17, /* rd_vref; (unit %, range 3.3% - 48.7%) */
490 },
491 {
492 1066 * MHz,
493 0,
494 /* dram side */
495 6, /* dq_odt; */
496 0, /* ca_odt; */
497 1, /* pdds; */
498 0x10, /* dq_vref; */
499 0x72, /* ca_vref; */
500 /* phy side */
501 PHY_DRV_ODT_40, /* rd_odt; */
502 PHY_DRV_ODT_60, /* wr_dq_drv; */
503 PHY_DRV_ODT_40, /* wr_ca_drv; */
504 PHY_DRV_ODT_40, /* wr_ckcs_drv; */
505 1, /* rd_odt_en; */
506 17, /* rd_vref; (unit %, range 3.3% - 48.7%) */
507 },
508 };
509
get_io_set(struct rk3399_sdram_params * sdram_params,u32 mr5)510 static struct io_setting *get_io_set(struct rk3399_sdram_params *sdram_params,
511 u32 mr5)
512 {
513 struct io_setting *io = NULL;
514 u32 n;
515
516 for (n = 0; n < ARRAY_SIZE(lp4_io_set); n++) {
517 io = &lp4_io_set[n];
518 if (io->mr5 != 0) {
519 if (io->mhz >= sdram_params->base.ddr_freq &&
520 io->mr5 == mr5)
521 break;
522 } else {
523 if (io->mhz >= sdram_params->base.ddr_freq)
524 break;
525 }
526 }
527 return io;
528 }
529
530 /*
531 * b_reg: indicate whether set phy register
532 * or just set sdram_params.
533 * if b_reg = 0, channel, mr5 are not care
534 */
set_ds_odt(const struct chan_info * chan,struct rk3399_sdram_params * sdram_params,u32 b_reg,u32 channel,u32 mr5)535 static void set_ds_odt(const struct chan_info *chan,
536 struct rk3399_sdram_params *sdram_params, u32 b_reg,
537 u32 channel, u32 mr5)
538 {
539 u32 *denali_phy;
540 u32 *denali_ctl;
541 u32 tsel_idle_en, tsel_wr_en, tsel_rd_en;
542 u32 tsel_idle_select_p, tsel_rd_select_p;
543 u32 tsel_idle_select_n, tsel_rd_select_n;
544 u32 tsel_wr_select_dq_p, tsel_wr_select_ca_p;
545 u32 tsel_wr_select_dq_n, tsel_wr_select_ca_n;
546 u32 soc_odt = 0;
547 u32 tsel_ckcs_select_p, tsel_ckcs_select_n;
548 u32 reg_value;
549 struct io_setting *io = NULL;
550
551 if (b_reg) {
552 denali_phy = chan->publ->denali_phy;
553 denali_ctl = chan->pctl->denali_ctl;
554 } else {
555 denali_phy = sdram_params->phy_regs.denali_phy;
556 denali_ctl = sdram_params->pctl_regs.denali_ctl;
557 }
558
559 if (sdram_params->base.dramtype == LPDDR4) {
560 io = get_io_set(sdram_params, mr5);
561
562 tsel_rd_select_p = PHY_DRV_ODT_HI_Z;
563 tsel_rd_select_n = io->rd_odt;
564
565 tsel_idle_select_p = PHY_DRV_ODT_HI_Z;
566 tsel_idle_select_n = PHY_DRV_ODT_HI_Z;
567
568 tsel_wr_select_dq_p = io->wr_dq_drv;
569 tsel_wr_select_dq_n = PHY_DRV_ODT_34_3;
570
571 tsel_wr_select_ca_p = io->wr_ca_drv;
572 tsel_wr_select_ca_n = PHY_DRV_ODT_34_3;
573
574 tsel_ckcs_select_p = io->wr_ckcs_drv;
575 tsel_ckcs_select_n = PHY_DRV_ODT_34_3;
576
577 switch (tsel_rd_select_n) {
578 case PHY_DRV_ODT_240:
579 soc_odt = 1;
580 break;
581 case PHY_DRV_ODT_120:
582 soc_odt = 2;
583 break;
584 case PHY_DRV_ODT_80:
585 soc_odt = 3;
586 break;
587 case PHY_DRV_ODT_60:
588 soc_odt = 4;
589 break;
590 case PHY_DRV_ODT_48:
591 soc_odt = 5;
592 break;
593 case PHY_DRV_ODT_40:
594 soc_odt = 6;
595 break;
596 case PHY_DRV_ODT_34_3:
597 soc_odt = 6;
598 printf("LPDDR4 MR22 Soc ODT not support\n");
599 break;
600 case PHY_DRV_ODT_HI_Z:
601 default:
602 soc_odt = 0;
603 break;
604 }
605 } else if (sdram_params->base.dramtype == LPDDR3) {
606 tsel_rd_select_p = PHY_DRV_ODT_240;
607 tsel_rd_select_n = PHY_DRV_ODT_HI_Z;
608
609 tsel_idle_select_p = PHY_DRV_ODT_240;
610 tsel_idle_select_n = PHY_DRV_ODT_HI_Z;
611
612 tsel_wr_select_dq_p = PHY_DRV_ODT_34_3;
613 tsel_wr_select_dq_n = PHY_DRV_ODT_34_3;
614
615 tsel_wr_select_ca_p = PHY_DRV_ODT_34_3;
616 tsel_wr_select_ca_n = PHY_DRV_ODT_34_3;
617
618 tsel_ckcs_select_p = PHY_DRV_ODT_34_3;
619 tsel_ckcs_select_n = PHY_DRV_ODT_34_3;
620 } else {
621 tsel_rd_select_p = PHY_DRV_ODT_240;
622 tsel_rd_select_n = PHY_DRV_ODT_240;
623
624 tsel_idle_select_p = PHY_DRV_ODT_240;
625 tsel_idle_select_n = PHY_DRV_ODT_240;
626
627 tsel_wr_select_dq_p = PHY_DRV_ODT_34_3;
628 tsel_wr_select_dq_n = PHY_DRV_ODT_34_3;
629
630 tsel_wr_select_ca_p = PHY_DRV_ODT_34_3;
631 tsel_wr_select_ca_n = PHY_DRV_ODT_34_3;
632
633 tsel_ckcs_select_p = PHY_DRV_ODT_34_3;
634 tsel_ckcs_select_n = PHY_DRV_ODT_34_3;
635 }
636
637 if (sdram_params->base.dramtype == LPDDR4) {
638 if (sdram_params->base.odt == 1) {
639 tsel_rd_en = io->rd_odt_en;
640 tsel_idle_en = 0;
641 } else {
642 tsel_rd_en = 0;
643 tsel_idle_en = 0;
644 }
645 } else {
646 tsel_rd_en = sdram_params->base.odt;
647 tsel_idle_en = 0;
648 }
649 tsel_wr_en = 0;
650
651 #define CS0_MR22_VAL (0)
652 #define CS1_MR22_VAL (3)
653 /* F0_0 */
654 clrsetbits_le32(&denali_ctl[145], 0xFF << 16,
655 (soc_odt | (CS0_MR22_VAL << 3)) << 16);
656 /* F2_0, F1_0 */
657 clrsetbits_le32(&denali_ctl[146], 0xFF00FF,
658 ((soc_odt | (CS0_MR22_VAL << 3)) << 16) |
659 (soc_odt | (CS0_MR22_VAL << 3)));
660 /* F0_1 */
661 clrsetbits_le32(&denali_ctl[159], 0xFF << 16,
662 (soc_odt | (CS1_MR22_VAL << 3)) << 16);
663 /* F2_1, F1_1 */
664 clrsetbits_le32(&denali_ctl[160], 0xFF00FF,
665 ((soc_odt | (CS1_MR22_VAL << 3)) << 16) |
666 (soc_odt | (CS1_MR22_VAL << 3)));
667
668 /*
669 * phy_dq_tsel_select_X 24bits DENALI_PHY_6/134/262/390 offset_0
670 * sets termination values for read/idle cycles and drive strength
671 * for write cycles for DQ/DM
672 */
673 reg_value = tsel_rd_select_n | (tsel_rd_select_p << 0x4) |
674 (tsel_wr_select_dq_n << 8) | (tsel_wr_select_dq_p << 12) |
675 (tsel_idle_select_n << 16) | (tsel_idle_select_p << 20);
676 clrsetbits_le32(&denali_phy[6], 0xffffff, reg_value);
677 clrsetbits_le32(&denali_phy[134], 0xffffff, reg_value);
678 clrsetbits_le32(&denali_phy[262], 0xffffff, reg_value);
679 clrsetbits_le32(&denali_phy[390], 0xffffff, reg_value);
680
681 /*
682 * phy_dqs_tsel_select_X 24bits DENALI_PHY_7/135/263/391 offset_0
683 * sets termination values for read/idle cycles and drive strength
684 * for write cycles for DQS
685 */
686 clrsetbits_le32(&denali_phy[7], 0xffffff, reg_value);
687 clrsetbits_le32(&denali_phy[135], 0xffffff, reg_value);
688 clrsetbits_le32(&denali_phy[263], 0xffffff, reg_value);
689 clrsetbits_le32(&denali_phy[391], 0xffffff, reg_value);
690
691 /* phy_adr_tsel_select_ 8bits DENALI_PHY_544/672/800 offset_0 */
692 reg_value = tsel_wr_select_ca_n | (tsel_wr_select_ca_p << 0x4);
693 if (sdram_params->base.dramtype == LPDDR4) {
694 /* LPDDR4 these register read always return 0, so
695 * can not use clrsetbits_le32(), need to write32
696 */
697 writel((0x300 << 8) | reg_value, &denali_phy[544]);
698 writel((0x300 << 8) | reg_value, &denali_phy[672]);
699 writel((0x300 << 8) | reg_value, &denali_phy[800]);
700 } else {
701 clrsetbits_le32(&denali_phy[544], 0xff, reg_value);
702 clrsetbits_le32(&denali_phy[672], 0xff, reg_value);
703 clrsetbits_le32(&denali_phy[800], 0xff, reg_value);
704 }
705
706 /* phy_pad_addr_drive 8bits DENALI_PHY_928 offset_0 */
707 clrsetbits_le32(&denali_phy[928], 0xff, reg_value);
708
709 /* phy_pad_rst_drive 8bits DENALI_PHY_937 offset_0 */
710 if (!b_reg)
711 clrsetbits_le32(&denali_phy[937], 0xff, reg_value);
712
713 /* phy_pad_cke_drive 8bits DENALI_PHY_935 offset_0 */
714 clrsetbits_le32(&denali_phy[935], 0xff, reg_value);
715
716 /* phy_pad_cs_drive 8bits DENALI_PHY_939 offset_0 */
717 clrsetbits_le32(&denali_phy[939], 0xff,
718 tsel_ckcs_select_n | (tsel_ckcs_select_p << 0x4));
719
720 /* phy_pad_clk_drive 8bits DENALI_PHY_929 offset_0 */
721 clrsetbits_le32(&denali_phy[929], 0xff,
722 tsel_ckcs_select_n | (tsel_ckcs_select_p << 0x4));
723
724 /* phy_pad_fdbk_drive 23bit DENALI_PHY_924/925 */
725 clrsetbits_le32(&denali_phy[924], 0xff,
726 tsel_wr_select_ca_n | (tsel_wr_select_ca_p << 4));
727 clrsetbits_le32(&denali_phy[925], 0xff,
728 tsel_wr_select_dq_n | (tsel_wr_select_dq_p << 4));
729
730 /* phy_dq_tsel_enable_X 3bits DENALI_PHY_5/133/261/389 offset_16 */
731 reg_value = (tsel_rd_en | (tsel_wr_en << 1) | (tsel_idle_en << 2))
732 << 16;
733 clrsetbits_le32(&denali_phy[5], 0x7 << 16, reg_value);
734 clrsetbits_le32(&denali_phy[133], 0x7 << 16, reg_value);
735 clrsetbits_le32(&denali_phy[261], 0x7 << 16, reg_value);
736 clrsetbits_le32(&denali_phy[389], 0x7 << 16, reg_value);
737
738 /* phy_dqs_tsel_enable_X 3bits DENALI_PHY_6/134/262/390 offset_24 */
739 reg_value = (tsel_rd_en | (tsel_wr_en << 1) | (tsel_idle_en << 2))
740 << 24;
741 clrsetbits_le32(&denali_phy[6], 0x7 << 24, reg_value);
742 clrsetbits_le32(&denali_phy[134], 0x7 << 24, reg_value);
743 clrsetbits_le32(&denali_phy[262], 0x7 << 24, reg_value);
744 clrsetbits_le32(&denali_phy[390], 0x7 << 24, reg_value);
745
746 /* phy_adr_tsel_enable_ 1bit DENALI_PHY_518/646/774 offset_8 */
747 reg_value = tsel_wr_en << 8;
748 clrsetbits_le32(&denali_phy[518], 0x1 << 8, reg_value);
749 clrsetbits_le32(&denali_phy[646], 0x1 << 8, reg_value);
750 clrsetbits_le32(&denali_phy[774], 0x1 << 8, reg_value);
751
752 /* phy_pad_addr_term tsel 1bit DENALI_PHY_933 offset_17 */
753 reg_value = tsel_wr_en << 17;
754 clrsetbits_le32(&denali_phy[933], 0x1 << 17, reg_value);
755 /*
756 * pad_rst/cke/cs/clk_term tsel 1bits
757 * DENALI_PHY_938/936/940/934 offset_17
758 */
759 clrsetbits_le32(&denali_phy[938], 0x1 << 17, reg_value);
760 clrsetbits_le32(&denali_phy[936], 0x1 << 17, reg_value);
761 clrsetbits_le32(&denali_phy[940], 0x1 << 17, reg_value);
762 clrsetbits_le32(&denali_phy[934], 0x1 << 17, reg_value);
763
764 /* phy_pad_fdbk_term 1bit DENALI_PHY_930 offset_17 */
765 clrsetbits_le32(&denali_phy[930], 0x1 << 17, reg_value);
766
767 if (sdram_params->base.dramtype == LPDDR4)
768 phy_io_config(chan, sdram_params, io->rd_vref, b_reg, channel);
769 else
770 phy_io_config(chan, sdram_params, 0, b_reg, channel);
771 }
772
pctl_start(struct dram_info * dram,struct rk3399_sdram_params * sdram_params,u32 channel_mask)773 static void pctl_start(struct dram_info *dram,
774 struct rk3399_sdram_params *sdram_params,
775 u32 channel_mask)
776 {
777 const struct chan_info *chan_0 = &dram->chan[0];
778 const struct chan_info *chan_1 = &dram->chan[1];
779
780 u32 *denali_ctl_0 = chan_0->pctl->denali_ctl;
781 u32 *denali_phy_0 = chan_0->publ->denali_phy;
782 u32 *denali_ctl_1 = chan_1->pctl->denali_ctl;
783 u32 *denali_phy_1 = chan_1->publ->denali_phy;
784 u32 count, byte, tmp;
785
786 /* PHY_DLL_RST_EN */
787 if (channel_mask & 1) {
788 writel(0x01000000, &dram->grf->ddrc0_con0);
789 clrsetbits_le32(&denali_phy_0[957], 0x3 << 24, 0x2 << 24);
790 }
791
792 if (channel_mask & 1) {
793 count = 0;
794 while (!(readl(&denali_ctl_0[203]) & (1 << 3))) {
795 if (count > 1000) {
796 printf("channel 0 init err!\n");
797 while (1)
798 ;
799 }
800 udelay(1);
801 count++;
802 }
803
804 writel(0x01000100, &dram->grf->ddrc0_con0);
805 for (byte = 0; byte < 4; byte++) {
806 tmp = 0x820;
807 writel((tmp << 16) | tmp,
808 &denali_phy_0[53 + (128 * byte)]);
809 writel((tmp << 16) | tmp,
810 &denali_phy_0[54 + (128 * byte)]);
811 writel((tmp << 16) | tmp,
812 &denali_phy_0[55 + (128 * byte)]);
813 writel((tmp << 16) | tmp,
814 &denali_phy_0[56 + (128 * byte)]);
815 writel((tmp << 16) | tmp,
816 &denali_phy_0[57 + (128 * byte)]);
817 clrsetbits_le32(&denali_phy_0[58 + (128 * byte)],
818 0xffff, tmp);
819 }
820 clrsetbits_le32(&denali_ctl_0[68], PWRUP_SREFRESH_EXIT,
821 g_pwrup_srefresh_exit[0]);
822 }
823
824 if (channel_mask & 2) {
825 writel(0x01000000, &dram->grf->ddrc1_con0);
826 clrsetbits_le32(&denali_phy_1[957], 0x3 << 24, 0x2 << 24);
827 }
828 if (channel_mask & 2) {
829 count = 0;
830 while (!(readl(&denali_ctl_1[203]) & (1 << 3))) {
831 if (count > 1000) {
832 printf("channel 1 init err!\n");
833 while (1)
834 ;
835 }
836 udelay(1);
837 count++;
838 }
839
840 writel(0x01000100, &dram->grf->ddrc1_con0);
841 for (byte = 0; byte < 4; byte++) {
842 tmp = 0x820;
843 writel((tmp << 16) | tmp,
844 &denali_phy_1[53 + (128 * byte)]);
845 writel((tmp << 16) | tmp,
846 &denali_phy_1[54 + (128 * byte)]);
847 writel((tmp << 16) | tmp,
848 &denali_phy_1[55 + (128 * byte)]);
849 writel((tmp << 16) | tmp,
850 &denali_phy_1[56 + (128 * byte)]);
851 writel((tmp << 16) | tmp,
852 &denali_phy_1[57 + (128 * byte)]);
853 clrsetbits_le32(&denali_phy_1[58 + (128 * byte)],
854 0xffff, tmp);
855 }
856
857 clrsetbits_le32(&denali_ctl_1[68], PWRUP_SREFRESH_EXIT,
858 g_pwrup_srefresh_exit[1]);
859
860 /*
861 * restore channel 1 RESET original setting
862 * to avoid 240ohm too weak to prevent ESD test
863 */
864 if (sdram_params->base.dramtype == LPDDR4)
865 clrsetbits_le32(&denali_phy_1[937], 0xff,
866 sdram_params->phy_regs.denali_phy[937] &
867 0xFF);
868 }
869 }
870
871 /* phy_fn = 0, PHY boot freq
872 * phy_fn = 1, PHY index 0
873 * phy_fn = 2, PHY index 1
874 */
875 static struct rk3399_sdram_params
get_phy_index_params(u32 phy_fn,struct rk3399_sdram_params * sdram_params)876 *get_phy_index_params(u32 phy_fn,
877 struct rk3399_sdram_params *sdram_params)
878 {
879 if (phy_fn == 0)
880 return sdram_params;
881 else if (phy_fn == 1)
882 return &dfs_configs[1];
883 else if (phy_fn == 2)
884 return &dfs_configs[0];
885 else
886 return NULL;
887 }
888
889 /*
890 * b_reg: indicate whether set phy register
891 * or just set sdram_params.
892 * if b_reg = 0, channel, mr5 are not care
893 */
set_lp4_dq_odt(const struct chan_info * chan,struct rk3399_sdram_params * sdram_params,u32 ctl_fn,u32 en,u32 b_reg,u32 channel,u32 mr5)894 static void set_lp4_dq_odt(const struct chan_info *chan,
895 struct rk3399_sdram_params *sdram_params, u32 ctl_fn,
896 u32 en, u32 b_reg, u32 channel, u32 mr5)
897 {
898 u32 *denali_ctl;
899 u32 *denali_pi;
900 u32 reg_value;
901 struct io_setting *io;
902
903 if (b_reg) {
904 denali_pi = chan->pi->denali_pi;
905 denali_ctl = chan->pctl->denali_ctl;
906 } else {
907 denali_pi = sdram_params->pi_regs.denali_pi;
908 denali_ctl = sdram_params->pctl_regs.denali_ctl;
909 }
910 io = get_io_set(sdram_params, mr5);
911 if (en)
912 reg_value = io->dq_odt;
913 else
914 reg_value = 0;
915
916 switch (ctl_fn) {
917 case 0:
918 clrsetbits_le32(&denali_ctl[139], 0x7 << 24, reg_value << 24);
919 clrsetbits_le32(&denali_ctl[153], 0x7 << 24, reg_value << 24);
920
921 clrsetbits_le32(&denali_pi[132], 0x7 << 0, (reg_value << 0));
922 clrsetbits_le32(&denali_pi[139], 0x7 << 16, (reg_value << 16));
923 clrsetbits_le32(&denali_pi[147], 0x7 << 0, (reg_value << 0));
924 clrsetbits_le32(&denali_pi[154], 0x7 << 16, (reg_value << 16));
925 break;
926 case 1:
927 clrsetbits_le32(&denali_ctl[140], 0x7 << 0, reg_value << 0);
928 clrsetbits_le32(&denali_ctl[154], 0x7 << 0, reg_value << 0);
929
930 clrsetbits_le32(&denali_pi[129], 0x7 << 16, (reg_value << 16));
931 clrsetbits_le32(&denali_pi[137], 0x7 << 0, (reg_value << 0));
932 clrsetbits_le32(&denali_pi[144], 0x7 << 16, (reg_value << 16));
933 clrsetbits_le32(&denali_pi[152], 0x7 << 0, (reg_value << 0));
934 break;
935 case 2:
936 default:
937 clrsetbits_le32(&denali_ctl[140], 0x7 << 8, (reg_value << 8));
938 clrsetbits_le32(&denali_ctl[154], 0x7 << 8, (reg_value << 8));
939
940 clrsetbits_le32(&denali_pi[127], 0x7 << 0, (reg_value << 0));
941 clrsetbits_le32(&denali_pi[134], 0x7 << 16, (reg_value << 16));
942 clrsetbits_le32(&denali_pi[142], 0x7 << 0, (reg_value << 0));
943 clrsetbits_le32(&denali_pi[149], 0x7 << 16, (reg_value << 16));
944 break;
945 }
946 }
947
948 /*
949 * b_reg: indicate whether set phy register
950 * or just set sdram_params.
951 * if b_reg = 0, channel, mr5 are not care
952 */
set_lp4_ca_odt(const struct chan_info * chan,struct rk3399_sdram_params * sdram_params,u32 ctl_fn,u32 en,u32 b_reg,u32 channel,u32 mr5)953 static void set_lp4_ca_odt(const struct chan_info *chan,
954 struct rk3399_sdram_params *sdram_params, u32 ctl_fn,
955 u32 en, u32 b_reg, u32 channel, u32 mr5)
956 {
957 u32 *denali_ctl;
958 u32 *denali_pi;
959 u32 reg_value;
960 struct io_setting *io;
961
962 if (b_reg) {
963 denali_pi = chan->pi->denali_pi;
964 denali_ctl = chan->pctl->denali_ctl;
965 } else {
966 denali_pi = sdram_params->pi_regs.denali_pi;
967 denali_ctl = sdram_params->pctl_regs.denali_ctl;
968 }
969 io = get_io_set(sdram_params, mr5);
970 if (en)
971 reg_value = io->ca_odt;
972 else
973 reg_value = 0;
974
975 switch (ctl_fn) {
976 case 0:
977 clrsetbits_le32(&denali_ctl[139], 0x7 << 28, reg_value << 28);
978 clrsetbits_le32(&denali_ctl[153], 0x7 << 28, reg_value << 28);
979
980 clrsetbits_le32(&denali_pi[132], 0x7 << 4, reg_value << 4);
981 clrsetbits_le32(&denali_pi[139], 0x7 << 20, reg_value << 20);
982 clrsetbits_le32(&denali_pi[147], 0x7 << 4, reg_value << 4);
983 clrsetbits_le32(&denali_pi[154], 0x7 << 20, reg_value << 20);
984 break;
985 case 1:
986 clrsetbits_le32(&denali_ctl[140], 0x7 << 4, reg_value << 4);
987 clrsetbits_le32(&denali_ctl[154], 0x7 << 4, reg_value << 4);
988
989 clrsetbits_le32(&denali_pi[129], 0x7 << 20, reg_value << 20);
990 clrsetbits_le32(&denali_pi[137], 0x7 << 4, reg_value << 4);
991 clrsetbits_le32(&denali_pi[144], 0x7 << 20, reg_value << 20);
992 clrsetbits_le32(&denali_pi[152], 0x7 << 4, reg_value << 4);
993 break;
994 case 2:
995 default:
996 clrsetbits_le32(&denali_ctl[140], 0x7 << 12, (reg_value << 12));
997 clrsetbits_le32(&denali_ctl[154], 0x7 << 12, (reg_value << 12));
998
999 clrsetbits_le32(&denali_pi[127], 0x7 << 4, reg_value << 4);
1000 clrsetbits_le32(&denali_pi[134], 0x7 << 20, reg_value << 20);
1001 clrsetbits_le32(&denali_pi[142], 0x7 << 4, reg_value << 4);
1002 clrsetbits_le32(&denali_pi[149], 0x7 << 20, reg_value << 20);
1003 break;
1004 }
1005 }
1006
1007 /*
1008 * b_reg: indicate whether set phy register
1009 * or just set sdram_params.
1010 * if b_reg = 0, channel, mr5 are not care
1011 */
set_lp4_MR3(const struct chan_info * chan,struct rk3399_sdram_params * sdram_params,u32 ctl_fn,u32 b_reg,u32 channel,u32 mr5)1012 static void set_lp4_MR3(const struct chan_info *chan,
1013 struct rk3399_sdram_params *sdram_params, u32 ctl_fn,
1014 u32 b_reg, u32 channel, u32 mr5)
1015 {
1016 u32 *denali_ctl;
1017 u32 *denali_pi;
1018 u32 reg_value;
1019 struct io_setting *io;
1020
1021 if (b_reg) {
1022 denali_pi = chan->pi->denali_pi;
1023 denali_ctl = chan->pctl->denali_ctl;
1024 } else {
1025 denali_pi = sdram_params->pi_regs.denali_pi;
1026 denali_ctl = sdram_params->pctl_regs.denali_ctl;
1027 }
1028 io = get_io_set(sdram_params, mr5);
1029
1030 reg_value = ((io->pdds << 3) | 1);
1031 switch (ctl_fn) {
1032 case 0:
1033 clrsetbits_le32(&denali_ctl[138], 0xFFFF, reg_value);
1034 clrsetbits_le32(&denali_ctl[152], 0xFFFF, reg_value);
1035
1036 clrsetbits_le32(&denali_pi[131], 0xFFFF << 16, reg_value << 16);
1037 clrsetbits_le32(&denali_pi[139], 0xFFFF, reg_value);
1038 clrsetbits_le32(&denali_pi[146], 0xFFFF << 16, reg_value << 16);
1039 clrsetbits_le32(&denali_pi[154], 0xFFFF, reg_value);
1040 break;
1041 case 1:
1042 clrsetbits_le32(&denali_ctl[138], 0xFFFF << 16,
1043 reg_value << 16);
1044 clrsetbits_le32(&denali_ctl[152], 0xFFFF << 16,
1045 reg_value << 16);
1046
1047 clrsetbits_le32(&denali_pi[129], 0xFFFF, reg_value);
1048 clrsetbits_le32(&denali_pi[136], 0xFFFF << 16, reg_value << 16);
1049 clrsetbits_le32(&denali_pi[144], 0xFFFF, reg_value);
1050 clrsetbits_le32(&denali_pi[151], 0xFFFF << 16, reg_value << 16);
1051 break;
1052 case 2:
1053 default:
1054 clrsetbits_le32(&denali_ctl[139], 0xFFFF, reg_value);
1055 clrsetbits_le32(&denali_ctl[153], 0xFFFF, reg_value);
1056
1057 clrsetbits_le32(&denali_pi[126], 0xFFFF << 16, reg_value << 16);
1058 clrsetbits_le32(&denali_pi[134], 0xFFFF, reg_value);
1059 clrsetbits_le32(&denali_pi[141], 0xFFFF << 16, reg_value << 16);
1060 clrsetbits_le32(&denali_pi[149], 0xFFFF, reg_value);
1061 break;
1062 }
1063 }
1064
1065 /*
1066 * b_reg: indicate whether set phy register
1067 * or just set sdram_params.
1068 * if b_reg = 0, channel, mr5 are not care
1069 */
set_lp4_MR12(const struct chan_info * chan,struct rk3399_sdram_params * sdram_params,u32 ctl_fn,u32 b_reg,u32 channel,u32 mr5)1070 static void set_lp4_MR12(const struct chan_info *chan,
1071 struct rk3399_sdram_params *sdram_params, u32 ctl_fn,
1072 u32 b_reg, u32 channel, u32 mr5)
1073 {
1074 u32 *denali_ctl;
1075 u32 *denali_pi;
1076 u32 reg_value;
1077 struct io_setting *io;
1078
1079 if (b_reg) {
1080 denali_pi = chan->pi->denali_pi;
1081 denali_ctl = chan->pctl->denali_ctl;
1082 } else {
1083 denali_pi = sdram_params->pi_regs.denali_pi;
1084 denali_ctl = sdram_params->pctl_regs.denali_ctl;
1085 }
1086 io = get_io_set(sdram_params, mr5);
1087
1088 reg_value = io->ca_vref;
1089 switch (ctl_fn) {
1090 case 0:
1091 clrsetbits_le32(&denali_ctl[140], 0xFFFF << 16,
1092 reg_value << 16);
1093 clrsetbits_le32(&denali_ctl[154], 0xFFFF << 16,
1094 reg_value << 16);
1095
1096 clrsetbits_le32(&denali_pi[132], 0xFF << 8, reg_value << 8);
1097 clrsetbits_le32(&denali_pi[139], 0xFF << 24, reg_value << 24);
1098 clrsetbits_le32(&denali_pi[147], 0xFF << 8, reg_value << 8);
1099 clrsetbits_le32(&denali_pi[154], 0xFF << 24, reg_value << 24);
1100 break;
1101 case 1:
1102 clrsetbits_le32(&denali_ctl[141], 0xFFFF, reg_value);
1103 clrsetbits_le32(&denali_ctl[155], 0xFFFF, reg_value);
1104
1105 clrsetbits_le32(&denali_pi[129], 0xFF << 24, reg_value << 24);
1106 clrsetbits_le32(&denali_pi[137], 0xFF << 8, reg_value << 8);
1107 clrsetbits_le32(&denali_pi[144], 0xFF << 24, reg_value << 24);
1108 clrsetbits_le32(&denali_pi[152], 0xFF << 8, reg_value << 8);
1109 break;
1110 case 2:
1111 default:
1112 clrsetbits_le32(&denali_ctl[141], 0xFFFF << 16,
1113 reg_value << 16);
1114 clrsetbits_le32(&denali_ctl[155], 0xFFFF << 16,
1115 reg_value << 16);
1116
1117 clrsetbits_le32(&denali_pi[127], 0xFF << 8, reg_value << 8);
1118 clrsetbits_le32(&denali_pi[134], 0xFF << 24, reg_value << 24);
1119 clrsetbits_le32(&denali_pi[142], 0xFF << 8, reg_value << 8);
1120 clrsetbits_le32(&denali_pi[149], 0xFF << 24, reg_value << 24);
1121 break;
1122 }
1123 }
1124
1125 /*
1126 * b_reg: indicate whether set phy register
1127 * or just set sdram_params.
1128 * if b_reg = 0, channel, mr5 are not care
1129 */
set_lp4_MR14(const struct chan_info * chan,struct rk3399_sdram_params * sdram_params,u32 ctl_fn,u32 b_reg,u32 channel,u32 mr5)1130 static void set_lp4_MR14(const struct chan_info *chan,
1131 struct rk3399_sdram_params *sdram_params, u32 ctl_fn,
1132 u32 b_reg, u32 channel, u32 mr5)
1133 {
1134 u32 *denali_ctl;
1135 u32 *denali_pi;
1136 u32 reg_value;
1137 struct io_setting *io;
1138
1139 if (b_reg) {
1140 denali_pi = chan->pi->denali_pi;
1141 denali_ctl = chan->pctl->denali_ctl;
1142 } else {
1143 denali_pi = sdram_params->pi_regs.denali_pi;
1144 denali_ctl = sdram_params->pctl_regs.denali_ctl;
1145 }
1146 io = get_io_set(sdram_params, mr5);
1147
1148 reg_value = io->dq_vref;
1149 switch (ctl_fn) {
1150 case 0:
1151 clrsetbits_le32(&denali_ctl[142], 0xFFFF << 16,
1152 reg_value << 16);
1153 clrsetbits_le32(&denali_ctl[156], 0xFFFF << 16,
1154 reg_value << 16);
1155
1156 clrsetbits_le32(&denali_pi[132], 0xFF << 16, reg_value << 16);
1157 clrsetbits_le32(&denali_pi[140], 0xFF << 0, reg_value << 0);
1158 clrsetbits_le32(&denali_pi[147], 0xFF << 16, reg_value << 16);
1159 clrsetbits_le32(&denali_pi[155], 0xFF << 0, reg_value << 0);
1160 break;
1161 case 1:
1162 clrsetbits_le32(&denali_ctl[143], 0xFFFF, reg_value);
1163 clrsetbits_le32(&denali_ctl[157], 0xFFFF, reg_value);
1164
1165 clrsetbits_le32(&denali_pi[130], 0xFF << 0, reg_value << 0);
1166 clrsetbits_le32(&denali_pi[137], 0xFF << 16, reg_value << 16);
1167 clrsetbits_le32(&denali_pi[145], 0xFF << 0, reg_value << 0);
1168 clrsetbits_le32(&denali_pi[152], 0xFF << 16, reg_value << 16);
1169 break;
1170 case 2:
1171 default:
1172 clrsetbits_le32(&denali_ctl[143], 0xFFFF << 16,
1173 reg_value << 16);
1174 clrsetbits_le32(&denali_ctl[157], 0xFFFF << 16,
1175 reg_value << 16);
1176
1177 clrsetbits_le32(&denali_pi[127], 0xFF << 16, reg_value << 16);
1178 clrsetbits_le32(&denali_pi[135], 0xFF << 0, reg_value << 0);
1179 clrsetbits_le32(&denali_pi[142], 0xFF << 16, reg_value << 16);
1180 clrsetbits_le32(&denali_pi[150], 0xFF << 0, reg_value << 0);
1181 break;
1182 }
1183 }
1184
modify_param(const struct chan_info * chan,struct rk3399_sdram_params * sdram_params)1185 static void modify_param(const struct chan_info *chan,
1186 struct rk3399_sdram_params *sdram_params)
1187 {
1188 struct rk3399_sdram_params *params;
1189 u32 *denali_ctl_params;
1190 u32 *denali_pi_params;
1191 u32 *denali_phy_params;
1192
1193 denali_ctl_params = sdram_params->pctl_regs.denali_ctl;
1194 denali_pi_params = sdram_params->pi_regs.denali_pi;
1195 denali_phy_params = sdram_params->phy_regs.denali_phy;
1196
1197 if (sdram_params->base.dramtype == LPDDR4) {
1198 set_lp4_dq_odt(chan, sdram_params, 2, 1, 0, 0, 0);
1199 set_lp4_ca_odt(chan, sdram_params, 2, 1, 0, 0, 0);
1200 set_lp4_MR3(chan, sdram_params, 2, 0, 0, 0);
1201 set_lp4_MR12(chan, sdram_params, 2, 0, 0, 0);
1202 set_lp4_MR14(chan, sdram_params, 2, 0, 0, 0);
1203 params = get_phy_index_params(0, sdram_params);
1204 set_ds_odt(chan, params, 0, 0, 0);
1205 /* read two cycle preamble */
1206 clrsetbits_le32(&denali_ctl_params[200], 0x3 << 24, 0x3 << 24);
1207 clrsetbits_le32(&denali_phy_params[7], 0x3 << 24, 0x3 << 24);
1208 clrsetbits_le32(&denali_phy_params[135], 0x3 << 24, 0x3 << 24);
1209 clrsetbits_le32(&denali_phy_params[263], 0x3 << 24, 0x3 << 24);
1210 clrsetbits_le32(&denali_phy_params[391], 0x3 << 24, 0x3 << 24);
1211
1212 /* boot frequency two cycle preamble */
1213 clrsetbits_le32(&denali_phy_params[2], 0x3 << 16, 0x3 << 16);
1214 clrsetbits_le32(&denali_phy_params[130], 0x3 << 16, 0x3 << 16);
1215 clrsetbits_le32(&denali_phy_params[258], 0x3 << 16, 0x3 << 16);
1216 clrsetbits_le32(&denali_phy_params[386], 0x3 << 16, 0x3 << 16);
1217
1218 clrsetbits_le32(&denali_pi_params[45], 0x3 << 8, 0x3 << 8);
1219 clrsetbits_le32(&denali_pi_params[58], 0x1, 0x1);
1220
1221 /*
1222 * bypass mode need PHY_SLICE_PWR_RDC_DISABLE_x = 1,
1223 * boot frequency mode use bypass mode
1224 */
1225 setbits_le32(&denali_phy_params[10], 1 << 16);
1226 setbits_le32(&denali_phy_params[138], 1 << 16);
1227 setbits_le32(&denali_phy_params[266], 1 << 16);
1228 setbits_le32(&denali_phy_params[394], 1 << 16);
1229 } else {
1230 /* modify PHY F0/F1/F2 params */
1231 params = get_phy_index_params(0, sdram_params);
1232 set_ds_odt(chan, params, 0, 0, 0);
1233 }
1234
1235 clrsetbits_le32(&denali_pi_params[45], 0x1 << 24, 0x1 << 24);
1236 clrsetbits_le32(&denali_pi_params[61], 0x1 << 24, 0x1 << 24);
1237 clrsetbits_le32(&denali_pi_params[76], 0x1 << 24, 0x1 << 24);
1238 clrsetbits_le32(&denali_pi_params[77], 0x1, 0x1);
1239 }
1240
pctl_cfg(const struct chan_info * chan,u32 channel,struct rk3399_sdram_params * sdram_params)1241 static int pctl_cfg(const struct chan_info *chan, u32 channel,
1242 struct rk3399_sdram_params *sdram_params)
1243 {
1244 u32 *denali_ctl = chan->pctl->denali_ctl;
1245 u32 *denali_pi = chan->pi->denali_pi;
1246 u32 *denali_phy = chan->publ->denali_phy;
1247 const u32 *params_ctl = sdram_params->pctl_regs.denali_ctl;
1248 const u32 *params_phy = sdram_params->phy_regs.denali_phy;
1249 u32 tmp, tmp1, tmp2;
1250 struct rk3399_sdram_params *params;
1251 u32 byte;
1252
1253 modify_param(chan, sdram_params);
1254 /*
1255 * work around controller bug:
1256 * Do not program DRAM_CLASS until NO_PHY_IND_TRAIN_INT is programmed
1257 */
1258 sdram_copy_to_reg(&denali_ctl[1], ¶ms_ctl[1],
1259 sizeof(struct rk3399_ddr_pctl_regs) - 4);
1260 writel(params_ctl[0], &denali_ctl[0]);
1261
1262 /*
1263 * two channel init at the same time, then ZQ Cal Start
1264 * at the same time, it will use the same RZQ.
1265 * to fix it: increase tINIT3 for channel 1, will avoid two
1266 * channel ZQ Cal Start at the same time
1267 */
1268 if (sdram_params->base.dramtype == LPDDR4 && channel == 1) {
1269 tmp = ((1000000 * (sdram_params->base.ddr_freq / MHz) + 999) /
1270 1000);
1271 tmp1 = readl(&denali_ctl[14]);
1272 writel(tmp + tmp1, &denali_ctl[14]);
1273 }
1274
1275 sdram_copy_to_reg(denali_pi, &sdram_params->pi_regs.denali_pi[0],
1276 sizeof(struct rk3399_ddr_pi_regs));
1277 /* rank count need to set for init */
1278 set_memory_map(chan, channel, sdram_params);
1279
1280 writel(sdram_params->phy_regs.denali_phy[910], &denali_phy[910]);
1281 writel(sdram_params->phy_regs.denali_phy[911], &denali_phy[911]);
1282 writel(sdram_params->phy_regs.denali_phy[912], &denali_phy[912]);
1283
1284 if (sdram_params->base.dramtype == LPDDR4) {
1285 writel(sdram_params->phy_regs.denali_phy[898],
1286 &denali_phy[898]);
1287 writel(sdram_params->phy_regs.denali_phy[919],
1288 &denali_phy[919]);
1289 }
1290
1291 g_pwrup_srefresh_exit[channel] = readl(&denali_ctl[68]) &
1292 PWRUP_SREFRESH_EXIT;
1293 clrbits_le32(&denali_ctl[68], PWRUP_SREFRESH_EXIT);
1294
1295 /* PHY_DLL_RST_EN */
1296 clrsetbits_le32(&denali_phy[957], 0x3 << 24, 1 << 24);
1297
1298 setbits_le32(&denali_pi[0], START);
1299 setbits_le32(&denali_ctl[0], START);
1300
1301 /* because LPDDR4 use PLL bypass mode for init
1302 * not need to wait for the PLL to lock
1303 */
1304 if (sdram_params->base.dramtype != LPDDR4) {
1305 /* wait lock */
1306 while (1) {
1307 tmp = readl(&denali_phy[920]);
1308 tmp1 = readl(&denali_phy[921]);
1309 tmp2 = readl(&denali_phy[922]);
1310 if ((((tmp >> 16) & 0x1) == 0x1) &&
1311 (((tmp1 >> 16) & 0x1) == 0x1) &&
1312 (((tmp1 >> 0) & 0x1) == 0x1) &&
1313 (((tmp2 >> 0) & 0x1) == 0x1))
1314 break;
1315 }
1316 }
1317
1318 sdram_copy_to_reg(&denali_phy[896], ¶ms_phy[896], (958 - 895) * 4);
1319 sdram_copy_to_reg(&denali_phy[0], ¶ms_phy[0], (90 - 0 + 1) * 4);
1320 sdram_copy_to_reg(&denali_phy[128],
1321 ¶ms_phy[128], (218 - 128 + 1) * 4);
1322 sdram_copy_to_reg(&denali_phy[256],
1323 ¶ms_phy[256], (346 - 256 + 1) * 4);
1324 sdram_copy_to_reg(&denali_phy[384],
1325 ¶ms_phy[384], (474 - 384 + 1) * 4);
1326 sdram_copy_to_reg(&denali_phy[512],
1327 ¶ms_phy[512], (549 - 512 + 1) * 4);
1328 sdram_copy_to_reg(&denali_phy[640],
1329 ¶ms_phy[640], (677 - 640 + 1) * 4);
1330 sdram_copy_to_reg(&denali_phy[768],
1331 ¶ms_phy[768], (805 - 768 + 1) * 4);
1332
1333 if (sdram_params->base.dramtype == LPDDR4)
1334 params = get_phy_index_params(1, sdram_params);
1335 else
1336 params = get_phy_index_params(0, sdram_params);
1337
1338 clrsetbits_le32(¶ms->phy_regs.denali_phy[896], 0x3 << 8,
1339 0 << 8);
1340 writel(params->phy_regs.denali_phy[896], &denali_phy[896]);
1341
1342 writel(sdram_params->phy_regs.denali_phy[83] + (0x10 << 16),
1343 &denali_phy[83]);
1344 writel(sdram_params->phy_regs.denali_phy[84] + (0x10 << 8),
1345 &denali_phy[84]);
1346 writel(sdram_params->phy_regs.denali_phy[211] + (0x10 << 16),
1347 &denali_phy[211]);
1348 writel(sdram_params->phy_regs.denali_phy[212] + (0x10 << 8),
1349 &denali_phy[212]);
1350 writel(sdram_params->phy_regs.denali_phy[339] + (0x10 << 16),
1351 &denali_phy[339]);
1352 writel(sdram_params->phy_regs.denali_phy[340] + (0x10 << 8),
1353 &denali_phy[340]);
1354 writel(sdram_params->phy_regs.denali_phy[467] + (0x10 << 16),
1355 &denali_phy[467]);
1356 writel(sdram_params->phy_regs.denali_phy[468] + (0x10 << 8),
1357 &denali_phy[468]);
1358
1359 if (sdram_params->base.dramtype == LPDDR4) {
1360 /*
1361 * to improve write dqs and dq phase from 1.5ns to 3.5ns
1362 * at 50MHz.
1363 */
1364 for (byte = 0; byte < 4; byte++) {
1365 tmp = 0x680;
1366 clrsetbits_le32(&denali_phy[1 + (128 * byte)],
1367 0xfff << 8, tmp << 8);
1368 }
1369 /*
1370 * to workaround 366ball two channel's RESET connect to
1371 * one RESET signal of die
1372 */
1373 if (channel == 1)
1374 clrsetbits_le32(&denali_phy[937], 0xff,
1375 PHY_DRV_ODT_240 |
1376 (PHY_DRV_ODT_240 << 0x4));
1377 }
1378
1379 return 0;
1380 }
1381
select_per_cs_training_index(const struct chan_info * chan,u32 rank)1382 static void select_per_cs_training_index(const struct chan_info *chan,
1383 u32 rank)
1384 {
1385 u32 *denali_phy = chan->publ->denali_phy;
1386
1387 /* PHY_84 PHY_PER_CS_TRAINING_EN_0 1bit offset_16 */
1388 if ((readl(&denali_phy[84]) >> 16) & 1) {
1389 /*
1390 * PHY_8/136/264/392
1391 * phy_per_cs_training_index_X 1bit offset_24
1392 */
1393 clrsetbits_le32(&denali_phy[8], 0x1 << 24, rank << 24);
1394 clrsetbits_le32(&denali_phy[136], 0x1 << 24, rank << 24);
1395 clrsetbits_le32(&denali_phy[264], 0x1 << 24, rank << 24);
1396 clrsetbits_le32(&denali_phy[392], 0x1 << 24, rank << 24);
1397 }
1398 }
1399
override_write_leveling_value(const struct chan_info * chan)1400 static void override_write_leveling_value(const struct chan_info *chan)
1401 {
1402 u32 *denali_ctl = chan->pctl->denali_ctl;
1403 u32 *denali_phy = chan->publ->denali_phy;
1404 u32 byte;
1405
1406 /* PHY_896 PHY_FREQ_SEL_MULTICAST_EN 1bit offset_0 */
1407 setbits_le32(&denali_phy[896], 1);
1408
1409 /*
1410 * PHY_8/136/264/392
1411 * phy_per_cs_training_multicast_en_X 1bit offset_16
1412 */
1413 clrsetbits_le32(&denali_phy[8], 0x1 << 16, 1 << 16);
1414 clrsetbits_le32(&denali_phy[136], 0x1 << 16, 1 << 16);
1415 clrsetbits_le32(&denali_phy[264], 0x1 << 16, 1 << 16);
1416 clrsetbits_le32(&denali_phy[392], 0x1 << 16, 1 << 16);
1417
1418 for (byte = 0; byte < 4; byte++)
1419 clrsetbits_le32(&denali_phy[63 + (128 * byte)], 0xffff << 16,
1420 0x200 << 16);
1421
1422 /* PHY_896 PHY_FREQ_SEL_MULTICAST_EN 1bit offset_0 */
1423 clrbits_le32(&denali_phy[896], 1);
1424
1425 /* CTL_200 ctrlupd_req 1bit offset_8 */
1426 clrsetbits_le32(&denali_ctl[200], 0x1 << 8, 0x1 << 8);
1427 }
1428
data_training_ca(const struct chan_info * chan,u32 channel,const struct rk3399_sdram_params * sdram_params)1429 static int data_training_ca(const struct chan_info *chan, u32 channel,
1430 const struct rk3399_sdram_params *sdram_params)
1431 {
1432 u32 *denali_pi = chan->pi->denali_pi;
1433 u32 *denali_phy = chan->publ->denali_phy;
1434 u32 i, tmp;
1435 u32 obs_0, obs_1, obs_2, obs_err = 0;
1436 u32 rank = sdram_params->ch[channel].cap_info.rank;
1437 u32 rank_mask;
1438
1439 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
1440 writel(0x00003f7c, (&denali_pi[175]));
1441
1442 if (sdram_params->base.dramtype == LPDDR4)
1443 rank_mask = (rank == 1) ? 0x5 : 0xf;
1444 else
1445 rank_mask = (rank == 1) ? 0x1 : 0x3;
1446
1447 for (i = 0; i < 4; i++) {
1448 if (!(rank_mask & (1 << i)))
1449 continue;
1450 select_per_cs_training_index(chan, i);
1451 /* PI_100 PI_CALVL_EN:RW:8:2 */
1452 clrsetbits_le32(&denali_pi[100], 0x3 << 8, 0x2 << 8);
1453 /* PI_92 PI_CALVL_REQ:WR:16:1,PI_CALVL_CS:RW:24:2 */
1454 clrsetbits_le32(&denali_pi[92],
1455 (0x1 << 16) | (0x3 << 24),
1456 (0x1 << 16) | (i << 24));
1457
1458 /* Waiting for training complete */
1459 while (1) {
1460 /* PI_174 PI_INT_STATUS:RD:8:18 */
1461 tmp = readl(&denali_pi[174]) >> 8;
1462 /*
1463 * check status obs
1464 * PHY_532/660/789 phy_adr_calvl_obs1_:0:32
1465 */
1466 obs_0 = readl(&denali_phy[532]);
1467 obs_1 = readl(&denali_phy[660]);
1468 obs_2 = readl(&denali_phy[788]);
1469 if (((obs_0 >> 30) & 0x3) ||
1470 ((obs_1 >> 30) & 0x3) ||
1471 ((obs_2 >> 30) & 0x3))
1472 obs_err = 1;
1473 if ((((tmp >> 11) & 0x1) == 0x1) &&
1474 (((tmp >> 13) & 0x1) == 0x1) &&
1475 (((tmp >> 5) & 0x1) == 0x0) &&
1476 obs_err == 0)
1477 break;
1478 else if ((((tmp >> 5) & 0x1) == 0x1) ||
1479 (obs_err == 1))
1480 return -EIO;
1481 }
1482 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
1483 writel(0x00003f7c, (&denali_pi[175]));
1484 }
1485 clrbits_le32(&denali_pi[100], 0x3 << 8);
1486
1487 return 0;
1488 }
1489
data_training_wl(const struct chan_info * chan,u32 channel,const struct rk3399_sdram_params * sdram_params)1490 static int data_training_wl(const struct chan_info *chan, u32 channel,
1491 const struct rk3399_sdram_params *sdram_params)
1492 {
1493 u32 *denali_pi = chan->pi->denali_pi;
1494 u32 *denali_phy = chan->publ->denali_phy;
1495 u32 i, tmp;
1496 u32 obs_0, obs_1, obs_2, obs_3, obs_err = 0;
1497 u32 rank = sdram_params->ch[channel].cap_info.rank;
1498
1499 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
1500 writel(0x00003f7c, (&denali_pi[175]));
1501
1502 for (i = 0; i < rank; i++) {
1503 select_per_cs_training_index(chan, i);
1504 /* PI_60 PI_WRLVL_EN:RW:8:2 */
1505 clrsetbits_le32(&denali_pi[60], 0x3 << 8, 0x2 << 8);
1506 /* PI_59 PI_WRLVL_REQ:WR:8:1,PI_WRLVL_CS:RW:16:2 */
1507 clrsetbits_le32(&denali_pi[59],
1508 (0x1 << 8) | (0x3 << 16),
1509 (0x1 << 8) | (i << 16));
1510
1511 /* Waiting for training complete */
1512 while (1) {
1513 /* PI_174 PI_INT_STATUS:RD:8:18 */
1514 tmp = readl(&denali_pi[174]) >> 8;
1515
1516 /*
1517 * check status obs, if error maybe can not
1518 * get leveling done PHY_40/168/296/424
1519 * phy_wrlvl_status_obs_X:0:13
1520 */
1521 obs_0 = readl(&denali_phy[40]);
1522 obs_1 = readl(&denali_phy[168]);
1523 obs_2 = readl(&denali_phy[296]);
1524 obs_3 = readl(&denali_phy[424]);
1525 if (((obs_0 >> 12) & 0x1) ||
1526 ((obs_1 >> 12) & 0x1) ||
1527 ((obs_2 >> 12) & 0x1) ||
1528 ((obs_3 >> 12) & 0x1))
1529 obs_err = 1;
1530 if ((((tmp >> 10) & 0x1) == 0x1) &&
1531 (((tmp >> 13) & 0x1) == 0x1) &&
1532 (((tmp >> 4) & 0x1) == 0x0) &&
1533 obs_err == 0)
1534 break;
1535 else if ((((tmp >> 4) & 0x1) == 0x1) ||
1536 (obs_err == 1))
1537 return -EIO;
1538 }
1539 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
1540 writel(0x00003f7c, (&denali_pi[175]));
1541 }
1542
1543 override_write_leveling_value(chan);
1544 clrbits_le32(&denali_pi[60], 0x3 << 8);
1545
1546 return 0;
1547 }
1548
data_training_rg(const struct chan_info * chan,u32 channel,const struct rk3399_sdram_params * sdram_params)1549 static int data_training_rg(const struct chan_info *chan, u32 channel,
1550 const struct rk3399_sdram_params *sdram_params)
1551 {
1552 u32 *denali_pi = chan->pi->denali_pi;
1553 u32 *denali_phy = chan->publ->denali_phy;
1554 u32 i, tmp;
1555 u32 obs_0, obs_1, obs_2, obs_3, obs_err = 0;
1556 u32 rank = sdram_params->ch[channel].cap_info.rank;
1557
1558 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
1559 writel(0x00003f7c, (&denali_pi[175]));
1560
1561 for (i = 0; i < rank; i++) {
1562 select_per_cs_training_index(chan, i);
1563 /* PI_80 PI_RDLVL_GATE_EN:RW:24:2 */
1564 clrsetbits_le32(&denali_pi[80], 0x3 << 24, 0x2 << 24);
1565 /*
1566 * PI_74 PI_RDLVL_GATE_REQ:WR:16:1
1567 * PI_RDLVL_CS:RW:24:2
1568 */
1569 clrsetbits_le32(&denali_pi[74],
1570 (0x1 << 16) | (0x3 << 24),
1571 (0x1 << 16) | (i << 24));
1572
1573 /* Waiting for training complete */
1574 while (1) {
1575 /* PI_174 PI_INT_STATUS:RD:8:18 */
1576 tmp = readl(&denali_pi[174]) >> 8;
1577
1578 /*
1579 * check status obs
1580 * PHY_43/171/299/427
1581 * PHY_GTLVL_STATUS_OBS_x:16:8
1582 */
1583 obs_0 = readl(&denali_phy[43]);
1584 obs_1 = readl(&denali_phy[171]);
1585 obs_2 = readl(&denali_phy[299]);
1586 obs_3 = readl(&denali_phy[427]);
1587 if (((obs_0 >> (16 + 6)) & 0x3) ||
1588 ((obs_1 >> (16 + 6)) & 0x3) ||
1589 ((obs_2 >> (16 + 6)) & 0x3) ||
1590 ((obs_3 >> (16 + 6)) & 0x3))
1591 obs_err = 1;
1592 if ((((tmp >> 9) & 0x1) == 0x1) &&
1593 (((tmp >> 13) & 0x1) == 0x1) &&
1594 (((tmp >> 3) & 0x1) == 0x0) &&
1595 obs_err == 0)
1596 break;
1597 else if ((((tmp >> 3) & 0x1) == 0x1) ||
1598 (obs_err == 1))
1599 return -EIO;
1600 }
1601 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
1602 writel(0x00003f7c, (&denali_pi[175]));
1603 }
1604 clrbits_le32(&denali_pi[80], 0x3 << 24);
1605
1606 return 0;
1607 }
1608
data_training_rl(const struct chan_info * chan,u32 channel,const struct rk3399_sdram_params * sdram_params)1609 static int data_training_rl(const struct chan_info *chan, u32 channel,
1610 const struct rk3399_sdram_params *sdram_params)
1611 {
1612 u32 *denali_pi = chan->pi->denali_pi;
1613 u32 i, tmp;
1614 u32 rank = sdram_params->ch[channel].cap_info.rank;
1615
1616 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
1617 writel(0x00003f7c, (&denali_pi[175]));
1618
1619 for (i = 0; i < rank; i++) {
1620 select_per_cs_training_index(chan, i);
1621 /* PI_80 PI_RDLVL_EN:RW:16:2 */
1622 clrsetbits_le32(&denali_pi[80], 0x3 << 16, 0x2 << 16);
1623 /* PI_74 PI_RDLVL_REQ:WR:8:1,PI_RDLVL_CS:RW:24:2 */
1624 clrsetbits_le32(&denali_pi[74],
1625 (0x1 << 8) | (0x3 << 24),
1626 (0x1 << 8) | (i << 24));
1627
1628 /* Waiting for training complete */
1629 while (1) {
1630 /* PI_174 PI_INT_STATUS:RD:8:18 */
1631 tmp = readl(&denali_pi[174]) >> 8;
1632
1633 /*
1634 * make sure status obs not report error bit
1635 * PHY_46/174/302/430
1636 * phy_rdlvl_status_obs_X:16:8
1637 */
1638 if ((((tmp >> 8) & 0x1) == 0x1) &&
1639 (((tmp >> 13) & 0x1) == 0x1) &&
1640 (((tmp >> 2) & 0x1) == 0x0))
1641 break;
1642 else if (((tmp >> 2) & 0x1) == 0x1)
1643 return -EIO;
1644 }
1645 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
1646 writel(0x00003f7c, (&denali_pi[175]));
1647 }
1648 clrbits_le32(&denali_pi[80], 0x3 << 16);
1649
1650 return 0;
1651 }
1652
data_training_wdql(const struct chan_info * chan,u32 channel,const struct rk3399_sdram_params * sdram_params)1653 static int data_training_wdql(const struct chan_info *chan, u32 channel,
1654 const struct rk3399_sdram_params *sdram_params)
1655 {
1656 u32 *denali_pi = chan->pi->denali_pi;
1657 u32 i, tmp;
1658 u32 rank = sdram_params->ch[channel].cap_info.rank;
1659 u32 rank_mask;
1660
1661 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
1662 writel(0x00003f7c, (&denali_pi[175]));
1663
1664 if (sdram_params->base.dramtype == LPDDR4)
1665 rank_mask = (rank == 1) ? 0x5 : 0xf;
1666 else
1667 rank_mask = (rank == 1) ? 0x1 : 0x3;
1668
1669 for (i = 0; i < 4; i++) {
1670 if (!(rank_mask & (1 << i)))
1671 continue;
1672
1673 select_per_cs_training_index(chan, i);
1674 /*
1675 * disable PI_WDQLVL_VREF_EN before wdq leveling?
1676 * PI_117 PI_WDQLVL_VREF_EN:RW:8:1
1677 */
1678 clrbits_le32(&denali_pi[117], 0x1 << 8);
1679 /* PI_124 PI_WDQLVL_EN:RW:16:2 */
1680 clrsetbits_le32(&denali_pi[124], 0x3 << 16, 0x2 << 16);
1681 /* PI_121 PI_WDQLVL_REQ:WR:8:1,PI_WDQLVL_CS:RW:16:2 */
1682 clrsetbits_le32(&denali_pi[121],
1683 (0x1 << 8) | (0x3 << 16),
1684 (0x1 << 8) | (i << 16));
1685
1686 /* Waiting for training complete */
1687 while (1) {
1688 /* PI_174 PI_INT_STATUS:RD:8:18 */
1689 tmp = readl(&denali_pi[174]) >> 8;
1690 if ((((tmp >> 12) & 0x1) == 0x1) &&
1691 (((tmp >> 13) & 0x1) == 0x1) &&
1692 (((tmp >> 6) & 0x1) == 0x0))
1693 break;
1694 else if (((tmp >> 6) & 0x1) == 0x1)
1695 return -EIO;
1696 }
1697 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
1698 writel(0x00003f7c, (&denali_pi[175]));
1699 }
1700 clrbits_le32(&denali_pi[124], 0x3 << 16);
1701
1702 return 0;
1703 }
1704
data_training(const struct chan_info * chan,u32 channel,const struct rk3399_sdram_params * sdram_params,u32 training_flag)1705 static int data_training(const struct chan_info *chan, u32 channel,
1706 const struct rk3399_sdram_params *sdram_params,
1707 u32 training_flag)
1708 {
1709 u32 *denali_phy = chan->publ->denali_phy;
1710 int ret = 0;
1711
1712 /* PHY_927 PHY_PAD_DQS_DRIVE RPULL offset_22 */
1713 setbits_le32(&denali_phy[927], (1 << 22));
1714
1715 if (training_flag == PI_FULL_TRAINING) {
1716 if (sdram_params->base.dramtype == LPDDR4) {
1717 training_flag = PI_WRITE_LEVELING |
1718 PI_READ_GATE_TRAINING |
1719 PI_READ_LEVELING | PI_WDQ_LEVELING;
1720 } else if (sdram_params->base.dramtype == LPDDR3) {
1721 training_flag = PI_CA_TRAINING | PI_WRITE_LEVELING |
1722 PI_READ_GATE_TRAINING;
1723 } else if (sdram_params->base.dramtype == DDR3) {
1724 training_flag = PI_WRITE_LEVELING |
1725 PI_READ_GATE_TRAINING |
1726 PI_READ_LEVELING;
1727 }
1728 }
1729
1730 /* ca training(LPDDR4,LPDDR3 support) */
1731 if ((training_flag & PI_CA_TRAINING) == PI_CA_TRAINING) {
1732 ret = data_training_ca(chan, channel, sdram_params);
1733 if (ret != 0)
1734 goto out;
1735 }
1736
1737 /* write leveling(LPDDR4,LPDDR3,DDR3 support) */
1738 if ((training_flag & PI_WRITE_LEVELING) == PI_WRITE_LEVELING) {
1739 ret = data_training_wl(chan, channel, sdram_params);
1740 if (ret != 0)
1741 goto out;
1742 }
1743
1744 /* read gate training(LPDDR4,LPDDR3,DDR3 support) */
1745 if ((training_flag & PI_READ_GATE_TRAINING) == PI_READ_GATE_TRAINING) {
1746 ret = data_training_rg(chan, channel, sdram_params);
1747 if (ret != 0)
1748 goto out;
1749 }
1750
1751 /* read leveling(LPDDR4,LPDDR3,DDR3 support) */
1752 if ((training_flag & PI_READ_LEVELING) == PI_READ_LEVELING) {
1753 ret = data_training_rl(chan, channel, sdram_params);
1754 if (ret != 0)
1755 goto out;
1756 }
1757
1758 /* wdq leveling(LPDDR4 support) */
1759 if ((training_flag & PI_WDQ_LEVELING) == PI_WDQ_LEVELING) {
1760 ret = data_training_wdql(chan, channel, sdram_params);
1761 if (ret != 0)
1762 goto out;
1763 }
1764
1765 /* PHY_927 PHY_PAD_DQS_DRIVE RPULL offset_22 */
1766 clrbits_le32(&denali_phy[927], (1 << 22));
1767
1768 out:
1769 return ret;
1770 }
1771
set_ddrconfig(const struct chan_info * chan,const struct rk3399_sdram_params * sdram_params,unsigned char channel,u32 ddrconfig)1772 static void set_ddrconfig(const struct chan_info *chan,
1773 const struct rk3399_sdram_params *sdram_params,
1774 unsigned char channel, u32 ddrconfig)
1775 {
1776 /* only need to set ddrconfig */
1777 struct msch_regs *ddr_msch_regs = chan->msch;
1778 unsigned int cs0_cap = 0;
1779 unsigned int cs1_cap = 0;
1780
1781 cs0_cap = (1 << (sdram_params->ch[channel].cap_info.cs0_row
1782 + sdram_params->ch[channel].cap_info.col
1783 + sdram_params->ch[channel].cap_info.bk
1784 + sdram_params->ch[channel].cap_info.bw - 20));
1785 if (sdram_params->ch[channel].cap_info.rank > 1)
1786 cs1_cap = cs0_cap >> (sdram_params->ch[channel].cap_info.cs0_row
1787 - sdram_params->ch[channel].cap_info.cs1_row);
1788 if (sdram_params->ch[channel].cap_info.row_3_4) {
1789 cs0_cap = cs0_cap * 3 / 4;
1790 cs1_cap = cs1_cap * 3 / 4;
1791 }
1792
1793 writel(ddrconfig | (ddrconfig << 8), &ddr_msch_regs->ddrconf);
1794 writel(((cs0_cap / 32) & 0xff) | (((cs1_cap / 32) & 0xff) << 8),
1795 &ddr_msch_regs->ddrsize);
1796 }
1797
sdram_msch_config(struct msch_regs * msch,struct sdram_msch_timings * noc_timings)1798 static void sdram_msch_config(struct msch_regs *msch,
1799 struct sdram_msch_timings *noc_timings)
1800 {
1801 writel(noc_timings->ddrtiminga0.d32,
1802 &msch->ddrtiminga0.d32);
1803 writel(noc_timings->ddrtimingb0.d32,
1804 &msch->ddrtimingb0.d32);
1805 writel(noc_timings->ddrtimingc0.d32,
1806 &msch->ddrtimingc0.d32);
1807 writel(noc_timings->devtodev0.d32,
1808 &msch->devtodev0.d32);
1809 writel(noc_timings->ddrmode.d32,
1810 &msch->ddrmode.d32);
1811 }
1812
dram_all_config(struct dram_info * dram,struct rk3399_sdram_params * sdram_params)1813 static void dram_all_config(struct dram_info *dram,
1814 struct rk3399_sdram_params *sdram_params)
1815 {
1816 u32 sys_reg2 = 0;
1817 u32 sys_reg3 = 0;
1818 unsigned int channel, idx;
1819
1820 for (channel = 0, idx = 0;
1821 (idx < sdram_params->base.num_channels) && (channel < 2);
1822 channel++) {
1823 struct msch_regs *ddr_msch_regs;
1824 struct sdram_msch_timings *noc_timing;
1825
1826 if (sdram_params->ch[channel].cap_info.col == 0)
1827 continue;
1828 idx++;
1829 sdram_org_config(&sdram_params->ch[channel].cap_info,
1830 &sdram_params->base, &sys_reg2,
1831 &sys_reg3, channel);
1832 ddr_msch_regs = dram->chan[channel].msch;
1833 noc_timing = &sdram_params->ch[channel].noc_timings;
1834 sdram_msch_config(ddr_msch_regs, noc_timing);
1835
1836 /* rank 1 memory clock disable (dfi_dram_clk_disable = 1) */
1837 if (sdram_params->ch[channel].cap_info.rank == 1)
1838 setbits_le32(&dram->chan[channel].pctl->denali_ctl[276],
1839 1 << 17);
1840 }
1841
1842 writel(sys_reg2, &dram->pmugrf->os_reg2);
1843 writel(sys_reg3, &dram->pmugrf->os_reg3);
1844 rk_clrsetreg(&dram->pmusgrf->soc_con4, 0x1f << 10,
1845 sdram_params->base.stride << 10);
1846
1847 /* reboot hold register set */
1848 writel(PRESET_SGRF_HOLD(0) | PRESET_GPIO0_HOLD(1) |
1849 PRESET_GPIO1_HOLD(1),
1850 &dram->pmucru->pmucru_rstnhold_con[1]);
1851 clrsetbits_le32(&dram->cru->glb_rst_con, 0x3, 0x3);
1852 }
1853
switch_to_phy_index1(struct dram_info * dram,const struct rk3399_sdram_params * sdram_params)1854 static int switch_to_phy_index1(struct dram_info *dram,
1855 const struct rk3399_sdram_params *sdram_params)
1856 {
1857 u32 channel;
1858 u32 *denali_phy;
1859 u32 ch_count = sdram_params->base.num_channels;
1860 int ret;
1861 int i = 0;
1862
1863 writel(RK_CLRSETBITS(0x03 << 4 | 1 << 2 | 1,
1864 1 << 4 | 1 << 2 | 1),
1865 &dram->cic->cic_ctrl0);
1866 while (!(readl(&dram->cic->cic_status0) & (1 << 2))) {
1867 mdelay(10);
1868 i++;
1869 if (i > 10) {
1870 debug("index1 frequency change overtime\n");
1871 return -ETIME;
1872 }
1873 }
1874
1875 i = 0;
1876 writel(RK_CLRSETBITS(1 << 1, 1 << 1), &dram->cic->cic_ctrl0);
1877 while (!(readl(&dram->cic->cic_status0) & (1 << 0))) {
1878 mdelay(10);
1879 if (i > 10) {
1880 debug("index1 frequency done overtime\n");
1881 return -ETIME;
1882 }
1883 }
1884
1885 for (channel = 0; channel < ch_count; channel++) {
1886 denali_phy = dram->chan[channel].publ->denali_phy;
1887 clrsetbits_le32(&denali_phy[896], (0x3 << 8) | 1, 1 << 8);
1888 ret = data_training(&dram->chan[channel], channel,
1889 sdram_params, PI_FULL_TRAINING);
1890 if (ret) {
1891 debug("index1 training failed\n");
1892 return ret;
1893 }
1894 }
1895
1896 return 0;
1897 }
1898
1899 u16 ddr_cfg_2_rbc[] = {
1900 /*
1901 * [6] highest bit col
1902 * [5:3] max row(14+n)
1903 * [2] insertion row
1904 * [1:0] col(9+n),col, data bus 32bit
1905 *
1906 * highbitcol, max_row, insertion_row, col
1907 */
1908 ((0 << 6) | (2 << 3) | (0 << 2) | 0), /* 0 */
1909 ((0 << 6) | (2 << 3) | (0 << 2) | 1), /* 1 */
1910 ((0 << 6) | (1 << 3) | (0 << 2) | 2), /* 2 */
1911 ((0 << 6) | (0 << 3) | (0 << 2) | 3), /* 3 */
1912 ((0 << 6) | (2 << 3) | (1 << 2) | 1), /* 4 */
1913 ((0 << 6) | (1 << 3) | (1 << 2) | 2), /* 5 */
1914 ((1 << 6) | (0 << 3) | (0 << 2) | 2), /* 6 */
1915 ((1 << 6) | (1 << 3) | (0 << 2) | 2), /* 7 */
1916 };
1917
calculate_ddrconfig(struct rk3399_sdram_params * sdram_params,u32 channel)1918 static u32 calculate_ddrconfig(struct rk3399_sdram_params *sdram_params,
1919 u32 channel)
1920 {
1921 unsigned int i;
1922 unsigned int cs0_row = sdram_params->ch[channel].cap_info.cs0_row;
1923 unsigned int col = sdram_params->ch[channel].cap_info.col;
1924 unsigned int bw = sdram_params->ch[channel].cap_info.bw;
1925
1926 col -= (bw == 2) ? 0 : 1;
1927 col -= 9;
1928
1929 for (i = 0; i < 4; i++) {
1930 if ((col == (ddr_cfg_2_rbc[i] & 0x3)) &&
1931 (cs0_row <= (((ddr_cfg_2_rbc[i] >> 3) & 0x7) + 14)))
1932 break;
1933 }
1934
1935 if (i >= 4)
1936 i = -1;
1937
1938 return i;
1939 }
1940
calculate_stride(struct rk3399_sdram_params * sdram_params)1941 static unsigned char calculate_stride(struct rk3399_sdram_params *sdram_params)
1942 {
1943 unsigned int gstride_type;
1944 unsigned int channel;
1945 unsigned int chinfo = 0;
1946 unsigned int cap = 0;
1947 unsigned int stride = -1;
1948 unsigned int ch_cap[2] = {0, 0};
1949
1950 gstride_type = STRIDE_256B;
1951
1952 for (channel = 0; channel < 2; channel++) {
1953 unsigned int cs0_cap = 0;
1954 unsigned int cs1_cap = 0;
1955 struct sdram_cap_info *cap_info =
1956 &sdram_params->ch[channel].cap_info;
1957
1958 if (cap_info->col == 0)
1959 continue;
1960
1961 cs0_cap = (1 << (cap_info->cs0_row + cap_info->col +
1962 cap_info->bk + cap_info->bw - 20));
1963 if (cap_info->rank > 1)
1964 cs1_cap = cs0_cap >> (cap_info->cs0_row
1965 - cap_info->cs1_row);
1966 if (cap_info->row_3_4) {
1967 cs0_cap = cs0_cap * 3 / 4;
1968 cs1_cap = cs1_cap * 3 / 4;
1969 }
1970 ch_cap[channel] = cs0_cap + cs1_cap;
1971 chinfo |= 1 << channel;
1972 }
1973
1974 cap = ch_cap[0] + ch_cap[1];
1975 if (sdram_params->base.num_channels == 1) {
1976 if (chinfo & 1) /* channel a only */
1977 stride = 0x17;
1978 else /* channel b only */
1979 stride = 0x18;
1980 } else {/* 2 channel */
1981 if (ch_cap[0] == ch_cap[1]) {
1982 /* interleaved */
1983 if (gstride_type == PART_STRIDE) {
1984 /*
1985 * first 64MB no interleaved other 256B interleaved
1986 * if 786M+768M.useful space from 0-1280MB and
1987 * 1536MB-1792MB
1988 * if 1.5G+1.5G(continuous).useful space from 0-2560MB
1989 * and 3072MB-3584MB
1990 */
1991 stride = 0x1F;
1992 } else {
1993 switch (cap) {
1994 /* 512MB */
1995 case 512:
1996 stride = 0;
1997 break;
1998 /* 1GB unstride or 256B stride*/
1999 case 1024:
2000 stride = (gstride_type == UN_STRIDE) ?
2001 0x1 : 0x5;
2002 break;
2003 /*
2004 * 768MB + 768MB same as total 2GB memory
2005 * useful space: 0-768MB 1GB-1792MB
2006 */
2007 case 1536:
2008 /* 2GB unstride or 256B or 512B stride */
2009 case 2048:
2010 stride = (gstride_type == UN_STRIDE) ?
2011 0x2 :
2012 ((gstride_type == STRIDE_512B) ?
2013 0xA : 0x9);
2014 break;
2015 /* 1536MB + 1536MB */
2016 case 3072:
2017 stride = (gstride_type == UN_STRIDE) ?
2018 0x3 :
2019 ((gstride_type == STRIDE_512B) ?
2020 0x12 : 0x11);
2021 break;
2022 /* 4GB unstride or 128B,256B,512B,4KB stride */
2023 case 4096:
2024 stride = (gstride_type == UN_STRIDE) ?
2025 0x3 : (0xC + gstride_type);
2026 break;
2027 }
2028 }
2029 }
2030 if (ch_cap[0] == 2048 && ch_cap[1] == 1024) {
2031 /* 2GB + 1GB */
2032 stride = (gstride_type == UN_STRIDE) ? 0x3 : 0x19;
2033 }
2034 /*
2035 * remain two channel capability not equal OR capability
2036 * power function of 2
2037 */
2038 if (stride == (-1)) {
2039 switch ((ch_cap[0] > ch_cap[1]) ?
2040 ch_cap[0] : ch_cap[1]) {
2041 case 256: /* 256MB + 128MB */
2042 stride = 0;
2043 break;
2044 case 512: /* 512MB + 256MB */
2045 stride = 1;
2046 break;
2047 case 1024:/* 1GB + 128MB/256MB/384MB/512MB/768MB */
2048 stride = 2;
2049 break;
2050 case 2048: /* 2GB + 128MB/256MB/384MB/512MB/768MB/1GB */
2051 stride = 3;
2052 break;
2053 default:
2054 break;
2055 }
2056 }
2057 if (stride == (-1))
2058 goto error;
2059 }
2060 switch (stride) {
2061 case 0xc:
2062 printf("128B stride\n");
2063 break;
2064 case 5:
2065 case 9:
2066 case 0xd:
2067 case 0x11:
2068 case 0x19:
2069 printf("256B stride\n");
2070 break;
2071 case 0xa:
2072 case 0xe:
2073 case 0x12:
2074 printf("512B stride\n");
2075 break;
2076 case 0xf:
2077 printf("4K stride\n");
2078 break;
2079 case 0x1f:
2080 printf("32MB + 256B stride\n");
2081 break;
2082 default:
2083 printf("no stride\n");
2084 }
2085
2086 return stride;
2087 error:
2088 printf("Cap not support!\n");
2089 return (-1);
2090 }
2091
get_ddr_stride(struct rk3399_pmusgrf_regs * pmusgrf)2092 static u32 get_ddr_stride(struct rk3399_pmusgrf_regs *pmusgrf)
2093 {
2094 u32 val;
2095
2096 val = (readl(&pmusgrf->soc_con4) >> 10) & 0x1F;
2097
2098 return val;
2099 }
2100
set_ddr_stride(struct rk3399_pmusgrf_regs * pmusgrf,u32 stride)2101 static void set_ddr_stride(struct rk3399_pmusgrf_regs *pmusgrf, u32 stride)
2102 {
2103 rk_clrsetreg(&pmusgrf->soc_con4, 0x1f << 10,
2104 stride << 10);
2105 }
2106
set_cap_relate_config(const struct chan_info * chan,struct rk3399_sdram_params * sdram_params,unsigned int channel)2107 static void set_cap_relate_config(const struct chan_info *chan,
2108 struct rk3399_sdram_params *sdram_params,
2109 unsigned int channel)
2110 {
2111 u32 *denali_ctl = chan->pctl->denali_ctl;
2112 u32 tmp;
2113 struct sdram_msch_timings *noc_timing;
2114
2115 if (sdram_params->base.dramtype == LPDDR3) {
2116 tmp = (8 << sdram_params->ch[channel].cap_info.bw) /
2117 (8 << sdram_params->ch[channel].cap_info.dbw);
2118 /* memdata_ratio
2119 * 1 -> 0, 2 -> 1, 4 -> 2
2120 */
2121 clrsetbits_le32(&denali_ctl[197], 0x7,
2122 (tmp >> 1));
2123 clrsetbits_le32(&denali_ctl[198], 0x7 << 8,
2124 (tmp >> 1) << 8);
2125 }
2126 noc_timing = &sdram_params->ch[channel].noc_timings;
2127 /*
2128 * noc timing bw relate timing is 32 bit, and real bw is 16bit
2129 * actually noc reg is setting at function dram_all_config
2130 */
2131 if (sdram_params->ch[channel].cap_info.bw == 16 &&
2132 noc_timing->ddrmode.b.mwrsize == 2) {
2133 if (noc_timing->ddrmode.b.burstsize)
2134 noc_timing->ddrmode.b.burstsize -= 1;
2135 noc_timing->ddrmode.b.mwrsize -= 1;
2136 noc_timing->ddrtimingc0.b.burstpenalty *= 2;
2137 noc_timing->ddrtimingc0.b.wrtomwr *= 2;
2138 }
2139 }
2140
clear_channel_params(struct rk3399_sdram_params * sdram_params,unsigned int channel)2141 static void clear_channel_params(struct rk3399_sdram_params *sdram_params,
2142 unsigned int channel)
2143 {
2144 sdram_params->ch[channel].cap_info.rank = 0;
2145 sdram_params->ch[channel].cap_info.col = 0;
2146 sdram_params->ch[channel].cap_info.bk = 0;
2147 sdram_params->ch[channel].cap_info.bw = 32;
2148 sdram_params->ch[channel].cap_info.dbw = 32;
2149 sdram_params->ch[channel].cap_info.row_3_4 = 0;
2150 sdram_params->ch[channel].cap_info.cs0_row = 0;
2151 sdram_params->ch[channel].cap_info.cs1_row = 0;
2152 sdram_params->ch[channel].cap_info.ddrconfig = 0;
2153 }
2154
2155 /* CS0,n=1
2156 * CS1,n=2
2157 * CS0 & CS1, n=3
2158 * cs0_cap: MB unit
2159 */
dram_set_cs(const struct chan_info * chan,u32 cs_map,u32 cs0_cap,unsigned char dramtype)2160 static void dram_set_cs(const struct chan_info *chan, u32 cs_map, u32 cs0_cap,
2161 unsigned char dramtype)
2162 {
2163 u32 *denali_ctl = chan->pctl->denali_ctl;
2164 u32 *denali_pi = chan->pi->denali_pi;
2165 struct msch_regs *ddr_msch_regs = chan->msch;
2166
2167 clrsetbits_le32(&denali_ctl[196], 0x3, cs_map);
2168 writel((cs0_cap / 32) | (((4096 - cs0_cap) / 32) << 8),
2169 &ddr_msch_regs->ddrsize);
2170 if (dramtype == LPDDR4) {
2171 if (cs_map == 1)
2172 cs_map = 0x5;
2173 else if (cs_map == 2)
2174 cs_map = 0xa;
2175 else
2176 cs_map = 0xF;
2177 }
2178 /*PI_41 PI_CS_MAP:RW:24:4*/
2179 clrsetbits_le32(&denali_pi[41],
2180 0xf << 24, cs_map << 24);
2181 if (cs_map == 1 && dramtype == DDR3)
2182 writel(0x2EC7FFFF, &denali_pi[34]);
2183 }
2184
dram_set_bw(const struct chan_info * chan,u32 bw)2185 static void dram_set_bw(const struct chan_info *chan, u32 bw)
2186 {
2187 u32 *denali_ctl = chan->pctl->denali_ctl;
2188
2189 if (bw == 2)
2190 clrbits_le32(&denali_ctl[196], 1 << 16);
2191 else
2192 setbits_le32(&denali_ctl[196], 1 << 16);
2193 }
2194
dram_set_max_col(const struct chan_info * chan,u32 bw,u32 * pcol)2195 static void dram_set_max_col(const struct chan_info *chan, u32 bw, u32 *pcol)
2196 {
2197 u32 *denali_ctl = chan->pctl->denali_ctl;
2198 struct msch_regs *ddr_msch_regs = chan->msch;
2199 u32 *denali_pi = chan->pi->denali_pi;
2200 u32 ddrconfig;
2201
2202 clrbits_le32(&denali_ctl[191], 0xf);
2203 clrsetbits_le32(&denali_ctl[190],
2204 (7 << 24),
2205 ((16 - ((bw == 2) ? 14 : 15)) << 24));
2206 /*PI_199 PI_COL_DIFF:RW:0:4*/
2207 clrbits_le32(&denali_pi[199], 0xf);
2208 /*PI_155 PI_ROW_DIFF:RW:24:3*/
2209 clrsetbits_le32(&denali_pi[155],
2210 (7 << 24),
2211 ((16 - 12) << 24));
2212 ddrconfig = (bw == 2) ? 3 : 2;
2213 writel(ddrconfig | (ddrconfig << 8), &ddr_msch_regs->ddrconf);
2214 /* set max cs0 size */
2215 writel((4096 / 32) | ((0 / 32) << 8),
2216 &ddr_msch_regs->ddrsize);
2217
2218 *pcol = 12;
2219 }
2220
dram_set_max_bank(const struct chan_info * chan,u32 bw,u32 * pbank,u32 * pcol)2221 static void dram_set_max_bank(const struct chan_info *chan, u32 bw, u32 *pbank,
2222 u32 *pcol)
2223 {
2224 u32 *denali_ctl = chan->pctl->denali_ctl;
2225 u32 *denali_pi = chan->pi->denali_pi;
2226
2227 clrbits_le32(&denali_ctl[191], 0xf);
2228 clrbits_le32(&denali_ctl[190], (3 << 16));
2229 /*PI_199 PI_COL_DIFF:RW:0:4*/
2230 clrbits_le32(&denali_pi[199], 0xf);
2231 /*PI_155 PI_BANK_DIFF:RW:16:2*/
2232 clrbits_le32(&denali_pi[155], (3 << 16));
2233
2234 *pbank = 3;
2235 *pcol = 12;
2236 }
2237
dram_set_max_row(const struct chan_info * chan,u32 bw,u32 * prow,u32 * pbank,u32 * pcol)2238 static void dram_set_max_row(const struct chan_info *chan, u32 bw, u32 *prow,
2239 u32 *pbank, u32 *pcol)
2240 {
2241 u32 *denali_ctl = chan->pctl->denali_ctl;
2242 u32 *denali_pi = chan->pi->denali_pi;
2243 struct msch_regs *ddr_msch_regs = chan->msch;
2244
2245 clrsetbits_le32(&denali_ctl[191], 0xf, 12 - 10);
2246 clrbits_le32(&denali_ctl[190],
2247 (0x3 << 16) | (0x7 << 24));
2248 /*PI_199 PI_COL_DIFF:RW:0:4*/
2249 clrsetbits_le32(&denali_pi[199], 0xf, 12 - 10);
2250 /*PI_155 PI_ROW_DIFF:RW:24:3 PI_BANK_DIFF:RW:16:2*/
2251 clrbits_le32(&denali_pi[155],
2252 (0x3 << 16) | (0x7 << 24));
2253 writel(1 | (1 << 8), &ddr_msch_regs->ddrconf);
2254 /* set max cs0 size */
2255 writel((4096 / 32) | ((0 / 32) << 8),
2256 &ddr_msch_regs->ddrsize);
2257
2258 *prow = 16;
2259 *pbank = 3;
2260 *pcol = (bw == 2) ? 10 : 11;
2261 }
2262
dram_detect_cap(struct dram_info * dram,struct rk3399_sdram_params * sdram_params,unsigned char channel)2263 static u64 dram_detect_cap(struct dram_info *dram,
2264 struct rk3399_sdram_params *sdram_params,
2265 unsigned char channel)
2266 {
2267 const struct chan_info *chan = &dram->chan[channel];
2268 struct sdram_cap_info *cap_info = &sdram_params->ch[channel].cap_info;
2269 u32 bw;
2270 u32 col_tmp;
2271 u32 bk_tmp;
2272 u32 row_tmp;
2273 u32 cs0_cap;
2274 u32 training_flag;
2275 u32 ddrconfig;
2276
2277 /* detect bw */
2278 bw = 2;
2279 if (sdram_params->base.dramtype != LPDDR4) {
2280 dram_set_bw(chan, bw);
2281 cap_info->bw = bw;
2282 if (data_training(chan, channel, sdram_params,
2283 PI_READ_GATE_TRAINING)) {
2284 bw = 1;
2285 dram_set_bw(chan, 1);
2286 cap_info->bw = bw;
2287 if (data_training(chan, channel, sdram_params,
2288 PI_READ_GATE_TRAINING)) {
2289 printf("16bit error!!!\n");
2290 goto error;
2291 }
2292 }
2293 }
2294 /*
2295 * LPDDR3 CA training msut be trigger before other training.
2296 * DDR3 is not have CA training.
2297 */
2298 if (sdram_params->base.dramtype == LPDDR3)
2299 training_flag = PI_WRITE_LEVELING;
2300 else
2301 training_flag = PI_FULL_TRAINING;
2302
2303 if (sdram_params->base.dramtype != LPDDR4) {
2304 if (data_training(chan, channel, sdram_params, training_flag)) {
2305 printf("full training error!!!\n");
2306 goto error;
2307 }
2308 }
2309
2310 /* detect col */
2311 dram_set_max_col(chan, bw, &col_tmp);
2312 if (sdram_detect_col(cap_info, col_tmp) != 0)
2313 goto error;
2314
2315 /* detect bank */
2316 dram_set_max_bank(chan, bw, &bk_tmp, &col_tmp);
2317 sdram_detect_bank(cap_info, col_tmp, bk_tmp);
2318
2319 /* detect row */
2320 dram_set_max_row(chan, bw, &row_tmp, &bk_tmp, &col_tmp);
2321 if (sdram_detect_row(cap_info, col_tmp, bk_tmp, row_tmp) != 0)
2322 goto error;
2323
2324 /* detect row_3_4 */
2325 sdram_detect_row_3_4(cap_info, col_tmp, bk_tmp);
2326
2327 /* set ddrconfig */
2328 cs0_cap = (1 << (cap_info->cs0_row + cap_info->col + cap_info->bk +
2329 cap_info->bw - 20));
2330 if (cap_info->row_3_4)
2331 cs0_cap = cs0_cap * 3 / 4;
2332
2333 cap_info->cs1_row = cap_info->cs0_row;
2334 set_memory_map(chan, channel, sdram_params);
2335 ddrconfig = calculate_ddrconfig(sdram_params, channel);
2336 if (-1 == ddrconfig)
2337 goto error;
2338 set_ddrconfig(chan, sdram_params, channel,
2339 cap_info->ddrconfig);
2340
2341 /* detect cs1 row */
2342 sdram_detect_cs1_row(cap_info, sdram_params->base.dramtype);
2343
2344 /* detect die bw */
2345 sdram_detect_dbw(cap_info, sdram_params->base.dramtype);
2346
2347 return 0;
2348 error:
2349 return (-1);
2350 }
2351
2352 /* read mr_num mode register
2353 * input: rank = 1: cs0, rank = 2: cs1
2354 * mr_num: mode register number
2355 * output: buf
2356 */
read_mr(struct rk3399_ddr_pctl_regs * ddr_pctl_regs,u32 rank,u32 mr_num,u32 * buf)2357 static int read_mr(struct rk3399_ddr_pctl_regs *ddr_pctl_regs, u32 rank,
2358 u32 mr_num, u32 *buf)
2359 {
2360 s32 timeout = 100;
2361
2362 writel(((1 << 16) |
2363 (((rank == 2) ? 1 : 0) << 8) |
2364 mr_num) << 8,
2365 &ddr_pctl_regs->denali_ctl[118]);
2366 while (0 == (readl(&ddr_pctl_regs->denali_ctl[203]) &
2367 ((1 << 21) | (1 << 12)))) {
2368 udelay(1);
2369 if (timeout <= 0)
2370 goto error;
2371 timeout--;
2372 }
2373 if (!(readl(&ddr_pctl_regs->denali_ctl[203]) & (1 << 12))) {
2374 *buf = readl(&ddr_pctl_regs->denali_ctl[119]) & 0xFF;
2375 } else {
2376 printf("read mr error\n");
2377 printf("MRR_ERROR_STATUS = 0x%x\n",
2378 readl(&ddr_pctl_regs->denali_ctl[17]) & 0x3);
2379 *buf = 0;
2380 }
2381 setbits_le32(&ddr_pctl_regs->denali_ctl[205], (1 << 21) | (1 << 12));
2382 return 0;
2383 error:
2384 return (-1);
2385 }
2386
read_mr_for_detect(struct dram_info * dram,u32 channel,u32 rank,struct rk3399_sdram_params * sdram_params)2387 static int read_mr_for_detect(struct dram_info *dram, u32 channel, u32 rank,
2388 struct rk3399_sdram_params *sdram_params)
2389 {
2390 u64 cs0_cap;
2391 u32 stride;
2392 u32 cs = 0, col = 0, bk = 0, bw = 0, row_3_4 = 0;
2393 u32 cs0_row = 0, cs1_row = 0, ddrconfig = 0;
2394 u32 mr5, mr12, mr14;
2395 struct chan_info *chan =
2396 &dram->chan[channel];
2397 struct rk3399_ddr_pctl_regs *ddr_pctl_regs = chan->pctl;
2398 int ret = 0;
2399 u32 val;
2400 void __iomem *addr = NULL;
2401
2402 stride = get_ddr_stride(dram->pmusgrf);
2403
2404 if (sdram_params->ch[channel].cap_info.col == 0) {
2405 ret = -1;
2406 goto end;
2407 }
2408
2409 cs = sdram_params->ch[channel].cap_info.rank;
2410 col = sdram_params->ch[channel].cap_info.col;
2411 bk = sdram_params->ch[channel].cap_info.bk;
2412 bw = sdram_params->ch[channel].cap_info.bw;
2413 row_3_4 = sdram_params->ch[channel].cap_info.row_3_4;
2414 cs0_row = sdram_params->ch[channel].cap_info.cs0_row;
2415 cs1_row = sdram_params->ch[channel].cap_info.cs1_row;
2416 ddrconfig = sdram_params->ch[channel].cap_info.ddrconfig;
2417
2418 /* 2GB */
2419 sdram_params->ch[channel].cap_info.rank = 2;
2420 sdram_params->ch[channel].cap_info.col = 10;
2421 sdram_params->ch[channel].cap_info.bk = 3;
2422 sdram_params->ch[channel].cap_info.bw = 2;
2423 sdram_params->ch[channel].cap_info.row_3_4 = 0;
2424 sdram_params->ch[channel].cap_info.cs0_row = 15;
2425 sdram_params->ch[channel].cap_info.cs1_row = 15;
2426 sdram_params->ch[channel].cap_info.ddrconfig = 1;
2427
2428 set_memory_map(chan, channel, sdram_params);
2429 sdram_params->ch[channel].cap_info.ddrconfig =
2430 calculate_ddrconfig(sdram_params, channel);
2431 set_ddrconfig(chan, sdram_params, channel,
2432 sdram_params->ch[channel].cap_info.ddrconfig);
2433 set_cap_relate_config(chan, sdram_params, channel);
2434
2435 cs0_cap = (1 << (sdram_params->ch[channel].cap_info.bw
2436 + sdram_params->ch[channel].cap_info.col
2437 + sdram_params->ch[channel].cap_info.bk
2438 + sdram_params->ch[channel].cap_info.cs0_row));
2439
2440 if (sdram_params->ch[channel].cap_info.row_3_4)
2441 cs0_cap = cs0_cap * 3 / 4;
2442
2443 if (channel == 0)
2444 set_ddr_stride(dram->pmusgrf, 0x17);
2445 else
2446 set_ddr_stride(dram->pmusgrf, 0x18);
2447
2448 /* !will soiled DRAM space here!
2449 * read and write data to DRAM, avoid be optimized by compiler.
2450 */
2451 if (rank == 1)
2452 addr = (void __iomem *)0x100;
2453 else if (rank == 2)
2454 addr = (void __iomem *)(cs0_cap + 0x100);
2455
2456 val = readl(addr);
2457 writel(val + 1, addr);
2458
2459 read_mr(ddr_pctl_regs, rank, 5, &mr5);
2460 read_mr(ddr_pctl_regs, rank, 12, &mr12);
2461 read_mr(ddr_pctl_regs, rank, 14, &mr14);
2462
2463 if (mr5 == 0 || mr12 != 0x4d || mr14 != 0x4d) {
2464 ret = -1;
2465 goto end;
2466 }
2467 end:
2468 sdram_params->ch[channel].cap_info.rank = cs;
2469 sdram_params->ch[channel].cap_info.col = col;
2470 sdram_params->ch[channel].cap_info.bk = bk;
2471 sdram_params->ch[channel].cap_info.bw = bw;
2472 sdram_params->ch[channel].cap_info.row_3_4 = row_3_4;
2473 sdram_params->ch[channel].cap_info.cs0_row = cs0_row;
2474 sdram_params->ch[channel].cap_info.cs1_row = cs1_row;
2475 sdram_params->ch[channel].cap_info.ddrconfig = ddrconfig;
2476
2477 set_ddr_stride(dram->pmusgrf, stride);
2478 return ret;
2479 }
2480
get_phy_fn(struct rk3399_sdram_params * sdram_params,u32 ctl_fn)2481 static u32 get_phy_fn(struct rk3399_sdram_params *sdram_params, u32 ctl_fn)
2482 {
2483 u32 lp4_phy_fn[] = {1, 0, 0xb};
2484
2485 if (sdram_params->base.dramtype == LPDDR4)
2486 return lp4_phy_fn[ctl_fn];
2487 else
2488 return ctl_fn;
2489 }
2490
get_ctl_fn(struct rk3399_sdram_params * sdram_params,u32 phy_fn)2491 static u32 get_ctl_fn(struct rk3399_sdram_params *sdram_params, u32 phy_fn)
2492 {
2493 u32 lp4_ctl_fn[] = {1, 0, 2};
2494
2495 if (sdram_params->base.dramtype == LPDDR4)
2496 return lp4_ctl_fn[phy_fn];
2497 else
2498 return phy_fn;
2499 }
2500
dram_copy_phy_fn(struct dram_info * dram,struct rk3399_sdram_params * sdram_params,u32 fn,struct rk3399_sdram_params * f1_sdram_params,u32 channel)2501 static void dram_copy_phy_fn(struct dram_info *dram,
2502 struct rk3399_sdram_params *sdram_params, u32 fn,
2503 struct rk3399_sdram_params *f1_sdram_params,
2504 u32 channel)
2505 {
2506 u32 *denali_ctl;
2507 u32 *denali_phy;
2508 u32 *denali_phy_params;
2509 u32 speed = 0;
2510 u32 mr5;
2511 u32 ctl_fn;
2512
2513 denali_ctl = dram->chan[channel].pctl->denali_ctl;
2514 denali_phy = dram->chan[channel].publ->denali_phy;
2515 denali_phy_params = f1_sdram_params->phy_regs.denali_phy;
2516
2517 /* switch index */
2518 clrsetbits_le32(&denali_phy_params[896], 0x3 << 8,
2519 fn << 8);
2520 writel(denali_phy_params[896], &denali_phy[896]);
2521
2522 /* phy_pll_ctrl_ca, phy_pll_ctrl */
2523 writel(denali_phy_params[911], &denali_phy[911]);
2524 /* phy_low_freq_sel */
2525 clrsetbits_le32(&denali_phy[913], 0x1,
2526 denali_phy_params[913] & 0x1);
2527 /* PHY_GRP_SLAVE_DELAY_X, phy_cslvl_dly_step */
2528 writel(denali_phy_params[916], &denali_phy[916]);
2529 writel(denali_phy_params[917], &denali_phy[917]);
2530 writel(denali_phy_params[918], &denali_phy[918]);
2531 /* phy_adrZ_sw_wraddr_shift_X */
2532 writel(denali_phy_params[512], &denali_phy[512]);
2533 clrsetbits_le32(&denali_phy[513], 0xFFFF,
2534 denali_phy_params[513] & 0xFFFF);
2535 writel(denali_phy_params[640], &denali_phy[640]);
2536 clrsetbits_le32(&denali_phy[641], 0xFFFF,
2537 denali_phy_params[641] & 0xFFFF);
2538 writel(denali_phy_params[768], &denali_phy[768]);
2539 clrsetbits_le32(&denali_phy[769], 0xFFFF,
2540 denali_phy_params[769] & 0xFFFF);
2541
2542 writel(denali_phy_params[544], &denali_phy[544]);
2543 writel(denali_phy_params[545], &denali_phy[545]);
2544 writel(denali_phy_params[546], &denali_phy[546]);
2545 writel(denali_phy_params[547], &denali_phy[547]);
2546
2547 writel(denali_phy_params[672], &denali_phy[672]);
2548 writel(denali_phy_params[673], &denali_phy[673]);
2549 writel(denali_phy_params[674], &denali_phy[674]);
2550 writel(denali_phy_params[675], &denali_phy[675]);
2551
2552 writel(denali_phy_params[800], &denali_phy[800]);
2553 writel(denali_phy_params[801], &denali_phy[801]);
2554 writel(denali_phy_params[802], &denali_phy[802]);
2555 writel(denali_phy_params[803], &denali_phy[803]);
2556
2557 /*
2558 * phy_adr_master_delay_start_X
2559 * phy_adr_master_delay_step_X
2560 * phy_adr_master_delay_wait_X
2561 */
2562 writel(denali_phy_params[548], &denali_phy[548]);
2563 writel(denali_phy_params[676], &denali_phy[676]);
2564 writel(denali_phy_params[804], &denali_phy[804]);
2565
2566 /* phy_adr_calvl_dly_step_X */
2567 writel(denali_phy_params[549], &denali_phy[549]);
2568 writel(denali_phy_params[677], &denali_phy[677]);
2569 writel(denali_phy_params[805], &denali_phy[805]);
2570
2571 /*
2572 * phy_clk_wrdm_slave_delay_X
2573 * phy_clk_wrdqZ_slave_delay_X
2574 * phy_clk_wrdqs_slave_delay_X
2575 */
2576 sdram_copy_to_reg((u32 *)&denali_phy[59],
2577 (u32 *)&denali_phy_params[59],
2578 (63 - 58) * 4);
2579 sdram_copy_to_reg((u32 *)&denali_phy[187],
2580 (u32 *)&denali_phy_params[187],
2581 (191 - 186) * 4);
2582 sdram_copy_to_reg((u32 *)&denali_phy[315],
2583 (u32 *)&denali_phy_params[315],
2584 (319 - 314) * 4);
2585 sdram_copy_to_reg((u32 *)&denali_phy[443],
2586 (u32 *)&denali_phy_params[443],
2587 (447 - 442) * 4);
2588
2589 /*
2590 * phy_dqs_tsel_wr_timing_X 8bits DENALI_PHY_84/212/340/468 offset_8
2591 * dqs_tsel_wr_end[7:4] add Half cycle
2592 * phy_dq_tsel_wr_timing_X 8bits DENALI_PHY_83/211/339/467 offset_8
2593 * dq_tsel_wr_end[7:4] add Half cycle
2594 */
2595 writel(denali_phy_params[83] + (0x10 << 16), &denali_phy[83]);
2596 writel(denali_phy_params[84] + (0x10 << 8), &denali_phy[84]);
2597 writel(denali_phy_params[85], &denali_phy[85]);
2598
2599 writel(denali_phy_params[211] + (0x10 << 16), &denali_phy[211]);
2600 writel(denali_phy_params[212] + (0x10 << 8), &denali_phy[212]);
2601 writel(denali_phy_params[213], &denali_phy[213]);
2602
2603 writel(denali_phy_params[339] + (0x10 << 16), &denali_phy[339]);
2604 writel(denali_phy_params[340] + (0x10 << 8), &denali_phy[340]);
2605 writel(denali_phy_params[341], &denali_phy[341]);
2606
2607 writel(denali_phy_params[467] + (0x10 << 16), &denali_phy[467]);
2608 writel(denali_phy_params[468] + (0x10 << 8), &denali_phy[468]);
2609 writel(denali_phy_params[469], &denali_phy[469]);
2610
2611 /*
2612 * phy_gtlvl_resp_wait_cnt_X
2613 * phy_gtlvl_dly_step_X
2614 * phy_wrlvl_resp_wait_cnt_X
2615 * phy_gtlvl_final_step_X
2616 * phy_gtlvl_back_step_X
2617 * phy_rdlvl_dly_step_X
2618 *
2619 * phy_master_delay_step_X
2620 * phy_master_delay_wait_X
2621 * phy_wrlvl_dly_step_X
2622 * phy_rptr_update_X
2623 * phy_wdqlvl_dly_step_X
2624 */
2625 writel(denali_phy_params[87], &denali_phy[87]);
2626 writel(denali_phy_params[88], &denali_phy[88]);
2627 writel(denali_phy_params[89], &denali_phy[89]);
2628 writel(denali_phy_params[90], &denali_phy[90]);
2629
2630 writel(denali_phy_params[215], &denali_phy[215]);
2631 writel(denali_phy_params[216], &denali_phy[216]);
2632 writel(denali_phy_params[217], &denali_phy[217]);
2633 writel(denali_phy_params[218], &denali_phy[218]);
2634
2635 writel(denali_phy_params[343], &denali_phy[343]);
2636 writel(denali_phy_params[344], &denali_phy[344]);
2637 writel(denali_phy_params[345], &denali_phy[345]);
2638 writel(denali_phy_params[346], &denali_phy[346]);
2639
2640 writel(denali_phy_params[471], &denali_phy[471]);
2641 writel(denali_phy_params[472], &denali_phy[472]);
2642 writel(denali_phy_params[473], &denali_phy[473]);
2643 writel(denali_phy_params[474], &denali_phy[474]);
2644
2645 /*
2646 * phy_gtlvl_lat_adj_start_X
2647 * phy_gtlvl_rddqs_slv_dly_start_X
2648 * phy_rdlvl_rddqs_dq_slv_dly_start_X
2649 * phy_wdqlvl_dqdm_slv_dly_start_X
2650 */
2651 writel(denali_phy_params[80], &denali_phy[80]);
2652 writel(denali_phy_params[81], &denali_phy[81]);
2653
2654 writel(denali_phy_params[208], &denali_phy[208]);
2655 writel(denali_phy_params[209], &denali_phy[209]);
2656
2657 writel(denali_phy_params[336], &denali_phy[336]);
2658 writel(denali_phy_params[337], &denali_phy[337]);
2659
2660 writel(denali_phy_params[464], &denali_phy[464]);
2661 writel(denali_phy_params[465], &denali_phy[465]);
2662
2663 /*
2664 * phy_master_delay_start_X
2665 * phy_sw_master_mode_X
2666 * phy_rddata_en_tsel_dly_X
2667 */
2668 writel(denali_phy_params[86], &denali_phy[86]);
2669 writel(denali_phy_params[214], &denali_phy[214]);
2670 writel(denali_phy_params[342], &denali_phy[342]);
2671 writel(denali_phy_params[470], &denali_phy[470]);
2672
2673 /*
2674 * phy_rddqZ_slave_delay_X
2675 * phy_rddqs_dqZ_fall_slave_delay_X
2676 * phy_rddqs_dqZ_rise_slave_delay_X
2677 * phy_rddqs_dm_fall_slave_delay_X
2678 * phy_rddqs_dm_rise_slave_delay_X
2679 * phy_rddqs_gate_slave_delay_X
2680 * phy_wrlvl_delay_early_threshold_X
2681 * phy_write_path_lat_add_X
2682 * phy_rddqs_latency_adjust_X
2683 * phy_wrlvl_delay_period_threshold_X
2684 * phy_wrlvl_early_force_zero_X
2685 */
2686 sdram_copy_to_reg((u32 *)&denali_phy[64],
2687 (u32 *)&denali_phy_params[64],
2688 (67 - 63) * 4);
2689 clrsetbits_le32(&denali_phy[68], 0xFFFFFC00,
2690 denali_phy_params[68] & 0xFFFFFC00);
2691 sdram_copy_to_reg((u32 *)&denali_phy[69],
2692 (u32 *)&denali_phy_params[69],
2693 (79 - 68) * 4);
2694
2695 sdram_copy_to_reg((u32 *)&denali_phy[192],
2696 (u32 *)&denali_phy_params[192],
2697 (195 - 191) * 4);
2698 clrsetbits_le32(&denali_phy[196], 0xFFFFFC00,
2699 denali_phy_params[196] & 0xFFFFFC00);
2700 sdram_copy_to_reg((u32 *)&denali_phy[197],
2701 (u32 *)&denali_phy_params[197],
2702 (207 - 196) * 4);
2703
2704 sdram_copy_to_reg((u32 *)&denali_phy[320],
2705 (u32 *)&denali_phy_params[320],
2706 (323 - 319) * 4);
2707 clrsetbits_le32(&denali_phy[324], 0xFFFFFC00,
2708 denali_phy_params[324] & 0xFFFFFC00);
2709 sdram_copy_to_reg((u32 *)&denali_phy[325],
2710 (u32 *)&denali_phy_params[325],
2711 (335 - 324) * 4);
2712
2713 sdram_copy_to_reg((u32 *)&denali_phy[448],
2714 (u32 *)&denali_phy_params[448],
2715 (451 - 447) * 4);
2716 clrsetbits_le32(&denali_phy[452], 0xFFFFFC00,
2717 denali_phy_params[452] & 0xFFFFFC00);
2718 sdram_copy_to_reg((u32 *)&denali_phy[453],
2719 (u32 *)&denali_phy_params[453],
2720 (463 - 452) * 4);
2721
2722 /* phy_two_cyc_preamble_X */
2723 clrsetbits_le32(&denali_phy[7], 0x3 << 24,
2724 denali_phy_params[7] & (0x3 << 24));
2725 clrsetbits_le32(&denali_phy[135], 0x3 << 24,
2726 denali_phy_params[135] & (0x3 << 24));
2727 clrsetbits_le32(&denali_phy[263], 0x3 << 24,
2728 denali_phy_params[263] & (0x3 << 24));
2729 clrsetbits_le32(&denali_phy[391], 0x3 << 24,
2730 denali_phy_params[391] & (0x3 << 24));
2731
2732 /* speed */
2733 if (f1_sdram_params->base.ddr_freq < 400 * MHz)
2734 speed = 0x0;
2735 else if (f1_sdram_params->base.ddr_freq < 800 * MHz)
2736 speed = 0x1;
2737 else if (f1_sdram_params->base.ddr_freq < 1200 * MHz)
2738 speed = 0x2;
2739
2740 /* PHY_924 PHY_PAD_FDBK_DRIVE */
2741 clrsetbits_le32(&denali_phy[924],
2742 0x3 << 21, speed << 21);
2743 /* PHY_926 PHY_PAD_DATA_DRIVE */
2744 clrsetbits_le32(&denali_phy[926],
2745 0x3 << 9, speed << 9);
2746 /* PHY_927 PHY_PAD_DQS_DRIVE */
2747 clrsetbits_le32(&denali_phy[927],
2748 0x3 << 9, speed << 9);
2749 /* PHY_928 PHY_PAD_ADDR_DRIVE */
2750 clrsetbits_le32(&denali_phy[928],
2751 0x3 << 17, speed << 17);
2752 /* PHY_929 PHY_PAD_CLK_DRIVE */
2753 clrsetbits_le32(&denali_phy[929],
2754 0x3 << 17, speed << 17);
2755 /* PHY_935 PHY_PAD_CKE_DRIVE */
2756 clrsetbits_le32(&denali_phy[935],
2757 0x3 << 17, speed << 17);
2758 /* PHY_937 PHY_PAD_RST_DRIVE */
2759 clrsetbits_le32(&denali_phy[937],
2760 0x3 << 17, speed << 17);
2761 /* PHY_939 PHY_PAD_CS_DRIVE */
2762 clrsetbits_le32(&denali_phy[939],
2763 0x3 << 17, speed << 17);
2764
2765 if (f1_sdram_params->base.dramtype == LPDDR4) {
2766 read_mr(dram->chan[channel].pctl, 1, 5, &mr5);
2767 set_ds_odt(&dram->chan[channel], f1_sdram_params, 1, 0, mr5);
2768 set_ds_odt(&dram->chan[channel], f1_sdram_params, 1, 1, mr5);
2769
2770 ctl_fn = get_ctl_fn(f1_sdram_params, fn);
2771 set_lp4_dq_odt(&dram->chan[channel], f1_sdram_params,
2772 ctl_fn, 1, 1, 0, mr5);
2773 set_lp4_ca_odt(&dram->chan[channel], f1_sdram_params,
2774 ctl_fn, 1, 1, 0, mr5);
2775 set_lp4_MR3(&dram->chan[channel], f1_sdram_params,
2776 ctl_fn, 1, 0, mr5);
2777 set_lp4_MR12(&dram->chan[channel], f1_sdram_params,
2778 ctl_fn, 1, 0, mr5);
2779 set_lp4_MR14(&dram->chan[channel], f1_sdram_params,
2780 ctl_fn, 1, 0, mr5);
2781
2782 set_lp4_dq_odt(&dram->chan[channel], f1_sdram_params,
2783 ctl_fn, 1, 1, 1, mr5);
2784 set_lp4_ca_odt(&dram->chan[channel], f1_sdram_params,
2785 ctl_fn, 1, 1, 1, mr5);
2786 set_lp4_MR3(&dram->chan[channel], f1_sdram_params,
2787 ctl_fn, 1, 1, mr5);
2788 set_lp4_MR12(&dram->chan[channel], f1_sdram_params,
2789 ctl_fn, 1, 1, mr5);
2790 set_lp4_MR14(&dram->chan[channel], f1_sdram_params,
2791 ctl_fn, 1, 1, mr5);
2792
2793 /*
2794 * if phy_sw_master_mode_X not bypass mode,
2795 * clear PHY_SLICE_PWR_RDC_DISABLE.
2796 * NOTE: need use f1_sdram_params, not ddr_publ_regs
2797 */
2798 if (!((denali_phy_params[86] >> 8)
2799 & (1 << 2))) {
2800 clrbits_le32(&denali_phy[10], 1 << 16);
2801 clrbits_le32(&denali_phy[138], 1 << 16);
2802 clrbits_le32(&denali_phy[266], 1 << 16);
2803 clrbits_le32(&denali_phy[394], 1 << 16);
2804 }
2805
2806 /*
2807 * when PHY_PER_CS_TRAINING_EN=1, W2W_DIFFCS_DLY_Fx can't
2808 * smaller than 8
2809 * NOTE: need use f1_sdram_params, not ddr_publ_regs
2810 */
2811 if ((denali_phy_params[84] >> 16) & 1) {
2812 if (((readl(&denali_ctl[217 + ctl_fn]) >>
2813 16) & 0x1f) < 8)
2814 clrsetbits_le32(&denali_ctl[217 + ctl_fn],
2815 0x1f << 16,
2816 8 << 16);
2817 }
2818 }
2819 }
2820
dram_set_phy_fn(struct dram_info * dram,struct rk3399_sdram_params * sdram_params,u32 fn,struct rk3399_sdram_params * f1_sdram_params)2821 static void dram_set_phy_fn(struct dram_info *dram,
2822 struct rk3399_sdram_params *sdram_params, u32 fn,
2823 struct rk3399_sdram_params *f1_sdram_params)
2824 {
2825 u32 channel;
2826
2827 for (channel = 0; channel < 2; channel++)
2828 dram_copy_phy_fn(dram, sdram_params, fn, f1_sdram_params,
2829 channel);
2830 }
2831
dram_set_rate(struct dram_info * dram,struct rk3399_sdram_params * sdram_params,u32 fn,u32 hz)2832 static int dram_set_rate(struct dram_info *dram,
2833 struct rk3399_sdram_params *sdram_params,
2834 u32 fn, u32 hz)
2835 {
2836 u32 channel;
2837 int ret_clk, ret[2];
2838
2839 /* cci idle req stall */
2840 writel(0x70007, &dram->grf->soc_con0);
2841 /* enable all clk */
2842 setbits_le32(&dram->pmu->pmu_noc_auto_ena, (0x3 << 7));
2843 /* idle */
2844 setbits_le32(&dram->pmu->pmu_bus_idle_req, (0x3 << 18));
2845 while ((readl(&dram->pmu->pmu_bus_idle_st) & (0x3 << 18))
2846 != (0x3 << 18))
2847 ;
2848
2849 /* change freq */
2850 writel((((0x3 << 4) | (1 << 2) | 1) << 16) |
2851 (fn << 4) | (1 << 2) | 1, &dram->cic->cic_ctrl0);
2852 while (!(readl(&dram->cic->cic_status0) & (1 << 2)))
2853 ;
2854
2855 ret_clk = clk_set_rate(&dram->ddr_clk, hz);
2856 if (ret_clk < 0) {
2857 printf("%s clk set failed %d\n", __func__, ret_clk);
2858 return ret_clk;
2859 }
2860
2861 writel(0x20002, &dram->cic->cic_ctrl0);
2862 while (!(readl(&dram->cic->cic_status0) & (1 << 0)))
2863 ;
2864
2865 /* deidle */
2866 clrbits_le32(&dram->pmu->pmu_bus_idle_req, (0x3 << 18));
2867 while (readl(&dram->pmu->pmu_bus_idle_st) & (0x3 << 18))
2868 ;
2869
2870 /* clear enable all clk */
2871 clrbits_le32(&dram->pmu->pmu_noc_auto_ena, (0x3 << 7));
2872
2873 /* LPDDR4 f2 can not do training, all training will fail */
2874 if (!(sdram_params->base.dramtype == LPDDR4 && fn == 2)) {
2875 for (channel = 0; channel < 2; channel++) {
2876 if (!(sdram_params->ch[channel].cap_info.col))
2877 continue;
2878 ret[channel] = data_training(&dram->chan[channel],
2879 channel, sdram_params,
2880 PI_FULL_TRAINING);
2881 }
2882 for (channel = 0; channel < 2; channel++) {
2883 if (!(sdram_params->ch[channel].cap_info.col))
2884 continue;
2885 if (ret[channel])
2886 printf("channel %d training failed!\n",
2887 channel);
2888 else
2889 printf("channel %d training pass\n", channel);
2890 }
2891 }
2892
2893 return 0;
2894 }
2895
2896 static struct rk3399_sdram_params *g_sdram_params;
set_rate0(struct dram_info * dram)2897 static void set_rate0(struct dram_info *dram)
2898 {
2899 u32 ctl_fn;
2900 u32 phy_fn;
2901
2902 ctl_fn = 0;
2903 phy_fn = get_phy_fn(g_sdram_params, ctl_fn);
2904 dram_set_phy_fn(dram, g_sdram_params, phy_fn, &dfs_configs[ctl_fn]);
2905 dram_set_rate(dram, g_sdram_params, ctl_fn,
2906 dfs_configs[ctl_fn].base.ddr_freq);
2907 printf("change freq to %d MHz %d, %d\n",
2908 dfs_configs[ctl_fn].base.ddr_freq / MHZ, ctl_fn, phy_fn);
2909 }
2910
set_rate1(struct dram_info * dram)2911 static void set_rate1(struct dram_info *dram)
2912 {
2913 u32 ctl_fn;
2914 u32 phy_fn;
2915
2916 ctl_fn = 1;
2917 phy_fn = get_phy_fn(g_sdram_params, ctl_fn);
2918 dram_set_phy_fn(dram, g_sdram_params, phy_fn, &dfs_configs[ctl_fn]);
2919 dram_set_rate(dram, g_sdram_params, ctl_fn,
2920 dfs_configs[ctl_fn].base.ddr_freq);
2921 printf("change freq to %d MHz %d, %d\n",
2922 dfs_configs[ctl_fn].base.ddr_freq / MHZ, ctl_fn, phy_fn);
2923 }
2924
sdram_init(struct dram_info * dram,struct rk3399_sdram_params * sdram_params)2925 static int sdram_init(struct dram_info *dram,
2926 struct rk3399_sdram_params *sdram_params)
2927 {
2928 unsigned char dramtype = sdram_params->base.dramtype;
2929 unsigned int ddr_freq = sdram_params->base.ddr_freq;
2930 int channel;
2931 u32 rank;
2932 int ch;
2933 u32 tmp;
2934 u32 training_flag;
2935
2936 debug("Starting SDRAM initialization...\n");
2937
2938 if ((dramtype == DDR3 && ddr_freq > 933) ||
2939 (dramtype == LPDDR3 && ddr_freq > 933) ||
2940 (dramtype == LPDDR4 && ddr_freq > 800)) {
2941 debug("SDRAM frequency is to high!");
2942 return -E2BIG;
2943 }
2944
2945 /* detect rank */
2946 for (ch = 0; ch < 2; ch++) {
2947 sdram_params->ch[ch].cap_info.rank = 2;
2948 for (rank = 2; rank != 0; rank--) {
2949 for (channel = 0; channel < 2; channel++) {
2950 const struct chan_info *chan =
2951 &dram->chan[channel];
2952 struct rk3399_cru *cru = dram->cru;
2953 struct rk3399_ddr_publ_regs *publ = chan->publ;
2954
2955 phy_pctrl_reset(cru, channel);
2956 phy_dll_bypass_set(publ, ddr_freq);
2957 pctl_cfg(chan, channel, sdram_params);
2958 }
2959
2960 /* start to trigger initialization */
2961 pctl_start(dram, sdram_params, 3);
2962
2963 /* LPDDR2/LPDDR3 need to wait DAI complete, max 10us */
2964 if (dramtype == LPDDR3)
2965 udelay(10);
2966
2967 tmp = (rank == 2) ? 3 : 1;
2968 dram_set_cs(&dram->chan[ch], tmp, 2048,
2969 sdram_params->base.dramtype);
2970 sdram_params->ch[ch].cap_info.rank = rank;
2971 if (sdram_params->base.dramtype == LPDDR4) {
2972 /* two rank, then read_mr(cs1)
2973 * one rank, then read_mr(cs0)
2974 */
2975 if (!read_mr_for_detect(dram, ch, rank,
2976 sdram_params))
2977 break;
2978 } else {
2979 /*
2980 * LPDDR3 CA training msut be trigger before
2981 * other training.
2982 * DDR3 is not have CA training.
2983 * LPDDR4 need confirm here!
2984 */
2985 if (sdram_params->base.dramtype == LPDDR3)
2986 training_flag = PI_CA_TRAINING |
2987 PI_READ_GATE_TRAINING;
2988 else
2989 training_flag = PI_READ_GATE_TRAINING;
2990 if (!(data_training(&dram->chan[ch], ch,
2991 sdram_params,
2992 training_flag)))
2993 break;
2994 }
2995 }
2996 sdram_params->ch[ch].cap_info.rank = rank;
2997 }
2998
2999 sdram_params->base.num_channels = 0;
3000 for (channel = 0; channel < 2; channel++) {
3001 const struct chan_info *chan = &dram->chan[channel];
3002 struct sdram_cap_info *cap_info =
3003 &sdram_params->ch[channel].cap_info;
3004
3005 if (cap_info->rank == 0) {
3006 clear_channel_params(sdram_params, 1);
3007 continue;
3008 } else {
3009 sdram_params->base.num_channels++;
3010 }
3011
3012 printf("Channel ");
3013 printf(channel ? "1: " : "0: ");
3014
3015 if (channel == 0)
3016 set_ddr_stride(dram->pmusgrf, 0x17);
3017 else
3018 set_ddr_stride(dram->pmusgrf, 0x18);
3019
3020 if (dram_detect_cap(dram, sdram_params, channel)) {
3021 printf("Cap error!\n");
3022 continue;
3023 }
3024
3025 sdram_print_ddr_info(cap_info, &sdram_params->base, 0);
3026 set_memory_map(chan, channel, sdram_params);
3027 cap_info->ddrconfig =
3028 calculate_ddrconfig(sdram_params, channel);
3029 if (-1 == cap_info->ddrconfig) {
3030 printf("no ddrconfig find, Cap not support!\n");
3031 continue;
3032 }
3033 set_ddrconfig(chan, sdram_params, channel, cap_info->ddrconfig);
3034 set_cap_relate_config(chan, sdram_params, channel);
3035 }
3036
3037 if (sdram_params->base.num_channels == 0) {
3038 sdram_print_dram_type(sdram_params->base.dramtype);
3039 printf(" %dMHz\n", sdram_params->base.ddr_freq);
3040 return -1;
3041 }
3042
3043 sdram_params->base.stride = calculate_stride(sdram_params);
3044 dram_all_config(dram, sdram_params);
3045
3046 if (sdram_params->base.dramtype != LPDDR4)
3047 switch_to_phy_index1(dram, sdram_params);
3048
3049 if (sdram_params->base.dramtype == LPDDR4) {
3050 g_sdram_params = sdram_params;
3051 set_rate0(dram);
3052 set_rate1(dram);
3053 }
3054
3055 debug("Finish SDRAM initialization...\n");
3056 return 0;
3057 }
3058
rk3399_dmc_ofdata_to_platdata(struct udevice * dev)3059 static int rk3399_dmc_ofdata_to_platdata(struct udevice *dev)
3060 {
3061 #if !CONFIG_IS_ENABLED(OF_PLATDATA)
3062 struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
3063 int ret;
3064
3065 ret = dev_read_u32_array(dev, "rockchip,sdram-params",
3066 (u32 *)&plat->sdram_params,
3067 sizeof(plat->sdram_params) / sizeof(u32));
3068 if (ret) {
3069 printf("%s: Cannot read rockchip,sdram-params %d\n",
3070 __func__, ret);
3071 return ret;
3072 }
3073 ret = regmap_init_mem(dev, &plat->map);
3074 if (ret)
3075 printf("%s: regmap failed %d\n", __func__, ret);
3076
3077 #endif
3078 return 0;
3079 }
3080
3081 #if CONFIG_IS_ENABLED(OF_PLATDATA)
conv_of_platdata(struct udevice * dev)3082 static int conv_of_platdata(struct udevice *dev)
3083 {
3084 struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
3085 struct dtd_rockchip_rk3399_dmc *dtplat = &plat->dtplat;
3086 int ret;
3087
3088 ret = regmap_init_mem_platdata(dev, dtplat->reg,
3089 ARRAY_SIZE(dtplat->reg) / 2,
3090 &plat->map);
3091 if (ret)
3092 return ret;
3093
3094 return 0;
3095 }
3096 #endif
3097
rk3399_dmc_init(struct udevice * dev)3098 static int rk3399_dmc_init(struct udevice *dev)
3099 {
3100 struct dram_info *priv = dev_get_priv(dev);
3101 struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
3102 int ret;
3103 #if !CONFIG_IS_ENABLED(OF_PLATDATA)
3104 struct rk3399_sdram_params *params = &plat->sdram_params;
3105 #else
3106 struct dtd_rockchip_rk3399_dmc *dtplat = &plat->dtplat;
3107 struct rk3399_sdram_params *params =
3108 (void *)dtplat->rockchip_sdram_params;
3109
3110 ret = conv_of_platdata(dev);
3111 if (ret)
3112 return ret;
3113 #endif
3114
3115 priv->cic = syscon_get_first_range(ROCKCHIP_SYSCON_CIC);
3116 priv->grf = syscon_get_first_range(ROCKCHIP_SYSCON_GRF);
3117 priv->pmu = syscon_get_first_range(ROCKCHIP_SYSCON_PMU);
3118 priv->pmugrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUGRF);
3119 priv->pmusgrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUSGRF);
3120 priv->pmucru = rockchip_get_pmucru();
3121 priv->cru = rockchip_get_cru();
3122 priv->chan[0].pctl = regmap_get_range(plat->map, 0);
3123 priv->chan[0].pi = regmap_get_range(plat->map, 1);
3124 priv->chan[0].publ = regmap_get_range(plat->map, 2);
3125 priv->chan[0].msch = regmap_get_range(plat->map, 3);
3126 priv->chan[1].pctl = regmap_get_range(plat->map, 4);
3127 priv->chan[1].pi = regmap_get_range(plat->map, 5);
3128 priv->chan[1].publ = regmap_get_range(plat->map, 6);
3129 priv->chan[1].msch = regmap_get_range(plat->map, 7);
3130
3131 debug("con reg %p %p %p %p %p %p %p %p\n",
3132 priv->chan[0].pctl, priv->chan[0].pi,
3133 priv->chan[0].publ, priv->chan[0].msch,
3134 priv->chan[1].pctl, priv->chan[1].pi,
3135 priv->chan[1].publ, priv->chan[1].msch);
3136 debug("cru %p, cic %p, grf %p, sgrf %p, pmucru %p, pmu %p\n", priv->cru,
3137 priv->cic, priv->pmugrf, priv->pmusgrf, priv->pmucru, priv->pmu);
3138 #if CONFIG_IS_ENABLED(OF_PLATDATA)
3139 ret = clk_get_by_index_platdata(dev, 0, dtplat->clocks, &priv->ddr_clk);
3140 #else
3141 ret = clk_get_by_index(dev, 0, &priv->ddr_clk);
3142 #endif
3143 if (ret) {
3144 printf("%s clk get failed %d\n", __func__, ret);
3145 return ret;
3146 }
3147 ret = clk_set_rate(&priv->ddr_clk, params->base.ddr_freq * MHz);
3148 if (ret < 0) {
3149 printf("%s clk set failed %d\n", __func__, ret);
3150 return ret;
3151 }
3152 ret = sdram_init(priv, params);
3153 if (ret < 0) {
3154 printf("%s DRAM init failed %d\n", __func__, ret);
3155 return ret;
3156 }
3157
3158 return 0;
3159 }
3160 #endif
3161
rk3399_dmc_probe(struct udevice * dev)3162 static int rk3399_dmc_probe(struct udevice *dev)
3163 {
3164 #ifdef CONFIG_TPL_BUILD
3165 if (rk3399_dmc_init(dev))
3166 return 0;
3167 #else
3168 struct dram_info *priv = dev_get_priv(dev);
3169
3170 priv->pmugrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUGRF);
3171 debug("%s: pmugrf=%p\n", __func__, priv->pmugrf);
3172 priv->info.base = CONFIG_SYS_SDRAM_BASE;
3173 priv->info.size =
3174 rockchip_sdram_size((phys_addr_t)&priv->pmugrf->os_reg2);
3175 #ifdef CONFIG_SPL_BUILD
3176 struct ddr_param ddr_parem;
3177
3178 ddr_parem.count = 1;
3179 ddr_parem.para[0] = priv->info.base;
3180 ddr_parem.para[1] = priv->info.size;
3181 rockchip_setup_ddr_param(&ddr_parem);
3182 #endif
3183
3184 #endif
3185 return 0;
3186 }
3187
rk3399_dmc_get_info(struct udevice * dev,struct ram_info * info)3188 static int rk3399_dmc_get_info(struct udevice *dev, struct ram_info *info)
3189 {
3190 struct dram_info *priv = dev_get_priv(dev);
3191
3192 *info = priv->info;
3193
3194 return 0;
3195 }
3196
3197 static struct ram_ops rk3399_dmc_ops = {
3198 .get_info = rk3399_dmc_get_info,
3199 };
3200
3201 static const struct udevice_id rk3399_dmc_ids[] = {
3202 { .compatible = "rockchip,rk3399-dmc" },
3203 { }
3204 };
3205
3206 U_BOOT_DRIVER(dmc_rk3399) = {
3207 .name = "rockchip_rk3399_dmc",
3208 .id = UCLASS_RAM,
3209 .of_match = rk3399_dmc_ids,
3210 .ops = &rk3399_dmc_ops,
3211 #ifdef CONFIG_TPL_BUILD
3212 .ofdata_to_platdata = rk3399_dmc_ofdata_to_platdata,
3213 #endif
3214 .probe = rk3399_dmc_probe,
3215 .priv_auto_alloc_size = sizeof(struct dram_info),
3216 #ifdef CONFIG_TPL_BUILD
3217 .platdata_auto_alloc_size = sizeof(struct rockchip_dmc_plat),
3218 #endif
3219 };
3220