1 /*
2 * (C) Copyright 2017 Rockchip Electronics Co., Ltd.
3 *
4 * SPDX-License-Identifier: GPL-2.0
5 */
6 #include <common.h>
7 #include <clk.h>
8 #include <debug_uart.h>
9 #include <dm.h>
10 #include <dt-structs.h>
11 #include <ram.h>
12 #include <regmap.h>
13 #include <syscon.h>
14 #include <asm/io.h>
15 #include <asm/arch/clock.h>
16 #include <asm/arch/cru_rk3328.h>
17 #include <asm/arch/grf_rk3328.h>
18 #include <asm/arch/rockchip_dmc.h>
19 #include <asm/arch/sdram.h>
20 #include <asm/arch/sdram_rk3328.h>
21 #include <asm/arch/uart.h>
22
23 DECLARE_GLOBAL_DATA_PTR;
24 struct dram_info {
25 #ifdef CONFIG_TPL_BUILD
26 struct ddr_pctl_regs *pctl;
27 struct ddr_phy_regs *phy;
28 struct clk ddr_clk;
29 struct rk3328_cru *cru;
30 struct msch_regs *msch;
31 struct rk3328_ddr_grf_regs *ddr_grf;
32 #endif
33 struct ram_info info;
34 struct rk3328_grf_regs *grf;
35 };
36
37 #ifdef CONFIG_TPL_BUILD
38
39 struct rk3328_sdram_channel sdram_ch;
40
41 struct rockchip_dmc_plat {
42 #if CONFIG_IS_ENABLED(OF_PLATDATA)
43 struct dtd_rockchip_rk3328_dmc dtplat;
44 #else
45 struct rk3328_sdram_params sdram_params;
46 #endif
47 struct regmap *map;
48 };
49
50 #if CONFIG_IS_ENABLED(OF_PLATDATA)
conv_of_platdata(struct udevice * dev)51 static int conv_of_platdata(struct udevice *dev)
52 {
53 struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
54 struct dtd_rockchip_rk3328_dmc *dtplat = &plat->dtplat;
55 int ret;
56
57 ret = regmap_init_mem_platdata(dev, dtplat->reg,
58 ARRAY_SIZE(dtplat->reg) / 2,
59 &plat->map);
60 if (ret)
61 return ret;
62
63 return 0;
64 }
65 #endif
66
rkclk_ddr_reset(struct dram_info * dram,u32 ctl_srstn,u32 ctl_psrstn,u32 phy_srstn,u32 phy_psrstn)67 static void rkclk_ddr_reset(struct dram_info *dram,
68 u32 ctl_srstn, u32 ctl_psrstn,
69 u32 phy_srstn, u32 phy_psrstn)
70 {
71 writel(ddrctrl_srstn_req(ctl_srstn) | ddrctrl_psrstn_req(ctl_psrstn) |
72 ddrphy_srstn_req(phy_srstn) | ddrphy_psrstn_req(phy_psrstn),
73 &dram->cru->softrst_con[5]);
74 writel(ddrctrl_asrstn_req(ctl_srstn), &dram->cru->softrst_con[9]);
75 }
76
rkclk_set_dpll(struct dram_info * dram,unsigned int hz)77 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
78 {
79 unsigned int refdiv, postdiv1, postdiv2, fbdiv;
80 int delay = 1000;
81 u32 mhz = hz / MHZ;
82
83 refdiv = 1;
84 if (mhz <= 300) {
85 postdiv1 = 4;
86 postdiv2 = 2;
87 } else if (mhz <= 400) {
88 postdiv1 = 6;
89 postdiv2 = 1;
90 } else if (mhz <= 600) {
91 postdiv1 = 4;
92 postdiv2 = 1;
93 } else if (mhz <= 800) {
94 postdiv1 = 3;
95 postdiv2 = 1;
96 } else if (mhz <= 1600) {
97 postdiv1 = 2;
98 postdiv2 = 1;
99 } else {
100 postdiv1 = 1;
101 postdiv2 = 1;
102 }
103 fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
104
105 writel(((0x1 << 4) << 16) | (0 << 4), &dram->cru->mode_con);
106 writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->dpll_con[0]);
107 writel(DSMPD(1) | POSTDIV2(postdiv2) | REFDIV(refdiv),
108 &dram->cru->dpll_con[1]);
109
110 while (delay > 0) {
111 udelay(1);
112 if (LOCK(readl(&dram->cru->dpll_con[1])))
113 break;
114 delay--;
115 }
116
117 writel(((0x1 << 4) << 16) | (1 << 4), &dram->cru->mode_con);
118 }
119
rkclk_configure_ddr(struct dram_info * dram,struct rk3328_sdram_params * sdram_params)120 static void rkclk_configure_ddr(struct dram_info *dram,
121 struct rk3328_sdram_params *sdram_params)
122 {
123 void __iomem *phy_base = dram->phy;
124
125 /* choose DPLL for ddr clk source */
126 clrbits_le32(PHY_REG(phy_base, 0xef), 1 << 7);
127
128 /* for inno ddr phy need 2*freq */
129 rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ * 2);
130 }
131
132 /* return ddrconfig value
133 * (-1), find ddrconfig fail
134 * other, the ddrconfig value
135 * only support cs0_row >= cs1_row
136 */
calculate_ddrconfig(struct rk3328_sdram_params * sdram_params)137 static unsigned int calculate_ddrconfig(
138 struct rk3328_sdram_params *sdram_params)
139 {
140 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
141 u32 cs, bw, die_bw, col, row, bank;
142 u32 cs1_row;
143 u32 i, tmp;
144 u32 ddrconf = -1;
145
146 cs = cap_info->rank;
147 bw = cap_info->bw;
148 die_bw = cap_info->dbw;
149 col = cap_info->col;
150 row = cap_info->cs0_row;
151 cs1_row = cap_info->cs1_row;
152 bank = cap_info->bk;
153
154 if (sdram_params->base.dramtype == DDR4) {
155 /* when DDR_TEST, CS always at MSB position for easy test */
156 if (cs == 2 && row == cs1_row) {
157 /* include 2cs cap both 2^n or both (2^n - 2^(n-2)) */
158 tmp = ((row - 13) << 3) | (1 << 2) | (bw & 0x2) |
159 die_bw;
160 for (i = 17; i < 21; i++) {
161 if (((tmp & 0x7) ==
162 (ddr4_cfg_2_rbc[i - 10] & 0x7)) &&
163 ((tmp & 0x3c) <=
164 (ddr4_cfg_2_rbc[i - 10] & 0x3c))) {
165 ddrconf = i;
166 goto out;
167 }
168 }
169 }
170
171 tmp = ((cs - 1) << 6) | ((row - 13) << 3) | (bw & 0x2) | die_bw;
172 for (i = 10; i < 17; i++) {
173 if (((tmp & 0x7) == (ddr4_cfg_2_rbc[i - 10] & 0x7)) &&
174 ((tmp & 0x3c) <= (ddr4_cfg_2_rbc[i - 10] & 0x3c)) &&
175 ((tmp & 0x40) <= (ddr4_cfg_2_rbc[i - 10] & 0x40))) {
176 ddrconf = i;
177 goto out;
178 }
179 }
180 } else {
181 if (bank == 2) {
182 ddrconf = 8;
183 goto out;
184 }
185
186 /* when DDR_TEST, CS always at MSB position for easy test */
187 if (cs == 2 && row == cs1_row) {
188 /* include 2cs cap both 2^n or both (2^n - 2^(n-2)) */
189 for (i = 5; i < 8; i++) {
190 if ((bw + col - 11) == (ddr_cfg_2_rbc[i] &
191 0x3)) {
192 ddrconf = i;
193 goto out;
194 }
195 }
196 }
197
198 tmp = ((row - 13) << 4) | (1 << 2) | ((bw + col - 11) << 0);
199 for (i = 0; i < 5; i++)
200 if (((tmp & 0xf) == (ddr_cfg_2_rbc[i] & 0xf)) &&
201 ((tmp & 0x30) <= (ddr_cfg_2_rbc[i] & 0x30))) {
202 ddrconf = i;
203 goto out;
204 }
205 }
206
207 out:
208 if (ddrconf > 20)
209 printf("calculate ddrconfig error\n");
210
211 return ddrconf;
212 }
213
214 /*******
215 * calculate controller dram address map, and setting to register.
216 * argument sdram_ch.ddrconf must be right value before
217 * call this function.
218 *******/
set_ctl_address_map(struct dram_info * dram,struct rk3328_sdram_params * sdram_params)219 static void set_ctl_address_map(struct dram_info *dram,
220 struct rk3328_sdram_params *sdram_params)
221 {
222 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
223 void __iomem *pctl_base = dram->pctl;
224
225 sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
226 &addrmap[cap_info->ddrconfig][0], 9 * 4);
227 if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4)
228 setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
229 if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1)
230 setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
231
232 if (cap_info->rank == 1)
233 clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
234 }
235
data_training(struct dram_info * dram,u32 cs,u32 dramtype)236 static int data_training(struct dram_info *dram, u32 cs, u32 dramtype)
237 {
238 void __iomem *pctl_base = dram->pctl;
239 u32 dis_auto_zq = 0;
240 u32 pwrctl;
241 u32 ret;
242
243 /* disable auto low-power */
244 pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
245 writel(0, pctl_base + DDR_PCTL2_PWRCTL);
246
247 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
248
249 ret = phy_data_training(dram->phy, cs, dramtype);
250
251 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
252
253 /* restore auto low-power */
254 writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
255
256 return ret;
257 }
258
rx_deskew_switch_adjust(struct dram_info * dram)259 static void rx_deskew_switch_adjust(struct dram_info *dram)
260 {
261 u32 i, deskew_val;
262 u32 gate_val = 0;
263 void __iomem *phy_base = dram->phy;
264
265 for (i = 0; i < 4; i++)
266 gate_val = MAX(readl(PHY_REG(phy_base, 0xfb + i)), gate_val);
267
268 deskew_val = (gate_val >> 3) + 1;
269 deskew_val = (deskew_val > 0x1f) ? 0x1f : deskew_val;
270 clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xc, (deskew_val & 0x3) << 2);
271 clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x7 << 4,
272 (deskew_val & 0x1c) << 2);
273 }
274
tx_deskew_switch_adjust(struct dram_info * dram)275 static void tx_deskew_switch_adjust(struct dram_info *dram)
276 {
277 void __iomem *phy_base = dram->phy;
278
279 clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0x3, 1);
280 }
281
set_ddrconfig(struct dram_info * dram,u32 ddrconfig)282 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
283 {
284 writel(ddrconfig, &dram->msch->ddrconf);
285 }
286
sdram_msch_config(struct msch_regs * msch,struct sdram_msch_timings * noc_timings)287 static void sdram_msch_config(struct msch_regs *msch,
288 struct sdram_msch_timings *noc_timings)
289 {
290 writel(noc_timings->ddrtiming.d32, &msch->ddrtiming);
291
292 writel(noc_timings->ddrmode.d32, &msch->ddrmode);
293 writel(noc_timings->readlatency, &msch->readlatency);
294
295 writel(noc_timings->activate.d32, &msch->activate);
296 writel(noc_timings->devtodev.d32, &msch->devtodev);
297 writel(noc_timings->ddr4timing.d32, &msch->ddr4_timing);
298 writel(noc_timings->agingx0, &msch->aging0);
299 writel(noc_timings->agingx0, &msch->aging1);
300 writel(noc_timings->agingx0, &msch->aging2);
301 writel(noc_timings->agingx0, &msch->aging3);
302 writel(noc_timings->agingx0, &msch->aging4);
303 writel(noc_timings->agingx0, &msch->aging5);
304 }
305
dram_all_config(struct dram_info * dram,struct rk3328_sdram_params * sdram_params)306 static void dram_all_config(struct dram_info *dram,
307 struct rk3328_sdram_params *sdram_params)
308 {
309 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
310 u32 sys_reg2 = 0;
311 u32 sys_reg3 = 0;
312
313 set_ddrconfig(dram, cap_info->ddrconfig);
314 sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
315 &sys_reg3, 0);
316 writel(sys_reg2, &dram->grf->os_reg[2]);
317 writel(sys_reg3, &dram->grf->os_reg[3]);
318
319 sdram_msch_config(dram->msch, &sdram_ch.noc_timings);
320 }
321
enable_low_power(struct dram_info * dram,struct rk3328_sdram_params * sdram_params)322 static void enable_low_power(struct dram_info *dram,
323 struct rk3328_sdram_params *sdram_params)
324 {
325 void __iomem *pctl_base = dram->pctl;
326
327 /* enable upctl2 axi clock auto gating */
328 writel(0x00800000, &dram->ddr_grf->ddr_grf_con[0]);
329 writel(0x20012001, &dram->ddr_grf->ddr_grf_con[2]);
330 /* enable upctl2 core clock auto gating */
331 writel(0x001e001a, &dram->ddr_grf->ddr_grf_con[2]);
332 /* enable sr, pd */
333 if (PD_IDLE == 0)
334 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
335 else
336 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
337 if (SR_IDLE == 0)
338 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
339 else
340 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
341 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
342 }
343
sdram_init(struct dram_info * dram,struct rk3328_sdram_params * sdram_params,u32 pre_init)344 static int sdram_init(struct dram_info *dram,
345 struct rk3328_sdram_params *sdram_params, u32 pre_init)
346 {
347 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
348 void __iomem *pctl_base = dram->pctl;
349
350 rkclk_ddr_reset(dram, 1, 1, 1, 1);
351 udelay(10);
352 /*
353 * dereset ddr phy psrstn to config pll,
354 * if using phy pll psrstn must be dereset
355 * before config pll
356 */
357 rkclk_ddr_reset(dram, 1, 1, 1, 0);
358 rkclk_configure_ddr(dram, sdram_params);
359
360 /* release phy srst to provide clk to ctrl */
361 rkclk_ddr_reset(dram, 1, 1, 0, 0);
362 udelay(10);
363 phy_soft_reset(dram->phy);
364 /* release ctrl presetn, and config ctl registers */
365 rkclk_ddr_reset(dram, 1, 0, 0, 0);
366 pctl_cfg(dram->pctl, &sdram_params->pctl_regs, SR_IDLE, PD_IDLE);
367 cap_info->ddrconfig = calculate_ddrconfig(sdram_params);
368 set_ctl_address_map(dram, sdram_params);
369 phy_cfg(dram->phy, &sdram_params->phy_regs, &sdram_params->skew,
370 &sdram_params->base, cap_info->bw);
371
372 /* enable dfi_init_start to init phy after ctl srstn deassert */
373 setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
374 rkclk_ddr_reset(dram, 0, 0, 0, 0);
375 /* wait for dfi_init_done and dram init complete */
376 while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
377 continue;
378
379 /* do ddr gate training */
380 if (data_training(dram, 0, sdram_params->base.dramtype) != 0) {
381 printf("data training error\n");
382 return -1;
383 }
384 if (data_training(dram, 1, sdram_params->base.dramtype) != 0) {
385 printf("data training error\n");
386 return -1;
387 }
388
389 if (sdram_params->base.dramtype == DDR4)
390 pctl_write_vrefdq(dram->pctl, 0x3, 5670,
391 sdram_params->base.dramtype);
392
393 if (pre_init == 0) {
394 rx_deskew_switch_adjust(dram);
395 tx_deskew_switch_adjust(dram);
396 }
397
398 dram_all_config(dram, sdram_params);
399 enable_low_power(dram, sdram_params);
400
401 return 0;
402 }
403
dram_detect_cap(struct dram_info * dram,struct rk3328_sdram_params * sdram_params,unsigned char channel)404 static u64 dram_detect_cap(struct dram_info *dram,
405 struct rk3328_sdram_params *sdram_params,
406 unsigned char channel)
407 {
408 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
409
410 /*
411 * for ddr3: ddrconf = 3
412 * for ddr4: ddrconf = 12
413 * for lpddr3: ddrconf = 3
414 * default bw = 1
415 */
416 u32 bk, bktmp;
417 u32 col, coltmp;
418 u32 rowtmp;
419 u32 cs;
420 u32 bw = 1;
421 u32 dram_type = sdram_params->base.dramtype;
422
423 if (dram_type != DDR4) {
424 /* detect col and bk for ddr3/lpddr3 */
425 coltmp = 12;
426 bktmp = 3;
427 rowtmp = 16;
428
429 if (sdram_detect_col(cap_info, coltmp) != 0)
430 goto cap_err;
431 sdram_detect_bank(cap_info, coltmp, bktmp);
432 sdram_detect_dbw(cap_info, dram_type);
433 } else {
434 /* detect bg for ddr4 */
435 coltmp = 10;
436 bktmp = 4;
437 rowtmp = 17;
438
439 col = 10;
440 bk = 2;
441 cap_info->col = col;
442 cap_info->bk = bk;
443 sdram_detect_bg(cap_info, coltmp);
444 }
445
446 /* detect row */
447 if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
448 goto cap_err;
449
450 /* detect row_3_4 */
451 sdram_detect_row_3_4(cap_info, coltmp, bktmp);
452
453 /* bw and cs detect using data training */
454 if (data_training(dram, 1, dram_type) == 0)
455 cs = 1;
456 else
457 cs = 0;
458 cap_info->rank = cs + 1;
459
460 bw = 2;
461 cap_info->bw = bw;
462
463 cap_info->cs0_high16bit_row = cap_info->cs0_row;
464 if (cs) {
465 cap_info->cs1_row = cap_info->cs0_row;
466 cap_info->cs1_high16bit_row = cap_info->cs0_row;
467 } else {
468 cap_info->cs1_row = 0;
469 cap_info->cs1_high16bit_row = 0;
470 }
471
472 return 0;
473 cap_err:
474 return -1;
475 }
476
sdram_init_detect(struct dram_info * dram,struct rk3328_sdram_params * sdram_params)477 static int sdram_init_detect(struct dram_info *dram,
478 struct rk3328_sdram_params *sdram_params)
479 {
480 u32 sys_reg = 0;
481 u32 sys_reg3 = 0;
482 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
483
484 debug("Starting SDRAM initialization...\n");
485
486 memcpy(&sdram_ch, &sdram_params->ch,
487 sizeof(struct rk3328_sdram_channel));
488
489 sdram_init(dram, sdram_params, 1);
490 dram_detect_cap(dram, sdram_params, 0);
491
492 /* modify bw, cs related timing */
493 pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
494 sdram_params->base.dramtype);
495
496 if (cap_info->bw == 2)
497 sdram_ch.noc_timings.ddrtiming.b.bwratio = 0;
498 else
499 sdram_ch.noc_timings.ddrtiming.b.bwratio = 1;
500
501 /* reinit sdram by real dram cap */
502 sdram_init(dram, sdram_params, 0);
503
504 /* redetect cs1 row */
505 sdram_detect_cs1_row(cap_info, sdram_params->base.dramtype);
506 if (cap_info->cs1_row) {
507 sys_reg = readl(&dram->grf->os_reg[2]);
508 sys_reg3 = readl(&dram->grf->os_reg[3]);
509 SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
510 sys_reg, sys_reg3, 0);
511 writel(sys_reg, &dram->grf->os_reg[2]);
512 writel(sys_reg3, &dram->grf->os_reg[3]);
513 }
514
515 sdram_print_ddr_info(&sdram_params->ch.cap_info,
516 &sdram_params->base, 0);
517
518 return 0;
519 }
520
rk3328_dmc_init(struct udevice * dev)521 static int rk3328_dmc_init(struct udevice *dev)
522 {
523 struct dram_info *priv = dev_get_priv(dev);
524 struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
525 int ret;
526
527 #if !CONFIG_IS_ENABLED(OF_PLATDATA)
528 struct rk3328_sdram_params *params = &plat->sdram_params;
529 #else
530 struct dtd_rockchip_rk3328_dmc *dtplat = &plat->dtplat;
531 struct rk3328_sdram_params *params =
532 (void *)dtplat->rockchip_sdram_params;
533
534 ret = conv_of_platdata(dev);
535 if (ret)
536 return ret;
537 #endif
538 priv->phy = regmap_get_range(plat->map, 0);
539 priv->pctl = regmap_get_range(plat->map, 1);
540 priv->grf = regmap_get_range(plat->map, 2);
541 priv->cru = regmap_get_range(plat->map, 3);
542 priv->msch = regmap_get_range(plat->map, 4);
543 priv->ddr_grf = regmap_get_range(plat->map, 5);
544
545 debug("%s phy %p pctrl %p grf %p cru %p msch %p ddr_grf %p\n",
546 __func__, priv->phy, priv->pctl, priv->grf, priv->cru,
547 priv->msch, priv->ddr_grf);
548 ret = sdram_init_detect(priv, params);
549 if (ret < 0) {
550 printf("%s DRAM init failed%d\n", __func__, ret);
551 return ret;
552 }
553
554 return 0;
555 }
556
rk3328_dmc_ofdata_to_platdata(struct udevice * dev)557 static int rk3328_dmc_ofdata_to_platdata(struct udevice *dev)
558 {
559 #if !CONFIG_IS_ENABLED(OF_PLATDATA)
560 struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
561 int ret;
562
563 ret = dev_read_u32_array(dev, "rockchip,sdram-params",
564 (u32 *)&plat->sdram_params,
565 sizeof(plat->sdram_params) / sizeof(u32));
566 if (ret) {
567 printf("%s: Cannot read rockchip,sdram-params %d\n",
568 __func__, ret);
569 return ret;
570 }
571 ret = regmap_init_mem(dev, &plat->map);
572 if (ret)
573 printf("%s: regmap failed %d\n", __func__, ret);
574 #endif
575 return 0;
576 }
577
578 #endif
579
rk3328_dmc_probe(struct udevice * dev)580 static int rk3328_dmc_probe(struct udevice *dev)
581 {
582 int ret = 0;
583 #ifdef CONFIG_TPL_BUILD
584 if (rk3328_dmc_init(dev))
585 return 0;
586 #else
587 struct dram_info *priv;
588
589 if (!(gd->flags & GD_FLG_RELOC)) {
590 priv = dev_get_priv(dev);
591 priv->grf = syscon_get_first_range(ROCKCHIP_SYSCON_GRF);
592 debug("%s: grf=%p\n", __func__, priv->grf);
593 priv->info.base = CONFIG_SYS_SDRAM_BASE;
594 priv->info.size =
595 rockchip_sdram_size((phys_addr_t)&priv->grf->os_reg[2]);
596 #ifdef CONFIG_SPL_BUILD
597 struct ddr_param ddr_parem;
598
599 ddr_parem.count = 1;
600 ddr_parem.para[0] = priv->info.base;
601 ddr_parem.para[1] = priv->info.size;
602 rockchip_setup_ddr_param(&ddr_parem);
603 #endif
604 } else {
605 #if !defined(CONFIG_SPL_BUILD) && defined(CONFIG_ROCKCHIP_DMC)
606 ret = rockchip_dmcfreq_probe(dev);
607 #endif
608 }
609 #endif
610 return ret;
611 }
612
rk3328_dmc_get_info(struct udevice * dev,struct ram_info * info)613 static int rk3328_dmc_get_info(struct udevice *dev, struct ram_info *info)
614 {
615 struct dram_info *priv = dev_get_priv(dev);
616
617 *info = priv->info;
618
619 return 0;
620 }
621
622 static struct ram_ops rk3328_dmc_ops = {
623 .get_info = rk3328_dmc_get_info,
624 };
625
626 static const struct udevice_id rk3328_dmc_ids[] = {
627 { .compatible = "rockchip,rk3328-dmc" },
628 { }
629 };
630
631 U_BOOT_DRIVER(dmc_rk3328) = {
632 .name = "rockchip_rk3328_dmc",
633 .id = UCLASS_RAM,
634 .of_match = rk3328_dmc_ids,
635 .ops = &rk3328_dmc_ops,
636 #ifdef CONFIG_TPL_BUILD
637 .ofdata_to_platdata = rk3328_dmc_ofdata_to_platdata,
638 #endif
639 .probe = rk3328_dmc_probe,
640 .priv_auto_alloc_size = sizeof(struct dram_info),
641 #ifdef CONFIG_TPL_BUILD
642 .platdata_auto_alloc_size = sizeof(struct rockchip_dmc_plat),
643 #endif
644 };
645