1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * (C) Copyright 2018 Rockchip Electronics Co., Ltd. 4 */ 5 6 #include <common.h> 7 #include <debug_uart.h> 8 #include <dm.h> 9 #include <ram.h> 10 #include <syscon.h> 11 #include <asm/io.h> 12 #include <asm/arch/clock.h> 13 #include <asm/arch/cru_px30.h> 14 #include <asm/arch/grf_px30.h> 15 #include <asm/arch/hardware.h> 16 #include <asm/arch/sdram.h> 17 #include <asm/arch/sdram_px30.h> 18 19 /* 20 * Because px30 sram size is small, so need define CONFIG_TPL_TINY_FRAMEWORK 21 * to reduce TPL size when build TPL firmware. 22 */ 23 #ifdef CONFIG_TPL_BUILD 24 #ifndef CONFIG_TPL_TINY_FRAMEWORK 25 #error please defined CONFIG_TPL_TINY_FRAMEWORK for px30 !!! 26 #endif 27 #endif 28 29 #ifdef CONFIG_TPL_BUILD 30 31 DECLARE_GLOBAL_DATA_PTR; 32 struct dram_info { 33 struct ddr_pctl_regs *pctl; 34 struct ddr_phy_regs *phy; 35 struct px30_cru *cru; 36 struct msch_regs *msch; 37 struct px30_ddr_grf_regs *ddr_grf; 38 struct px30_grf *grf; 39 struct ram_info info; 40 struct px30_pmugrf *pmugrf; 41 }; 42 43 #define PMUGRF_BASE_ADDR 0xFF010000 44 #define CRU_BASE_ADDR 0xFF2B0000 45 #define GRF_BASE_ADDR 0xFF140000 46 #define DDRC_BASE_ADDR 0xFF600000 47 #define DDR_PHY_BASE_ADDR 0xFF2A0000 48 #define SERVER_MSCH0_BASE_ADDR 0xFF530000 49 #define DDR_GRF_BASE_ADDR 0xff630000 50 51 struct dram_info dram_info; 52 53 struct px30_sdram_params sdram_configs[] = { 54 #ifdef CONFIG_ROCKCHIP_RK3326 55 #include "sdram-px30-lpddr3-detect-333.inc" 56 #else 57 #include "sdram-px30-ddr3-detect-333.inc" 58 #endif 59 }; 60 61 struct ddr_phy_skew skew = { 62 #include "sdram-px30-ddr_skew.inc" 63 }; 64 65 static void rkclk_ddr_reset(struct dram_info *dram, 66 u32 ctl_srstn, u32 ctl_psrstn, 67 u32 phy_srstn, u32 phy_psrstn) 68 { 69 writel(upctl2_srstn_req(ctl_srstn) | upctl2_psrstn_req(ctl_psrstn) | 70 upctl2_asrstn_req(ctl_srstn), 71 &dram->cru->softrst_con[1]); 72 writel(ddrphy_srstn_req(phy_srstn) | ddrphy_psrstn_req(phy_psrstn), 73 &dram->cru->softrst_con[2]); 74 } 75 76 static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz) 77 { 78 unsigned int refdiv, postdiv1, postdiv2, fbdiv; 79 int delay = 1000; 80 u32 mhz = hz / MHz; 81 82 refdiv = 1; 83 if (mhz <= 300) { 84 postdiv1 = 4; 85 postdiv2 = 2; 86 } else if (mhz <= 400) { 87 postdiv1 = 6; 88 postdiv2 = 1; 89 } else if (mhz <= 600) { 90 postdiv1 = 4; 91 postdiv2 = 1; 92 } else if (mhz <= 800) { 93 postdiv1 = 3; 94 postdiv2 = 1; 95 } else if (mhz <= 1600) { 96 postdiv1 = 2; 97 postdiv2 = 1; 98 } else { 99 postdiv1 = 1; 100 postdiv2 = 1; 101 } 102 fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24; 103 104 writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode); 105 106 writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0); 107 writel(DSMPD(1) | POSTDIV2(postdiv2) | REFDIV(refdiv), 108 &dram->cru->pll[1].con1); 109 110 while (delay > 0) { 111 udelay(1); 112 if (LOCK(readl(&dram->cru->pll[1].con1))) 113 break; 114 delay--; 115 } 116 117 writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode); 118 } 119 120 static void rkclk_configure_ddr(struct dram_info *dram, 121 struct px30_sdram_params *sdram_params) 122 { 123 /* for inno ddr phy need 2*freq */ 124 rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHz * 2); 125 } 126 127 /* return ddrconfig value 128 * (-1), find ddrconfig fail 129 * other, the ddrconfig value 130 * only support cs0_row >= cs1_row 131 */ 132 static unsigned int calculate_ddrconfig(struct px30_sdram_params *sdram_params) 133 { 134 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 135 u32 bw, die_bw, col, bank; 136 u32 i, tmp; 137 u32 ddrconf = -1; 138 139 bw = cap_info->bw; 140 die_bw = cap_info->dbw; 141 col = cap_info->col; 142 bank = cap_info->bk; 143 144 if (sdram_params->base.dramtype == DDR4) { 145 if (die_bw == 0) 146 ddrconf = 7 + bw; 147 else 148 ddrconf = 12 - bw; 149 ddrconf = d4_rbc_2_d3_rbc[ddrconf - 7]; 150 } else { 151 tmp = ((bank - 2) << 3) | (col + bw - 10); 152 for (i = 0; i < 7; i++) 153 if ((ddr_cfg_2_rbc[i] & 0xf) == tmp) { 154 ddrconf = i; 155 break; 156 } 157 if (i > 6) 158 printascii("calculate ddrconfig error\n"); 159 } 160 161 return ddrconf; 162 } 163 164 /* 165 * calculate controller dram address map, and setting to register. 166 * argument sdram_params->ch.ddrconf must be right value before 167 * call this function. 168 */ 169 static void set_ctl_address_map(struct dram_info *dram, 170 struct px30_sdram_params *sdram_params) 171 { 172 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 173 void __iomem *pctl_base = dram->pctl; 174 u32 cs_pst, bg, max_row, ddrconf; 175 u32 i; 176 177 if (sdram_params->base.dramtype == DDR4) 178 /* 179 * DDR4 8bit dram BG = 2(4bank groups), 180 * 16bit dram BG = 1 (2 bank groups) 181 */ 182 bg = (cap_info->dbw == 0) ? 2 : 1; 183 else 184 bg = 0; 185 186 cs_pst = cap_info->bw + cap_info->col + 187 bg + cap_info->bk + cap_info->cs0_row; 188 if (cs_pst >= 32 || cap_info->rank == 1) 189 writel(0x1f, pctl_base + DDR_PCTL2_ADDRMAP0); 190 else 191 writel(cs_pst - 8, pctl_base + DDR_PCTL2_ADDRMAP0); 192 193 ddrconf = cap_info->ddrconfig; 194 if (sdram_params->base.dramtype == DDR4) { 195 for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc); i++) { 196 if (d4_rbc_2_d3_rbc[i] == ddrconf) { 197 ddrconf = 7 + i; 198 break; 199 } 200 } 201 } 202 203 sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP1), 204 &addrmap[ddrconf][0], 8 * 4); 205 max_row = cs_pst - 1 - 8 - (addrmap[ddrconf][5] & 0xf); 206 207 if (max_row < 12) 208 printascii("set addrmap fail\n"); 209 /* need to disable row ahead of rank by set to 0xf */ 210 for (i = 17; i > max_row; i--) 211 clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 + 212 ((i - 12) * 8 / 32) * 4, 213 0xf << ((i - 12) * 8 % 32), 214 0xf << ((i - 12) * 8 % 32)); 215 216 if ((sdram_params->base.dramtype == LPDDR3 || 217 sdram_params->base.dramtype == LPDDR2) && 218 cap_info->row_3_4) 219 setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31); 220 if (sdram_params->base.dramtype == DDR4 && cap_info->bw != 0x2) 221 setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8); 222 } 223 224 /* 225 * rank = 1: cs0 226 * rank = 2: cs1 227 */ 228 int read_mr(struct dram_info *dram, u32 rank, u32 mr_num) 229 { 230 void __iomem *ddr_grf_base = dram->ddr_grf; 231 232 pctl_read_mr(dram->pctl, rank, mr_num); 233 234 return (readl(ddr_grf_base + DDR_GRF_STATUS(0)) & 0xff); 235 } 236 237 #define MIN(a, b) (((a) > (b)) ? (b) : (a)) 238 #define MAX(a, b) (((a) > (b)) ? (a) : (b)) 239 static u32 check_rd_gate(struct dram_info *dram) 240 { 241 void __iomem *phy_base = dram->phy; 242 243 u32 max_val = 0; 244 u32 min_val = 0xff; 245 u32 gate[4]; 246 u32 i, bw; 247 248 bw = (readl(PHY_REG(phy_base, 0x0)) >> 4) & 0xf; 249 switch (bw) { 250 case 0x1: 251 bw = 1; 252 break; 253 case 0x3: 254 bw = 2; 255 break; 256 case 0xf: 257 default: 258 bw = 4; 259 break; 260 } 261 262 for (i = 0; i < bw; i++) { 263 gate[i] = readl(PHY_REG(phy_base, 0xfb + i)); 264 max_val = MAX(max_val, gate[i]); 265 min_val = MIN(min_val, gate[i]); 266 } 267 268 if (max_val > 0x80 || min_val < 0x20) 269 return -1; 270 else 271 return 0; 272 } 273 274 static int data_training(struct dram_info *dram, u32 cs, u32 dramtype) 275 { 276 void __iomem *pctl_base = dram->pctl; 277 u32 dis_auto_zq = 0; 278 u32 pwrctl; 279 u32 ret; 280 281 /* disable auto low-power */ 282 pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL); 283 writel(0, pctl_base + DDR_PCTL2_PWRCTL); 284 285 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl); 286 287 ret = phy_data_training(dram->phy, cs, dramtype); 288 289 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq); 290 291 /* restore auto low-power */ 292 writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL); 293 294 return ret; 295 } 296 297 static void dram_set_bw(struct dram_info *dram, u32 bw) 298 { 299 phy_dram_set_bw(dram->phy, bw); 300 } 301 302 static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig) 303 { 304 writel(ddrconfig | (ddrconfig << 8), &dram->msch->deviceconf); 305 rk_clrsetreg(&dram->grf->soc_noc_con[1], 0x3 << 14, 0 << 14); 306 } 307 308 static void sdram_msch_config(struct msch_regs *msch, 309 struct sdram_msch_timings *noc_timings, 310 struct sdram_cap_info *cap_info, 311 struct sdram_base_params *base) 312 { 313 u64 cs_cap[2]; 314 315 cs_cap[0] = sdram_get_cs_cap(cap_info, 0, base->dramtype); 316 cs_cap[1] = sdram_get_cs_cap(cap_info, 1, base->dramtype); 317 writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) | 318 (((cs_cap[0] >> 20) / 64) & 0xff), 319 &msch->devicesize); 320 321 writel(noc_timings->ddrtiminga0.d32, 322 &msch->ddrtiminga0); 323 writel(noc_timings->ddrtimingb0.d32, 324 &msch->ddrtimingb0); 325 writel(noc_timings->ddrtimingc0.d32, 326 &msch->ddrtimingc0); 327 writel(noc_timings->devtodev0.d32, 328 &msch->devtodev0); 329 writel(noc_timings->ddrmode.d32, &msch->ddrmode); 330 writel(noc_timings->ddr4timing.d32, 331 &msch->ddr4timing); 332 writel(noc_timings->agingx0, &msch->agingx0); 333 writel(noc_timings->agingx0, &msch->aging0); 334 writel(noc_timings->agingx0, &msch->aging1); 335 writel(noc_timings->agingx0, &msch->aging2); 336 writel(noc_timings->agingx0, &msch->aging3); 337 } 338 339 static void dram_all_config(struct dram_info *dram, 340 struct px30_sdram_params *sdram_params) 341 { 342 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 343 u32 sys_reg2 = 0; 344 u32 sys_reg3 = 0; 345 346 set_ddrconfig(dram, cap_info->ddrconfig); 347 sdram_org_config(cap_info, &sdram_params->base, &sys_reg2, 348 &sys_reg3, 0); 349 writel(sys_reg2, &dram->pmugrf->os_reg[2]); 350 writel(sys_reg3, &dram->pmugrf->os_reg[3]); 351 sdram_msch_config(dram->msch, &sdram_params->ch.noc_timings, cap_info, 352 &sdram_params->base); 353 } 354 355 static void enable_low_power(struct dram_info *dram, 356 struct px30_sdram_params *sdram_params) 357 { 358 void __iomem *pctl_base = dram->pctl; 359 void __iomem *phy_base = dram->phy; 360 void __iomem *ddr_grf_base = dram->ddr_grf; 361 u32 grf_lp_con; 362 363 /* 364 * bit0: grf_upctl_axi_cg_en = 1 enable upctl2 axi clk auto gating 365 * bit1: grf_upctl_apb_cg_en = 1 ungated axi,core clk for apb access 366 * bit2: grf_upctl_core_cg_en = 1 enable upctl2 core clk auto gating 367 * bit3: grf_selfref_type2_en = 0 disable core clk gating when type2 sr 368 * bit4: grf_upctl_syscreq_cg_en = 1 369 * ungating coreclk when c_sysreq assert 370 * bit8-11: grf_auto_sr_dly = 6 371 */ 372 writel(0x1f1f0617, &dram->ddr_grf->ddr_grf_con[1]); 373 374 if (sdram_params->base.dramtype == DDR4) 375 grf_lp_con = (0x7 << 16) | (1 << 1); 376 else if (sdram_params->base.dramtype == DDR3) 377 grf_lp_con = (0x7 << 16) | (1 << 0); 378 else 379 grf_lp_con = (0x7 << 16) | (1 << 2); 380 381 /* en lpckdis_en */ 382 grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9); 383 writel(grf_lp_con, ddr_grf_base + DDR_GRF_LP_CON); 384 385 /* off digit module clock when enter power down */ 386 setbits_le32(PHY_REG(phy_base, 7), 1 << 7); 387 388 /* enable sr, pd */ 389 if (PD_IDLE == 0) 390 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1)); 391 else 392 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1)); 393 if (SR_IDLE == 0) 394 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1); 395 else 396 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1); 397 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3)); 398 } 399 400 /* 401 * pre_init: 0: pre init for dram cap detect 402 * 1: detect correct cap(except cs1 row)info, than reinit 403 * 2: after reinit, we detect cs1_row, if cs1_row not equal 404 * to cs0_row and cs is in middle on ddrconf map, we need 405 * to reinit dram, than set the correct ddrconf. 406 */ 407 static int sdram_init_(struct dram_info *dram, 408 struct px30_sdram_params *sdram_params, u32 pre_init) 409 { 410 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 411 void __iomem *pctl_base = dram->pctl; 412 413 rkclk_ddr_reset(dram, 1, 1, 1, 1); 414 udelay(10); 415 /* 416 * dereset ddr phy psrstn to config pll, 417 * if using phy pll psrstn must be dereset 418 * before config pll 419 */ 420 rkclk_ddr_reset(dram, 1, 1, 1, 0); 421 rkclk_configure_ddr(dram, sdram_params); 422 423 /* release phy srst to provide clk to ctrl */ 424 rkclk_ddr_reset(dram, 1, 1, 0, 0); 425 udelay(10); 426 phy_soft_reset(dram->phy); 427 /* release ctrl presetn, and config ctl registers */ 428 rkclk_ddr_reset(dram, 1, 0, 0, 0); 429 pctl_cfg(dram->pctl, &sdram_params->pctl_regs, SR_IDLE, PD_IDLE); 430 cap_info->ddrconfig = calculate_ddrconfig(sdram_params); 431 set_ctl_address_map(dram, sdram_params); 432 phy_cfg(dram->phy, &sdram_params->phy_regs, sdram_params->skew, 433 &sdram_params->base, cap_info->bw); 434 435 /* enable dfi_init_start to init phy after ctl srstn deassert */ 436 setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4)); 437 438 rkclk_ddr_reset(dram, 0, 0, 0, 0); 439 /* wait for dfi_init_done and dram init complete */ 440 while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0) 441 continue; 442 443 if (sdram_params->base.dramtype == LPDDR3) 444 pctl_write_mr(dram->pctl, 3, 11, 3, LPDDR3); 445 446 /* do ddr gate training */ 447 redo_cs0_training: 448 if (data_training(dram, 0, sdram_params->base.dramtype) != 0) { 449 if (pre_init != 0) 450 printascii("DTT cs0 error\n"); 451 return -1; 452 } 453 if (check_rd_gate(dram)) { 454 printascii("re training cs0"); 455 goto redo_cs0_training; 456 } 457 458 if (sdram_params->base.dramtype == LPDDR3) { 459 if ((read_mr(dram, 1, 8) & 0x3) != 0x3) 460 return -1; 461 } else if (sdram_params->base.dramtype == LPDDR2) { 462 if ((read_mr(dram, 1, 8) & 0x3) != 0x0) 463 return -1; 464 } 465 /* for px30: when 2cs, both 2 cs should be training */ 466 if (pre_init != 0 && cap_info->rank == 2) { 467 redo_cs1_training: 468 if (data_training(dram, 1, sdram_params->base.dramtype) != 0) { 469 printascii("DTT cs1 error\n"); 470 return -1; 471 } 472 if (check_rd_gate(dram)) { 473 printascii("re training cs1"); 474 goto redo_cs1_training; 475 } 476 } 477 478 if (sdram_params->base.dramtype == DDR4) 479 pctl_write_vrefdq(dram->pctl, 0x3, 5670, 480 sdram_params->base.dramtype); 481 482 dram_all_config(dram, sdram_params); 483 enable_low_power(dram, sdram_params); 484 485 return 0; 486 } 487 488 static int dram_detect_cap(struct dram_info *dram, 489 struct px30_sdram_params *sdram_params, 490 unsigned char channel) 491 { 492 void __iomem *pctl_base = dram->pctl; 493 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 494 495 /* 496 * for ddr3: ddrconf = 3 497 * for ddr4: ddrconf = 12 498 * for lpddr3: ddrconf = 3 499 * default bw = 1 500 */ 501 u32 bk, bktmp; 502 u32 col, coltmp; 503 u32 rowtmp; 504 u32 cs; 505 u32 bw = 1; 506 u32 dram_type = sdram_params->base.dramtype; 507 508 if (dram_type != DDR4) { 509 /* detect col and bk for ddr3/lpddr3 */ 510 coltmp = 12; 511 bktmp = 3; 512 if (dram_type == LPDDR2) 513 rowtmp = 15; 514 else 515 rowtmp = 16; 516 517 if (sdram_detect_col(cap_info, coltmp) != 0) 518 goto cap_err; 519 sdram_detect_bank(cap_info, pctl_base, coltmp, bktmp); 520 sdram_detect_dbw(cap_info, dram_type); 521 } else { 522 /* detect bg for ddr4 */ 523 coltmp = 10; 524 bktmp = 4; 525 rowtmp = 17; 526 527 col = 10; 528 bk = 2; 529 cap_info->col = col; 530 cap_info->bk = bk; 531 sdram_detect_bg(cap_info, pctl_base, coltmp); 532 } 533 534 /* detect row */ 535 if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0) 536 goto cap_err; 537 538 /* detect row_3_4 */ 539 sdram_detect_row_3_4(cap_info, coltmp, bktmp); 540 541 /* bw and cs detect using data training */ 542 if (data_training(dram, 1, dram_type) == 0) 543 cs = 1; 544 else 545 cs = 0; 546 cap_info->rank = cs + 1; 547 548 dram_set_bw(dram, 2); 549 if (data_training(dram, 0, dram_type) == 0) 550 bw = 2; 551 else 552 bw = 1; 553 cap_info->bw = bw; 554 555 cap_info->cs0_high16bit_row = cap_info->cs0_row; 556 if (cs) { 557 cap_info->cs1_row = cap_info->cs0_row; 558 cap_info->cs1_high16bit_row = cap_info->cs0_row; 559 } else { 560 cap_info->cs1_row = 0; 561 cap_info->cs1_high16bit_row = 0; 562 } 563 564 return 0; 565 cap_err: 566 return -1; 567 } 568 569 void get_ddr_param(struct px30_sdram_params *sdram_params, 570 struct ddr_param *ddr_param) 571 { 572 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 573 u32 dram_type = sdram_params->base.dramtype; 574 u64 cs_cap[2]; 575 576 cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type); 577 cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type); 578 579 if (cap_info->row_3_4) { 580 cs_cap[0] = cs_cap[0] * 3 / 4; 581 cs_cap[1] = cs_cap[1] * 3 / 4; 582 } 583 584 if (cap_info->row_3_4 && cap_info->rank == 2) { 585 ddr_param->count = 2; 586 ddr_param->para[0] = 0; 587 ddr_param->para[1] = cs_cap[0] * 4 / 3; 588 ddr_param->para[2] = cs_cap[0]; 589 ddr_param->para[3] = cs_cap[1]; 590 } else { 591 ddr_param->count = 1; 592 ddr_param->para[0] = 0; 593 ddr_param->para[1] = (u64)cs_cap[0] + (u64)cs_cap[1]; 594 } 595 } 596 597 /* return: 0 = success, other = fail */ 598 static int sdram_init_detect(struct dram_info *dram, 599 struct px30_sdram_params *sdram_params) 600 { 601 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info; 602 u32 ret; 603 u32 sys_reg = 0; 604 u32 sys_reg3 = 0; 605 606 if (sdram_init_(dram, sdram_params, 0) != 0) 607 return -1; 608 609 if (dram_detect_cap(dram, sdram_params, 0) != 0) 610 return -1; 611 612 /* modify bw, cs related timing */ 613 pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info, 614 sdram_params->base.dramtype); 615 /* reinit sdram by real dram cap */ 616 ret = sdram_init_(dram, sdram_params, 1); 617 if (ret != 0) 618 goto out; 619 620 /* redetect cs1 row */ 621 sdram_detect_cs1_row(cap_info, sdram_params->base.dramtype); 622 if (cap_info->cs1_row) { 623 sys_reg = readl(&dram->pmugrf->os_reg[2]); 624 sys_reg3 = readl(&dram->pmugrf->os_reg[3]); 625 SYS_REG_ENC_CS1_ROW(cap_info->cs1_row, 626 sys_reg, sys_reg3, 0); 627 writel(sys_reg, &dram->pmugrf->os_reg[2]); 628 writel(sys_reg3, &dram->pmugrf->os_reg[3]); 629 } 630 631 ret = sdram_detect_high_row(cap_info, sdram_params->base.dramtype); 632 633 out: 634 return ret; 635 } 636 637 struct px30_sdram_params 638 *get_default_sdram_config(void) 639 { 640 sdram_configs[0].skew = &skew; 641 642 return &sdram_configs[0]; 643 } 644 645 /* return: 0 = success, other = fail */ 646 int sdram_init(void) 647 { 648 struct px30_sdram_params *sdram_params; 649 int ret = 0; 650 struct ddr_param ddr_param; 651 652 dram_info.phy = (void *)DDR_PHY_BASE_ADDR; 653 dram_info.pctl = (void *)DDRC_BASE_ADDR; 654 dram_info.grf = (void *)GRF_BASE_ADDR; 655 dram_info.cru = (void *)CRU_BASE_ADDR; 656 dram_info.msch = (void *)SERVER_MSCH0_BASE_ADDR; 657 dram_info.ddr_grf = (void *)DDR_GRF_BASE_ADDR; 658 dram_info.pmugrf = (void *)PMUGRF_BASE_ADDR; 659 660 sdram_params = get_default_sdram_config(); 661 ret = sdram_init_detect(&dram_info, sdram_params); 662 663 if (ret) 664 goto error; 665 666 get_ddr_param(sdram_params, &ddr_param); 667 rockchip_setup_ddr_param(&ddr_param); 668 sdram_print_ddr_info(&sdram_params->ch.cap_info, 669 &sdram_params->base, 0); 670 671 printascii("out\n"); 672 return ret; 673 error: 674 return (-1); 675 } 676 #endif /* CONFIG_TPL_BUILD */ 677