1 // SPDX-License-Identifier: GPL-2.0+ 2 /* 3 * Copyright (C) 2020 Rockchip Electronics Co., Ltd 4 */ 5 6 #include <common.h> 7 #include <debug_uart.h> 8 #include <dm.h> 9 #include <dm/root.h> 10 #include <dt-structs.h> 11 #include <ram.h> 12 #include <regmap.h> 13 #include <asm/io.h> 14 #include <asm/types.h> 15 #include <asm/arch/hardware.h> 16 #include <asm/arch/sdram_rv1108_pctl_phy.h> 17 #include <asm/arch/timer.h> 18 #include <asm/arch/sdram.h> 19 20 #if defined(CONFIG_ROCKCHIP_RV1108) 21 #include <asm/arch/sdram_rv1108.h> 22 #elif defined(CONFIG_ROCKCHIP_RK3308) 23 #include <asm/arch/sdram_rk3308.h> 24 #endif 25 26 /* 27 * we can not fit the code to access the device tree in SPL 28 * (due to 6K SRAM size limits), so these are hard-coded 29 */ 30 31 void copy_to_reg(u32 *dest, const u32 *src, u32 n) 32 { 33 int i; 34 35 for (i = 0; i < n / sizeof(u32); i++) { 36 writel(*src, dest); 37 src++; 38 dest++; 39 } 40 } 41 42 static void phy_pctrl_reset(struct dram_info *priv) 43 { 44 phy_pctrl_reset_cru(priv); 45 clrsetbits_le32(&priv->phy->phy_reg0, 46 RESET_DIGITAL_CORE_MASK | RESET_ANALOG_LOGIC_MASK, 47 RESET_DIGITAL_CORE_ACT << RESET_DIGITAL_CORE_SHIFT | 48 RESET_ANALOG_LOGIC_ACT << RESET_ANALOG_LOGIC_SHIFT); 49 udelay(1); 50 clrsetbits_le32(&priv->phy->phy_reg0, 51 RESET_ANALOG_LOGIC_MASK, 52 RESET_ANALOG_LOGIC_DIS << RESET_ANALOG_LOGIC_SHIFT); 53 udelay(5); 54 clrsetbits_le32(&priv->phy->phy_reg0, 55 RESET_DIGITAL_CORE_MASK, 56 RESET_DIGITAL_CORE_DIS << RESET_DIGITAL_CORE_SHIFT); 57 udelay(1); 58 } 59 60 static void phy_dll_bypass_set(struct dram_info *priv, unsigned int freq) 61 { 62 clrsetbits_le32(&priv->phy->phy_reg13, CMD_DLL_BYPASS_MASK, 63 CMD_DLL_BYPASS << CMD_DLL_BYPASS_SHIFT); 64 65 writel(CK_DLL_BYPASS_DISABLE << CK_DLL_BYPASS_SHIFT, 66 &priv->phy->phy_reg14); 67 68 clrsetbits_le32(&priv->phy->phy_reg26, LEFT_CHN_A_DQ_DLL_BYPASS_MASK, 69 LEFT_CHN_A_DQ_DLL_BYPASS << LEFT_CHN_A_DQ_DLL_SHIFT); 70 writel(LEFT_CHN_A_DQS_DLL_BYPASS_DIS << 71 LEFT_CHN_A_DQS_DLL_SHIFT, &priv->phy->phy_reg27); 72 73 clrsetbits_le32(&priv->phy->phy_reg36, RIGHT_CHN_A_DQ_DLL_BYPASS_MASK, 74 RIGHT_CHN_A_DQ_DLL_BYPASS << 75 RIGHT_CHN_A_DQ_DLL_SHIFT); 76 writel(RIGHT_CHN_A_DQS_DLL_BYPASS_DIS << 77 RIGHT_CHN_A_DQS_DLL_SHIFT, &priv->phy->phy_reg37); 78 79 if (freq <= PHY_LOW_SPEED_MHZ) { 80 writel(RIGHT_CHN_A_TX_DQ_BYPASS_SET << 81 RIGHT_CHN_A_TX_DQ_BYPASS_SHIFT | 82 LEFT_CHN_A_TX_DQ_BYPASS_SET << 83 LEFT_CHN_A_TX_DQ_BYPASS_SHIFT | 84 CMD_CK_DLL_BYPASS_SET << CMD_CK_DLL_BYPASS_SHIFT, 85 &priv->phy->phy_regdll); 86 } else { 87 writel(RIGHT_CHN_A_TX_DQ_BYPASS_DIS << 88 RIGHT_CHN_A_TX_DQ_BYPASS_SHIFT | 89 LEFT_CHN_A_TX_DQ_BYPASS_DIS << 90 LEFT_CHN_A_TX_DQ_BYPASS_SHIFT | 91 CMD_CK_DLL_BYPASS_DIS << CMD_CK_DLL_BYPASS_SHIFT, 92 &priv->phy->phy_regdll); 93 } 94 95 ddr_phy_dqs_rx_dll_cfg(priv, freq); 96 } 97 98 static void send_command(struct dram_info *priv, 99 u32 rank, u32 cmd, u32 arg) 100 { 101 writel((START_CMD | (rank << RANK_SEL_SHIFT) | arg | cmd), 102 &priv->pctl->mcmd); 103 while (readl(&priv->pctl->mcmd) & START_CMD) 104 ; 105 } 106 107 static void memory_init(struct dram_info *priv, 108 struct sdram_params *params_priv) 109 { 110 u32 mr0; 111 112 if (params_priv->ddr_config_t.ddr_type == DDR3 || 113 params_priv->ddr_config_t.ddr_type == DDR2) { 114 send_command(priv, RANK_SEL_CS0_CS1, DESELECT_CMD, 0); 115 udelay(1); 116 send_command(priv, RANK_SEL_CS0_CS1, PREA_CMD, 0); 117 send_command(priv, RANK_SEL_CS0_CS1, DESELECT_CMD, 0); 118 udelay(1); 119 send_command(priv, RANK_SEL_CS0_CS1, MRS_CMD, 120 (MR2 & BANK_ADDR_MASK) << BANK_ADDR_SHIFT | 121 (params_priv->ddr_timing_t.phy_timing.mr[2] & 122 CMD_ADDR_MASK) << CMD_ADDR_SHIFT); 123 124 send_command(priv, RANK_SEL_CS0_CS1, MRS_CMD, 125 (MR3 & BANK_ADDR_MASK) << BANK_ADDR_SHIFT | 126 (params_priv->ddr_timing_t.phy_timing.mr[3] & 127 CMD_ADDR_MASK) << CMD_ADDR_SHIFT); 128 129 send_command(priv, RANK_SEL_CS0_CS1, MRS_CMD, 130 (MR1 & BANK_ADDR_MASK) << BANK_ADDR_SHIFT | 131 (params_priv->ddr_timing_t.phy_timing.mr[1] & 132 CMD_ADDR_MASK) << CMD_ADDR_SHIFT); 133 134 mr0 = params_priv->ddr_timing_t.phy_timing.mr[0]; 135 if (params_priv->ddr_config_t.ddr_type == DDR3) { 136 send_command(priv, RANK_SEL_CS0_CS1, MRS_CMD, 137 (MR0 & BANK_ADDR_MASK) << BANK_ADDR_SHIFT | 138 (((mr0 | DDR3_DLL_RESET) & 139 CMD_ADDR_MASK) << CMD_ADDR_SHIFT)); 140 141 send_command(priv, RANK_SEL_CS0_CS1, ZQCL_CMD, 0); 142 } else { 143 send_command(priv, RANK_SEL_CS0_CS1, MRS_CMD, 144 (MR0 & BANK_ADDR_MASK) << BANK_ADDR_SHIFT | 145 (((mr0 | DDR3_DLL_RESET) & 146 CMD_ADDR_MASK) << CMD_ADDR_SHIFT)); 147 send_command(priv, RANK_SEL_CS0_CS1, PREA_CMD, 0); 148 send_command(priv, RANK_SEL_CS0_CS1, REF_CMD, 0); 149 send_command(priv, RANK_SEL_CS0_CS1, REF_CMD, 0); 150 send_command(priv, RANK_SEL_CS0_CS1, MRS_CMD, 151 (MR0 & BANK_ADDR_MASK) << 152 BANK_ADDR_SHIFT | 153 ((mr0 & CMD_ADDR_MASK) << 154 CMD_ADDR_SHIFT)); 155 } 156 } else { 157 /* reset */ 158 send_command(priv, RANK_SEL_CS0_CS1, MRS_CMD, 159 (63 & LPDDR23_MA_MASK) << LPDDR23_MA_SHIFT | 160 (0 & LPDDR23_OP_MASK) << 161 LPDDR23_OP_SHIFT); 162 /* tINIT5 */ 163 udelay(10); 164 /* ZQ calibration Init */ 165 send_command(priv, RANK_SEL_CS0, MRS_CMD, 166 (10 & LPDDR23_MA_MASK) << LPDDR23_MA_SHIFT | 167 (0xFF & LPDDR23_OP_MASK) << 168 LPDDR23_OP_SHIFT); 169 /* tZQINIT */ 170 udelay(1); 171 send_command(priv, RANK_SEL_CS1, MRS_CMD, 172 (10 & LPDDR23_MA_MASK) << LPDDR23_MA_SHIFT | 173 (0xFF & LPDDR23_OP_MASK) << 174 LPDDR23_OP_SHIFT); 175 /* tZQINIT */ 176 udelay(1); 177 send_command(priv, RANK_SEL_CS0_CS1, MRS_CMD, 178 (1 & LPDDR23_MA_MASK) << LPDDR23_MA_SHIFT | 179 (params_priv->ddr_timing_t.phy_timing.mr[1] & 180 LPDDR23_OP_MASK) << LPDDR23_OP_SHIFT); 181 send_command(priv, RANK_SEL_CS0_CS1, MRS_CMD, 182 (2 & LPDDR23_MA_MASK) << LPDDR23_MA_SHIFT | 183 (params_priv->ddr_timing_t.phy_timing.mr[2] & 184 LPDDR23_OP_MASK) << LPDDR23_OP_SHIFT); 185 send_command(priv, RANK_SEL_CS0_CS1, MRS_CMD, 186 (3 & LPDDR23_MA_MASK) << LPDDR23_MA_SHIFT | 187 (params_priv->ddr_timing_t.phy_timing.mr[3] & 188 LPDDR23_OP_MASK) << LPDDR23_OP_SHIFT); 189 } 190 } 191 192 void move_to_config_state(struct dram_info *priv) 193 { 194 unsigned int state; 195 196 while (1) { 197 state = readl(&priv->pctl->stat) & PCTL_CTL_STAT_MASK; 198 switch (state) { 199 case LOW_POWER: 200 writel(WAKEUP_STATE, &priv->pctl->sctl); 201 while ((readl(&priv->pctl->stat) & PCTL_CTL_STAT_MASK) 202 != ACCESS) 203 ; 204 /* 205 * If at low power state, need wakeup first, and then 206 * enter the config, so fallthrough 207 */ 208 case ACCESS: 209 case INIT_MEM: 210 writel(CFG_STATE, &priv->pctl->sctl); 211 while ((readl(&priv->pctl->stat) & PCTL_CTL_STAT_MASK) 212 != CONFIG) 213 ; 214 break; 215 case CONFIG: 216 return; 217 default: 218 break; 219 } 220 } 221 } 222 223 void move_to_access_state(struct dram_info *priv) 224 { 225 unsigned int state; 226 227 while (1) { 228 state = readl(&priv->pctl->stat) & PCTL_CTL_STAT_MASK; 229 switch (state) { 230 case LOW_POWER: 231 writel(WAKEUP_STATE, &priv->pctl->sctl); 232 while ((readl(&priv->pctl->stat) & 233 PCTL_CTL_STAT_MASK) != ACCESS) 234 ; 235 break; 236 case INIT_MEM: 237 writel(CFG_STATE, &priv->pctl->sctl); 238 while ((readl(&priv->pctl->stat) & 239 PCTL_CTL_STAT_MASK) != CONFIG) 240 ; 241 /* fallthrough */ 242 case CONFIG: 243 writel(GO_STATE, &priv->pctl->sctl); 244 while ((readl(&priv->pctl->stat) & 245 PCTL_CTL_STAT_MASK) != ACCESS) 246 ; 247 break; 248 case ACCESS: 249 return; 250 default: 251 break; 252 } 253 } 254 } 255 256 static void pctl_cfg(struct dram_info *priv, 257 struct sdram_params *params_priv) 258 { 259 u32 reg; 260 u32 burstlen; 261 u32 bl_mddr_lpddr2; 262 263 /* DFI config */ 264 writel(DFI_DATA_BYTE_DISABLE_EN << DFI_DATA_BYTE_DISABLE_EN_SHIFT | 265 DFI_INIT_START_EN << DFI_INIT_START_SHIFT, 266 &priv->pctl->dfistcfg0); 267 writel(DFI_DRAM_CLK_DISABLE_EN_DPD << 268 DFI_DRAM_CLK_DISABLE_EN_DPD_SHIFT | 269 DFI_DRAM_CLK_DISABLE_EN << DFI_DRAM_CLK_DISABLE_EN_SHIFT, 270 &priv->pctl->dfistcfg1); 271 writel(PARITY_EN << PARITY_EN_SHIFT | 272 PARITY_INTR_EN << PARITY_INTR_EN_SHIFT, &priv->pctl->dfistcfg2); 273 274 writel(TPHYUPD_TYPE0, &priv->pctl->dfitphyupdtype0); 275 writel(TPHY_RDLAT, &priv->pctl->dfitphyrdlat); 276 writel(TPHY_WRDATA, &priv->pctl->dfitphywrdata); 277 278 writel(DFI_PHYUPD_DISABLE | DFI_CTRLUPD_DISABLE, 279 &priv->pctl->dfiupdcfg); 280 281 copy_to_reg(&priv->pctl->togcnt1u, 282 ¶ms_priv->ddr_timing_t.pctl_timing.togcnt1u, 283 sizeof(struct pctl_timing)); 284 /* 285 * rv1108 phy is 1:2 mode, noc_timing.b.burstlen 286 * have divide by scheuler clock, so need to * 4 287 */ 288 burstlen = params_priv->ddr_timing_t.noc_timing.b.burstlen * 4; 289 290 if (params_priv->ddr_config_t.ddr_type == DDR3 || 291 params_priv->ddr_config_t.ddr_type == DDR2) { 292 writel((RANK0_ODT_WRITE_SEL << RANK0_ODT_WRITE_SEL_SHIFT | 293 RANK1_ODT_WRITE_SEL << RANK1_ODT_WRITE_SEL_SHIFT), 294 &priv->pctl->dfiodtcfg); 295 296 writel(ODT_LEN_BL8_W << ODT_LEN_BL8_W_SHIFT, 297 &priv->pctl->dfiodtcfg1); 298 299 writel(params_priv->ddr_timing_t.pctl_timing.trsth, 300 &priv->pctl->trsth); 301 if (params_priv->ddr_config_t.ddr_type == DDR3) 302 writel(MDDR_LPDDR23_CLOCK_STOP_IDLE_DIS | DDR3_EN | 303 MEM_BL_8 | TFAW_CFG_5_TDDR | 304 PD_EXIT_SLOW_EXIT_MODE | PD_TYPE_ACT_PD | 305 PD_IDLE_DISABLE | 306 params_priv->ddr_2t_en << TWO_T_SHIFT, 307 &priv->pctl->mcfg); 308 else if (burstlen == 8) 309 writel(MDDR_LPDDR23_CLOCK_STOP_IDLE_DIS | DDR2_EN | 310 MEM_BL_8 | TFAW_CFG_5_TDDR | 311 PD_EXIT_SLOW_EXIT_MODE | PD_TYPE_ACT_PD | 312 PD_IDLE_DISABLE | 313 params_priv->ddr_2t_en << TWO_T_SHIFT, 314 &priv->pctl->mcfg); 315 else 316 writel(MDDR_LPDDR23_CLOCK_STOP_IDLE_DIS | DDR2_EN | 317 MEM_BL_4 | TFAW_CFG_5_TDDR | 318 PD_EXIT_SLOW_EXIT_MODE | PD_TYPE_ACT_PD | 319 PD_IDLE_DISABLE | 320 params_priv->ddr_2t_en << TWO_T_SHIFT, 321 &priv->pctl->mcfg); 322 writel(DFI_LP_EN_SR << DFI_LP_EN_SR_SHIFT | 323 DFI_LP_WAKEUP_SR_32_CYCLES << DFI_LP_WAKEUP_SR_SHIFT | 324 DFI_TLP_RESP << DFI_TLP_RESP_SHIFT, 325 &priv->pctl->dfilpcfg0); 326 327 reg = readl(&priv->pctl->tcl); 328 writel((reg - 1) / 2 - 1, &priv->pctl->dfitrddataen); 329 reg = readl(&priv->pctl->tcwl); 330 writel((reg - 1) / 2 - 1, &priv->pctl->dfitphywrlat); 331 } else { 332 if (burstlen == 4) 333 bl_mddr_lpddr2 = MDDR_LPDDR2_BL_4; 334 else 335 bl_mddr_lpddr2 = MDDR_LPDDR2_BL_8; 336 writel((RANK0_ODT_WRITE_DIS << RANK0_ODT_WRITE_SEL_SHIFT | 337 RANK1_ODT_WRITE_DIS << RANK1_ODT_WRITE_SEL_SHIFT), 338 &priv->pctl->dfiodtcfg); 339 340 writel(ODT_LEN_BL8_W_0 << ODT_LEN_BL8_W_SHIFT, 341 &priv->pctl->dfiodtcfg1); 342 343 writel(0, &priv->pctl->trsth); 344 writel(MDDR_LPDDR23_CLOCK_STOP_IDLE_DIS | LPDDR2_EN | 345 LPDDR2_S4 | bl_mddr_lpddr2 | 346 TFAW_CFG_6_TDDR | PD_EXIT_FAST_EXIT_MODE | 347 PD_TYPE_ACT_PD | PD_IDLE_DISABLE, &priv->pctl->mcfg); 348 writel(DFI_LP_EN_SR << DFI_LP_EN_SR_SHIFT | 349 DFI_LP_WAKEUP_SR_32_CYCLES << DFI_LP_WAKEUP_SR_SHIFT | 350 DFI_TLP_RESP << DFI_TLP_RESP_SHIFT | 351 DFI_LP_WAKEUP_PD_32_CYCLES << DFI_LP_WAKEUP_PD_SHIFT | 352 DFI_LP_EN_PD, 353 &priv->pctl->dfilpcfg0); 354 355 reg = readl(&priv->pctl->tcl); 356 writel(reg / 2 - 1, &priv->pctl->dfitrddataen); 357 reg = readl(&priv->pctl->tcwl); 358 writel(reg / 2 - 1, &priv->pctl->dfitphywrlat); 359 } 360 pctl_cfg_grf(priv, params_priv); 361 setbits_le32(&priv->pctl->scfg, HW_LOW_POWER_EN); 362 363 /* only support x16 memory */ 364 clrsetbits_le32(&priv->pctl->ppcfg, PPMEM_EN_MASK, PPMEM_EN); 365 } 366 367 static void phy_cfg(struct dram_info *priv, 368 struct sdram_params *params_priv) 369 { 370 u32 burstlen; 371 372 burstlen = params_priv->ddr_timing_t.noc_timing.b.burstlen * 4; 373 burstlen = (burstlen == 4) ? PHY_BL_4 : PHY_BL_8; 374 ddr_msch_cfg(priv, params_priv); 375 ddr_phy_skew_cfg(priv); 376 switch (params_priv->ddr_config_t.ddr_type) { 377 case DDR2: 378 writel(MEMORY_SELECT_DDR2 | PHY_BL_8, &priv->phy->phy_reg1); 379 break; 380 case DDR3: 381 writel(MEMORY_SELECT_DDR3 | PHY_BL_8, &priv->phy->phy_reg1); 382 break; 383 case LPDDR2: 384 default: 385 writel(MEMORY_SELECT_LPDDR2 | burstlen, &priv->phy->phy_reg1); 386 break; 387 } 388 389 writel(params_priv->ddr_timing_t.phy_timing.cl_al, 390 &priv->phy->phy_regb); 391 writel(params_priv->ddr_timing_t.pctl_timing.tcwl, 392 &priv->phy->phy_regc); 393 394 set_ds_odt(priv, params_priv); 395 396 /* only support x16 memory */ 397 clrsetbits_le32(&priv->phy->phy_reg0, DQ_16BIT_EN_MASK, 398 DQ_16BIT_EN); 399 } 400 401 static void dram_cfg_rbc(struct dram_info *priv, 402 struct sdram_params *params_priv) 403 { 404 move_to_config_state(priv); 405 ddr_msch_cfg_rbc(params_priv, priv); 406 move_to_access_state(priv); 407 } 408 409 static void data_training(struct dram_info *priv) 410 { 411 u32 value; 412 u32 tmp = 0; 413 u32 tmp1 = 0; 414 u32 timeout = 1000; 415 416 /* disable auto refresh */ 417 value = readl(&priv->pctl->trefi); 418 writel(UPD_REF, &priv->pctl->trefi); 419 420 tmp1 = readl(&priv->phy->phy_reg2); 421 422 writel(DQS_GATE_TRAINING_SEL_CS0 | DQS_GATE_TRAINING_DIS | tmp1, 423 &priv->phy->phy_reg2); 424 writel(DQS_GATE_TRAINING_SEL_CS0 | DQS_GATE_TRAINING_ACT | tmp1, 425 &priv->phy->phy_reg2); 426 427 /* delay until data training done */ 428 while (tmp != (CHN_A_HIGH_8BIT_TRAINING_DONE | 429 CHN_A_LOW_8BIT_TRAINING_DONE)) { 430 udelay(1); 431 tmp = (readl(&priv->phy->phy_regff) & CHN_A_TRAINING_DONE_MASK); 432 timeout--; 433 if (!timeout) 434 break; 435 } 436 437 writel(DQS_GATE_TRAINING_SEL_CS0 | DQS_GATE_TRAINING_DIS | tmp1, 438 &priv->phy->phy_reg2); 439 440 send_command(priv, RANK_SEL_CS0_CS1, PREA_CMD, 0); 441 442 writel(value | UPD_REF, &priv->pctl->trefi); 443 } 444 445 static int sdram_detect(struct dram_info *priv, 446 struct sdram_params *params_priv) 447 { 448 u32 row, col, row_max, col_max, bank_max; 449 u32 bw = 1; 450 phys_addr_t test_addr; 451 struct ddr_schedule ddr_sch; 452 453 /* if col detect wrong,row needs initial */ 454 row = 0; 455 456 /* detect col */ 457 move_to_config_state(priv); 458 ddr_msch_get_max_col(priv, &ddr_sch); 459 col_max = ddr_sch.col; 460 bank_max = ddr_sch.bank; 461 move_to_access_state(priv); 462 463 for (col = col_max; col >= 10; col--) { 464 writel(0, CONFIG_SYS_SDRAM_BASE); 465 test_addr = (phys_addr_t)(CONFIG_SYS_SDRAM_BASE + 466 (1ul << (col + bw - 1ul))); 467 writel(PATTERN, test_addr); 468 if ((readl(test_addr) == PATTERN) && 469 (readl(CONFIG_SYS_SDRAM_BASE) == 0)) 470 break; 471 } 472 if (col <= 9) 473 goto cap_err; 474 params_priv->ddr_config_t.col = col; 475 476 if (params_priv->ddr_config_t.ddr_type == DDR3) { 477 params_priv->ddr_config_t.bank = 3; 478 } else { 479 writel(0, CONFIG_SYS_SDRAM_BASE); 480 test_addr = (phys_addr_t)(CONFIG_SYS_SDRAM_BASE + 481 (1ul << (bank_max + col_max + 482 bw - 1ul))); 483 writel(PATTERN, test_addr); 484 if ((readl(test_addr) == PATTERN) && 485 (readl(CONFIG_SYS_SDRAM_BASE) == 0)) 486 params_priv->ddr_config_t.bank = 3; 487 else 488 params_priv->ddr_config_t.bank = 2; 489 } 490 491 /* detect row */ 492 move_to_config_state(priv); 493 ddr_msch_get_max_row(priv, &ddr_sch); 494 move_to_access_state(priv); 495 col_max = ddr_sch.col; 496 row_max = ddr_sch.row; 497 498 for (row = row_max; row >= 12; row--) { 499 writel(0, CONFIG_SYS_SDRAM_BASE); 500 test_addr = (phys_addr_t)(CONFIG_SYS_SDRAM_BASE + 501 (1ul << (row + bank_max + 502 col_max + bw - 1ul))); 503 504 writel(PATTERN, test_addr); 505 if ((readl(test_addr) == PATTERN) && 506 (readl(CONFIG_SYS_SDRAM_BASE) == 0)) 507 break; 508 } 509 if (row <= 11) 510 goto cap_err; 511 params_priv->ddr_config_t.cs0_row = row; 512 return 0; 513 cap_err: 514 return -EAGAIN; 515 } 516 517 #define DDR_VERSION 0x2 518 519 static void sdram_all_config(struct dram_info *priv, 520 struct sdram_params *params_priv) 521 { 522 u32 version = DDR_VERSION; 523 u32 os_reg = 0; 524 u32 row_12 = 0; 525 u32 ddr_info = 0; 526 /* rk3308,rv1108 only support 1 channel, x16 ddr bus, x16 memory */ 527 u32 chn_cnt = 0; 528 u32 rank = 1; 529 u32 bw = 1; 530 u32 dbw = 1; 531 size_t size = 0; 532 struct ddr_param ddr_param; 533 534 /* os_reg2 */ 535 os_reg = (params_priv->ddr_config_t.ddr_type & SYS_REG_DDRTYPE_MASK) << 536 SYS_REG_DDRTYPE_SHIFT | 537 (chn_cnt & SYS_REG_NUM_CH_MASK) << 538 SYS_REG_NUM_CH_SHIFT | 539 ((rank - 1) & SYS_REG_RANK_MASK) << 540 SYS_REG_RANK_SHIFT(0) | 541 ((params_priv->ddr_config_t.col - 9) & SYS_REG_COL_MASK) << 542 SYS_REG_COL_SHIFT(0) | 543 ((params_priv->ddr_config_t.bank == 3 ? 0 : 1) & 544 SYS_REG_BK_MASK) << SYS_REG_BK_SHIFT(0) | 545 ((params_priv->ddr_config_t.cs0_row - 13) & 546 SYS_REG_CS0_ROW_MASK) << SYS_REG_CS0_ROW_SHIFT(0) | 547 (bw & SYS_REG_BW_MASK) << 548 SYS_REG_BW_SHIFT(0) | 549 (dbw & SYS_REG_DBW_MASK) << 550 SYS_REG_DBW_SHIFT(0); 551 552 writel(os_reg, &priv->grf->os_reg2); 553 554 /* os_reg3 */ 555 if (params_priv->ddr_config_t.cs0_row == 12) 556 row_12 = 1; 557 os_reg = (version & SYS_REG1_VERSION_MASK) << 558 SYS_REG1_VERSION_SHIFT | (row_12 & 559 SYS_REG1_EXTEND_CS0_ROW_MASK) << 560 SYS_REG1_EXTEND_CS0_ROW_SHIFT(0); 561 writel(os_reg, &priv->grf->os_reg3); 562 563 printascii("In\n"); 564 printdec(params_priv->ddr_timing_t.freq); 565 printascii("MHz\n"); 566 switch (params_priv->ddr_config_t.ddr_type & SYS_REG_DDRTYPE_MASK) { 567 case 2: 568 printascii("DDR2\n"); 569 break; 570 case 5: 571 printascii("LPDDR2\n"); 572 break; 573 case 3: 574 default: 575 printascii("DDR3\n"); 576 break; 577 } 578 printascii(" Col="); 579 printdec(params_priv->ddr_config_t.col); 580 printascii(" Bank="); 581 printdec(params_priv->ddr_config_t.bank); 582 printascii(" Row="); 583 printdec(params_priv->ddr_config_t.cs0_row); 584 585 size = 1llu << (bw + 586 params_priv->ddr_config_t.col + 587 params_priv->ddr_config_t.cs0_row + 588 params_priv->ddr_config_t.bank); 589 ddr_info = size >> 20; 590 printascii(" Size="); 591 printdec(ddr_info); 592 printascii("MB\n"); 593 printascii("msch:"); 594 ddr_info = readl(&priv->service_msch->ddrconf); 595 printdec(ddr_info); 596 printascii("\n"); 597 598 priv->info.base = CONFIG_SYS_SDRAM_BASE; 599 priv->info.size = size; 600 ddr_param.count = 1; 601 ddr_param.para[0] = priv->info.base; 602 ddr_param.para[1] = priv->info.size; 603 rockchip_setup_ddr_param(&ddr_param); 604 } 605 606 int rv1108_sdram_init(struct dram_info *sdram_priv, 607 struct sdram_params *params_priv) 608 { 609 /* pmu enable ddr io retention */ 610 enable_ddr_io_ret(sdram_priv); 611 rkdclk_init(sdram_priv, params_priv); 612 phy_pctrl_reset(sdram_priv); 613 phy_dll_bypass_set(sdram_priv, params_priv->ddr_timing_t.freq); 614 pctl_cfg(sdram_priv, params_priv); 615 phy_cfg(sdram_priv, params_priv); 616 writel(POWER_UP_START, &sdram_priv->pctl->powctl); 617 while (!(readl(&sdram_priv->pctl->powstat) & POWER_UP_DONE)) 618 ; 619 620 memory_init(sdram_priv, params_priv); 621 re_training: 622 move_to_config_state(sdram_priv); 623 data_training(sdram_priv); 624 move_to_access_state(sdram_priv); 625 if (sdram_detect(sdram_priv, params_priv)) { 626 while (1) 627 ; 628 } 629 if (check_rd_gate(sdram_priv)) 630 goto re_training; 631 632 /* workaround data training not in middle */ 633 modify_data_training(sdram_priv, params_priv); 634 635 dram_cfg_rbc(sdram_priv, params_priv); 636 sdram_all_config(sdram_priv, params_priv); 637 enable_low_power(sdram_priv, params_priv); 638 639 return 0; 640 } 641