1 /* 2 * Copyright (C) 2018-2019, STMicroelectronics - All Rights Reserved 3 * 4 * SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause 5 */ 6 7 #include <errno.h> 8 #include <stddef.h> 9 10 #include <platform_def.h> 11 12 #include <arch.h> 13 #include <arch_helpers.h> 14 #include <common/debug.h> 15 #include <drivers/delay_timer.h> 16 #include <drivers/st/stm32mp_pmic.h> 17 #include <drivers/st/stm32mp1_ddr.h> 18 #include <drivers/st/stm32mp1_ddr_regs.h> 19 #include <drivers/st/stm32mp1_pwr.h> 20 #include <drivers/st/stm32mp1_ram.h> 21 #include <lib/mmio.h> 22 #include <plat/common/platform.h> 23 24 struct reg_desc { 25 const char *name; 26 uint16_t offset; /* Offset for base address */ 27 uint8_t par_offset; /* Offset for parameter array */ 28 }; 29 30 #define INVALID_OFFSET 0xFFU 31 32 #define TIMESLOT_1US (plat_get_syscnt_freq2() / 1000000U) 33 34 #define DDRCTL_REG(x, y) \ 35 { \ 36 .name = #x, \ 37 .offset = offsetof(struct stm32mp1_ddrctl, x), \ 38 .par_offset = offsetof(struct y, x) \ 39 } 40 41 #define DDRPHY_REG(x, y) \ 42 { \ 43 .name = #x, \ 44 .offset = offsetof(struct stm32mp1_ddrphy, x), \ 45 .par_offset = offsetof(struct y, x) \ 46 } 47 48 #define DDRCTL_REG_REG(x) DDRCTL_REG(x, stm32mp1_ddrctrl_reg) 49 static const struct reg_desc ddr_reg[] = { 50 DDRCTL_REG_REG(mstr), 51 DDRCTL_REG_REG(mrctrl0), 52 DDRCTL_REG_REG(mrctrl1), 53 DDRCTL_REG_REG(derateen), 54 DDRCTL_REG_REG(derateint), 55 DDRCTL_REG_REG(pwrctl), 56 DDRCTL_REG_REG(pwrtmg), 57 DDRCTL_REG_REG(hwlpctl), 58 DDRCTL_REG_REG(rfshctl0), 59 DDRCTL_REG_REG(rfshctl3), 60 DDRCTL_REG_REG(crcparctl0), 61 DDRCTL_REG_REG(zqctl0), 62 DDRCTL_REG_REG(dfitmg0), 63 DDRCTL_REG_REG(dfitmg1), 64 DDRCTL_REG_REG(dfilpcfg0), 65 DDRCTL_REG_REG(dfiupd0), 66 DDRCTL_REG_REG(dfiupd1), 67 DDRCTL_REG_REG(dfiupd2), 68 DDRCTL_REG_REG(dfiphymstr), 69 DDRCTL_REG_REG(odtmap), 70 DDRCTL_REG_REG(dbg0), 71 DDRCTL_REG_REG(dbg1), 72 DDRCTL_REG_REG(dbgcmd), 73 DDRCTL_REG_REG(poisoncfg), 74 DDRCTL_REG_REG(pccfg), 75 }; 76 77 #define DDRCTL_REG_TIMING(x) DDRCTL_REG(x, stm32mp1_ddrctrl_timing) 78 static const struct reg_desc ddr_timing[] = { 79 DDRCTL_REG_TIMING(rfshtmg), 80 DDRCTL_REG_TIMING(dramtmg0), 81 DDRCTL_REG_TIMING(dramtmg1), 82 DDRCTL_REG_TIMING(dramtmg2), 83 DDRCTL_REG_TIMING(dramtmg3), 84 DDRCTL_REG_TIMING(dramtmg4), 85 DDRCTL_REG_TIMING(dramtmg5), 86 DDRCTL_REG_TIMING(dramtmg6), 87 DDRCTL_REG_TIMING(dramtmg7), 88 DDRCTL_REG_TIMING(dramtmg8), 89 DDRCTL_REG_TIMING(dramtmg14), 90 DDRCTL_REG_TIMING(odtcfg), 91 }; 92 93 #define DDRCTL_REG_MAP(x) DDRCTL_REG(x, stm32mp1_ddrctrl_map) 94 static const struct reg_desc ddr_map[] = { 95 DDRCTL_REG_MAP(addrmap1), 96 DDRCTL_REG_MAP(addrmap2), 97 DDRCTL_REG_MAP(addrmap3), 98 DDRCTL_REG_MAP(addrmap4), 99 DDRCTL_REG_MAP(addrmap5), 100 DDRCTL_REG_MAP(addrmap6), 101 DDRCTL_REG_MAP(addrmap9), 102 DDRCTL_REG_MAP(addrmap10), 103 DDRCTL_REG_MAP(addrmap11), 104 }; 105 106 #define DDRCTL_REG_PERF(x) DDRCTL_REG(x, stm32mp1_ddrctrl_perf) 107 static const struct reg_desc ddr_perf[] = { 108 DDRCTL_REG_PERF(sched), 109 DDRCTL_REG_PERF(sched1), 110 DDRCTL_REG_PERF(perfhpr1), 111 DDRCTL_REG_PERF(perflpr1), 112 DDRCTL_REG_PERF(perfwr1), 113 DDRCTL_REG_PERF(pcfgr_0), 114 DDRCTL_REG_PERF(pcfgw_0), 115 DDRCTL_REG_PERF(pcfgqos0_0), 116 DDRCTL_REG_PERF(pcfgqos1_0), 117 DDRCTL_REG_PERF(pcfgwqos0_0), 118 DDRCTL_REG_PERF(pcfgwqos1_0), 119 DDRCTL_REG_PERF(pcfgr_1), 120 DDRCTL_REG_PERF(pcfgw_1), 121 DDRCTL_REG_PERF(pcfgqos0_1), 122 DDRCTL_REG_PERF(pcfgqos1_1), 123 DDRCTL_REG_PERF(pcfgwqos0_1), 124 DDRCTL_REG_PERF(pcfgwqos1_1), 125 }; 126 127 #define DDRPHY_REG_REG(x) DDRPHY_REG(x, stm32mp1_ddrphy_reg) 128 static const struct reg_desc ddrphy_reg[] = { 129 DDRPHY_REG_REG(pgcr), 130 DDRPHY_REG_REG(aciocr), 131 DDRPHY_REG_REG(dxccr), 132 DDRPHY_REG_REG(dsgcr), 133 DDRPHY_REG_REG(dcr), 134 DDRPHY_REG_REG(odtcr), 135 DDRPHY_REG_REG(zq0cr1), 136 DDRPHY_REG_REG(dx0gcr), 137 DDRPHY_REG_REG(dx1gcr), 138 DDRPHY_REG_REG(dx2gcr), 139 DDRPHY_REG_REG(dx3gcr), 140 }; 141 142 #define DDRPHY_REG_TIMING(x) DDRPHY_REG(x, stm32mp1_ddrphy_timing) 143 static const struct reg_desc ddrphy_timing[] = { 144 DDRPHY_REG_TIMING(ptr0), 145 DDRPHY_REG_TIMING(ptr1), 146 DDRPHY_REG_TIMING(ptr2), 147 DDRPHY_REG_TIMING(dtpr0), 148 DDRPHY_REG_TIMING(dtpr1), 149 DDRPHY_REG_TIMING(dtpr2), 150 DDRPHY_REG_TIMING(mr0), 151 DDRPHY_REG_TIMING(mr1), 152 DDRPHY_REG_TIMING(mr2), 153 DDRPHY_REG_TIMING(mr3), 154 }; 155 156 #define DDRPHY_REG_CAL(x) DDRPHY_REG(x, stm32mp1_ddrphy_cal) 157 static const struct reg_desc ddrphy_cal[] = { 158 DDRPHY_REG_CAL(dx0dllcr), 159 DDRPHY_REG_CAL(dx0dqtr), 160 DDRPHY_REG_CAL(dx0dqstr), 161 DDRPHY_REG_CAL(dx1dllcr), 162 DDRPHY_REG_CAL(dx1dqtr), 163 DDRPHY_REG_CAL(dx1dqstr), 164 DDRPHY_REG_CAL(dx2dllcr), 165 DDRPHY_REG_CAL(dx2dqtr), 166 DDRPHY_REG_CAL(dx2dqstr), 167 DDRPHY_REG_CAL(dx3dllcr), 168 DDRPHY_REG_CAL(dx3dqtr), 169 DDRPHY_REG_CAL(dx3dqstr), 170 }; 171 172 #define DDR_REG_DYN(x) \ 173 { \ 174 .name = #x, \ 175 .offset = offsetof(struct stm32mp1_ddrctl, x), \ 176 .par_offset = INVALID_OFFSET \ 177 } 178 179 static const struct reg_desc ddr_dyn[] = { 180 DDR_REG_DYN(stat), 181 DDR_REG_DYN(init0), 182 DDR_REG_DYN(dfimisc), 183 DDR_REG_DYN(dfistat), 184 DDR_REG_DYN(swctl), 185 DDR_REG_DYN(swstat), 186 DDR_REG_DYN(pctrl_0), 187 DDR_REG_DYN(pctrl_1), 188 }; 189 190 #define DDRPHY_REG_DYN(x) \ 191 { \ 192 .name = #x, \ 193 .offset = offsetof(struct stm32mp1_ddrphy, x), \ 194 .par_offset = INVALID_OFFSET \ 195 } 196 197 static const struct reg_desc ddrphy_dyn[] = { 198 DDRPHY_REG_DYN(pir), 199 DDRPHY_REG_DYN(pgsr), 200 }; 201 202 enum reg_type { 203 REG_REG, 204 REG_TIMING, 205 REG_PERF, 206 REG_MAP, 207 REGPHY_REG, 208 REGPHY_TIMING, 209 REGPHY_CAL, 210 /* 211 * Dynamic registers => managed in driver or not changed, 212 * can be dumped in interactive mode. 213 */ 214 REG_DYN, 215 REGPHY_DYN, 216 REG_TYPE_NB 217 }; 218 219 enum base_type { 220 DDR_BASE, 221 DDRPHY_BASE, 222 NONE_BASE 223 }; 224 225 struct ddr_reg_info { 226 const char *name; 227 const struct reg_desc *desc; 228 uint8_t size; 229 enum base_type base; 230 }; 231 232 static const struct ddr_reg_info ddr_registers[REG_TYPE_NB] = { 233 [REG_REG] = { 234 .name = "static", 235 .desc = ddr_reg, 236 .size = ARRAY_SIZE(ddr_reg), 237 .base = DDR_BASE 238 }, 239 [REG_TIMING] = { 240 .name = "timing", 241 .desc = ddr_timing, 242 .size = ARRAY_SIZE(ddr_timing), 243 .base = DDR_BASE 244 }, 245 [REG_PERF] = { 246 .name = "perf", 247 .desc = ddr_perf, 248 .size = ARRAY_SIZE(ddr_perf), 249 .base = DDR_BASE 250 }, 251 [REG_MAP] = { 252 .name = "map", 253 .desc = ddr_map, 254 .size = ARRAY_SIZE(ddr_map), 255 .base = DDR_BASE 256 }, 257 [REGPHY_REG] = { 258 .name = "static", 259 .desc = ddrphy_reg, 260 .size = ARRAY_SIZE(ddrphy_reg), 261 .base = DDRPHY_BASE 262 }, 263 [REGPHY_TIMING] = { 264 .name = "timing", 265 .desc = ddrphy_timing, 266 .size = ARRAY_SIZE(ddrphy_timing), 267 .base = DDRPHY_BASE 268 }, 269 [REGPHY_CAL] = { 270 .name = "cal", 271 .desc = ddrphy_cal, 272 .size = ARRAY_SIZE(ddrphy_cal), 273 .base = DDRPHY_BASE 274 }, 275 [REG_DYN] = { 276 .name = "dyn", 277 .desc = ddr_dyn, 278 .size = ARRAY_SIZE(ddr_dyn), 279 .base = DDR_BASE 280 }, 281 [REGPHY_DYN] = { 282 .name = "dyn", 283 .desc = ddrphy_dyn, 284 .size = ARRAY_SIZE(ddrphy_dyn), 285 .base = DDRPHY_BASE 286 }, 287 }; 288 289 static uintptr_t get_base_addr(const struct ddr_info *priv, enum base_type base) 290 { 291 if (base == DDRPHY_BASE) { 292 return (uintptr_t)priv->phy; 293 } else { 294 return (uintptr_t)priv->ctl; 295 } 296 } 297 298 static void set_reg(const struct ddr_info *priv, 299 enum reg_type type, 300 const void *param) 301 { 302 unsigned int i; 303 unsigned int value; 304 enum base_type base = ddr_registers[type].base; 305 uintptr_t base_addr = get_base_addr(priv, base); 306 const struct reg_desc *desc = ddr_registers[type].desc; 307 308 VERBOSE("init %s\n", ddr_registers[type].name); 309 for (i = 0; i < ddr_registers[type].size; i++) { 310 uintptr_t ptr = base_addr + desc[i].offset; 311 312 if (desc[i].par_offset == INVALID_OFFSET) { 313 ERROR("invalid parameter offset for %s", desc[i].name); 314 panic(); 315 } else { 316 value = *((uint32_t *)((uintptr_t)param + 317 desc[i].par_offset)); 318 mmio_write_32(ptr, value); 319 } 320 } 321 } 322 323 static void stm32mp1_ddrphy_idone_wait(struct stm32mp1_ddrphy *phy) 324 { 325 uint32_t pgsr; 326 int error = 0; 327 unsigned long start; 328 unsigned long time0, time; 329 330 start = get_timer(0); 331 time0 = start; 332 333 do { 334 pgsr = mmio_read_32((uintptr_t)&phy->pgsr); 335 time = get_timer(start); 336 if (time != time0) { 337 VERBOSE(" > [0x%lx] pgsr = 0x%x &\n", 338 (uintptr_t)&phy->pgsr, pgsr); 339 VERBOSE(" [0x%lx] pir = 0x%x (time=%lx)\n", 340 (uintptr_t)&phy->pir, 341 mmio_read_32((uintptr_t)&phy->pir), 342 time); 343 } 344 345 time0 = time; 346 if (time > plat_get_syscnt_freq2()) { 347 panic(); 348 } 349 if ((pgsr & DDRPHYC_PGSR_DTERR) != 0U) { 350 VERBOSE("DQS Gate Trainig Error\n"); 351 error++; 352 } 353 if ((pgsr & DDRPHYC_PGSR_DTIERR) != 0U) { 354 VERBOSE("DQS Gate Trainig Intermittent Error\n"); 355 error++; 356 } 357 if ((pgsr & DDRPHYC_PGSR_DFTERR) != 0U) { 358 VERBOSE("DQS Drift Error\n"); 359 error++; 360 } 361 if ((pgsr & DDRPHYC_PGSR_RVERR) != 0U) { 362 VERBOSE("Read Valid Training Error\n"); 363 error++; 364 } 365 if ((pgsr & DDRPHYC_PGSR_RVEIRR) != 0U) { 366 VERBOSE("Read Valid Training Intermittent Error\n"); 367 error++; 368 } 369 } while ((pgsr & DDRPHYC_PGSR_IDONE) == 0U && error == 0); 370 VERBOSE("\n[0x%lx] pgsr = 0x%x\n", 371 (uintptr_t)&phy->pgsr, pgsr); 372 } 373 374 static void stm32mp1_ddrphy_init(struct stm32mp1_ddrphy *phy, uint32_t pir) 375 { 376 uint32_t pir_init = pir | DDRPHYC_PIR_INIT; 377 378 mmio_write_32((uintptr_t)&phy->pir, pir_init); 379 VERBOSE("[0x%lx] pir = 0x%x -> 0x%x\n", 380 (uintptr_t)&phy->pir, pir_init, 381 mmio_read_32((uintptr_t)&phy->pir)); 382 383 /* Need to wait 10 configuration clock before start polling */ 384 udelay(10); 385 386 /* Wait DRAM initialization and Gate Training Evaluation complete */ 387 stm32mp1_ddrphy_idone_wait(phy); 388 } 389 390 /* Start quasi dynamic register update */ 391 static void stm32mp1_start_sw_done(struct stm32mp1_ddrctl *ctl) 392 { 393 mmio_clrbits_32((uintptr_t)&ctl->swctl, DDRCTRL_SWCTL_SW_DONE); 394 VERBOSE("[0x%lx] swctl = 0x%x\n", 395 (uintptr_t)&ctl->swctl, mmio_read_32((uintptr_t)&ctl->swctl)); 396 } 397 398 /* Wait quasi dynamic register update */ 399 static void stm32mp1_wait_sw_done_ack(struct stm32mp1_ddrctl *ctl) 400 { 401 unsigned long start; 402 uint32_t swstat; 403 404 mmio_setbits_32((uintptr_t)&ctl->swctl, DDRCTRL_SWCTL_SW_DONE); 405 VERBOSE("[0x%lx] swctl = 0x%x\n", 406 (uintptr_t)&ctl->swctl, mmio_read_32((uintptr_t)&ctl->swctl)); 407 408 start = get_timer(0); 409 do { 410 swstat = mmio_read_32((uintptr_t)&ctl->swstat); 411 VERBOSE("[0x%lx] swstat = 0x%x ", 412 (uintptr_t)&ctl->swstat, swstat); 413 VERBOSE("timer in ms 0x%x = start 0x%lx\r", 414 get_timer(0), start); 415 if (get_timer(start) > plat_get_syscnt_freq2()) { 416 panic(); 417 } 418 } while ((swstat & DDRCTRL_SWSTAT_SW_DONE_ACK) == 0U); 419 420 VERBOSE("[0x%lx] swstat = 0x%x\n", 421 (uintptr_t)&ctl->swstat, swstat); 422 } 423 424 /* Wait quasi dynamic register update */ 425 static void stm32mp1_wait_operating_mode(struct ddr_info *priv, uint32_t mode) 426 { 427 unsigned long start; 428 uint32_t stat; 429 uint32_t operating_mode; 430 uint32_t selref_type; 431 int break_loop = 0; 432 433 start = get_timer(0); 434 for ( ; ; ) { 435 stat = mmio_read_32((uintptr_t)&priv->ctl->stat); 436 operating_mode = stat & DDRCTRL_STAT_OPERATING_MODE_MASK; 437 selref_type = stat & DDRCTRL_STAT_SELFREF_TYPE_MASK; 438 VERBOSE("[0x%lx] stat = 0x%x\n", 439 (uintptr_t)&priv->ctl->stat, stat); 440 VERBOSE("timer in ms 0x%x = start 0x%lx\r", 441 get_timer(0), start); 442 if (get_timer(start) > plat_get_syscnt_freq2()) { 443 panic(); 444 } 445 446 if (mode == DDRCTRL_STAT_OPERATING_MODE_SR) { 447 /* 448 * Self-refresh due to software 449 * => checking also STAT.selfref_type. 450 */ 451 if ((operating_mode == 452 DDRCTRL_STAT_OPERATING_MODE_SR) && 453 (selref_type == DDRCTRL_STAT_SELFREF_TYPE_SR)) { 454 break_loop = 1; 455 } 456 } else if (operating_mode == mode) { 457 break_loop = 1; 458 } else if ((mode == DDRCTRL_STAT_OPERATING_MODE_NORMAL) && 459 (operating_mode == DDRCTRL_STAT_OPERATING_MODE_SR) && 460 (selref_type == DDRCTRL_STAT_SELFREF_TYPE_ASR)) { 461 /* Normal mode: handle also automatic self refresh */ 462 break_loop = 1; 463 } 464 465 if (break_loop == 1) { 466 break; 467 } 468 } 469 470 VERBOSE("[0x%lx] stat = 0x%x\n", 471 (uintptr_t)&priv->ctl->stat, stat); 472 } 473 474 /* Mode Register Writes (MRW or MRS) */ 475 static void stm32mp1_mode_register_write(struct ddr_info *priv, uint8_t addr, 476 uint32_t data) 477 { 478 uint32_t mrctrl0; 479 480 VERBOSE("MRS: %d = %x\n", addr, data); 481 482 /* 483 * 1. Poll MRSTAT.mr_wr_busy until it is '0'. 484 * This checks that there is no outstanding MR transaction. 485 * No write should be performed to MRCTRL0 and MRCTRL1 486 * if MRSTAT.mr_wr_busy = 1. 487 */ 488 while ((mmio_read_32((uintptr_t)&priv->ctl->mrstat) & 489 DDRCTRL_MRSTAT_MR_WR_BUSY) != 0U) { 490 ; 491 } 492 493 /* 494 * 2. Write the MRCTRL0.mr_type, MRCTRL0.mr_addr, MRCTRL0.mr_rank 495 * and (for MRWs) MRCTRL1.mr_data to define the MR transaction. 496 */ 497 mrctrl0 = DDRCTRL_MRCTRL0_MR_TYPE_WRITE | 498 DDRCTRL_MRCTRL0_MR_RANK_ALL | 499 (((uint32_t)addr << DDRCTRL_MRCTRL0_MR_ADDR_SHIFT) & 500 DDRCTRL_MRCTRL0_MR_ADDR_MASK); 501 mmio_write_32((uintptr_t)&priv->ctl->mrctrl0, mrctrl0); 502 VERBOSE("[0x%lx] mrctrl0 = 0x%x (0x%x)\n", 503 (uintptr_t)&priv->ctl->mrctrl0, 504 mmio_read_32((uintptr_t)&priv->ctl->mrctrl0), mrctrl0); 505 mmio_write_32((uintptr_t)&priv->ctl->mrctrl1, data); 506 VERBOSE("[0x%lx] mrctrl1 = 0x%x\n", 507 (uintptr_t)&priv->ctl->mrctrl1, 508 mmio_read_32((uintptr_t)&priv->ctl->mrctrl1)); 509 510 /* 511 * 3. In a separate APB transaction, write the MRCTRL0.mr_wr to 1. This 512 * bit is self-clearing, and triggers the MR transaction. 513 * The uMCTL2 then asserts the MRSTAT.mr_wr_busy while it performs 514 * the MR transaction to SDRAM, and no further access can be 515 * initiated until it is deasserted. 516 */ 517 mrctrl0 |= DDRCTRL_MRCTRL0_MR_WR; 518 mmio_write_32((uintptr_t)&priv->ctl->mrctrl0, mrctrl0); 519 520 while ((mmio_read_32((uintptr_t)&priv->ctl->mrstat) & 521 DDRCTRL_MRSTAT_MR_WR_BUSY) != 0U) { 522 ; 523 } 524 525 VERBOSE("[0x%lx] mrctrl0 = 0x%x\n", 526 (uintptr_t)&priv->ctl->mrctrl0, mrctrl0); 527 } 528 529 /* Switch DDR3 from DLL-on to DLL-off */ 530 static void stm32mp1_ddr3_dll_off(struct ddr_info *priv) 531 { 532 uint32_t mr1 = mmio_read_32((uintptr_t)&priv->phy->mr1); 533 uint32_t mr2 = mmio_read_32((uintptr_t)&priv->phy->mr2); 534 uint32_t dbgcam; 535 536 VERBOSE("mr1: 0x%x\n", mr1); 537 VERBOSE("mr2: 0x%x\n", mr2); 538 539 /* 540 * 1. Set the DBG1.dis_hif = 1. 541 * This prevents further reads/writes being received on the HIF. 542 */ 543 mmio_setbits_32((uintptr_t)&priv->ctl->dbg1, DDRCTRL_DBG1_DIS_HIF); 544 VERBOSE("[0x%lx] dbg1 = 0x%x\n", 545 (uintptr_t)&priv->ctl->dbg1, 546 mmio_read_32((uintptr_t)&priv->ctl->dbg1)); 547 548 /* 549 * 2. Ensure all commands have been flushed from the uMCTL2 by polling 550 * DBGCAM.wr_data_pipeline_empty = 1, 551 * DBGCAM.rd_data_pipeline_empty = 1, 552 * DBGCAM.dbg_wr_q_depth = 0 , 553 * DBGCAM.dbg_lpr_q_depth = 0, and 554 * DBGCAM.dbg_hpr_q_depth = 0. 555 */ 556 do { 557 dbgcam = mmio_read_32((uintptr_t)&priv->ctl->dbgcam); 558 VERBOSE("[0x%lx] dbgcam = 0x%x\n", 559 (uintptr_t)&priv->ctl->dbgcam, dbgcam); 560 } while ((((dbgcam & DDRCTRL_DBGCAM_DATA_PIPELINE_EMPTY) == 561 DDRCTRL_DBGCAM_DATA_PIPELINE_EMPTY)) && 562 ((dbgcam & DDRCTRL_DBGCAM_DBG_Q_DEPTH) == 0U)); 563 564 /* 565 * 3. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers) 566 * to disable RTT_NOM: 567 * a. DDR3: Write to MR1[9], MR1[6] and MR1[2] 568 * b. DDR4: Write to MR1[10:8] 569 */ 570 mr1 &= ~(BIT(9) | BIT(6) | BIT(2)); 571 stm32mp1_mode_register_write(priv, 1, mr1); 572 573 /* 574 * 4. For DDR4 only: Perform an MRS command 575 * (using MRCTRL0 and MRCTRL1 registers) to write to MR5[8:6] 576 * to disable RTT_PARK 577 */ 578 579 /* 580 * 5. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers) 581 * to write to MR2[10:9], to disable RTT_WR 582 * (and therefore disable dynamic ODT). 583 * This applies for both DDR3 and DDR4. 584 */ 585 mr2 &= ~GENMASK(10, 9); 586 stm32mp1_mode_register_write(priv, 2, mr2); 587 588 /* 589 * 6. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers) 590 * to disable the DLL. The timing of this MRS is automatically 591 * handled by the uMCTL2. 592 * a. DDR3: Write to MR1[0] 593 * b. DDR4: Write to MR1[0] 594 */ 595 mr1 |= BIT(0); 596 stm32mp1_mode_register_write(priv, 1, mr1); 597 598 /* 599 * 7. Put the SDRAM into self-refresh mode by setting 600 * PWRCTL.selfref_sw = 1, and polling STAT.operating_mode to ensure 601 * the DDRC has entered self-refresh. 602 */ 603 mmio_setbits_32((uintptr_t)&priv->ctl->pwrctl, 604 DDRCTRL_PWRCTL_SELFREF_SW); 605 VERBOSE("[0x%lx] pwrctl = 0x%x\n", 606 (uintptr_t)&priv->ctl->pwrctl, 607 mmio_read_32((uintptr_t)&priv->ctl->pwrctl)); 608 609 /* 610 * 8. Wait until STAT.operating_mode[1:0]==11 indicating that the 611 * DWC_ddr_umctl2 core is in self-refresh mode. 612 * Ensure transition to self-refresh was due to software 613 * by checking that STAT.selfref_type[1:0]=2. 614 */ 615 stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_SR); 616 617 /* 618 * 9. Set the MSTR.dll_off_mode = 1. 619 * warning: MSTR.dll_off_mode is a quasi-dynamic type 2 field 620 */ 621 stm32mp1_start_sw_done(priv->ctl); 622 623 mmio_setbits_32((uintptr_t)&priv->ctl->mstr, DDRCTRL_MSTR_DLL_OFF_MODE); 624 VERBOSE("[0x%lx] mstr = 0x%x\n", 625 (uintptr_t)&priv->ctl->mstr, 626 mmio_read_32((uintptr_t)&priv->ctl->mstr)); 627 628 stm32mp1_wait_sw_done_ack(priv->ctl); 629 630 /* 10. Change the clock frequency to the desired value. */ 631 632 /* 633 * 11. Update any registers which may be required to change for the new 634 * frequency. This includes static and dynamic registers. 635 * This includes both uMCTL2 registers and PHY registers. 636 */ 637 638 /* Change Bypass Mode Frequency Range */ 639 if (stm32mp_clk_get_rate(DDRPHYC) < 100000000U) { 640 mmio_clrbits_32((uintptr_t)&priv->phy->dllgcr, 641 DDRPHYC_DLLGCR_BPS200); 642 } else { 643 mmio_setbits_32((uintptr_t)&priv->phy->dllgcr, 644 DDRPHYC_DLLGCR_BPS200); 645 } 646 647 mmio_setbits_32((uintptr_t)&priv->phy->acdllcr, DDRPHYC_ACDLLCR_DLLDIS); 648 649 mmio_setbits_32((uintptr_t)&priv->phy->dx0dllcr, 650 DDRPHYC_DXNDLLCR_DLLDIS); 651 mmio_setbits_32((uintptr_t)&priv->phy->dx1dllcr, 652 DDRPHYC_DXNDLLCR_DLLDIS); 653 mmio_setbits_32((uintptr_t)&priv->phy->dx2dllcr, 654 DDRPHYC_DXNDLLCR_DLLDIS); 655 mmio_setbits_32((uintptr_t)&priv->phy->dx3dllcr, 656 DDRPHYC_DXNDLLCR_DLLDIS); 657 658 /* 12. Exit the self-refresh state by setting PWRCTL.selfref_sw = 0. */ 659 mmio_clrbits_32((uintptr_t)&priv->ctl->pwrctl, 660 DDRCTRL_PWRCTL_SELFREF_SW); 661 stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL); 662 663 /* 664 * 13. If ZQCTL0.dis_srx_zqcl = 0, the uMCTL2 performs a ZQCL command 665 * at this point. 666 */ 667 668 /* 669 * 14. Perform MRS commands as required to re-program timing registers 670 * in the SDRAM for the new frequency 671 * (in particular, CL, CWL and WR may need to be changed). 672 */ 673 674 /* 15. Write DBG1.dis_hif = 0 to re-enable reads and writes. */ 675 mmio_clrbits_32((uintptr_t)&priv->ctl->dbg1, DDRCTRL_DBG1_DIS_HIF); 676 VERBOSE("[0x%lx] dbg1 = 0x%x\n", 677 (uintptr_t)&priv->ctl->dbg1, 678 mmio_read_32((uintptr_t)&priv->ctl->dbg1)); 679 } 680 681 static void stm32mp1_refresh_disable(struct stm32mp1_ddrctl *ctl) 682 { 683 stm32mp1_start_sw_done(ctl); 684 /* Quasi-dynamic register update*/ 685 mmio_setbits_32((uintptr_t)&ctl->rfshctl3, 686 DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH); 687 mmio_clrbits_32((uintptr_t)&ctl->pwrctl, DDRCTRL_PWRCTL_POWERDOWN_EN); 688 mmio_clrbits_32((uintptr_t)&ctl->dfimisc, 689 DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN); 690 stm32mp1_wait_sw_done_ack(ctl); 691 } 692 693 static void stm32mp1_refresh_restore(struct stm32mp1_ddrctl *ctl, 694 uint32_t rfshctl3, uint32_t pwrctl) 695 { 696 stm32mp1_start_sw_done(ctl); 697 if ((rfshctl3 & DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH) == 0U) { 698 mmio_clrbits_32((uintptr_t)&ctl->rfshctl3, 699 DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH); 700 } 701 if ((pwrctl & DDRCTRL_PWRCTL_POWERDOWN_EN) != 0U) { 702 mmio_setbits_32((uintptr_t)&ctl->pwrctl, 703 DDRCTRL_PWRCTL_POWERDOWN_EN); 704 } 705 mmio_setbits_32((uintptr_t)&ctl->dfimisc, 706 DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN); 707 stm32mp1_wait_sw_done_ack(ctl); 708 } 709 710 static int board_ddr_power_init(enum ddr_type ddr_type) 711 { 712 if (dt_check_pmic()) { 713 return pmic_ddr_power_init(ddr_type); 714 } 715 716 return 0; 717 } 718 719 void stm32mp1_ddr_init(struct ddr_info *priv, 720 struct stm32mp1_ddr_config *config) 721 { 722 uint32_t pir; 723 int ret = -EINVAL; 724 725 if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) != 0U) { 726 ret = board_ddr_power_init(STM32MP_DDR3); 727 } else if ((config->c_reg.mstr & DDRCTRL_MSTR_LPDDR2) != 0U) { 728 ret = board_ddr_power_init(STM32MP_LPDDR2); 729 } else { 730 ERROR("DDR type not supported\n"); 731 } 732 733 if (ret != 0) { 734 panic(); 735 } 736 737 VERBOSE("name = %s\n", config->info.name); 738 VERBOSE("speed = %d kHz\n", config->info.speed); 739 VERBOSE("size = 0x%x\n", config->info.size); 740 741 /* DDR INIT SEQUENCE */ 742 743 /* 744 * 1. Program the DWC_ddr_umctl2 registers 745 * nota: check DFIMISC.dfi_init_complete = 0 746 */ 747 748 /* 1.1 RESETS: presetn, core_ddrc_rstn, aresetn */ 749 mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST); 750 mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST); 751 mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST); 752 mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST); 753 mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST); 754 mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST); 755 756 /* 1.2. start CLOCK */ 757 if (stm32mp1_ddr_clk_enable(priv, config->info.speed) != 0) { 758 panic(); 759 } 760 761 /* 1.3. deassert reset */ 762 /* De-assert PHY rstn and ctl_rstn via DPHYRST and DPHYCTLRST. */ 763 mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST); 764 mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST); 765 /* 766 * De-assert presetn once the clocks are active 767 * and stable via DDRCAPBRST bit. 768 */ 769 mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST); 770 771 /* 1.4. wait 128 cycles to permit initialization of end logic */ 772 udelay(2); 773 /* For PCLK = 133MHz => 1 us is enough, 2 to allow lower frequency */ 774 775 /* 1.5. initialize registers ddr_umctl2 */ 776 /* Stop uMCTL2 before PHY is ready */ 777 mmio_clrbits_32((uintptr_t)&priv->ctl->dfimisc, 778 DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN); 779 VERBOSE("[0x%lx] dfimisc = 0x%x\n", 780 (uintptr_t)&priv->ctl->dfimisc, 781 mmio_read_32((uintptr_t)&priv->ctl->dfimisc)); 782 783 set_reg(priv, REG_REG, &config->c_reg); 784 785 /* DDR3 = don't set DLLOFF for init mode */ 786 if ((config->c_reg.mstr & 787 (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) 788 == (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) { 789 VERBOSE("deactivate DLL OFF in mstr\n"); 790 mmio_clrbits_32((uintptr_t)&priv->ctl->mstr, 791 DDRCTRL_MSTR_DLL_OFF_MODE); 792 VERBOSE("[0x%lx] mstr = 0x%x\n", 793 (uintptr_t)&priv->ctl->mstr, 794 mmio_read_32((uintptr_t)&priv->ctl->mstr)); 795 } 796 797 set_reg(priv, REG_TIMING, &config->c_timing); 798 set_reg(priv, REG_MAP, &config->c_map); 799 800 /* Skip CTRL init, SDRAM init is done by PHY PUBL */ 801 mmio_clrsetbits_32((uintptr_t)&priv->ctl->init0, 802 DDRCTRL_INIT0_SKIP_DRAM_INIT_MASK, 803 DDRCTRL_INIT0_SKIP_DRAM_INIT_NORMAL); 804 VERBOSE("[0x%lx] init0 = 0x%x\n", 805 (uintptr_t)&priv->ctl->init0, 806 mmio_read_32((uintptr_t)&priv->ctl->init0)); 807 808 set_reg(priv, REG_PERF, &config->c_perf); 809 810 /* 2. deassert reset signal core_ddrc_rstn, aresetn and presetn */ 811 mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST); 812 mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST); 813 mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST); 814 815 /* 816 * 3. start PHY init by accessing relevant PUBL registers 817 * (DXGCR, DCR, PTR*, MR*, DTPR*) 818 */ 819 set_reg(priv, REGPHY_REG, &config->p_reg); 820 set_reg(priv, REGPHY_TIMING, &config->p_timing); 821 set_reg(priv, REGPHY_CAL, &config->p_cal); 822 823 /* DDR3 = don't set DLLOFF for init mode */ 824 if ((config->c_reg.mstr & 825 (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) 826 == (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) { 827 VERBOSE("deactivate DLL OFF in mr1\n"); 828 mmio_clrbits_32((uintptr_t)&priv->phy->mr1, BIT(0)); 829 VERBOSE("[0x%lx] mr1 = 0x%x\n", 830 (uintptr_t)&priv->phy->mr1, 831 mmio_read_32((uintptr_t)&priv->phy->mr1)); 832 } 833 834 /* 835 * 4. Monitor PHY init status by polling PUBL register PGSR.IDONE 836 * Perform DDR PHY DRAM initialization and Gate Training Evaluation 837 */ 838 stm32mp1_ddrphy_idone_wait(priv->phy); 839 840 /* 841 * 5. Indicate to PUBL that controller performs SDRAM initialization 842 * by setting PIR.INIT and PIR CTLDINIT and pool PGSR.IDONE 843 * DRAM init is done by PHY, init0.skip_dram.init = 1 844 */ 845 846 pir = DDRPHYC_PIR_DLLSRST | DDRPHYC_PIR_DLLLOCK | DDRPHYC_PIR_ZCAL | 847 DDRPHYC_PIR_ITMSRST | DDRPHYC_PIR_DRAMINIT | DDRPHYC_PIR_ICPC; 848 849 if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) != 0U) { 850 pir |= DDRPHYC_PIR_DRAMRST; /* Only for DDR3 */ 851 } 852 853 stm32mp1_ddrphy_init(priv->phy, pir); 854 855 /* 856 * 6. SET DFIMISC.dfi_init_complete_en to 1 857 * Enable quasi-dynamic register programming. 858 */ 859 stm32mp1_start_sw_done(priv->ctl); 860 861 mmio_setbits_32((uintptr_t)&priv->ctl->dfimisc, 862 DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN); 863 VERBOSE("[0x%lx] dfimisc = 0x%x\n", 864 (uintptr_t)&priv->ctl->dfimisc, 865 mmio_read_32((uintptr_t)&priv->ctl->dfimisc)); 866 867 stm32mp1_wait_sw_done_ack(priv->ctl); 868 869 /* 870 * 7. Wait for DWC_ddr_umctl2 to move to normal operation mode 871 * by monitoring STAT.operating_mode signal 872 */ 873 874 /* Wait uMCTL2 ready */ 875 stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL); 876 877 /* Switch to DLL OFF mode */ 878 if ((config->c_reg.mstr & DDRCTRL_MSTR_DLL_OFF_MODE) != 0U) { 879 stm32mp1_ddr3_dll_off(priv); 880 } 881 882 VERBOSE("DDR DQS training : "); 883 884 /* 885 * 8. Disable Auto refresh and power down by setting 886 * - RFSHCTL3.dis_au_refresh = 1 887 * - PWRCTL.powerdown_en = 0 888 * - DFIMISC.dfiinit_complete_en = 0 889 */ 890 stm32mp1_refresh_disable(priv->ctl); 891 892 /* 893 * 9. Program PUBL PGCR to enable refresh during training 894 * and rank to train 895 * not done => keep the programed value in PGCR 896 */ 897 898 /* 899 * 10. configure PUBL PIR register to specify which training step 900 * to run 901 * Warning : RVTRN is not supported by this PUBL 902 */ 903 stm32mp1_ddrphy_init(priv->phy, DDRPHYC_PIR_QSTRN); 904 905 /* 11. monitor PUB PGSR.IDONE to poll cpmpletion of training sequence */ 906 stm32mp1_ddrphy_idone_wait(priv->phy); 907 908 /* 909 * 12. set back registers in step 8 to the orginal values if desidered 910 */ 911 stm32mp1_refresh_restore(priv->ctl, config->c_reg.rfshctl3, 912 config->c_reg.pwrctl); 913 914 /* Enable uMCTL2 AXI port 0 */ 915 mmio_setbits_32((uintptr_t)&priv->ctl->pctrl_0, 916 DDRCTRL_PCTRL_N_PORT_EN); 917 VERBOSE("[0x%lx] pctrl_0 = 0x%x\n", 918 (uintptr_t)&priv->ctl->pctrl_0, 919 mmio_read_32((uintptr_t)&priv->ctl->pctrl_0)); 920 921 /* Enable uMCTL2 AXI port 1 */ 922 mmio_setbits_32((uintptr_t)&priv->ctl->pctrl_1, 923 DDRCTRL_PCTRL_N_PORT_EN); 924 VERBOSE("[0x%lx] pctrl_1 = 0x%x\n", 925 (uintptr_t)&priv->ctl->pctrl_1, 926 mmio_read_32((uintptr_t)&priv->ctl->pctrl_1)); 927 } 928